automation-script-generator
8
总安装量
3
周安装量
#34946
全站排名
安装命令
npx skills add https://github.com/ntaksh42/agents --skill automation-script-generator
Agent 安装分布
claude-code
3
mcpjam
2
roo
2
junie
2
windsurf
2
zencoder
2
Skill 文档
Automation Script Generator Skill
ç¹°ãè¿ã使¥ãèªååããã¹ã¯ãªãããçæããã¹ãã«ã§ãã
æ¦è¦
ãã®ã¹ãã«ã¯ãæ¥å¸¸çãªç¹°ãè¿ã使¥ãèªååããã¹ã¯ãªãããçæãã¾ãããã¡ã¤ã«æä½ããã¼ã¿å¦çãAPI飿ºãããã¯ã¢ãããç£è¦ãã¬ãã¼ãçæãªã©ãããããå®å使¥ãå¹çåãã¾ããè¤æ°ã®ã¹ã¯ãªããè¨èªã«å¯¾å¿ããã¨ã©ã¼ãã³ããªã³ã°ããã°åºåãã¹ã±ã¸ã¥ã¼ã«å®è¡ã«ã対å¿ãã¾ãã
ä¸»ãªæ©è½
- ãã«ãè¨èªå¯¾å¿: Bash, Python, PowerShell, Node.js, Ruby
- ãã¡ã¤ã«æä½: 䏿¬ãªãã¼ã ãã³ãã¼ãç§»åãå§ç¸®ãåé¤
- ãã¼ã¿å¦ç: CSV/JSON夿ããã£ã«ã¿ãªã³ã°ãéè¨ããã¼ã¸
- API飿º: REST APIå¼ã³åºããèªè¨¼ãã¨ã©ã¼ãã³ããªã³ã°
- ããã¯ã¢ããèªåå: ãã¡ã¤ã«ãDBãã¯ã©ã¦ãã¹ãã¬ã¼ã¸
- ç£è¦ã»ã¢ã©ã¼ã: ãªã½ã¼ã¹ç£è¦ããã«ã¹ãã§ãã¯ãéç¥
- ã¬ãã¼ãçæ: ãã°åæãã¡ããªã¯ã¹éè¨ãHTML/PDFã¬ãã¼ã
- CI/CDã¹ã¯ãªãã: ãã«ãããã¹ãããããã¤ã®èªåå
- ã¹ã±ã¸ã¥ã¼ã«å®è¡: cron, ã¿ã¹ã¯ã¹ã±ã¸ã¥ã¼ã©è¨å®
- ã¨ã©ã¼ãã³ããªã³ã°: å ç¢ãªã¨ã©ã¼å¦çã¨ãªãã©ã¤æ©è½
ã¹ã¯ãªããã¿ã¤ã
1. ãã¡ã¤ã«æä½
䏿¬ãªãã¼ã ï¼Bashï¼
#!/bin/bash
# ç»åãã¡ã¤ã«ãæ¥ä»é ã«ãªãã¼ã
# 使ç¨ä¾: ./rename_images.sh /path/to/images
set -euo pipefail
SOURCE_DIR="${1:-.}"
PREFIX="photo"
EXTENSION="jpg"
# ã«ã¦ã³ã¿ã¼åæå
counter=1
# ãã¡ã¤ã«ãæ´æ°æ¥æé ã§ã½ã¼ã
find "$SOURCE_DIR" -type f -name "*.$EXTENSION" -print0 | \
sort -z | \
while IFS= read -r -d '' file; do
# æ°ãããã¡ã¤ã«åãçæï¼ã¼ãããã£ã³ã°ï¼
new_name=$(printf "%s_%04d.%s" "$PREFIX" "$counter" "$EXTENSION")
new_path="$SOURCE_DIR/$new_name"
# ãªãã¼ã å®è¡
if [ "$file" != "$new_path" ]; then
mv -v "$file" "$new_path"
echo "Renamed: $(basename "$file") -> $new_name"
fi
((counter++))
done
echo "â ãªãã¼ã å®äº: $((counter - 1)) ãã¡ã¤ã«"
ãã¡ã¤ã«æ´çï¼Pythonï¼
#!/usr/bin/env python3
"""
ãã¦ã³ãã¼ããã©ã«ããæ¡å¼µåå¥ã«æ´ç
使ç¨ä¾: python organize_files.py ~/Downloads
"""
import os
import shutil
from pathlib import Path
from datetime import datetime
import logging
# ãã°è¨å®
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
# æ¡å¼µåã¨ãã©ã«ãã®ãããã³ã°
EXTENSION_MAPPING = {
'Images': ['.jpg', '.jpeg', '.png', '.gif', '.bmp', '.svg'],
'Documents': ['.pdf', '.doc', '.docx', '.txt', '.xlsx', '.pptx'],
'Videos': ['.mp4', '.avi', '.mov', '.mkv', '.flv'],
'Audio': ['.mp3', '.wav', '.flac', '.m4a'],
'Archives': ['.zip', '.rar', '.7z', '.tar', '.gz'],
'Code': ['.py', '.js', '.java', '.cpp', '.html', '.css'],
}
def organize_files(source_dir: str, dry_run: bool = False):
"""ãã¡ã¤ã«ãæ¡å¼µåå¥ã«æ´ç"""
source_path = Path(source_dir)
if not source_path.exists():
logging.error(f"ãã£ã¬ã¯ããªãåå¨ãã¾ãã: {source_dir}")
return
files_moved = 0
for file_path in source_path.iterdir():
# ãã£ã¬ã¯ããªã¯ã¹ããã
if file_path.is_dir():
continue
# æ¡å¼µåãåå¾
extension = file_path.suffix.lower()
# 対å¿ãããã©ã«ããç¹å®
target_folder = None
for folder, extensions in EXTENSION_MAPPING.items():
if extension in extensions:
target_folder = folder
break
# ãããã³ã°ã«ãªãæ¡å¼µå㯠"Others" ã«
if target_folder is None:
target_folder = "Others"
# ç§»åå
ãã£ã¬ã¯ããªã使
dest_dir = source_path / target_folder
if not dry_run:
dest_dir.mkdir(exist_ok=True)
# ãã¡ã¤ã«ãç§»å
dest_path = dest_dir / file_path.name
# ååãã¡ã¤ã«ãåå¨ããå ´åãã¿ã¤ã ã¹ã¿ã³ãã追å
if dest_path.exists():
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
stem = dest_path.stem
suffix = dest_path.suffix
dest_path = dest_dir / f"{stem}_{timestamp}{suffix}"
if dry_run:
logging.info(f"[DRY RUN] {file_path.name} -> {target_folder}/")
else:
shutil.move(str(file_path), str(dest_path))
logging.info(f"ç§»å: {file_path.name} -> {target_folder}/")
files_moved += 1
logging.info(f"â å®äº: {files_moved} ãã¡ã¤ã«ãæ´çãã¾ãã")
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print("ä½¿ç¨æ³: python organize_files.py <ãã£ã¬ã¯ããªãã¹> [--dry-run]")
sys.exit(1)
source_dir = sys.argv[1]
dry_run = "--dry-run" in sys.argv
organize_files(source_dir, dry_run)
2. ãã¼ã¿å¦ç
CSV to JSON夿ï¼Node.jsï¼
#!/usr/bin/env node
/**
* CSVãã¡ã¤ã«ãJSONã«å¤æ
* 使ç¨ä¾: node csv_to_json.js input.csv output.json
*/
const fs = require('fs');
const csv = require('csv-parser');
function csvToJson(inputFile, outputFile) {
const results = [];
fs.createReadStream(inputFile)
.pipe(csv())
.on('data', (data) => results.push(data))
.on('end', () => {
// JSONãã¡ã¤ã«ã«æ¸ãè¾¼ã¿
fs.writeFileSync(
outputFile,
JSON.stringify(results, null, 2),
'utf-8'
);
console.log(`â 夿å®äº: ${results.length} ã¬ã³ã¼ã`);
console.log(`åºå: ${outputFile}`);
})
.on('error', (error) => {
console.error('ã¨ã©ã¼:', error.message);
process.exit(1);
});
}
// ã³ãã³ãã©ã¤ã³å¼æ°
const [inputFile, outputFile] = process.argv.slice(2);
if (!inputFile || !outputFile) {
console.error('ä½¿ç¨æ³: node csv_to_json.js <input.csv> <output.json>');
process.exit(1);
}
csvToJson(inputFile, outputFile);
ãã¼ã¿éè¨ï¼Pythonï¼
#!/usr/bin/env python3
"""
ãã°ãã¡ã¤ã«ããçµ±è¨æ
å ±ãéè¨
使ç¨ä¾: python analyze_logs.py access.log
"""
import re
from collections import Counter, defaultdict
from datetime import datetime
import json
def analyze_access_log(log_file: str):
"""ã¢ã¯ã»ã¹ãã°ãåæ"""
# Apache/Nginxå½¢å¼ã®ãã°ãã¿ã¼ã³
log_pattern = re.compile(
r'(?P<ip>[\d.]+) - - \[(?P<datetime>[^\]]+)\] '
r'"(?P<method>\w+) (?P<path>[^\s]+) HTTP/[\d.]+" '
r'(?P<status>\d+) (?P<size>\d+)'
)
stats = {
'total_requests': 0,
'status_codes': Counter(),
'methods': Counter(),
'paths': Counter(),
'ips': Counter(),
'hourly_distribution': defaultdict(int),
}
with open(log_file, 'r') as f:
for line in f:
match = log_pattern.match(line)
if not match:
continue
data = match.groupdict()
stats['total_requests'] += 1
stats['status_codes'][data['status']] += 1
stats['methods'][data['method']] += 1
stats['paths'][data['path']] += 1
stats['ips'][data['ip']] += 1
# æé帯å¥ã®éè¨
try:
dt = datetime.strptime(data['datetime'], '%d/%b/%Y:%H:%M:%S %z')
hour = dt.hour
stats['hourly_distribution'][hour] += 1
except ValueError:
pass
# ã¬ãã¼ãçæ
print("=" * 60)
print("ã¢ã¯ã»ã¹ãã°åæã¬ãã¼ã")
print("=" * 60)
print(f"\nç·ãªã¯ã¨ã¹ãæ°: {stats['total_requests']:,}")
print("\n--- ã¹ãã¼ã¿ã¹ã³ã¼ãå¥ ---")
for status, count in stats['status_codes'].most_common():
print(f"{status}: {count:,}")
print("\n--- HTTPã¡ã½ããå¥ ---")
for method, count in stats['methods'].most_common():
print(f"{method}: {count:,}")
print("\n--- ããã10 ãã¹ ---")
for path, count in stats['paths'].most_common(10):
print(f"{count:,} - {path}")
print("\n--- ããã10 IPã¢ãã¬ã¹ ---")
for ip, count in stats['ips'].most_common(10):
print(f"{count:,} - {ip}")
print("\n--- æé帯å¥åå¸ ---")
for hour in range(24):
count = stats['hourly_distribution'][hour]
bar = 'â' * (count // 100)
print(f"{hour:02d}:00 | {bar} {count:,}")
# JSONåºå
output_file = log_file.replace('.log', '_stats.json')
with open(output_file, 'w') as f:
# Counterãdictã«å¤æ
stats_dict = {
'total_requests': stats['total_requests'],
'status_codes': dict(stats['status_codes']),
'methods': dict(stats['methods']),
'top_paths': dict(stats['paths'].most_common(20)),
'top_ips': dict(stats['ips'].most_common(20)),
'hourly_distribution': dict(stats['hourly_distribution']),
}
json.dump(stats_dict, f, indent=2)
print(f"\nâ 詳細統è¨ãä¿å: {output_file}")
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print("ä½¿ç¨æ³: python analyze_logs.py <log_file>")
sys.exit(1)
analyze_access_log(sys.argv[1])
3. API飿º
REST APIèªååï¼Pythonï¼
#!/usr/bin/env python3
"""
GitHub APIçµç±ã§ãªãã¸ããªæ
å ±ãåå¾
使ç¨ä¾: python github_stats.py <username>
ç°å¢å¤æ°: GITHUB_TOKEN
"""
import os
import requests
from datetime import datetime
import json
GITHUB_API_BASE = "https://api.github.com"
def get_user_repos(username: str, token: str = None):
"""ã¦ã¼ã¶ã¼ã®ãªãã¸ããªä¸è¦§ãåå¾"""
headers = {}
if token:
headers['Authorization'] = f'token {token}'
url = f"{GITHUB_API_BASE}/users/{username}/repos"
params = {'per_page': 100, 'sort': 'updated'}
try:
response = requests.get(url, headers=headers, params=params)
response.raise_for_status()
return response.json()
except requests.exceptions.RequestException as e:
print(f"ã¨ã©ã¼: {e}")
return []
def generate_report(username: str, repos: list):
"""ã¬ãã¼ãçæ"""
if not repos:
print("ãªãã¸ããªãè¦ã¤ããã¾ããã§ãã")
return
# çµ±è¨è¨ç®
total_stars = sum(repo['stargazers_count'] for repo in repos)
total_forks = sum(repo['forks_count'] for repo in repos)
languages = {}
for repo in repos:
lang = repo.get('language')
if lang:
languages[lang] = languages.get(lang, 0) + 1
# ã¬ãã¼ãåºå
print("=" * 60)
print(f"GitHub ãªãã¸ããªçµ±è¨: {username}")
print("=" * 60)
print(f"\nç·ãªãã¸ããªæ°: {len(repos)}")
print(f"ç·ã¹ã¿ã¼æ°: {total_stars:,}")
print(f"ç·ãã©ã¼ã¯æ°: {total_forks:,}")
print("\n--- 使ç¨è¨èª ---")
for lang, count in sorted(languages.items(), key=lambda x: x[1], reverse=True):
print(f"{lang}: {count}")
print("\n--- ããã10 ã¹ã¿ã¼æ° ---")
top_repos = sorted(repos, key=lambda x: x['stargazers_count'], reverse=True)[:10]
for repo in top_repos:
print(f"{repo['stargazers_count']:,} â - {repo['name']}")
print(f" {repo['html_url']}")
# JSONåºå
output_file = f"{username}_github_stats.json"
with open(output_file, 'w') as f:
json.dump({
'username': username,
'total_repos': len(repos),
'total_stars': total_stars,
'total_forks': total_forks,
'languages': languages,
'top_repos': [
{
'name': repo['name'],
'stars': repo['stargazers_count'],
'forks': repo['forks_count'],
'language': repo.get('language'),
'url': repo['html_url'],
}
for repo in top_repos
],
'generated_at': datetime.now().isoformat(),
}, f, indent=2)
print(f"\nâ ã¬ãã¼ããä¿å: {output_file}")
if __name__ == "__main__":
import sys
if len(sys.argv) < 2:
print("ä½¿ç¨æ³: python github_stats.py <username>")
sys.exit(1)
username = sys.argv[1]
token = os.environ.get('GITHUB_TOKEN')
repos = get_user_repos(username, token)
generate_report(username, repos)
4. ããã¯ã¢ããèªåå
ãã¼ã¿ãã¼ã¹ããã¯ã¢ããï¼Bashï¼
#!/bin/bash
# PostgreSQLãã¼ã¿ãã¼ã¹ã®èªåããã¯ã¢ãã
# 使ç¨ä¾: ./backup_postgres.sh
set -euo pipefail
# è¨å®
DB_NAME="mydb"
DB_USER="postgres"
BACKUP_DIR="/var/backups/postgres"
RETENTION_DAYS=7
# ã¿ã¤ã ã¹ã¿ã³ã
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
BACKUP_FILE="$BACKUP_DIR/${DB_NAME}_${TIMESTAMP}.sql.gz"
# ãã°é¢æ°
log() {
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $1"
}
# ããã¯ã¢ãããã£ã¬ã¯ããªä½æ
mkdir -p "$BACKUP_DIR"
# ããã¯ã¢ããå®è¡
log "ããã¯ã¢ããéå§: $DB_NAME"
pg_dump -U "$DB_USER" "$DB_NAME" | gzip > "$BACKUP_FILE"
if [ $? -eq 0 ]; then
log "â ããã¯ã¢ããæå: $BACKUP_FILE"
# ãã¡ã¤ã«ãµã¤ãºè¡¨ç¤º
SIZE=$(du -h "$BACKUP_FILE" | cut -f1)
log "ãã¡ã¤ã«ãµã¤ãº: $SIZE"
else
log "â ããã¯ã¢ãã失æ"
exit 1
fi
# å¤ãããã¯ã¢ãããåé¤ï¼ä¿ææéãéãããã®ï¼
log "å¤ãããã¯ã¢ãããåé¤ï¼${RETENTION_DAYS}æ¥ä»¥åï¼"
find "$BACKUP_DIR" -name "${DB_NAME}_*.sql.gz" -type f -mtime +$RETENTION_DAYS -delete
# ç¾å¨ã®ããã¯ã¢ããä¸è¦§
log "ç¾å¨ã®ããã¯ã¢ãã:"
ls -lh "$BACKUP_DIR/${DB_NAME}_"*.sql.gz
log "ããã¯ã¢ããå¦çå®äº"
ã¯ã©ã¦ãã¹ãã¬ã¼ã¸åæï¼Pythonï¼
#!/usr/bin/env python3
"""
ãã¼ã«ã«ãã¡ã¤ã«ãS3ã«ããã¯ã¢ãã
使ç¨ä¾: python backup_to_s3.py /path/to/local s3://bucket-name/prefix
è¦ä»¶: pip install boto3
"""
import os
import boto3
from pathlib import Path
import logging
from datetime import datetime
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(levelname)s - %(message)s'
)
def backup_to_s3(local_path: str, s3_uri: str):
"""ãã¼ã«ã«ãã£ã¬ã¯ããªãS3ã«ããã¯ã¢ãã"""
# S3ã¯ã©ã¤ã¢ã³ã
s3 = boto3.client('s3')
# S3 URIè§£æ
if not s3_uri.startswith('s3://'):
raise ValueError("ç¡å¹ãªS3 URI")
parts = s3_uri[5:].split('/', 1)
bucket = parts[0]
prefix = parts[1] if len(parts) > 1 else ''
local_path = Path(local_path)
if not local_path.exists():
logging.error(f"ãã¹ãåå¨ãã¾ãã: {local_path}")
return
files_uploaded = 0
total_size = 0
# å帰çã«ãã¡ã¤ã«ãã¢ãããã¼ã
for file_path in local_path.rglob('*'):
if file_path.is_file():
# S3ãã¼ãçæ
relative_path = file_path.relative_to(local_path)
s3_key = f"{prefix}/{relative_path}".replace('\\', '/')
# ãã¡ã¤ã«ãµã¤ãº
file_size = file_path.stat().st_size
try:
# ã¢ãããã¼ã
s3.upload_file(
str(file_path),
bucket,
s3_key,
ExtraArgs={'StorageClass': 'STANDARD_IA'}
)
logging.info(f"ã¢ãããã¼ã: {relative_path} ({file_size:,} bytes)")
files_uploaded += 1
total_size += file_size
except Exception as e:
logging.error(f"ã¢ãããã¼ã失æ {relative_path}: {e}")
logging.info(f"â å®äº: {files_uploaded} ãã¡ã¤ã« ({total_size:,} bytes)")
if __name__ == "__main__":
import sys
if len(sys.argv) < 3:
print("ä½¿ç¨æ³: python backup_to_s3.py <local_path> <s3_uri>")
sys.exit(1)
local_path = sys.argv[1]
s3_uri = sys.argv[2]
backup_to_s3(local_path, s3_uri)
5. ç£è¦ã»ã¢ã©ã¼ã
ãµã¼ãã¼ç£è¦ï¼PowerShellï¼
# ãµã¼ãã¼ãªã½ã¼ã¹ç£è¦ã¹ã¯ãªãã
# 使ç¨ä¾: .\monitor_server.ps1
# é¾å¤è¨å®
$CPU_THRESHOLD = 80
$MEMORY_THRESHOLD = 85
$DISK_THRESHOLD = 90
# ãã°ãã¡ã¤ã«
$LOG_FILE = "C:\Logs\server_monitor.log"
function Write-Log {
param($Message)
$timestamp = Get-Date -Format "yyyy-MM-dd HH:mm:ss"
"$timestamp - $Message" | Out-File -FilePath $LOG_FILE -Append
Write-Host "$timestamp - $Message"
}
function Send-Alert {
param($Subject, $Body)
# ããã«ã¡ã¼ã«éä¿¡ãSlackéç¥ã®ãã¸ãã¯ãå®è£
Write-Log "ALERT: $Subject - $Body"
}
# CPU使ç¨çãã§ãã¯
$cpu = (Get-Counter '\Processor(_Total)\% Processor Time').CounterSamples.CookedValue
if ($cpu -gt $CPU_THRESHOLD) {
Send-Alert "CPU使ç¨çãé«ã" "CPU使ç¨ç: $([math]::Round($cpu, 2))%"
}
# ã¡ã¢ãªä½¿ç¨çãã§ãã¯
$os = Get-CimInstance Win32_OperatingSystem
$totalMemory = $os.TotalVisibleMemorySize
$freeMemory = $os.FreePhysicalMemory
$usedMemoryPercent = (($totalMemory - $freeMemory) / $totalMemory) * 100
if ($usedMemoryPercent -gt $MEMORY_THRESHOLD) {
Send-Alert "ã¡ã¢ãªä½¿ç¨çãé«ã" "ã¡ã¢ãªä½¿ç¨ç: $([math]::Round($usedMemoryPercent, 2))%"
}
# ãã£ã¹ã¯ä½¿ç¨çãã§ãã¯
Get-PSDrive -PSProvider FileSystem | Where-Object { $_.Used -gt 0 } | ForEach-Object {
$usedPercent = ($_.Used / ($_.Used + $_.Free)) * 100
if ($usedPercent -gt $DISK_THRESHOLD) {
Send-Alert "ãã£ã¹ã¯ä½¿ç¨çãé«ã" "ãã©ã¤ã $($_.Name): $([math]::Round($usedPercent, 2))%"
}
}
# ã¹ãã¼ã¿ã¹ãã°
Write-Log "ç£è¦å®è¡å®äº - CPU: $([math]::Round($cpu, 2))% | Memory: $([math]::Round($usedMemoryPercent, 2))%"
6. CI/CDã¹ã¯ãªãã
ãã«ãï¼ãããã¤ï¼Bashï¼
#!/bin/bash
# Node.jsã¢ããªã®ãã«ãï¼ãããã¤
# 使ç¨ä¾: ./deploy.sh production
set -euo pipefail
ENV="${1:-staging}"
PROJECT_NAME="myapp"
BUILD_DIR="dist"
DEPLOY_SERVER="user@production-server.com"
DEPLOY_PATH="/var/www/$PROJECT_NAME"
log() {
echo "[$(date +'%Y-%m-%d %H:%M:%S')] $1"
}
# ç°å¢å¤æ°èªã¿è¾¼ã¿
if [ -f ".env.$ENV" ]; then
log "ç°å¢å¤æ°èªã¿è¾¼ã¿: .env.$ENV"
export $(cat ".env.$ENV" | xargs)
fi
# ä¾åé¢ä¿ã¤ã³ã¹ãã¼ã«
log "ä¾åé¢ä¿ã¤ã³ã¹ãã¼ã«ä¸..."
npm ci
# ãã¹ãå®è¡
log "ãã¹ãå®è¡ä¸..."
npm test
if [ $? -ne 0 ]; then
log "â ãã¹ã失æ - ãããã¤ä¸æ¢"
exit 1
fi
# ãã«ã
log "ãã«ãå®è¡ä¸..."
npm run build
if [ ! -d "$BUILD_DIR" ]; then
log "â ãã«ã失æ - $BUILD_DIR ãè¦ã¤ããã¾ãã"
exit 1
fi
# ããã¯ã¢ãã
log "ãããã¤å
ã§ããã¯ã¢ãã使ä¸..."
ssh "$DEPLOY_SERVER" "cd $DEPLOY_PATH && tar -czf backup_$(date +%Y%m%d_%H%M%S).tar.gz * || true"
# ãããã¤
log "ãããã¤ä¸: $ENV"
rsync -avz --delete "$BUILD_DIR/" "$DEPLOY_SERVER:$DEPLOY_PATH/"
# ãµã¼ãã¹åèµ·å
log "ãµã¼ãã¹åèµ·åä¸..."
ssh "$DEPLOY_SERVER" "sudo systemctl restart $PROJECT_NAME"
# ãã«ã¹ãã§ãã¯
log "ãã«ã¹ãã§ãã¯å®è¡ä¸..."
sleep 5
HEALTH_URL="https://production-server.com/health"
STATUS=$(curl -s -o /dev/null -w "%{http_code}" "$HEALTH_URL")
if [ "$STATUS" == "200" ]; then
log "â ãããã¤æå - ãã«ã¹ãã§ãã¯OK"
else
log "â ãã«ã¹ãã§ãã¯å¤±æ (HTTP $STATUS)"
exit 1
fi
ã¹ã±ã¸ã¥ã¼ã«å®è¡
cronè¨å®ä¾
# æ¯æ¥åå2æã«ããã¯ã¢ããå®è¡
0 2 * * * /path/to/backup_script.sh >> /var/log/backup.log 2>&1
# æ¯æ0åã«ãã°åæ
0 * * * * /usr/bin/python3 /path/to/analyze_logs.py
# 5åãã¨ã«ç£è¦ã¹ã¯ãªããå®è¡
*/5 * * * * /path/to/monitor_server.sh
# æ¯é±æææ¥åå3æã«ã¯ãªã¼ã³ã¢ãã
0 3 * * 1 /path/to/cleanup_old_files.sh
Windowsã¿ã¹ã¯ã¹ã±ã¸ã¥ã¼ã©ï¼PowerShellï¼
# ã¿ã¹ã¯ã¹ã±ã¸ã¥ã¼ã©ã«ã¸ã§ããç»é²
$action = New-ScheduledTaskAction -Execute "PowerShell.exe" `
-Argument "-File C:\Scripts\backup.ps1"
$trigger = New-ScheduledTaskTrigger -Daily -At 2am
$principal = New-ScheduledTaskPrincipal -UserId "SYSTEM" `
-LogonType ServiceAccount -RunLevel Highest
Register-ScheduledTask -TaskName "DailyBackup" `
-Action $action `
-Trigger $trigger `
-Principal $principal `
-Description "æ¯æ¥åå2æã«ããã¯ã¢ãããå®è¡"
ã¨ã©ã¼ãã³ããªã³ã°
ãªãã©ã¤æ©è½ï¼Pythonï¼
import time
from functools import wraps
def retry(max_attempts=3, delay=1, backoff=2):
"""ãªãã©ã¤ãã³ã¬ã¼ã¿"""
def decorator(func):
@wraps(func)
def wrapper(*args, **kwargs):
attempts = 0
current_delay = delay
while attempts < max_attempts:
try:
return func(*args, **kwargs)
except Exception as e:
attempts += 1
if attempts >= max_attempts:
raise
print(f"ã¨ã©ã¼: {e}")
print(f"ãªãã©ã¤ {attempts}/{max_attempts} - {current_delay}ç§å¾ã«å試è¡...")
time.sleep(current_delay)
current_delay *= backoff
return wrapper
return decorator
@retry(max_attempts=3, delay=2, backoff=2)
def fetch_data_from_api(url):
import requests
response = requests.get(url, timeout=10)
response.raise_for_status()
return response.json()
使ç¨ä¾
åºæ¬çãªä½¿ãæ¹
ç»åãã¡ã¤ã«ãæ¥ä»é ã«ãªãã¼ã ããBashã¹ã¯ãªãããçæãã¦ãã ããã
å ·ä½çãªã¿ã¹ã¯
以ä¸ã®è¦ä»¶ãæºããPythonã¹ã¯ãªãããçæãã¦ãã ããï¼
ã¿ã¹ã¯: ãã¦ã³ãã¼ããã©ã«ããæ¡å¼µåå¥ã«æ´ç
è¦ä»¶:
- æ¡å¼µåãã¨ã«ãã©ã«ãã使ï¼Images, Documents, Videosçï¼
- ååãã¡ã¤ã«ã¯ã¿ã¤ã ã¹ã¿ã³ããä»ä¸
- ãã°åºå
- ãã©ã¤ã©ã³æ©è½
åºå: Python 3.8以ä¸
API飿ºã¹ã¯ãªãã
GitHub APIã使ç¨ãã¦ãã¦ã¼ã¶ã¼ã®ãªãã¸ããªçµ±è¨ãåå¾ããã¹ã¯ãªãããçæãã¦ãã ããï¼
æ©è½:
- ãªãã¸ããªä¸è¦§åå¾
- ã¹ã¿ã¼æ°ããã©ã¼ã¯æ°éè¨
- 使ç¨è¨èªã®çµ±è¨
- JSONå½¢å¼ã§ã¬ãã¼ãåºå
- èªè¨¼ãã¼ã¯ã³å¯¾å¿
è¨èª: Python
ããã¯ã¢ããèªåå
PostgreSQLãã¼ã¿ãã¼ã¹ã®ããã¯ã¢ããã¹ã¯ãªãããçæãã¦ãã ããï¼
è¦ä»¶:
- å§ç¸®ããã¯ã¢ããï¼gzipï¼
- ã¿ã¤ã ã¹ã¿ã³ãä»ããã¡ã¤ã«å
- 7æ¥ä»¥ä¸åã®ããã¯ã¢ãããèªååé¤
- ãã°åºå
- ã¨ã©ã¼ãã³ããªã³ã°
è¨èª: Bash
åºå: cronè¨å®ä¾ãå«ãã¦
ãã¹ããã©ã¯ãã£ã¹
- ã¨ã©ã¼ãã³ããªã³ã°: ãã¹ã¦ã®å¤é¨ã³ãã³ãã»APIå¼ã³åºãã«ã¨ã©ã¼å¦ç
- ãã°åºå: å®è¡ç¶æ³ã詳細ã«ãã°
- ã¹ãçæ§: è¤æ°åå®è¡ãã¦ãå®å ¨
- ãã©ã¤ã©ã³: å®éã®å¦çåã«ç¢ºèªå¯è½
- è¨å®ã®å¤é¨å: ãã¼ãã³ã¼ããããç°å¢å¤æ°ãè¨å®ãã¡ã¤ã«ã使ç¨
- ããã¯ã¢ãã: ç ´å£çæä½ã®åã«ããã¯ã¢ãã
- éç¥: éè¦ãªå¦çã®æå/失æãéç¥
- ããã¥ã¡ã³ã: ä½¿ç¨æ¹æ³ãã³ã¡ã³ããREADMEã«è¨è¼
ãã¼ã¸ã§ã³æ å ±
- ã¹ãã«ãã¼ã¸ã§ã³: 1.0.0
- æçµæ´æ°: 2025-11-22
使ç¨ä¾ã¾ã¨ã
ã·ã³ãã«ãªèªåå
ãã¡ã¤ã«ããªãã¼ã ããã¹ã¯ãªããã使ãã¦ãã ããã
詳細ãªè¦ä»¶
以ä¸ã®ã¿ã¹ã¯ãèªååããã¹ã¯ãªãããçæãã¦ãã ããï¼
{詳細ãªè¦ä»¶}
è¨èª: Python/Bash/PowerShell
ã¨ã©ã¼ãã³ããªã³ã°: å«ã
ãã°åºå: å«ã
ãã®ã¹ãã«ã§ãæ¥ã ã®ç¹°ãè¿ã使¥ãèªååãã¾ãããï¼