Sanitized mirror from private repository - 2026-03-21 08:52:36 UTC
This commit is contained in:
213
scripts/add_disaster_recovery_comments.py
Normal file
213
scripts/add_disaster_recovery_comments.py
Normal file
@@ -0,0 +1,213 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Script to add basic disaster recovery comments to Docker Compose files
|
||||
that don't already have comprehensive documentation.
|
||||
"""
|
||||
|
||||
import os
|
||||
import re
|
||||
from pathlib import Path
|
||||
|
||||
def has_disaster_recovery_comments(file_path):
|
||||
"""Check if file already has disaster recovery comments."""
|
||||
try:
|
||||
with open(file_path, 'r') as f:
|
||||
content = f.read()
|
||||
return 'DISASTER RECOVERY' in content or 'SERVICE OVERVIEW' in content
|
||||
except:
|
||||
return False
|
||||
|
||||
def get_service_info(file_path):
|
||||
"""Extract service information from Docker Compose file."""
|
||||
try:
|
||||
with open(file_path, 'r') as f:
|
||||
content = f.read()
|
||||
|
||||
# Extract service name and image
|
||||
service_match = re.search(r'^\s*([a-zA-Z0-9_-]+):\s*$', content, re.MULTILINE)
|
||||
image_match = re.search(r'image:\s*([^\s\n]+)', content)
|
||||
container_match = re.search(r'container_name:\s*([^\s\n]+)', content)
|
||||
|
||||
service_name = service_match.group(1) if service_match else 'unknown'
|
||||
image_name = image_match.group(1) if image_match else 'unknown'
|
||||
container_name = container_match.group(1) if container_match else service_name
|
||||
|
||||
# Skip if it's not a service definition
|
||||
if service_name in ['version', 'services', 'networks', 'volumes']:
|
||||
return None, None, None
|
||||
|
||||
return service_name, image_name, container_name
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error parsing {file_path}: {e}")
|
||||
return None, None, None
|
||||
|
||||
def generate_disaster_recovery_header(service_name, image_name, container_name, file_path):
|
||||
"""Generate disaster recovery header for a service."""
|
||||
|
||||
# Determine service category and priority
|
||||
service_lower = service_name.lower()
|
||||
image_lower = image_name.lower()
|
||||
|
||||
if any(x in service_lower or x in image_lower for x in ['vaultwarden', 'bitwarden', 'password']):
|
||||
priority = 'MAXIMUM CRITICAL'
|
||||
rto = '15 minutes'
|
||||
rpo = '1 hour'
|
||||
elif any(x in service_lower or x in image_lower for x in ['plex', 'jellyfin', 'media']):
|
||||
priority = 'HIGH'
|
||||
rto = '30 minutes'
|
||||
rpo = '24 hours'
|
||||
elif any(x in service_lower or x in image_lower for x in ['grafana', 'prometheus', 'monitoring', 'uptime']):
|
||||
priority = 'HIGH'
|
||||
rto = '30 minutes'
|
||||
rpo = '4 hours'
|
||||
elif any(x in service_lower or x in image_lower for x in ['pihole', 'dns', 'adguard']):
|
||||
priority = 'HIGH'
|
||||
rto = '15 minutes'
|
||||
rpo = '24 hours'
|
||||
elif any(x in service_lower or x in image_lower for x in ['nginx', 'proxy', 'traefik']):
|
||||
priority = 'HIGH'
|
||||
rto = '20 minutes'
|
||||
rpo = '24 hours'
|
||||
elif any(x in service_lower or x in image_lower for x in ['database', 'postgres', 'mysql', 'mariadb', 'db']):
|
||||
priority = 'CRITICAL'
|
||||
rto = '20 minutes'
|
||||
rpo = '1 hour'
|
||||
else:
|
||||
priority = 'MEDIUM'
|
||||
rto = '1 hour'
|
||||
rpo = '24 hours'
|
||||
|
||||
# Get relative path for documentation
|
||||
rel_path = str(file_path).replace('/workspace/project/homelab/', '')
|
||||
|
||||
header = f"""# =============================================================================
|
||||
# {service_name.upper().replace('-', ' ').replace('_', ' ')} - DOCKER SERVICE
|
||||
# =============================================================================
|
||||
#
|
||||
# SERVICE OVERVIEW:
|
||||
# - Container: {container_name}
|
||||
# - Image: {image_name}
|
||||
# - Configuration: {rel_path}
|
||||
#
|
||||
# DISASTER RECOVERY PRIORITY: {priority}
|
||||
# - Recovery Time Objective (RTO): {rto}
|
||||
# - Recovery Point Objective (RPO): {rpo}
|
||||
#
|
||||
# BACKUP REQUIREMENTS:
|
||||
# - Configuration: Docker volumes and bind mounts
|
||||
# - Data: Persistent volumes (if any)
|
||||
# - Frequency: Daily for critical services, weekly for others
|
||||
#
|
||||
# DEPENDENCIES:
|
||||
# - Docker daemon running
|
||||
# - Network connectivity
|
||||
# - Storage volumes accessible
|
||||
# - Required environment variables set
|
||||
#
|
||||
# RECOVERY PROCEDURE:
|
||||
# 1. Ensure dependencies are met
|
||||
# 2. Restore configuration and data from backups
|
||||
# 3. Deploy using: docker-compose -f {os.path.basename(file_path)} up -d
|
||||
# 4. Verify service functionality
|
||||
# 5. Update monitoring and documentation
|
||||
#
|
||||
# =============================================================================
|
||||
|
||||
"""
|
||||
|
||||
return header
|
||||
|
||||
def add_comments_to_file(file_path):
|
||||
"""Add disaster recovery comments to a Docker Compose file."""
|
||||
if has_disaster_recovery_comments(file_path):
|
||||
return False
|
||||
|
||||
try:
|
||||
service_name, image_name, container_name = get_service_info(file_path)
|
||||
|
||||
if not service_name:
|
||||
return False
|
||||
|
||||
with open(file_path, 'r') as f:
|
||||
original_content = f.read()
|
||||
|
||||
# Generate header
|
||||
header = generate_disaster_recovery_header(service_name, image_name, container_name, file_path)
|
||||
|
||||
# Add header to file
|
||||
new_content = header + original_content
|
||||
|
||||
# Add footer with basic recovery commands
|
||||
footer = f"""
|
||||
# =============================================================================
|
||||
# BASIC DISASTER RECOVERY COMMANDS
|
||||
# =============================================================================
|
||||
#
|
||||
# BACKUP:
|
||||
# docker-compose -f {os.path.basename(file_path)} down
|
||||
# tar -czf backup-{service_name}-$(date +%Y%m%d).tar.gz [volume-paths]
|
||||
#
|
||||
# RESTORE:
|
||||
# tar -xzf backup-{service_name}-[date].tar.gz
|
||||
# docker-compose -f {os.path.basename(file_path)} up -d
|
||||
#
|
||||
# VERIFY:
|
||||
# docker-compose -f {os.path.basename(file_path)} ps
|
||||
# docker logs {container_name}
|
||||
#
|
||||
# =============================================================================
|
||||
"""
|
||||
|
||||
new_content += footer
|
||||
|
||||
# Write back to file
|
||||
with open(file_path, 'w') as f:
|
||||
f.write(new_content)
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error processing {file_path}: {e}")
|
||||
return False
|
||||
|
||||
def main():
|
||||
"""Main function to process all Docker Compose files."""
|
||||
homelab_root = Path('/workspace/project/homelab')
|
||||
|
||||
# Find all YAML files
|
||||
yaml_files = []
|
||||
for pattern in ['**/*.yml', '**/*.yaml']:
|
||||
yaml_files.extend(homelab_root.glob(pattern))
|
||||
|
||||
# Filter for Docker Compose files and limit to reasonable number
|
||||
compose_files = []
|
||||
for file_path in yaml_files[:50]: # Limit to first 50 files
|
||||
try:
|
||||
with open(file_path, 'r') as f:
|
||||
content = f.read()
|
||||
# Check if it's a Docker Compose file
|
||||
if any(keyword in content for keyword in ['version:', 'services:', 'image:']):
|
||||
compose_files.append(file_path)
|
||||
except:
|
||||
continue
|
||||
|
||||
print(f"Processing {len(compose_files)} Docker Compose files...")
|
||||
|
||||
# Process files
|
||||
processed = 0
|
||||
skipped = 0
|
||||
|
||||
for file_path in compose_files:
|
||||
if add_comments_to_file(file_path):
|
||||
processed += 1
|
||||
print(f"✓ Added DR comments to {file_path}")
|
||||
else:
|
||||
skipped += 1
|
||||
|
||||
print(f"\nProcessing complete:")
|
||||
print(f"- Processed: {processed} files")
|
||||
print(f"- Skipped: {skipped} files")
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Reference in New Issue
Block a user