Files
homelab-optimized/ansible/automation/playbooks/backup_databases.yml
Gitea Mirror Bot 8a947d9e36
Some checks failed
Documentation / Build Docusaurus (push) Failing after 5m3s
Documentation / Deploy to GitHub Pages (push) Has been skipped
Sanitized mirror from private repository - 2026-04-04 03:23:14 UTC
2026-04-04 03:23:14 +00:00

285 lines
11 KiB
YAML

---
# Database Backup Playbook
# Automated backup of all PostgreSQL and MySQL databases across homelab
# Usage: ansible-playbook playbooks/backup_databases.yml
# Usage: ansible-playbook playbooks/backup_databases.yml --limit atlantis
# Usage: ansible-playbook playbooks/backup_databases.yml -e "backup_type=full"
- name: Backup All Databases
hosts: "{{ host_target | default('all') }}"
gather_facts: yes
vars:
backup_base_dir: "/volume1/backups/databases" # Synology path
backup_local_dir: "/tmp/database_backups"
# Database service mapping
database_services:
atlantis:
- name: "immich-db"
type: "postgresql"
database: "immich"
container: "immich-db"
user: "postgres"
- name: "vaultwarden-db"
type: "postgresql"
database: "vaultwarden"
container: "vaultwarden-db"
user: "postgres"
- name: "joplin-db"
type: "postgresql"
database: "joplin"
container: "joplin-stack-db"
user: "postgres"
- name: "firefly-db"
type: "postgresql"
database: "firefly"
container: "firefly-db"
user: "firefly"
calypso:
- name: "authentik-db"
type: "postgresql"
database: "authentik"
container: "authentik-db"
user: "postgres"
- name: "paperless-db"
type: "postgresql"
database: "paperless"
container: "paperless-db"
user: "paperless"
homelab_vm:
- name: "mastodon-db"
type: "postgresql"
database: "mastodon"
container: "mastodon-db"
user: "postgres"
- name: "matrix-db"
type: "postgresql"
database: "synapse"
container: "synapse-db"
user: "postgres"
tasks:
- name: Check if Docker is running
systemd:
name: docker
register: docker_status
failed_when: docker_status.status.ActiveState != "active"
- name: Create backup directories
file:
path: "{{ item }}"
state: directory
mode: '0755'
loop:
- "{{ backup_base_dir }}/{{ inventory_hostname }}"
- "{{ backup_local_dir }}/{{ inventory_hostname }}"
ignore_errors: yes
- name: Get current database services for this host
set_fact:
current_databases: "{{ database_services.get(inventory_hostname, []) }}"
- name: Display backup plan
debug:
msg: |
📊 DATABASE BACKUP PLAN
=======================
🖥️ Host: {{ inventory_hostname }}
📅 Date: {{ ansible_date_time.date }}
🔄 Type: {{ backup_type | default('incremental') }}
📦 Databases: {{ current_databases | length }}
{% for db in current_databases %}
- {{ db.name }} ({{ db.type }})
{% endfor %}
📁 Backup Dir: {{ backup_base_dir }}/{{ inventory_hostname }}
🗜️ Compression: {{ compress_backups | default(true) }}
- name: Check database containers are running
shell: docker ps --filter "name={{ item.container }}" --format "{{.Names}}"
register: container_check
loop: "{{ current_databases }}"
changed_when: false
- name: Create pre-backup container status
shell: |
echo "=== PRE-BACKUP STATUS ===" > {{ backup_local_dir }}/{{ inventory_hostname }}/backup_status_{{ ansible_date_time.epoch }}.log
echo "Host: {{ inventory_hostname }}" >> {{ backup_local_dir }}/{{ inventory_hostname }}/backup_status_{{ ansible_date_time.epoch }}.log
echo "Date: {{ ansible_date_time.iso8601 }}" >> {{ backup_local_dir }}/{{ inventory_hostname }}/backup_status_{{ ansible_date_time.epoch }}.log
echo "Type: {{ backup_type | default('incremental') }}" >> {{ backup_local_dir }}/{{ inventory_hostname }}/backup_status_{{ ansible_date_time.epoch }}.log
echo "" >> {{ backup_local_dir }}/{{ inventory_hostname }}/backup_status_{{ ansible_date_time.epoch }}.log
{% for db in current_databases %}
echo "=== {{ db.name }} ===" >> {{ backup_local_dir }}/{{ inventory_hostname }}/backup_status_{{ ansible_date_time.epoch }}.log
docker ps --filter "name={{ db.container }}" --format "Status: {% raw %}{{.Status}}{% endraw %}" >> {{ backup_local_dir }}/{{ inventory_hostname }}/backup_status_{{ ansible_date_time.epoch }}.log
{% endfor %}
- name: Backup PostgreSQL databases
shell: |
backup_file="{{ backup_local_dir }}/{{ inventory_hostname }}/{{ item.name }}_{{ ansible_date_time.date }}_{{ ansible_date_time.hour }}{{ ansible_date_time.minute }}.sql"
echo "🔄 Backing up {{ item.name }}..."
docker exec {{ item.container }} pg_dump -U {{ item.user }} {{ item.database }} > "$backup_file"
if [ $? -eq 0 ]; then
echo "✅ {{ item.name }} backup successful"
{% if compress_backups | default(true) %}
gzip "$backup_file"
backup_file="${backup_file}.gz"
{% endif %}
# Get backup size
backup_size=$(du -h "$backup_file" | cut -f1)
echo "📦 Backup size: $backup_size"
# Copy to permanent storage if available
if [ -d "{{ backup_base_dir }}/{{ inventory_hostname }}" ]; then
cp "$backup_file" "{{ backup_base_dir }}/{{ inventory_hostname }}/"
echo "📁 Copied to permanent storage"
fi
else
echo "❌ {{ item.name }} backup failed"
exit 1
fi
register: postgres_backups
loop: "{{ current_databases }}"
when:
- item.type == "postgresql"
- item.container in (container_check.results | selectattr('stdout', 'equalto', item.container) | map(attribute='stdout') | list)
- name: Backup MySQL databases
shell: |
backup_file="{{ backup_local_dir }}/{{ inventory_hostname }}/{{ item.name }}_{{ ansible_date_time.date }}_{{ ansible_date_time.hour }}{{ ansible_date_time.minute }}.sql"
echo "🔄 Backing up {{ item.name }}..."
docker exec {{ item.container }} mysqldump -u {{ item.user }} -p{{ item.password | default('') }} {{ item.database }} > "$backup_file"
if [ $? -eq 0 ]; then
echo "✅ {{ item.name }} backup successful"
{% if compress_backups | default(true) %}
gzip "$backup_file"
backup_file="${backup_file}.gz"
{% endif %}
backup_size=$(du -h "$backup_file" | cut -f1)
echo "📦 Backup size: $backup_size"
if [ -d "{{ backup_base_dir }}/{{ inventory_hostname }}" ]; then
cp "$backup_file" "{{ backup_base_dir }}/{{ inventory_hostname }}/"
echo "📁 Copied to permanent storage"
fi
else
echo "❌ {{ item.name }} backup failed"
exit 1
fi
register: mysql_backups
loop: "{{ current_databases }}"
when:
- item.type == "mysql"
- item.container in (container_check.results | selectattr('stdout', 'equalto', item.container) | map(attribute='stdout') | list)
no_log: true # Hide passwords
- name: Verify backup integrity
shell: |
backup_file="{{ backup_local_dir }}/{{ inventory_hostname }}/{{ item.name }}_{{ ansible_date_time.date }}_{{ ansible_date_time.hour }}{{ ansible_date_time.minute }}.sql{% if compress_backups | default(true) %}.gz{% endif %}"
if [ -f "$backup_file" ]; then
{% if compress_backups | default(true) %}
# Test gzip integrity
gzip -t "$backup_file"
if [ $? -eq 0 ]; then
echo "✅ {{ item.name }} backup integrity verified"
else
echo "❌ {{ item.name }} backup corrupted"
exit 1
fi
{% else %}
# Check if file is not empty and contains SQL
if [ -s "$backup_file" ] && head -1 "$backup_file" | grep -q "SQL\|PostgreSQL\|MySQL"; then
echo "✅ {{ item.name }} backup integrity verified"
else
echo "❌ {{ item.name }} backup appears invalid"
exit 1
fi
{% endif %}
else
echo "❌ {{ item.name }} backup file not found"
exit 1
fi
register: backup_verification
loop: "{{ current_databases }}"
when:
- verify_backups | default(true) | bool
- item.container in (container_check.results | selectattr('stdout', 'equalto', item.container) | map(attribute='stdout') | list)
- name: Clean up old backups
shell: |
echo "🧹 Cleaning up backups older than {{ backup_retention_days | default(30) }} days..."
# Clean local backups
find {{ backup_local_dir }}/{{ inventory_hostname }} -name "*.sql*" -mtime +{{ backup_retention_days | default(30) }} -delete
# Clean permanent storage backups
if [ -d "{{ backup_base_dir }}/{{ inventory_hostname }}" ]; then
find {{ backup_base_dir }}/{{ inventory_hostname }} -name "*.sql*" -mtime +{{ backup_retention_days | default(30) }} -delete
fi
echo "✅ Cleanup complete"
when: backup_retention_days | default(30) | int > 0
- name: Generate backup report
shell: |
report_file="{{ backup_local_dir }}/{{ inventory_hostname }}/backup_report_{{ ansible_date_time.date }}.txt"
echo "📊 DATABASE BACKUP REPORT" > "$report_file"
echo "=========================" >> "$report_file"
echo "Host: {{ inventory_hostname }}" >> "$report_file"
echo "Date: {{ ansible_date_time.iso8601 }}" >> "$report_file"
echo "Type: {{ backup_type | default('incremental') }}" >> "$report_file"
echo "Retention: {{ backup_retention_days | default(30) }} days" >> "$report_file"
echo "" >> "$report_file"
echo "📦 BACKUP RESULTS:" >> "$report_file"
{% for db in current_databases %}
backup_file="{{ backup_local_dir }}/{{ inventory_hostname }}/{{ db.name }}_{{ ansible_date_time.date }}_{{ ansible_date_time.hour }}{{ ansible_date_time.minute }}.sql{% if compress_backups | default(true) %}.gz{% endif %}"
if [ -f "$backup_file" ]; then
size=$(du -h "$backup_file" | cut -f1)
echo "✅ {{ db.name }}: $size" >> "$report_file"
else
echo "❌ {{ db.name }}: FAILED" >> "$report_file"
fi
{% endfor %}
echo "" >> "$report_file"
echo "📁 BACKUP LOCATIONS:" >> "$report_file"
echo "Local: {{ backup_local_dir }}/{{ inventory_hostname }}" >> "$report_file"
echo "Permanent: {{ backup_base_dir }}/{{ inventory_hostname }}" >> "$report_file"
# Copy report to permanent storage
if [ -d "{{ backup_base_dir }}/{{ inventory_hostname }}" ]; then
cp "$report_file" "{{ backup_base_dir }}/{{ inventory_hostname }}/"
fi
cat "$report_file"
register: backup_report
- name: Display backup summary
debug:
msg: |
✅ DATABASE BACKUP COMPLETE
===========================
🖥️ Host: {{ inventory_hostname }}
📅 Date: {{ ansible_date_time.date }}
📦 Databases: {{ current_databases | length }}
🔄 Type: {{ backup_type | default('incremental') }}
{{ backup_report.stdout }}
🔍 Next Steps:
- Verify backups: ls -la {{ backup_local_dir }}/{{ inventory_hostname }}
- Test restore: ansible-playbook playbooks/restore_from_backup.yml
- Schedule regular backups via cron
===========================