#!/bin/bash # Mattermost Automated Backup Script # Backs up PostgreSQL database and uploads to Backblaze B2 set -e BACKUP_DIR="/opt/mattermost/backups" DATE=$(date +%Y%m%d_%H%M%S) BACKUP_FILE="mattermost_backup_${DATE}.sql.gz" RETENTION_DAYS=30 # Create backup directory mkdir -p ${BACKUP_DIR} echo "[$(date)] Starting Mattermost backup..." # Get PostgreSQL password source /opt/mattermost/.env # Backup PostgreSQL database echo "[$(date)] Backing up PostgreSQL database..." docker exec mattermost-postgres pg_dump -U mmuser -d mattermost | gzip > ${BACKUP_DIR}/${BACKUP_FILE} # Check backup size BACKUP_SIZE=$(ls -lh ${BACKUP_DIR}/${BACKUP_FILE} | awk '{print $5}') echo "[$(date)] Backup created: ${BACKUP_FILE} (${BACKUP_SIZE})" # Upload to Backblaze B2 using S3 API echo "[$(date)] Uploading to Backblaze B2..." /usr/local/bin/aws s3 cp ${BACKUP_DIR}/${BACKUP_FILE} s3://vk-mattermost/backups/${BACKUP_FILE} \ --endpoint-url https://s3.us-west-004.backblazeb2.com if [ $? -eq 0 ]; then echo "[$(date)] Upload successful!" else echo "[$(date)] Upload failed!" exit 1 fi # Clean up old local backups (keep last 7 days locally) echo "[$(date)] Cleaning up old local backups..." find ${BACKUP_DIR} -name "mattermost_backup_*.sql.gz" -mtime +7 -delete # Clean up old remote backups (keep last 30 days) echo "[$(date)] Cleaning up old remote backups..." CUTOFF_DATE=$(date -d "-${RETENTION_DAYS} days" +%Y%m%d) /usr/local/bin/aws s3 ls s3://vk-mattermost/backups/ --endpoint-url https://s3.us-west-004.backblazeb2.com | while read -r line; do FILE_DATE=$(echo "$line" | awk '{print $4}' | grep -oP '\d{8}' | head -1) FILE_NAME=$(echo "$line" | awk '{print $4}') if [[ -n "$FILE_DATE" && "$FILE_DATE" < "$CUTOFF_DATE" ]]; then echo "[$(date)] Deleting old backup: ${FILE_NAME}" /usr/local/bin/aws s3 rm s3://vk-mattermost/backups/${FILE_NAME} --endpoint-url https://s3.us-west-004.backblazeb2.com fi done echo "[$(date)] Backup completed successfully!"