Comprehensive backup strategy ensuring data protection, disaster recovery capabilities, and automated backup procedures across all homelab services and infrastructure.
βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
β Backup Architecture β
β β
β Production Data β Local Backup β Offsite Backup β
β β β β β
β βββββββββββββββ βββββββββββββββ βββββββββββββββ β
β β Server β β NAS02 β β Backblaze B2β β
β β Storage β β (30 days) β β (90 days) β β
β βββββββββββββββ βββββββββββββββ βββββββββββββββ β
β β
β Real-time Hourly/Daily Daily/Weekly β
β Replication Local Backups Cloud Backups β
βββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββββ
Recovery Time Objective (RTO): < 1 hour
Recovery Point Objective (RPO): < 15 minutes
Recovery Time Objective (RTO): < 4 hours
Recovery Point Objective (RPO): < 1 hour
Recovery Time Objective (RTO): < 24 hours
Recovery Point Objective (RPO): < 24 hours
## NAS02 Storage Layout
/backup/
βββ daily/ # Daily incremental backups
βββ weekly/ # Weekly full backups
βββ monthly/ # Monthly archive backups
βββ photos/ # Family photo archive
βββ documents/ # Document management backups
βββ databases/ # Database dump files
βββ configurations/ # System configuration backups
βββ media/ # Media library backups
## Cron schedule on nas02
0 1 * * * /opt/scripts/daily-backup.sh # 1 AM daily
0 2 * * 0 /opt/scripts/weekly-backup.sh # 2 AM Sunday
0 3 1 * * /opt/scripts/monthly-backup.sh # 3 AM 1st of month
0 4 * * * /opt/scripts/offsite-sync.sh # 4 AM daily cloud sync
#!/bin/bash
## /opt/scripts/daily-backup.sh
set -euo pipefail
BACKUP_DATE=$(date +%Y%m%d)
BACKUP_ROOT="/backup/daily/$BACKUP_DATE"
mkdir -p "$BACKUP_ROOT"
## Service databases
echo "Backing up databases..."
docker exec authentik-db pg_dump -U authentik authentik | gzip > "$BACKUP_ROOT/authentik.sql.gz"
docker exec mealie sqlite3 /data/mealie.db ".backup $BACKUP_ROOT/mealie.db"
docker exec paperless-db pg_dump -U paperless paperless | gzip > "$BACKUP_ROOT/paperless.sql.gz"
## Application data
echo "Backing up application data..."
tar czf "$BACKUP_ROOT/paperless-media.tar.gz" -C /opt/paperless media/
tar czf "$BACKUP_ROOT/n8n-data.tar.gz" -C /opt/n8n .n8n/
tar czf "$BACKUP_ROOT/home-assistant.tar.gz" -C /opt/homeassistant config/
## System configurations
echo "Backing up configurations..."
tar czf "$BACKUP_ROOT/docker-configs.tar.gz" -C /opt/docker-compose .
tar czf "$BACKUP_ROOT/caddy-config.tar.gz" -C /opt/caddy .
## Verify backups
echo "Verifying backup integrity..."
for file in "$BACKUP_ROOT"/*.{gz,db}; do
if [[ $file == *.gz ]]; then
gunzip -t "$file" && echo "β $file" || echo "β $file FAILED"
elif [[ $file == *.db ]]; then
sqlite3 "$file" "PRAGMA integrity_check;" | grep -q "ok" && echo "β $file" || echo "β $file FAILED"
fi
done
## Clean old backups (keep 30 days)
find /backup/daily -type d -name "20*" -mtime +30 -exec rm -rf {} \;
echo "Daily backup completed: $BACKUP_ROOT"
#!/bin/bash
## /opt/scripts/offsite-sync.sh
set -euo pipefail
## Backblaze B2 configuration
export B2_ACCOUNT_ID=$(cat /etc/secrets/b2-account-id)
export B2_APPLICATION_KEY=$(cat /etc/secrets/b2-app-key)
B2_BUCKET="homelab-backups"
## Sync recent backups to cloud
echo "Syncing to Backblaze B2..."
## Critical data (daily sync)
b2 sync --keepDays 90 --threads 4 \
/backup/daily/ b2://$B2_BUCKET/daily/
## Family photos (real-time sync)
b2 sync --keepDays 0 --threads 8 \
/backup/photos/ b2://$B2_BUCKET/photos/
## Documents (daily sync)
b2 sync --keepDays 365 --threads 4 \
/backup/documents/ b2://$B2_BUCKET/documents/
## Weekly archives (weekly sync)
if [[ $(date +%u) -eq 7 ]]; then
b2 sync --keepDays 365 --threads 2 \
/backup/weekly/ b2://$B2_BUCKET/weekly/
fi
echo "Cloud sync completed"
## Home Assistant automation for backups
automation:
- alias: 'Daily Backup'
trigger:
- platform: time
at: '02:30:00'
action:
- service: hassio.backup_full
data:
name: 'Daily Backup {{ now().strftime("%Y%m%d") }}'
password: !secret backup_password
- delay: '00:30:00' # Wait for backup completion
- service: shell_command.sync_backup_to_nas
data:
backup_name: 'Daily Backup {{ now().strftime("%Y%m%d") }}'
- alias: 'Cleanup Old Backups'
trigger:
- platform: time
at: '03:00:00'
action:
- service: python_script.cleanup_backups
data:
keep_days: 7
#!/bin/bash
## Comprehensive Paperless backup
BACKUP_DATE=$(date +%Y%m%d_%H%M%S)
BACKUP_DIR="/backup/paperless/$BACKUP_DATE"
mkdir -p "$BACKUP_DIR"
## Database backup
docker exec paperless-db pg_dump -U paperless paperless | gzip > "$BACKUP_DIR/database.sql.gz"
## Media files (documents, thumbnails)
tar czf "$BACKUP_DIR/media.tar.gz" -C /opt/paperless media/
## Configuration and index
tar czf "$BACKUP_DIR/data.tar.gz" -C /opt/paperless data/
## Export document data
docker exec paperless-webserver document_exporter "$BACKUP_DIR/export/" --format json
echo "Paperless backup completed: $BACKUP_DIR"
#!/bin/bash
## Jellyfin metadata and configuration backup
BACKUP_DATE=$(date +%Y%m%d)
BACKUP_DIR="/backup/jellyfin/$BACKUP_DATE"
mkdir -p "$BACKUP_DIR"
## Database backup
docker exec jellyfin sqlite3 /config/data/jellyfin.db ".backup $BACKUP_DIR/jellyfin.db"
## Configuration and metadata
tar czf "$BACKUP_DIR/config.tar.gz" -C /opt/jellyfin config/
## Library metadata (not media files - too large)
tar czf "$BACKUP_DIR/metadata.tar.gz" -C /opt/jellyfin config/metadata/
echo "Jellyfin backup completed: $BACKUP_DIR"
#!/bin/bash
## Authentik complete backup
BACKUP_DATE=$(date +%Y%m%d)
BACKUP_DIR="/backup/authentik/$BACKUP_DATE"
mkdir -p "$BACKUP_DIR"
## Database backup
docker exec authentik-db pg_dump -U authentik authentik | gzip > "$BACKUP_DIR/database.sql.gz"
## Media files (user avatars, branding)
tar czf "$BACKUP_DIR/media.tar.gz" -C /opt/authentik media/
## Custom templates and certificates
tar czf "$BACKUP_DIR/custom-data.tar.gz" -C /opt/authentik custom-templates/ certs/
## Export configuration
docker exec authentik-server ak export > "$BACKUP_DIR/authentik-config.json"
echo "Authentik backup completed: $BACKUP_DIR"
#!/bin/bash
## Full system recovery from backups
set -euo pipefail
RECOVERY_DATE=${1:-$(date +%Y%m%d)}
BACKUP_SOURCE="/backup/daily/$RECOVERY_DATE"
echo "Starting system recovery from $BACKUP_SOURCE"
## Stop all services
docker compose down
## Restore databases
echo "Restoring databases..."
gunzip -c "$BACKUP_SOURCE/authentik.sql.gz" | docker exec -i authentik-db psql -U authentik authentik
gunzip -c "$BACKUP_SOURCE/paperless.sql.gz" | docker exec -i paperless-db psql -U paperless paperless
cp "$BACKUP_SOURCE/mealie.db" /opt/mealie/data/mealie.db
## Restore application data
echo "Restoring application data..."
tar xzf "$BACKUP_SOURCE/paperless-media.tar.gz" -C /opt/paperless/
tar xzf "$BACKUP_SOURCE/n8n-data.tar.gz" -C /opt/n8n/
tar xzf "$BACKUP_SOURCE/home-assistant.tar.gz" -C /opt/homeassistant/
## Restore configurations
echo "Restoring configurations..."
tar xzf "$BACKUP_SOURCE/docker-configs.tar.gz" -C /opt/docker-compose/
tar xzf "$BACKUP_SOURCE/caddy-config.tar.gz" -C /opt/caddy/
## Set proper permissions
chown -R 1000:1000 /opt/paperless/media/
chown -R 1000:1000 /opt/n8n/.n8n/
chown -R 1000:1000 /opt/homeassistant/config/
## Start services
docker compose up -d
## Verify recovery
sleep 60
./scripts/verify-services.sh
echo "System recovery completed"
#!/bin/bash
## Recover individual service
SERVICE=${1:?"Usage: $0 <service-name>"}
BACKUP_DATE=${2:-$(date +%Y%m%d)}
case $SERVICE in
"paperless")
echo "Recovering Paperless-ngx..."
docker compose stop paperless-webserver paperless-consumer paperless-scheduler
## Restore database and files
gunzip -c "/backup/paperless/$BACKUP_DATE/database.sql.gz" | docker exec -i paperless-db psql -U paperless paperless
tar xzf "/backup/paperless/$BACKUP_DATE/media.tar.gz" -C /opt/paperless/
docker compose start paperless-webserver paperless-consumer paperless-scheduler
;;
"authentik")
echo "Recovering Authentik..."
docker compose stop authentik-server authentik-worker
gunzip -c "/backup/authentik/$BACKUP_DATE/database.sql.gz" | docker exec -i authentik-db psql -U authentik authentik
tar xzf "/backup/authentik/$BACKUP_DATE/media.tar.gz" -C /opt/authentik/
docker compose start authentik-server authentik-worker
;;
*)
echo "Unknown service: $SERVICE"
exit 1
;;
esac
echo "$SERVICE recovery completed"
#!/bin/bash
## Monthly recovery test - restore to test environment
set -euo pipefail
TEST_ENV="/opt/recovery-test"
BACKUP_DATE=$(date -d "1 day ago" +%Y%m%d)
echo "Starting recovery test with backup from $BACKUP_DATE"
## Create isolated test environment
mkdir -p "$TEST_ENV"
cd "$TEST_ENV"
## Copy test docker-compose configuration
cp /opt/templates/recovery-test-compose.yml docker-compose.yml
## Restore test database
gunzip -c "/backup/daily/$BACKUP_DATE/paperless.sql.gz" | docker exec -i test-paperless-db psql -U paperless paperless
## Test application functionality
docker compose up -d
sleep 30
## Verify services respond
curl -f http://localhost:8001/admin/ || echo "β Paperless recovery test failed"
curl -f http://localhost:8002/if/admin/ || echo "β Authentik recovery test failed"
## Cleanup test environment
docker compose down -v
rm -rf "$TEST_ENV"
echo "Recovery test completed"
#!/bin/bash
## Verify backup integrity and completeness
BACKUP_DATE=${1:-$(date +%Y%m%d)}
BACKUP_DIR="/backup/daily/$BACKUP_DATE"
echo "Verifying backup integrity for $BACKUP_DATE"
## Check file integrity
for file in "$BACKUP_DIR"/*.gz; do
if gunzip -t "$file" 2>/dev/null; then
echo "β $(basename "$file") - integrity OK"
else
echo "β $(basename "$file") - corruption detected"
fi
done
## Check database backups
for db in "$BACKUP_DIR"/*.db; do
if sqlite3 "$db" "PRAGMA integrity_check;" | grep -q "ok"; then
echo "β $(basename "$db") - database OK"
else
echo "β $(basename "$db") - database corrupted"
fi
done
## Check backup completeness
EXPECTED_FILES=(
"authentik.sql.gz"
"paperless.sql.gz"
"mealie.db"
"paperless-media.tar.gz"
"n8n-data.tar.gz"
"home-assistant.tar.gz"
)
for file in "${EXPECTED_FILES[@]}"; do
if [[ -f "$BACKUP_DIR/$file" ]]; then
echo "β $file - present"
else
echo "β $file - missing"
fi
done
#!/bin/bash
## Monitor backup sizes and growth
BACKUP_ROOT="/backup"
echo "Backup Size Report - $(date)"
echo "=================================="
## Daily backup sizes
echo "Daily Backups:"
du -sh $BACKUP_ROOT/daily/* | tail -7
## Weekly backup sizes
echo "Weekly Backups:"
du -sh $BACKUP_ROOT/weekly/* | tail -4
## Total usage
echo "Total Backup Usage:"
du -sh $BACKUP_ROOT/*
## Growth rate calculation
CURRENT_SIZE=$(du -s $BACKUP_ROOT | cut -f1)
LAST_MONTH_SIZE=$(cat /var/log/backup-size-$(date -d "1 month ago" +%Y%m) 2>/dev/null || echo "0")
GROWTH=$((CURRENT_SIZE - LAST_MONTH_SIZE))
echo "Monthly Growth: ${GROWTH}KB"
echo "$CURRENT_SIZE" > "/var/log/backup-size-$(date +%Y%m)"
#!/bin/bash
## Backup notification system
BACKUP_STATUS=${1:?"Usage: $0 <success|failure>"}
BACKUP_TYPE=${2:-"daily"}
BACKUP_SIZE=${3:-"unknown"}
WEBHOOK_URL="https://discord.com/api/webhooks/YOUR_WEBHOOK"
case $BACKUP_STATUS in
"success")
MESSAGE="β
$BACKUP_TYPE backup completed successfully (Size: $BACKUP_SIZE)"
COLOR="3066993" # Green
;;
"failure")
MESSAGE="β $BACKUP_TYPE backup failed! Manual intervention required."
COLOR="15158332" # Red
;;
esac
## Send Discord notification
curl -X POST "$WEBHOOK_URL" \
-H "Content-Type: application/json" \
-d "{
\"embeds\": [{
\"title\": \"Backup Status\",
\"description\": \"$MESSAGE\",
\"color\": $COLOR,
\"timestamp\": \"$(date -u +%Y-%m-%dT%H:%M:%S.000Z)\"
}]
}"
## Log to Seq
logger -t backup "status=$BACKUP_STATUS type=$BACKUP_TYPE size=$BACKUP_SIZE"
## Encrypt sensitive backups before cloud upload
encrypt_backup() {
local SOURCE_FILE="$1"
local ENCRYPTED_FILE="$1.enc"
## Encrypt with AES-256
openssl enc -aes-256-cbc -salt -in "$SOURCE_FILE" -out "$ENCRYPTED_FILE" -k "$(cat /etc/secrets/backup-key)"
## Verify encryption
if openssl enc -aes-256-cbc -d -in "$ENCRYPTED_FILE" -k "$(cat /etc/secrets/backup-key)" | cmp -s "$SOURCE_FILE" -; then
echo "β Encryption verified for $SOURCE_FILE"
rm "$SOURCE_FILE" # Keep only encrypted version
else
echo "β Encryption failed for $SOURCE_FILE"
return 1
fi
}
## Backup directory permissions
chmod 700 /backup/
chown backup:backup /backup/
setfacl -m u:backup:rwx,g::---,o::--- /backup/
## Backup script permissions
chmod 750 /opt/scripts/backup-*.sh
chown root:backup /opt/scripts/backup-*.sh
-- Seq query for backup growth analysis
SELECT
DateTrunc('day', @Timestamp) as Date,
AVG(ToNumber(@Fields.backup_size_mb)) as AvgBackupSize,
MAX(ToNumber(@Fields.backup_size_mb)) as MaxBackupSize
FROM @Fields.backup_type = 'daily'
WHERE @Timestamp > Now() - 30d
GROUP BY DateTrunc('day', @Timestamp)
ORDER BY Date
#!/bin/bash
## Optimize retention based on storage usage
STORAGE_USAGE=$(df /backup | tail -1 | awk '{print $5}' | sed 's/%//')
if [[ $STORAGE_USAGE -gt 90 ]]; then
echo "Storage usage critical: ${STORAGE_USAGE}%"
## Reduce retention for non-critical backups
find /backup/daily -name "*.tar.gz" -mtime +14 -delete
find /backup/weekly -name "*.tar.gz" -mtime +60 -delete
elif [[ $STORAGE_USAGE -gt 80 ]]; then
echo "Storage usage high: ${STORAGE_USAGE}%"
## Standard cleanup
find /backup/daily -name "*.tar.gz" -mtime +21 -delete
fi