102 lines
3.5 KiB
Bash
102 lines
3.5 KiB
Bash
#!/bin/bash
|
|
|
|
# Physical backup script using mariabackup with S3 upload
|
|
# Runs every 12 hours and keeps last 7 local backups
|
|
|
|
BACKUP_DIR="/backups"
|
|
DB_HOST="${DB_HOST:-db}"
|
|
DB_USER="${DB_USER:-root}"
|
|
DB_PASSWORD="${MYSQL_ROOT_PASSWORD}"
|
|
KEEP_BACKUPS="${KEEP_BACKUPS:-7}"
|
|
S3_BUCKET="${S3_BUCKET:-selectel:backup_db}"
|
|
S3_PATH="${S3_PATH:-dot}"
|
|
ENVIRONMENT="${ENVIRONMENT:-prod}"
|
|
|
|
while true; do
|
|
echo "[$(date)] Starting physical backup process with mariabackup..."
|
|
|
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
|
DAY_NAME=$(date +%a)
|
|
BACKUP_NAME="full_backup.${DAY_NAME}"
|
|
BACKUP_PATH="${BACKUP_DIR}/${BACKUP_NAME}"
|
|
BACKUP_ARCHIVE="${BACKUP_PATH}.tar.gz"
|
|
|
|
# Remove previous backup with same day name if exists
|
|
if [ -d "${BACKUP_PATH}" ]; then
|
|
echo "[$(date)] Removing old backup: ${BACKUP_PATH}"
|
|
rm -rf "${BACKUP_PATH}"
|
|
fi
|
|
[ -f "${BACKUP_ARCHIVE}" ] && rm -f "${BACKUP_ARCHIVE}"
|
|
|
|
echo "[$(date)] Creating full physical backup..."
|
|
|
|
# Create full backup using mariabackup
|
|
mariabackup --backup \
|
|
--target-dir="${BACKUP_PATH}" \
|
|
--host="${DB_HOST}" \
|
|
--user="${DB_USER}" \
|
|
--password="${DB_PASSWORD}" \
|
|
--no-lock \
|
|
--parallel=4
|
|
|
|
if [ $? -eq 0 ]; then
|
|
echo "[$(date)] Backup created successfully, preparing..."
|
|
|
|
# Prepare the backup (apply log)
|
|
mariabackup --prepare \
|
|
--target-dir="${BACKUP_PATH}"
|
|
|
|
if [ $? -eq 0 ]; then
|
|
echo "[$(date)] Backup prepared successfully, compressing..."
|
|
|
|
# Compress the backup
|
|
tar -czf "${BACKUP_ARCHIVE}" -C "${BACKUP_DIR}" "${BACKUP_NAME}"
|
|
|
|
if [ $? -eq 0 ]; then
|
|
# Remove uncompressed backup to save space
|
|
rm -rf "${BACKUP_PATH}"
|
|
|
|
# Get file size for logging
|
|
BACKUP_SIZE=$(du -h "${BACKUP_ARCHIVE}" | cut -f1)
|
|
echo "[$(date)] Backup compressed: ${BACKUP_ARCHIVE} (${BACKUP_SIZE})"
|
|
|
|
# Upload to S3 if rclone is configured
|
|
if [ -f /root/.config/rclone/rclone.conf ]; then
|
|
echo "[$(date)] Uploading backup to S3..."
|
|
rclone copy "${BACKUP_ARCHIVE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress
|
|
|
|
if [ $? -eq 0 ]; then
|
|
echo "[$(date)] Successfully uploaded backup to S3"
|
|
else
|
|
echo "[$(date)] WARNING: Failed to upload backup to S3"
|
|
fi
|
|
else
|
|
echo "[$(date)] Rclone not configured, keeping backup locally only"
|
|
fi
|
|
else
|
|
echo "[$(date)] ERROR: Failed to compress backup!"
|
|
rm -rf "${BACKUP_PATH}"
|
|
fi
|
|
else
|
|
echo "[$(date)] ERROR: Failed to prepare backup!"
|
|
rm -rf "${BACKUP_PATH}"
|
|
fi
|
|
else
|
|
echo "[$(date)] ERROR: Failed to create backup!"
|
|
[ -d "${BACKUP_PATH}" ] && rm -rf "${BACKUP_PATH}"
|
|
fi
|
|
|
|
# Clean old local backups (keep last N backups)
|
|
echo "[$(date)] Cleaning old local backups..."
|
|
ls -t ${BACKUP_DIR}/full_backup.*.tar.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f
|
|
|
|
# List current backups
|
|
echo "[$(date)] Current local backups:"
|
|
ls -lah ${BACKUP_DIR}/*.tar.gz 2>/dev/null || echo "No backups found"
|
|
|
|
echo "[$(date)] Next backup will run in 12 hours..."
|
|
echo "========================================="
|
|
|
|
# Sleep for 12 hours (43200 seconds)
|
|
sleep 43200
|
|
done |