diff --git a/Dockerfile b/Dockerfile index 69a80e7..472546d 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,14 +1,8 @@ FROM mariadb:10.6.23 -# Install mariabackup and required tools +# Install rclone (ignore apt update errors from old repos) RUN apt-get update || true && \ - apt-get install -y --no-install-recommends \ - mariadb-backup \ - curl \ - unzip \ - ca-certificates \ - tar \ - gzip && \ + apt-get install -y --no-install-recommends curl unzip ca-certificates && \ curl -L -o rclone.zip https://downloads.rclone.org/rclone-current-linux-amd64.zip && \ unzip rclone.zip && \ cp rclone-*/rclone /usr/bin/ && \ @@ -19,11 +13,7 @@ RUN apt-get update || true && \ COPY scripts/backup.sh /backup.sh # Create config directory -RUN mkdir -p /root/.config/rclone && \ - chmod +x /backup.sh - -# Create backup directory -RUN mkdir -p /backups +RUN mkdir -p /root/.config/rclone # Default command CMD ["sh", "/backup.sh"] \ No newline at end of file diff --git a/Dockerfile.old b/Dockerfile.old deleted file mode 100644 index 472546d..0000000 --- a/Dockerfile.old +++ /dev/null @@ -1,19 +0,0 @@ -FROM mariadb:10.6.23 - -# Install rclone (ignore apt update errors from old repos) -RUN apt-get update || true && \ - apt-get install -y --no-install-recommends curl unzip ca-certificates && \ - curl -L -o rclone.zip https://downloads.rclone.org/rclone-current-linux-amd64.zip && \ - unzip rclone.zip && \ - cp rclone-*/rclone /usr/bin/ && \ - chmod 755 /usr/bin/rclone && \ - rm -rf rclone* && \ - apt-get clean && \ - rm -rf /var/lib/apt/lists/* - -COPY scripts/backup.sh /backup.sh -# Create config directory -RUN mkdir -p /root/.config/rclone - -# Default command -CMD ["sh", "/backup.sh"] \ No newline at end of file diff --git a/scripts/backup.sh b/scripts/backup.sh index fe470c0..311a9a4 100644 --- a/scripts/backup.sh +++ b/scripts/backup.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Physical backup script using mariabackup with S3 upload +# Database backup script with S3 upload # Runs every 12 hours and keeps last 7 local backups BACKUP_DIR="/backups" @@ -13,90 +13,78 @@ S3_PATH="${S3_PATH:-dot}" ENVIRONMENT="${ENVIRONMENT:-prod}" while true; do - echo "[$(date)] Starting physical backup process with mariabackup..." + echo "[$(date)] Starting database backup process..." - TIMESTAMP=$(date +%Y%m%d_%H%M%S) - DAY_NAME=$(date +%a) - BACKUP_NAME="full_backup.${DAY_NAME}" - BACKUP_PATH="${BACKUP_DIR}/${BACKUP_NAME}" - BACKUP_ARCHIVE="${BACKUP_PATH}.tar.gz" + # Get list of databases (exclude system databases) + DATABASES=$(mysql -h "${DB_HOST}" -u"${DB_USER}" -p"${DB_PASSWORD}" -s -AN -e 'show databases' | grep -vE "information_schema|performance_schema|sys") - # Remove previous backup with same day name if exists - if [ -d "${BACKUP_PATH}" ]; then - echo "[$(date)] Removing old backup: ${BACKUP_PATH}" - rm -rf "${BACKUP_PATH}" + if [ $? -ne 0 ]; then + echo "[$(date)] ERROR: Failed to get database list!" + sleep 43200 + continue fi - [ -f "${BACKUP_ARCHIVE}" ] && rm -f "${BACKUP_ARCHIVE}" - echo "[$(date)] Creating full physical backup..." + # Backup each database separately + for database in ${DATABASES}; do + TIMESTAMP=$(date +%Y%m%d_%H%M%S) + DAY_NAME=$(date +%a) + BACKUP_FILE="${BACKUP_DIR}/${database}.${DAY_NAME}.sql.gz" - # Create full backup using mariabackup - mariabackup --backup \ - --target-dir="${BACKUP_PATH}" \ - --host="${DB_HOST}" \ - --user="${DB_USER}" \ - --password="${DB_PASSWORD}" \ - --no-lock \ - --parallel=4 + echo "[$(date)] Backing up database: ${database}..." - if [ $? -eq 0 ]; then - echo "[$(date)] Backup created successfully, preparing..." - - # Prepare the backup (apply log) - mariabackup --prepare \ - --target-dir="${BACKUP_PATH}" + # Create backup with compression (MariaDB compatible) + mysqldump -h "${DB_HOST}" \ + -u"${DB_USER}" \ + -p"${DB_PASSWORD}" \ + --max-allowed-packet=1G \ + --add-drop-table \ + --single-transaction \ + --extended-insert \ + --quick \ + --lock-tables=false \ + --skip-add-locks \ + --skip-comments \ + "${database}" | gzip -c > "${BACKUP_FILE}" if [ $? -eq 0 ]; then - echo "[$(date)] Backup prepared successfully, compressing..." + # Get file size for logging + BACKUP_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1) + echo "[$(date)] Backup completed: ${database} (${BACKUP_SIZE})" - # Compress the backup - tar -czf "${BACKUP_ARCHIVE}" -C "${BACKUP_DIR}" "${BACKUP_NAME}" + # Upload to S3 if rclone is configured + if [ -f /root/.config/rclone/rclone.conf ]; then + echo "[$(date)] Uploading ${database} to S3..." + rclone copy "${BACKUP_FILE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress - if [ $? -eq 0 ]; then - # Remove uncompressed backup to save space - rm -rf "${BACKUP_PATH}" - - # Get file size for logging - BACKUP_SIZE=$(du -h "${BACKUP_ARCHIVE}" | cut -f1) - echo "[$(date)] Backup compressed: ${BACKUP_ARCHIVE} (${BACKUP_SIZE})" - - # Upload to S3 if rclone is configured - if [ -f /root/.config/rclone/rclone.conf ]; then - echo "[$(date)] Uploading backup to S3..." - rclone copy "${BACKUP_ARCHIVE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress - - if [ $? -eq 0 ]; then - echo "[$(date)] Successfully uploaded backup to S3" - else - echo "[$(date)] WARNING: Failed to upload backup to S3" - fi + if [ $? -eq 0 ]; then + echo "[$(date)] Successfully uploaded ${database} to S3" + # Optional: remove local backup after successful upload + # rm -f "${BACKUP_FILE}" else - echo "[$(date)] Rclone not configured, keeping backup locally only" + echo "[$(date)] WARNING: Failed to upload ${database} to S3" fi else - echo "[$(date)] ERROR: Failed to compress backup!" - rm -rf "${BACKUP_PATH}" + echo "[$(date)] Rclone not configured, keeping backup locally only" fi else - echo "[$(date)] ERROR: Failed to prepare backup!" - rm -rf "${BACKUP_PATH}" + echo "[$(date)] ERROR: Failed to backup ${database}!" + [ -f "${BACKUP_FILE}" ] && rm -f "${BACKUP_FILE}" fi - else - echo "[$(date)] ERROR: Failed to create backup!" - [ -d "${BACKUP_PATH}" ] && rm -rf "${BACKUP_PATH}" - fi + done - # Clean old local backups (keep last N backups) + # Clean old local backups (keep last N days for each database) echo "[$(date)] Cleaning old local backups..." - ls -t ${BACKUP_DIR}/full_backup.*.tar.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f + for database in ${DATABASES}; do + ls -t ${BACKUP_DIR}/${database}.*.sql.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f + done # List current backups echo "[$(date)] Current local backups:" - ls -lah ${BACKUP_DIR}/*.tar.gz 2>/dev/null || echo "No backups found" + ls -lah ${BACKUP_DIR}/*.sql.gz 2>/dev/null || echo "No backups found" echo "[$(date)] Next backup will run in 12 hours..." echo "=========================================" # Sleep for 12 hours (43200 seconds) sleep 43200 -done \ No newline at end of file +done diff --git a/scripts/backup.sh.old b/scripts/backup.sh.old deleted file mode 100644 index 311a9a4..0000000 --- a/scripts/backup.sh.old +++ /dev/null @@ -1,90 +0,0 @@ -#!/bin/bash - -# Database backup script with S3 upload -# Runs every 12 hours and keeps last 7 local backups - -BACKUP_DIR="/backups" -DB_HOST="${DB_HOST:-db}" -DB_USER="${DB_USER:-root}" -DB_PASSWORD="${MYSQL_ROOT_PASSWORD}" -KEEP_BACKUPS="${KEEP_BACKUPS:-7}" -S3_BUCKET="${S3_BUCKET:-selectel:backup_db}" -S3_PATH="${S3_PATH:-dot}" -ENVIRONMENT="${ENVIRONMENT:-prod}" - -while true; do - echo "[$(date)] Starting database backup process..." - - # Get list of databases (exclude system databases) - DATABASES=$(mysql -h "${DB_HOST}" -u"${DB_USER}" -p"${DB_PASSWORD}" -s -AN -e 'show databases' | grep -vE "information_schema|performance_schema|sys") - - if [ $? -ne 0 ]; then - echo "[$(date)] ERROR: Failed to get database list!" - sleep 43200 - continue - fi - - # Backup each database separately - for database in ${DATABASES}; do - TIMESTAMP=$(date +%Y%m%d_%H%M%S) - DAY_NAME=$(date +%a) - BACKUP_FILE="${BACKUP_DIR}/${database}.${DAY_NAME}.sql.gz" - - echo "[$(date)] Backing up database: ${database}..." - - # Create backup with compression (MariaDB compatible) - mysqldump -h "${DB_HOST}" \ - -u"${DB_USER}" \ - -p"${DB_PASSWORD}" \ - --max-allowed-packet=1G \ - --add-drop-table \ - --single-transaction \ - --extended-insert \ - --quick \ - --lock-tables=false \ - --skip-add-locks \ - --skip-comments \ - "${database}" | gzip -c > "${BACKUP_FILE}" - - if [ $? -eq 0 ]; then - # Get file size for logging - BACKUP_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1) - echo "[$(date)] Backup completed: ${database} (${BACKUP_SIZE})" - - # Upload to S3 if rclone is configured - if [ -f /root/.config/rclone/rclone.conf ]; then - echo "[$(date)] Uploading ${database} to S3..." - rclone copy "${BACKUP_FILE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress - - if [ $? -eq 0 ]; then - echo "[$(date)] Successfully uploaded ${database} to S3" - # Optional: remove local backup after successful upload - # rm -f "${BACKUP_FILE}" - else - echo "[$(date)] WARNING: Failed to upload ${database} to S3" - fi - else - echo "[$(date)] Rclone not configured, keeping backup locally only" - fi - else - echo "[$(date)] ERROR: Failed to backup ${database}!" - [ -f "${BACKUP_FILE}" ] && rm -f "${BACKUP_FILE}" - fi - done - - # Clean old local backups (keep last N days for each database) - echo "[$(date)] Cleaning old local backups..." - for database in ${DATABASES}; do - ls -t ${BACKUP_DIR}/${database}.*.sql.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f - done - - # List current backups - echo "[$(date)] Current local backups:" - ls -lah ${BACKUP_DIR}/*.sql.gz 2>/dev/null || echo "No backups found" - - echo "[$(date)] Next backup will run in 12 hours..." - echo "=========================================" - - # Sleep for 12 hours (43200 seconds) - sleep 43200 -done