From 22ad3682710cc22b9370a390d1ef3d9daf1693f5 Mon Sep 17 00:00:00 2001 From: Sergey Paramoshkin Date: Mon, 22 Sep 2025 23:42:44 +0300 Subject: [PATCH] migrate to mariadb --- Dockerfile | 18 +++++-- Dockerfile.old | 19 ++++++++ scripts/backup.sh | 108 ++++++++++++++++++++++++------------------ scripts/backup.sh.old | 90 +++++++++++++++++++++++++++++++++++ 4 files changed, 184 insertions(+), 51 deletions(-) create mode 100644 Dockerfile.old create mode 100644 scripts/backup.sh.old diff --git a/Dockerfile b/Dockerfile index 472546d..ae8fd27 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,8 +1,14 @@ FROM mariadb:10.6.23 -# Install rclone (ignore apt update errors from old repos) +# Install mariabackup and required tools RUN apt-get update || true && \ - apt-get install -y --no-install-recommends curl unzip ca-certificates && \ + apt-get install -y --no-install-recommends \ + mariadb-backup \ + curl \ + unzip \ + ca-certificates \ + tar \ + gzip && \ curl -L -o rclone.zip https://downloads.rclone.org/rclone-current-linux-amd64.zip && \ unzip rclone.zip && \ cp rclone-*/rclone /usr/bin/ && \ @@ -11,9 +17,13 @@ RUN apt-get update || true && \ apt-get clean && \ rm -rf /var/lib/apt/lists/* -COPY scripts/backup.sh /backup.sh +COPY scripts/backup-mariabackup.sh /backup.sh # Create config directory -RUN mkdir -p /root/.config/rclone +RUN mkdir -p /root/.config/rclone && \ + chmod +x /backup.sh + +# Create backup directory +RUN mkdir -p /backups # Default command CMD ["sh", "/backup.sh"] \ No newline at end of file diff --git a/Dockerfile.old b/Dockerfile.old new file mode 100644 index 0000000..472546d --- /dev/null +++ b/Dockerfile.old @@ -0,0 +1,19 @@ +FROM mariadb:10.6.23 + +# Install rclone (ignore apt update errors from old repos) +RUN apt-get update || true && \ + apt-get install -y --no-install-recommends curl unzip ca-certificates && \ + curl -L -o rclone.zip https://downloads.rclone.org/rclone-current-linux-amd64.zip && \ + unzip rclone.zip && \ + cp rclone-*/rclone /usr/bin/ && \ + chmod 755 /usr/bin/rclone && \ + rm -rf rclone* && \ + apt-get clean && \ + rm -rf /var/lib/apt/lists/* + +COPY scripts/backup.sh /backup.sh +# Create config directory +RUN mkdir -p /root/.config/rclone + +# Default command +CMD ["sh", "/backup.sh"] \ No newline at end of file diff --git a/scripts/backup.sh b/scripts/backup.sh index e958e33..fe470c0 100644 --- a/scripts/backup.sh +++ b/scripts/backup.sh @@ -1,6 +1,6 @@ #!/bin/bash -# Database backup script with S3 upload +# Physical backup script using mariabackup with S3 upload # Runs every 12 hours and keeps last 7 local backups BACKUP_DIR="/backups" @@ -13,76 +13,90 @@ S3_PATH="${S3_PATH:-dot}" ENVIRONMENT="${ENVIRONMENT:-prod}" while true; do - echo "[$(date)] Starting database backup process..." + echo "[$(date)] Starting physical backup process with mariabackup..." - # Get list of databases (exclude system databases) - DATABASES=$(mysql -h "${DB_HOST}" -u"${DB_USER}" -p"${DB_PASSWORD}" -s -AN -e 'show databases' | grep -vE "information_schema|performance_schema|sys") + TIMESTAMP=$(date +%Y%m%d_%H%M%S) + DAY_NAME=$(date +%a) + BACKUP_NAME="full_backup.${DAY_NAME}" + BACKUP_PATH="${BACKUP_DIR}/${BACKUP_NAME}" + BACKUP_ARCHIVE="${BACKUP_PATH}.tar.gz" - if [ $? -ne 0 ]; then - echo "[$(date)] ERROR: Failed to get database list!" - sleep 43200 - continue + # Remove previous backup with same day name if exists + if [ -d "${BACKUP_PATH}" ]; then + echo "[$(date)] Removing old backup: ${BACKUP_PATH}" + rm -rf "${BACKUP_PATH}" fi + [ -f "${BACKUP_ARCHIVE}" ] && rm -f "${BACKUP_ARCHIVE}" - # Backup each database separately - for database in ${DATABASES}; do - TIMESTAMP=$(date +%Y%m%d_%H%M%S) - DAY_NAME=$(date +%a) - BACKUP_FILE="${BACKUP_DIR}/${database}.${DAY_NAME}.sql.gz" + echo "[$(date)] Creating full physical backup..." - echo "[$(date)] Backing up database: ${database}..." + # Create full backup using mariabackup + mariabackup --backup \ + --target-dir="${BACKUP_PATH}" \ + --host="${DB_HOST}" \ + --user="${DB_USER}" \ + --password="${DB_PASSWORD}" \ + --no-lock \ + --parallel=4 - # Create backup with compression - mysqldump -h "${DB_HOST}" \ - -u"${DB_USER}" \ - -p"${DB_PASSWORD}" \ - --max-allowed-packet=1G \ - --add-drop-table \ - --single-transaction \ - --extended-insert \ - --quick \ - --lock-tables=false \ - "${database}" | gzip -c > "${BACKUP_FILE}" + if [ $? -eq 0 ]; then + echo "[$(date)] Backup created successfully, preparing..." + + # Prepare the backup (apply log) + mariabackup --prepare \ + --target-dir="${BACKUP_PATH}" if [ $? -eq 0 ]; then - # Get file size for logging - BACKUP_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1) - echo "[$(date)] Backup completed: ${database} (${BACKUP_SIZE})" + echo "[$(date)] Backup prepared successfully, compressing..." - # Upload to S3 if rclone is configured - if [ -f /root/.config/rclone/rclone.conf ]; then - echo "[$(date)] Uploading ${database} to S3..." - rclone copy "${BACKUP_FILE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress + # Compress the backup + tar -czf "${BACKUP_ARCHIVE}" -C "${BACKUP_DIR}" "${BACKUP_NAME}" - if [ $? -eq 0 ]; then - echo "[$(date)] Successfully uploaded ${database} to S3" - # Optional: remove local backup after successful upload - # rm -f "${BACKUP_FILE}" + if [ $? -eq 0 ]; then + # Remove uncompressed backup to save space + rm -rf "${BACKUP_PATH}" + + # Get file size for logging + BACKUP_SIZE=$(du -h "${BACKUP_ARCHIVE}" | cut -f1) + echo "[$(date)] Backup compressed: ${BACKUP_ARCHIVE} (${BACKUP_SIZE})" + + # Upload to S3 if rclone is configured + if [ -f /root/.config/rclone/rclone.conf ]; then + echo "[$(date)] Uploading backup to S3..." + rclone copy "${BACKUP_ARCHIVE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress + + if [ $? -eq 0 ]; then + echo "[$(date)] Successfully uploaded backup to S3" + else + echo "[$(date)] WARNING: Failed to upload backup to S3" + fi else - echo "[$(date)] WARNING: Failed to upload ${database} to S3" + echo "[$(date)] Rclone not configured, keeping backup locally only" fi else - echo "[$(date)] Rclone not configured, keeping backup locally only" + echo "[$(date)] ERROR: Failed to compress backup!" + rm -rf "${BACKUP_PATH}" fi else - echo "[$(date)] ERROR: Failed to backup ${database}!" - [ -f "${BACKUP_FILE}" ] && rm -f "${BACKUP_FILE}" + echo "[$(date)] ERROR: Failed to prepare backup!" + rm -rf "${BACKUP_PATH}" fi - done + else + echo "[$(date)] ERROR: Failed to create backup!" + [ -d "${BACKUP_PATH}" ] && rm -rf "${BACKUP_PATH}" + fi - # Clean old local backups (keep last N days for each database) + # Clean old local backups (keep last N backups) echo "[$(date)] Cleaning old local backups..." - for database in ${DATABASES}; do - ls -t ${BACKUP_DIR}/${database}.*.sql.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f - done + ls -t ${BACKUP_DIR}/full_backup.*.tar.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f # List current backups echo "[$(date)] Current local backups:" - ls -lah ${BACKUP_DIR}/*.sql.gz 2>/dev/null || echo "No backups found" + ls -lah ${BACKUP_DIR}/*.tar.gz 2>/dev/null || echo "No backups found" echo "[$(date)] Next backup will run in 12 hours..." echo "=========================================" # Sleep for 12 hours (43200 seconds) sleep 43200 -done +done \ No newline at end of file diff --git a/scripts/backup.sh.old b/scripts/backup.sh.old new file mode 100644 index 0000000..311a9a4 --- /dev/null +++ b/scripts/backup.sh.old @@ -0,0 +1,90 @@ +#!/bin/bash + +# Database backup script with S3 upload +# Runs every 12 hours and keeps last 7 local backups + +BACKUP_DIR="/backups" +DB_HOST="${DB_HOST:-db}" +DB_USER="${DB_USER:-root}" +DB_PASSWORD="${MYSQL_ROOT_PASSWORD}" +KEEP_BACKUPS="${KEEP_BACKUPS:-7}" +S3_BUCKET="${S3_BUCKET:-selectel:backup_db}" +S3_PATH="${S3_PATH:-dot}" +ENVIRONMENT="${ENVIRONMENT:-prod}" + +while true; do + echo "[$(date)] Starting database backup process..." + + # Get list of databases (exclude system databases) + DATABASES=$(mysql -h "${DB_HOST}" -u"${DB_USER}" -p"${DB_PASSWORD}" -s -AN -e 'show databases' | grep -vE "information_schema|performance_schema|sys") + + if [ $? -ne 0 ]; then + echo "[$(date)] ERROR: Failed to get database list!" + sleep 43200 + continue + fi + + # Backup each database separately + for database in ${DATABASES}; do + TIMESTAMP=$(date +%Y%m%d_%H%M%S) + DAY_NAME=$(date +%a) + BACKUP_FILE="${BACKUP_DIR}/${database}.${DAY_NAME}.sql.gz" + + echo "[$(date)] Backing up database: ${database}..." + + # Create backup with compression (MariaDB compatible) + mysqldump -h "${DB_HOST}" \ + -u"${DB_USER}" \ + -p"${DB_PASSWORD}" \ + --max-allowed-packet=1G \ + --add-drop-table \ + --single-transaction \ + --extended-insert \ + --quick \ + --lock-tables=false \ + --skip-add-locks \ + --skip-comments \ + "${database}" | gzip -c > "${BACKUP_FILE}" + + if [ $? -eq 0 ]; then + # Get file size for logging + BACKUP_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1) + echo "[$(date)] Backup completed: ${database} (${BACKUP_SIZE})" + + # Upload to S3 if rclone is configured + if [ -f /root/.config/rclone/rclone.conf ]; then + echo "[$(date)] Uploading ${database} to S3..." + rclone copy "${BACKUP_FILE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress + + if [ $? -eq 0 ]; then + echo "[$(date)] Successfully uploaded ${database} to S3" + # Optional: remove local backup after successful upload + # rm -f "${BACKUP_FILE}" + else + echo "[$(date)] WARNING: Failed to upload ${database} to S3" + fi + else + echo "[$(date)] Rclone not configured, keeping backup locally only" + fi + else + echo "[$(date)] ERROR: Failed to backup ${database}!" + [ -f "${BACKUP_FILE}" ] && rm -f "${BACKUP_FILE}" + fi + done + + # Clean old local backups (keep last N days for each database) + echo "[$(date)] Cleaning old local backups..." + for database in ${DATABASES}; do + ls -t ${BACKUP_DIR}/${database}.*.sql.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f + done + + # List current backups + echo "[$(date)] Current local backups:" + ls -lah ${BACKUP_DIR}/*.sql.gz 2>/dev/null || echo "No backups found" + + echo "[$(date)] Next backup will run in 12 hours..." + echo "=========================================" + + # Sleep for 12 hours (43200 seconds) + sleep 43200 +done