migrate to mariadb
build (backupdb) TeamCity build failed
Details
build (backupdb) TeamCity build failed
Details
This commit is contained in:
parent
c23f48ca09
commit
22ad368271
18
Dockerfile
18
Dockerfile
|
|
@ -1,8 +1,14 @@
|
|||
FROM mariadb:10.6.23
|
||||
|
||||
# Install rclone (ignore apt update errors from old repos)
|
||||
# Install mariabackup and required tools
|
||||
RUN apt-get update || true && \
|
||||
apt-get install -y --no-install-recommends curl unzip ca-certificates && \
|
||||
apt-get install -y --no-install-recommends \
|
||||
mariadb-backup \
|
||||
curl \
|
||||
unzip \
|
||||
ca-certificates \
|
||||
tar \
|
||||
gzip && \
|
||||
curl -L -o rclone.zip https://downloads.rclone.org/rclone-current-linux-amd64.zip && \
|
||||
unzip rclone.zip && \
|
||||
cp rclone-*/rclone /usr/bin/ && \
|
||||
|
|
@ -11,9 +17,13 @@ RUN apt-get update || true && \
|
|||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY scripts/backup.sh /backup.sh
|
||||
COPY scripts/backup-mariabackup.sh /backup.sh
|
||||
# Create config directory
|
||||
RUN mkdir -p /root/.config/rclone
|
||||
RUN mkdir -p /root/.config/rclone && \
|
||||
chmod +x /backup.sh
|
||||
|
||||
# Create backup directory
|
||||
RUN mkdir -p /backups
|
||||
|
||||
# Default command
|
||||
CMD ["sh", "/backup.sh"]
|
||||
|
|
@ -0,0 +1,19 @@
|
|||
FROM mariadb:10.6.23
|
||||
|
||||
# Install rclone (ignore apt update errors from old repos)
|
||||
RUN apt-get update || true && \
|
||||
apt-get install -y --no-install-recommends curl unzip ca-certificates && \
|
||||
curl -L -o rclone.zip https://downloads.rclone.org/rclone-current-linux-amd64.zip && \
|
||||
unzip rclone.zip && \
|
||||
cp rclone-*/rclone /usr/bin/ && \
|
||||
chmod 755 /usr/bin/rclone && \
|
||||
rm -rf rclone* && \
|
||||
apt-get clean && \
|
||||
rm -rf /var/lib/apt/lists/*
|
||||
|
||||
COPY scripts/backup.sh /backup.sh
|
||||
# Create config directory
|
||||
RUN mkdir -p /root/.config/rclone
|
||||
|
||||
# Default command
|
||||
CMD ["sh", "/backup.sh"]
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Database backup script with S3 upload
|
||||
# Physical backup script using mariabackup with S3 upload
|
||||
# Runs every 12 hours and keeps last 7 local backups
|
||||
|
||||
BACKUP_DIR="/backups"
|
||||
|
|
@ -13,72 +13,86 @@ S3_PATH="${S3_PATH:-dot}"
|
|||
ENVIRONMENT="${ENVIRONMENT:-prod}"
|
||||
|
||||
while true; do
|
||||
echo "[$(date)] Starting database backup process..."
|
||||
echo "[$(date)] Starting physical backup process with mariabackup..."
|
||||
|
||||
# Get list of databases (exclude system databases)
|
||||
DATABASES=$(mysql -h "${DB_HOST}" -u"${DB_USER}" -p"${DB_PASSWORD}" -s -AN -e 'show databases' | grep -vE "information_schema|performance_schema|sys")
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "[$(date)] ERROR: Failed to get database list!"
|
||||
sleep 43200
|
||||
continue
|
||||
fi
|
||||
|
||||
# Backup each database separately
|
||||
for database in ${DATABASES}; do
|
||||
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||
DAY_NAME=$(date +%a)
|
||||
BACKUP_FILE="${BACKUP_DIR}/${database}.${DAY_NAME}.sql.gz"
|
||||
BACKUP_NAME="full_backup.${DAY_NAME}"
|
||||
BACKUP_PATH="${BACKUP_DIR}/${BACKUP_NAME}"
|
||||
BACKUP_ARCHIVE="${BACKUP_PATH}.tar.gz"
|
||||
|
||||
echo "[$(date)] Backing up database: ${database}..."
|
||||
# Remove previous backup with same day name if exists
|
||||
if [ -d "${BACKUP_PATH}" ]; then
|
||||
echo "[$(date)] Removing old backup: ${BACKUP_PATH}"
|
||||
rm -rf "${BACKUP_PATH}"
|
||||
fi
|
||||
[ -f "${BACKUP_ARCHIVE}" ] && rm -f "${BACKUP_ARCHIVE}"
|
||||
|
||||
# Create backup with compression
|
||||
mysqldump -h "${DB_HOST}" \
|
||||
-u"${DB_USER}" \
|
||||
-p"${DB_PASSWORD}" \
|
||||
--max-allowed-packet=1G \
|
||||
--add-drop-table \
|
||||
--single-transaction \
|
||||
--extended-insert \
|
||||
--quick \
|
||||
--lock-tables=false \
|
||||
"${database}" | gzip -c > "${BACKUP_FILE}"
|
||||
echo "[$(date)] Creating full physical backup..."
|
||||
|
||||
# Create full backup using mariabackup
|
||||
mariabackup --backup \
|
||||
--target-dir="${BACKUP_PATH}" \
|
||||
--host="${DB_HOST}" \
|
||||
--user="${DB_USER}" \
|
||||
--password="${DB_PASSWORD}" \
|
||||
--no-lock \
|
||||
--parallel=4
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "[$(date)] Backup created successfully, preparing..."
|
||||
|
||||
# Prepare the backup (apply log)
|
||||
mariabackup --prepare \
|
||||
--target-dir="${BACKUP_PATH}"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "[$(date)] Backup prepared successfully, compressing..."
|
||||
|
||||
# Compress the backup
|
||||
tar -czf "${BACKUP_ARCHIVE}" -C "${BACKUP_DIR}" "${BACKUP_NAME}"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
# Remove uncompressed backup to save space
|
||||
rm -rf "${BACKUP_PATH}"
|
||||
|
||||
# Get file size for logging
|
||||
BACKUP_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1)
|
||||
echo "[$(date)] Backup completed: ${database} (${BACKUP_SIZE})"
|
||||
BACKUP_SIZE=$(du -h "${BACKUP_ARCHIVE}" | cut -f1)
|
||||
echo "[$(date)] Backup compressed: ${BACKUP_ARCHIVE} (${BACKUP_SIZE})"
|
||||
|
||||
# Upload to S3 if rclone is configured
|
||||
if [ -f /root/.config/rclone/rclone.conf ]; then
|
||||
echo "[$(date)] Uploading ${database} to S3..."
|
||||
rclone copy "${BACKUP_FILE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress
|
||||
echo "[$(date)] Uploading backup to S3..."
|
||||
rclone copy "${BACKUP_ARCHIVE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "[$(date)] Successfully uploaded ${database} to S3"
|
||||
# Optional: remove local backup after successful upload
|
||||
# rm -f "${BACKUP_FILE}"
|
||||
echo "[$(date)] Successfully uploaded backup to S3"
|
||||
else
|
||||
echo "[$(date)] WARNING: Failed to upload ${database} to S3"
|
||||
echo "[$(date)] WARNING: Failed to upload backup to S3"
|
||||
fi
|
||||
else
|
||||
echo "[$(date)] Rclone not configured, keeping backup locally only"
|
||||
fi
|
||||
else
|
||||
echo "[$(date)] ERROR: Failed to backup ${database}!"
|
||||
[ -f "${BACKUP_FILE}" ] && rm -f "${BACKUP_FILE}"
|
||||
echo "[$(date)] ERROR: Failed to compress backup!"
|
||||
rm -rf "${BACKUP_PATH}"
|
||||
fi
|
||||
else
|
||||
echo "[$(date)] ERROR: Failed to prepare backup!"
|
||||
rm -rf "${BACKUP_PATH}"
|
||||
fi
|
||||
else
|
||||
echo "[$(date)] ERROR: Failed to create backup!"
|
||||
[ -d "${BACKUP_PATH}" ] && rm -rf "${BACKUP_PATH}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Clean old local backups (keep last N days for each database)
|
||||
# Clean old local backups (keep last N backups)
|
||||
echo "[$(date)] Cleaning old local backups..."
|
||||
for database in ${DATABASES}; do
|
||||
ls -t ${BACKUP_DIR}/${database}.*.sql.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f
|
||||
done
|
||||
ls -t ${BACKUP_DIR}/full_backup.*.tar.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f
|
||||
|
||||
# List current backups
|
||||
echo "[$(date)] Current local backups:"
|
||||
ls -lah ${BACKUP_DIR}/*.sql.gz 2>/dev/null || echo "No backups found"
|
||||
ls -lah ${BACKUP_DIR}/*.tar.gz 2>/dev/null || echo "No backups found"
|
||||
|
||||
echo "[$(date)] Next backup will run in 12 hours..."
|
||||
echo "========================================="
|
||||
|
|
|
|||
|
|
@ -0,0 +1,90 @@
|
|||
#!/bin/bash
|
||||
|
||||
# Database backup script with S3 upload
|
||||
# Runs every 12 hours and keeps last 7 local backups
|
||||
|
||||
BACKUP_DIR="/backups"
|
||||
DB_HOST="${DB_HOST:-db}"
|
||||
DB_USER="${DB_USER:-root}"
|
||||
DB_PASSWORD="${MYSQL_ROOT_PASSWORD}"
|
||||
KEEP_BACKUPS="${KEEP_BACKUPS:-7}"
|
||||
S3_BUCKET="${S3_BUCKET:-selectel:backup_db}"
|
||||
S3_PATH="${S3_PATH:-dot}"
|
||||
ENVIRONMENT="${ENVIRONMENT:-prod}"
|
||||
|
||||
while true; do
|
||||
echo "[$(date)] Starting database backup process..."
|
||||
|
||||
# Get list of databases (exclude system databases)
|
||||
DATABASES=$(mysql -h "${DB_HOST}" -u"${DB_USER}" -p"${DB_PASSWORD}" -s -AN -e 'show databases' | grep -vE "information_schema|performance_schema|sys")
|
||||
|
||||
if [ $? -ne 0 ]; then
|
||||
echo "[$(date)] ERROR: Failed to get database list!"
|
||||
sleep 43200
|
||||
continue
|
||||
fi
|
||||
|
||||
# Backup each database separately
|
||||
for database in ${DATABASES}; do
|
||||
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||
DAY_NAME=$(date +%a)
|
||||
BACKUP_FILE="${BACKUP_DIR}/${database}.${DAY_NAME}.sql.gz"
|
||||
|
||||
echo "[$(date)] Backing up database: ${database}..."
|
||||
|
||||
# Create backup with compression (MariaDB compatible)
|
||||
mysqldump -h "${DB_HOST}" \
|
||||
-u"${DB_USER}" \
|
||||
-p"${DB_PASSWORD}" \
|
||||
--max-allowed-packet=1G \
|
||||
--add-drop-table \
|
||||
--single-transaction \
|
||||
--extended-insert \
|
||||
--quick \
|
||||
--lock-tables=false \
|
||||
--skip-add-locks \
|
||||
--skip-comments \
|
||||
"${database}" | gzip -c > "${BACKUP_FILE}"
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
# Get file size for logging
|
||||
BACKUP_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1)
|
||||
echo "[$(date)] Backup completed: ${database} (${BACKUP_SIZE})"
|
||||
|
||||
# Upload to S3 if rclone is configured
|
||||
if [ -f /root/.config/rclone/rclone.conf ]; then
|
||||
echo "[$(date)] Uploading ${database} to S3..."
|
||||
rclone copy "${BACKUP_FILE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
echo "[$(date)] Successfully uploaded ${database} to S3"
|
||||
# Optional: remove local backup after successful upload
|
||||
# rm -f "${BACKUP_FILE}"
|
||||
else
|
||||
echo "[$(date)] WARNING: Failed to upload ${database} to S3"
|
||||
fi
|
||||
else
|
||||
echo "[$(date)] Rclone not configured, keeping backup locally only"
|
||||
fi
|
||||
else
|
||||
echo "[$(date)] ERROR: Failed to backup ${database}!"
|
||||
[ -f "${BACKUP_FILE}" ] && rm -f "${BACKUP_FILE}"
|
||||
fi
|
||||
done
|
||||
|
||||
# Clean old local backups (keep last N days for each database)
|
||||
echo "[$(date)] Cleaning old local backups..."
|
||||
for database in ${DATABASES}; do
|
||||
ls -t ${BACKUP_DIR}/${database}.*.sql.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f
|
||||
done
|
||||
|
||||
# List current backups
|
||||
echo "[$(date)] Current local backups:"
|
||||
ls -lah ${BACKUP_DIR}/*.sql.gz 2>/dev/null || echo "No backups found"
|
||||
|
||||
echo "[$(date)] Next backup will run in 12 hours..."
|
||||
echo "========================================="
|
||||
|
||||
# Sleep for 12 hours (43200 seconds)
|
||||
sleep 43200
|
||||
done
|
||||
Loading…
Reference in New Issue