migrate to mariadb
build (backupdb) TeamCity build failed
Details
build (backupdb) TeamCity build failed
Details
This commit is contained in:
parent
c23f48ca09
commit
22ad368271
18
Dockerfile
18
Dockerfile
|
|
@ -1,8 +1,14 @@
|
||||||
FROM mariadb:10.6.23
|
FROM mariadb:10.6.23
|
||||||
|
|
||||||
# Install rclone (ignore apt update errors from old repos)
|
# Install mariabackup and required tools
|
||||||
RUN apt-get update || true && \
|
RUN apt-get update || true && \
|
||||||
apt-get install -y --no-install-recommends curl unzip ca-certificates && \
|
apt-get install -y --no-install-recommends \
|
||||||
|
mariadb-backup \
|
||||||
|
curl \
|
||||||
|
unzip \
|
||||||
|
ca-certificates \
|
||||||
|
tar \
|
||||||
|
gzip && \
|
||||||
curl -L -o rclone.zip https://downloads.rclone.org/rclone-current-linux-amd64.zip && \
|
curl -L -o rclone.zip https://downloads.rclone.org/rclone-current-linux-amd64.zip && \
|
||||||
unzip rclone.zip && \
|
unzip rclone.zip && \
|
||||||
cp rclone-*/rclone /usr/bin/ && \
|
cp rclone-*/rclone /usr/bin/ && \
|
||||||
|
|
@ -11,9 +17,13 @@ RUN apt-get update || true && \
|
||||||
apt-get clean && \
|
apt-get clean && \
|
||||||
rm -rf /var/lib/apt/lists/*
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
COPY scripts/backup.sh /backup.sh
|
COPY scripts/backup-mariabackup.sh /backup.sh
|
||||||
# Create config directory
|
# Create config directory
|
||||||
RUN mkdir -p /root/.config/rclone
|
RUN mkdir -p /root/.config/rclone && \
|
||||||
|
chmod +x /backup.sh
|
||||||
|
|
||||||
|
# Create backup directory
|
||||||
|
RUN mkdir -p /backups
|
||||||
|
|
||||||
# Default command
|
# Default command
|
||||||
CMD ["sh", "/backup.sh"]
|
CMD ["sh", "/backup.sh"]
|
||||||
|
|
@ -0,0 +1,19 @@
|
||||||
|
FROM mariadb:10.6.23
|
||||||
|
|
||||||
|
# Install rclone (ignore apt update errors from old repos)
|
||||||
|
RUN apt-get update || true && \
|
||||||
|
apt-get install -y --no-install-recommends curl unzip ca-certificates && \
|
||||||
|
curl -L -o rclone.zip https://downloads.rclone.org/rclone-current-linux-amd64.zip && \
|
||||||
|
unzip rclone.zip && \
|
||||||
|
cp rclone-*/rclone /usr/bin/ && \
|
||||||
|
chmod 755 /usr/bin/rclone && \
|
||||||
|
rm -rf rclone* && \
|
||||||
|
apt-get clean && \
|
||||||
|
rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
COPY scripts/backup.sh /backup.sh
|
||||||
|
# Create config directory
|
||||||
|
RUN mkdir -p /root/.config/rclone
|
||||||
|
|
||||||
|
# Default command
|
||||||
|
CMD ["sh", "/backup.sh"]
|
||||||
|
|
@ -1,6 +1,6 @@
|
||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
|
||||||
# Database backup script with S3 upload
|
# Physical backup script using mariabackup with S3 upload
|
||||||
# Runs every 12 hours and keeps last 7 local backups
|
# Runs every 12 hours and keeps last 7 local backups
|
||||||
|
|
||||||
BACKUP_DIR="/backups"
|
BACKUP_DIR="/backups"
|
||||||
|
|
@ -13,72 +13,86 @@ S3_PATH="${S3_PATH:-dot}"
|
||||||
ENVIRONMENT="${ENVIRONMENT:-prod}"
|
ENVIRONMENT="${ENVIRONMENT:-prod}"
|
||||||
|
|
||||||
while true; do
|
while true; do
|
||||||
echo "[$(date)] Starting database backup process..."
|
echo "[$(date)] Starting physical backup process with mariabackup..."
|
||||||
|
|
||||||
# Get list of databases (exclude system databases)
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
DATABASES=$(mysql -h "${DB_HOST}" -u"${DB_USER}" -p"${DB_PASSWORD}" -s -AN -e 'show databases' | grep -vE "information_schema|performance_schema|sys")
|
DAY_NAME=$(date +%a)
|
||||||
|
BACKUP_NAME="full_backup.${DAY_NAME}"
|
||||||
|
BACKUP_PATH="${BACKUP_DIR}/${BACKUP_NAME}"
|
||||||
|
BACKUP_ARCHIVE="${BACKUP_PATH}.tar.gz"
|
||||||
|
|
||||||
if [ $? -ne 0 ]; then
|
# Remove previous backup with same day name if exists
|
||||||
echo "[$(date)] ERROR: Failed to get database list!"
|
if [ -d "${BACKUP_PATH}" ]; then
|
||||||
sleep 43200
|
echo "[$(date)] Removing old backup: ${BACKUP_PATH}"
|
||||||
continue
|
rm -rf "${BACKUP_PATH}"
|
||||||
fi
|
fi
|
||||||
|
[ -f "${BACKUP_ARCHIVE}" ] && rm -f "${BACKUP_ARCHIVE}"
|
||||||
|
|
||||||
# Backup each database separately
|
echo "[$(date)] Creating full physical backup..."
|
||||||
for database in ${DATABASES}; do
|
|
||||||
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
|
||||||
DAY_NAME=$(date +%a)
|
|
||||||
BACKUP_FILE="${BACKUP_DIR}/${database}.${DAY_NAME}.sql.gz"
|
|
||||||
|
|
||||||
echo "[$(date)] Backing up database: ${database}..."
|
# Create full backup using mariabackup
|
||||||
|
mariabackup --backup \
|
||||||
|
--target-dir="${BACKUP_PATH}" \
|
||||||
|
--host="${DB_HOST}" \
|
||||||
|
--user="${DB_USER}" \
|
||||||
|
--password="${DB_PASSWORD}" \
|
||||||
|
--no-lock \
|
||||||
|
--parallel=4
|
||||||
|
|
||||||
# Create backup with compression
|
if [ $? -eq 0 ]; then
|
||||||
mysqldump -h "${DB_HOST}" \
|
echo "[$(date)] Backup created successfully, preparing..."
|
||||||
-u"${DB_USER}" \
|
|
||||||
-p"${DB_PASSWORD}" \
|
# Prepare the backup (apply log)
|
||||||
--max-allowed-packet=1G \
|
mariabackup --prepare \
|
||||||
--add-drop-table \
|
--target-dir="${BACKUP_PATH}"
|
||||||
--single-transaction \
|
|
||||||
--extended-insert \
|
|
||||||
--quick \
|
|
||||||
--lock-tables=false \
|
|
||||||
"${database}" | gzip -c > "${BACKUP_FILE}"
|
|
||||||
|
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
# Get file size for logging
|
echo "[$(date)] Backup prepared successfully, compressing..."
|
||||||
BACKUP_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1)
|
|
||||||
echo "[$(date)] Backup completed: ${database} (${BACKUP_SIZE})"
|
|
||||||
|
|
||||||
# Upload to S3 if rclone is configured
|
# Compress the backup
|
||||||
if [ -f /root/.config/rclone/rclone.conf ]; then
|
tar -czf "${BACKUP_ARCHIVE}" -C "${BACKUP_DIR}" "${BACKUP_NAME}"
|
||||||
echo "[$(date)] Uploading ${database} to S3..."
|
|
||||||
rclone copy "${BACKUP_FILE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress
|
|
||||||
|
|
||||||
if [ $? -eq 0 ]; then
|
if [ $? -eq 0 ]; then
|
||||||
echo "[$(date)] Successfully uploaded ${database} to S3"
|
# Remove uncompressed backup to save space
|
||||||
# Optional: remove local backup after successful upload
|
rm -rf "${BACKUP_PATH}"
|
||||||
# rm -f "${BACKUP_FILE}"
|
|
||||||
|
# Get file size for logging
|
||||||
|
BACKUP_SIZE=$(du -h "${BACKUP_ARCHIVE}" | cut -f1)
|
||||||
|
echo "[$(date)] Backup compressed: ${BACKUP_ARCHIVE} (${BACKUP_SIZE})"
|
||||||
|
|
||||||
|
# Upload to S3 if rclone is configured
|
||||||
|
if [ -f /root/.config/rclone/rclone.conf ]; then
|
||||||
|
echo "[$(date)] Uploading backup to S3..."
|
||||||
|
rclone copy "${BACKUP_ARCHIVE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress
|
||||||
|
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo "[$(date)] Successfully uploaded backup to S3"
|
||||||
|
else
|
||||||
|
echo "[$(date)] WARNING: Failed to upload backup to S3"
|
||||||
|
fi
|
||||||
else
|
else
|
||||||
echo "[$(date)] WARNING: Failed to upload ${database} to S3"
|
echo "[$(date)] Rclone not configured, keeping backup locally only"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "[$(date)] Rclone not configured, keeping backup locally only"
|
echo "[$(date)] ERROR: Failed to compress backup!"
|
||||||
|
rm -rf "${BACKUP_PATH}"
|
||||||
fi
|
fi
|
||||||
else
|
else
|
||||||
echo "[$(date)] ERROR: Failed to backup ${database}!"
|
echo "[$(date)] ERROR: Failed to prepare backup!"
|
||||||
[ -f "${BACKUP_FILE}" ] && rm -f "${BACKUP_FILE}"
|
rm -rf "${BACKUP_PATH}"
|
||||||
fi
|
fi
|
||||||
done
|
else
|
||||||
|
echo "[$(date)] ERROR: Failed to create backup!"
|
||||||
|
[ -d "${BACKUP_PATH}" ] && rm -rf "${BACKUP_PATH}"
|
||||||
|
fi
|
||||||
|
|
||||||
# Clean old local backups (keep last N days for each database)
|
# Clean old local backups (keep last N backups)
|
||||||
echo "[$(date)] Cleaning old local backups..."
|
echo "[$(date)] Cleaning old local backups..."
|
||||||
for database in ${DATABASES}; do
|
ls -t ${BACKUP_DIR}/full_backup.*.tar.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f
|
||||||
ls -t ${BACKUP_DIR}/${database}.*.sql.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f
|
|
||||||
done
|
|
||||||
|
|
||||||
# List current backups
|
# List current backups
|
||||||
echo "[$(date)] Current local backups:"
|
echo "[$(date)] Current local backups:"
|
||||||
ls -lah ${BACKUP_DIR}/*.sql.gz 2>/dev/null || echo "No backups found"
|
ls -lah ${BACKUP_DIR}/*.tar.gz 2>/dev/null || echo "No backups found"
|
||||||
|
|
||||||
echo "[$(date)] Next backup will run in 12 hours..."
|
echo "[$(date)] Next backup will run in 12 hours..."
|
||||||
echo "========================================="
|
echo "========================================="
|
||||||
|
|
|
||||||
|
|
@ -0,0 +1,90 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# Database backup script with S3 upload
|
||||||
|
# Runs every 12 hours and keeps last 7 local backups
|
||||||
|
|
||||||
|
BACKUP_DIR="/backups"
|
||||||
|
DB_HOST="${DB_HOST:-db}"
|
||||||
|
DB_USER="${DB_USER:-root}"
|
||||||
|
DB_PASSWORD="${MYSQL_ROOT_PASSWORD}"
|
||||||
|
KEEP_BACKUPS="${KEEP_BACKUPS:-7}"
|
||||||
|
S3_BUCKET="${S3_BUCKET:-selectel:backup_db}"
|
||||||
|
S3_PATH="${S3_PATH:-dot}"
|
||||||
|
ENVIRONMENT="${ENVIRONMENT:-prod}"
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
echo "[$(date)] Starting database backup process..."
|
||||||
|
|
||||||
|
# Get list of databases (exclude system databases)
|
||||||
|
DATABASES=$(mysql -h "${DB_HOST}" -u"${DB_USER}" -p"${DB_PASSWORD}" -s -AN -e 'show databases' | grep -vE "information_schema|performance_schema|sys")
|
||||||
|
|
||||||
|
if [ $? -ne 0 ]; then
|
||||||
|
echo "[$(date)] ERROR: Failed to get database list!"
|
||||||
|
sleep 43200
|
||||||
|
continue
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Backup each database separately
|
||||||
|
for database in ${DATABASES}; do
|
||||||
|
TIMESTAMP=$(date +%Y%m%d_%H%M%S)
|
||||||
|
DAY_NAME=$(date +%a)
|
||||||
|
BACKUP_FILE="${BACKUP_DIR}/${database}.${DAY_NAME}.sql.gz"
|
||||||
|
|
||||||
|
echo "[$(date)] Backing up database: ${database}..."
|
||||||
|
|
||||||
|
# Create backup with compression (MariaDB compatible)
|
||||||
|
mysqldump -h "${DB_HOST}" \
|
||||||
|
-u"${DB_USER}" \
|
||||||
|
-p"${DB_PASSWORD}" \
|
||||||
|
--max-allowed-packet=1G \
|
||||||
|
--add-drop-table \
|
||||||
|
--single-transaction \
|
||||||
|
--extended-insert \
|
||||||
|
--quick \
|
||||||
|
--lock-tables=false \
|
||||||
|
--skip-add-locks \
|
||||||
|
--skip-comments \
|
||||||
|
"${database}" | gzip -c > "${BACKUP_FILE}"
|
||||||
|
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
# Get file size for logging
|
||||||
|
BACKUP_SIZE=$(du -h "${BACKUP_FILE}" | cut -f1)
|
||||||
|
echo "[$(date)] Backup completed: ${database} (${BACKUP_SIZE})"
|
||||||
|
|
||||||
|
# Upload to S3 if rclone is configured
|
||||||
|
if [ -f /root/.config/rclone/rclone.conf ]; then
|
||||||
|
echo "[$(date)] Uploading ${database} to S3..."
|
||||||
|
rclone copy "${BACKUP_FILE}" "${S3_BUCKET}/${S3_PATH}/${ENVIRONMENT}/" --progress
|
||||||
|
|
||||||
|
if [ $? -eq 0 ]; then
|
||||||
|
echo "[$(date)] Successfully uploaded ${database} to S3"
|
||||||
|
# Optional: remove local backup after successful upload
|
||||||
|
# rm -f "${BACKUP_FILE}"
|
||||||
|
else
|
||||||
|
echo "[$(date)] WARNING: Failed to upload ${database} to S3"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "[$(date)] Rclone not configured, keeping backup locally only"
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "[$(date)] ERROR: Failed to backup ${database}!"
|
||||||
|
[ -f "${BACKUP_FILE}" ] && rm -f "${BACKUP_FILE}"
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
|
||||||
|
# Clean old local backups (keep last N days for each database)
|
||||||
|
echo "[$(date)] Cleaning old local backups..."
|
||||||
|
for database in ${DATABASES}; do
|
||||||
|
ls -t ${BACKUP_DIR}/${database}.*.sql.gz 2>/dev/null | tail -n +$((KEEP_BACKUPS + 1)) | xargs -r rm -f
|
||||||
|
done
|
||||||
|
|
||||||
|
# List current backups
|
||||||
|
echo "[$(date)] Current local backups:"
|
||||||
|
ls -lah ${BACKUP_DIR}/*.sql.gz 2>/dev/null || echo "No backups found"
|
||||||
|
|
||||||
|
echo "[$(date)] Next backup will run in 12 hours..."
|
||||||
|
echo "========================================="
|
||||||
|
|
||||||
|
# Sleep for 12 hours (43200 seconds)
|
||||||
|
sleep 43200
|
||||||
|
done
|
||||||
Loading…
Reference in New Issue