之前在 服务器重装设置指南 一文中分享了关于初装VPS后的一些列的优化操作,其中分享了一个数据的备份脚本,通过丰富的配置项即可实现文件、mysql数据的多版本备份策略,备份策略参见下放列表:
用尽量少的空间备份足够容错的数据备份,并通过rclone等工具同步到其他网盘或储存空间之上。
但是最近忽然发现这个脚本似乎没有正常工作,当时是通过ChatGPT生成的备份脚本,可能存在某些问题,在排查日志时发现是因为路径问题导致脚本中断。
所以今天又用AI重新对脚本做了修复和优化,目前能正常工作了,如果有需要的朋友可以拿去体验。
#!/bin/bash
set -e # 遇到错误停止执行
set -x # 打开调试模式
# 自定义变量
CUSTOM_HOME="/home/username" # 可以根据需要修改为其他路径
# 设置参数
BACKUP_DIR="$CUSTOM_HOME/backup" # 备份数据的存放文件夹
TARGET_DIR="$CUSTOM_HOME/data" # 需要备份的数据文件夹
DATABASES=( "database_prod" ) # 需要备份的mysql数据库列表
ZIP_PASSWORD="password" # 压缩包密码
DATE=$(date +"%Y%m%d%H%M%S") # 日期格式
MYSQL_CONTAINER_NAME="mysql" # mysql容器的名称
TEMP_HOME_DIR="$CUSTOM_HOME/backup_tmp" # 临时目录
TEMP_BACKUP_DIR="$TEMP_HOME_DIR/backup_temp_$DATE" # 临时目录名称
LOG_DIR="$CUSTOM_HOME/backup_logs" # 日志文件存放目录
LOG_FILE="$LOG_DIR/backup_script.log" # 输出日志
# 创建日志目录
mkdir -p "$LOG_DIR"
# 创建日志文件
touch "$LOG_FILE"
# 日志记录函数
log() {
echo "[$(date +"%Y-%m-%d %H:%M:%S")] $1" | tee -a "$LOG_FILE"
}
# 创建目录
create_dir() {
mkdir -p "$1"
log "Created directory: $1"
}
# 复制 .my.cnf 文件到 Docker 容器
copy_my_cnf() {
if ! docker cp "$CUSTOM_HOME/.my.cnf" "$MYSQL_CONTAINER_NAME":/root/.my.cnf; then
log "Failed to copy .my.cnf to Docker container $MYSQL_CONTAINER_NAME"
exit 1
fi
}
# 复制数据到临时备份目录
copy_data() {
if [ -d "$TARGET_DIR" ]; then
cp -r "$TARGET_DIR"/* "$TEMP_BACKUP_DIR"
log "Copied data from $TARGET_DIR to $TEMP_BACKUP_DIR"
else
log "Target directory $TARGET_DIR does not exist"
exit 1
fi
}
# 导出数据库
export_databases() {
for MYSQL_DATABASE in "${DATABASES[@]}"; do
SQL_FILE="$TEMP_BACKUP_DIR/${MYSQL_DATABASE}_backup_${DATE}.sql"
if ! docker exec "$MYSQL_CONTAINER_NAME" sh -c "mysqldump --defaults-extra-file=/root/.my.cnf $MYSQL_DATABASE" > "$SQL_FILE"; then
log "Failed to export database $MYSQL_DATABASE"
exit 1
fi
log "Exported database $MYSQL_DATABASE to $SQL_FILE"
done
}
# 创建加密压缩包
create_archive() {
ARCHIVE_NAME="backup_${DATE}.zip"
if ! zip -r -P "$ZIP_PASSWORD" "$BACKUP_DIR/$ARCHIVE_NAME" "$TEMP_BACKUP_DIR"; then
log "Failed to create backup archive"
exit 1
fi
log "Created encrypted archive $BACKUP_DIR/$ARCHIVE_NAME"
}
# 清理过期备份
cleanup_old_backups() {
log "Cleaning up old backups"
find "$BACKUP_DIR" -type f -name "*.zip" | while read -r backup_file; do
backup_date=$(basename "$backup_file" | grep -o -E '[0-9]{14}')
if [ -n "$backup_date" ]; then
backup_epoch=$(date -d "${backup_date:0:8} ${backup_date:8:2}:${backup_date:10:2}:${backup_date:12:2}" +%s)
current_epoch=$(date +%s)
diff_days=$(( (current_epoch - backup_epoch) / 86400 ))
if [ $diff_days -ge 365 ]; then
log "Deleting backup $backup_file older than 365 days"
rm "$backup_file"
elif [ $diff_days -ge 150 ] && [ $(( diff_days % 150 )) -ne 0 ]; then
log "Deleting backup $backup_file older than 150 days but less than 365 days"
rm "$backup_file"
elif [ $diff_days -ge 90 ] && [ $(( diff_days % 90 )) -ne 0 ]; then
log "Deleting backup $backup_file older than 90 days but less than 150 days"
rm "$backup_file"
elif [ $diff_days -ge 30 ] && [ $(( diff_days % 30 )) -ne 0 ]; then
log "Deleting backup $backup_file older than 30 days but less than 90 days"
rm "$backup_file"
elif [ $diff_days -ge 3 ] && [ $(( diff_days % 3 )) -ne 0 ]; then
log "Deleting backup $backup_file older than 3 days but less than 30 days"
rm "$backup_file"
fi
else
log "Could not extract date from $backup_file"
fi
done
}
# 主执行流程
log "Backup script started"
create_dir "$BACKUP_DIR"
create_dir "$TEMP_HOME_DIR"
create_dir "$TEMP_BACKUP_DIR"
copy_my_cnf
copy_data
export_databases
create_archive
log "Removing temporary backup directory $TEMP_BACKUP_DIR"
rm -rf "$TEMP_HOME_DIR"
cleanup_old_backups
log "Backup completed and expired backups cleaned"