总是忘记数据库的数据备份,所以写了一个脚本来备份。
最近发现表已经增加的比较大了,单进程的导入脚本时间比较长,所以更新了脚本支持并发导入。
并发数量设置时,请注意目标服务器的负载情况。
表特别多可以并发进行单表备份,以下为数据库导出、导入支持指定表的全功能脚本示例(注意替换变量后使用):
#--------------------------------- config -------------------------------------
# 数据库信息设置(内网地址只读实例) 生产库
DB_HOST="demo"
DB_USER="demo"
DB_PASS="demo"
DB_NAME="demo"
# 目标库切换vpc内网
TARGET_DB_HOST="jeeinn"
TARGET_DB_USER="jeeinn"
TARGET_DB_PASS="jeeinn"
TARGET_DB_NAME="jeeinn"
# 定义备份文件目录
BACKUP_DIR="/data/backup/tmp"
# 最大并发数
MAX_EXPORT_JOBS=4
MAX_IMPORT_JOBS=6
# 导出指定表
TABLES=(
tb1
tb2_jeeinn
)
#--------------------------------- start -------------------------------------
# 开始执行
mkdir -p "${BACKUP_DIR}"
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Start sync data..."
# 备份函数
backup_table() {
local table=$1
local backup_file="${BACKUP_DIR}/${table}.sql"
local run_log="${BACKUP_DIR}/${table}_run.log"
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Exporting table $table..." >> "${run_log}"
if /usr/bin/mysqldump -h${DB_HOST} -u${DB_USER} -p${DB_PASS} \
--set-gtid-purged=off \
--default-character-set=utf8 \
--single-transaction ${DB_NAME} "${table}" > "$backup_file"; then
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Export succeeded: ${backup_file}" >> "${run_log}"
else
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Export failed: ${table}" >> "${run_log}"
fi
}
# 并发备份控制
current_jobs=0
for table in "${TABLES[@]}"; do
backup_table "$table" &
((current_jobs++))
if (( current_jobs >= MAX_EXPORT_JOBS )); then
wait -n # 等待任意一个后台任务完成
((current_jobs--))
fi
done
wait # 等待所有剩余后台任务完成
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Export completed"
# 导入函数
import_table() {
local table=$1
local backup_file="${BACKUP_DIR}/${table}.sql"
local run_log="${BACKUP_DIR}/${table}_run.log"
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Importing table $table..." >> "${run_log}"
if /usr/bin/mysql -h${TARGET_DB_HOST} -u${TARGET_DB_USER} -p${TARGET_DB_PASS} \
--default-character-set=utf8 \
--init-command="SET autocommit=0; SET foreign_key_checks=0;" ${TARGET_DB_NAME} < "$backup_file"; then
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Import succeeded: ${backup_file}" >> "${run_log}"
else
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Import failed: ${table}" >> "${run_log}"
fi
}
# 并发导入控制
current_jobs=0
for table in "${TABLES[@]}"; do
import_table "$table" &
((current_jobs++))
if (( current_jobs >= MAX_IMPORT_JOBS )); then
wait -n
((current_jobs--))
fi
done
wait
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Import completed"
# 合并日志
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Merging run log..."
cat ${BACKUP_DIR}/*_run.log > "${BACKUP_DIR}/db_sync_run_$(date "+%Y%m%d").log"
rm -f ${BACKUP_DIR}/*_run.log
echo "[$(date "+%Y-%m-%d %H:%M:%S")] Merge run log competed"
更新于2025-09-22 20:39:24
按提示设置相应的数据库选项和备份文件存放地即可,支持排除表
#!/bin/bash
#获取当前时间
TODAY=$(date "+%Y%m%d_%H%M")
#定义备份文件目录
BACK_FOLDER="/mnt/db_back"
#数据库信息设置
HOST="localhost"
USRENAME="root"
PASSWORD="root"
DATABASE="test"
#定义备份文件名
FILE_NAME="${DATABASE}_${TODAY}.sql"
BACK_FILENAME="${BACK_FOLDER}/${FILE_NAME}"
#排除的数据表
EXCLUDED_TABLES=(
db_log
)
IGNORED_TABLES_STRING=''
for TABLE in "${EXCLUDED_TABLES[@]}"
do :
IGNORED_TABLES_STRING+=" --ignore-table=${DATABASE}.${TABLE}"
done
#执行备份
/usr/bin/mysqldump -h${HOST} -u${USRENAME} -p${PASSWORD} \
--single-transaction \ # 对InnoDB做非阻塞备份
--quick \ # 避免内存溢出
--skip-add-drop-table \ # [可选]防止drop现有表
--set-gtid-purged=OFF \ # 如果不需要GTID
--hex-blob \ # 如果有二进制数据
--default-character-set=utf8 \ #设置字符集
--set-gtid-purged=off \ #关闭gtid
${IGNORED_TABLES_STRING} ${DATABASE} > ${BACK_FILENAME}
#压缩备份文件并删除源文件
cd ${BACK_FOLDER}
tar -zcf ${FILE_NAME}.tar.gz ${FILE_NAME} --remove-files
# 删除超时的跟压缩的文件
BEFORE=$(date -d"15 day ago" +%Y%m%d_%H%M)
rm -f "${DATABASE}_${BEFORE}.sql.tar.gz";
echo "finish BACK mysql database ${DATABASE}."
你也可以选择合适的备份方式,来随意修改。
/usr/bin/mysql -h${TARGET_HOST} -u${TARGET_USERNAME} -p${TARGET_PASSWORD} \
--default-character-set=utf8 \ #设置字符集
--init-command="SET SESSION autocommit=0; SET SESSION unique_checks=0;
SET SESSION foreign_key_checks=0; SET SESSION sql_log_bin=0;" \ # 设置会话参数
${TARGET_DATABASE} < ${TMP_BACK_FILENAME}
注:使用 --init-command 参数某些导入会报权限错误,将该参数中SET SESSION sql_log_bin=0;移除即可
示例错误:ERROR 1227 (42000): Access denied; you need (at least one of) the SUPER privilege(s) for this operation