File: //bigscoots/wpo_backups_ovz.sh
#!/bin/bash
# exec &>> >(tee -a /var/log/myscript.log | xargs -I{} bash -c 'echo "$(date "+%Y-%m-%d %H:%M:%S") {}" >> /root/.bigscoots/logs/backups.log')
source /bigscoots/includes/common.sh
KOPIA_STATUS=$(bash /bigscoots/wpo/manage/bsbackup_manager.sh check 2>/dev/null)
if echo "$KOPIA_STATUS" | grep -q '"success": true'; then
kopia=y
else
kopia=n
fi
date=$(date "+%Y-%m-%dT%H_%M_%S")
HOMEDIR=/home/nginx/domains/
BKSVR=
BSPATH=/root/.bigscoots
PATH=/usr/lib64/ccache:/usr/local/sbin:/usr/local/bin:/sbin:/bin:/usr/sbin:/usr/bin:/root/bin:/root/bin
WPCLIFLAGS="--allow-root --skip-plugins --skip-themes --require=/bigscoots/includes/err_report.php"
BKLIMIT=30
SSHOPTIONS="ssh -oStrictHostKeyChecking=no -i $HOME/.ssh/wpo_backups -oBatchMode=yes"
NOW=$(date -u +"%Y-%m-%dT%H:%M:%SZ")
EXCLUDED_TABLES=(
bwps_log
statpress
slim_stats
redirection_logs
Counterize
Counterize_Referers
Counterize_UserAgents
wbz404_logs
wbz404_redirects
tts_trafficstats
tts_referrer_stats
wponlinebackup_generations
svisitor_stat
simple_feed_stats
itsec_log
relevanssi_log
blc_instances
wysija_email_user_stat
woocommerce_sessions
et_bloom_stats
redirection_404
lbakut_activity_log
stream_meta
wfFileMods
wffilemods
wfBlockedIPLog
wfblockediplog
page_visit_history
strack_st
bb_background_job_queue
)
SSH_OPTIONS=(-o StrictHostKeyChecking=no -o UserKnownHostsFile=/dev/null -o PasswordAuthentication=no -o BatchMode=yes)
function SLACK() {
local message="$1"
bash /bigscoots/general/slack.sh "#wpo-backups" ":warning: *WPO Backup*\n*Hostname:* $(hostname)\n *Server IP:* ${serverip} *WP Path:* ${wpinstall}\n *Message:* ${message}"
}
send_webhook_data() {
# Assign arguments to local variables for readability
local hostname="$1"
local ip="$2"
local last_attempted="$3"
local backup_system="$4"
# The API Token (Keeping it inside or passing as a 5th arg)
local auth_token="LKASDJFO#Fa3FI#AF(*3af(#ART(A#8ta7"
echo "Sending data for $hostname..."
# Execute curl
curl --location "https://n8n.bigscoots.dev/webhook/098148c8-42c8-4fa1-a657-944d86afb385" \
--header "Authorization: Bearer $auth_token" \
--header "Content-Type: application/json" \
--data "{
\"hostname\": \"$hostname\",
\"ip\": \"$ip\",
\"last_attempted\": \"$last_attempted\",
\"backup_system\": \"$backup_system\"
}"
}
if ! VISITOR_HASH=$(curl -s https://www.bigscoots.com/downloads/uniquevisithash 2>/dev/null)
then
bash /bigscoots/general/slack.sh "#wpo-errors" ":warning: $(hostname) - ${serverip} - Failed to pull the hash using CURL for wpo_backups_ovz.sh"
fi
if [[ $kopia == n ]]; then
mkdir -p "$BSPATH"
touch "$BSPATH"/backupinfo
if grep -q wpo33392 /root/.bigscoots/backupinfo
then
echo > /root/.bigscoots/backupinfo
fi
if ! rpm -q jq >/dev/null 2>&1 ; then
yum -q -y install jq >/dev/null 2>&1
fi
if ! jq -Rn >/dev/null 2>&1; then
wget -O /usr/bin/jq https://github.com/stedolan/jq/releases/download/jq-1.6/jq-linux64 >/dev/null 2>&1
chmod +x /usr/bin/jq
fi
if [ -f /etc/csf/csf.allow ] && ! grep -q 69.162.173.37 /etc/csf/csf.allow; then
csf -a 69.162.173.37 >/dev/null 2>&1
fi
if [ -f /etc/csf/csf.allow ] && ! grep -q 50.31.116.52 /etc/csf/csf.allow; then
csf -a 50.31.116.52 >/dev/null 2>&1
fi
if [ -f /etc/csf/csf.allow ] && ! grep -q 67.202.70.92 /etc/csf/csf.allow; then
csf -a 67.202.70.92 >/dev/null 2>&1
fi
if [ -f /etc/csf/csf.allow ] && ! grep -q 216.185.212.7 /etc/csf/csf.allow; then
csf -a 216.185.212.7 >/dev/null 2>&1
fi
if [ -f /etc/csf/csf.allow ] && ! grep -q 216.185.212.8 /etc/csf/csf.allow; then
csf -a 216.185.212.8 >/dev/null 2>&1
fi
if [ ! -f "$BSPATH"/rsync/exclude ]; then
mkdir -p "$BSPATH"/rsync
{
echo "# 3F4duEDQ80b4a9gM4"
echo ".infected_*"
echo "log/access.log*"
echo "log/error.log*"
echo "*/core.[0-9]*"
echo "error_log"
echo "debug.log"
echo "plugin.log"
echo "*/wp-content/updraft"
echo "*/wp-content/cache"
echo "*/wp-content/wpbackitup_backups"
echo "*/wp-content/uploads/ithemes-security"
echo "*/wp-content/uploads/wpallimport"
echo "*/wp-content/uploads/ShortpixelBackups"
echo "backupbuddy_backups"
echo "*/wp-content/backupwordpress-*-backups"
echo "*/wp-content/backups-dup-pro"
echo "*/wp-content/wp-cloudflare-super-page-cache"
} > "$BSPATH"/rsync/exclude
fi
if ! grep -q "bksvr" "$BSPATH/backupinfo" && ! grep -q "destination=local" "$BSPATH/backupinfo"
then
send_webhook_data "$HOSTNAME" "$serverip" "$NOW" "legacy" >/dev/null 2>&1
exit 1
fi
if grep bksvr "$BSPATH"/backupinfo >/dev/null 2>&1
then
BKSVR=$(grep bksvr "$BSPATH"/backupinfo | sed 's/bksvr=//g')
fi
if ! grep -q destination=local /root/.bigscoots/backupinfo >/dev/null 2>&1
then
if [ -f /proc/vz/veinfo ] && ! grep -q destination=local /root/.bigscoots/backupinfo >/dev/null 2>&1
then
remote=y
if grep -q bkuser= "${BSPATH}"/backupinfo; then
BKUSER=$(grep bkuser= "${BSPATH}"/backupinfo | sed 's/=/ /g' | awk '{print $2}')
else
BKUSER=wpo$(awk '{print $1}' /proc/vz/veinfo)
fi
elif ! grep -qs '/backup ' /proc/mounts && ! grep destination=remote "$BSPATH"/backupinfo >/dev/null 2>&1
then
send_slack_alert "#wpo-backups" ":warning:" "Backup drive not mounted in" "N/A" "Make sure to set destination=remote in ${BSPATH}/backupinfo if supposed to be remote backups."
send_webhook_data "$HOSTNAME" "$serverip" "$NOW" "legacy" >/dev/null 2>&1
remote=y
BKUSER=wpo"${HOSTNAME//./}"
elif ! grep -qs '/backup ' /proc/mounts && grep destination=remote "$BSPATH"/backupinfo >/dev/null 2>&1
then
remote=y
if [[ -n $(grep bkuser= "${BSPATH}"/backupinfo | sed 's/=/ /g' | awk '{print $2}') ]]
then
BKUSER=$(grep bkuser= "${BSPATH}"/backupinfo | sed 's/=/ /g' | awk '{print $2}')
else
BKUSER=wpo"${HOSTNAME//./}"
fi
fi
fi
if ! grep -q bkuser= "${BSPATH}"/backupinfo
then
echo bkuser=${BKUSER} >> "${BSPATH}"/backupinfo
fi
fi
if [[ $remote == y ]] && [[ ! $1 =~ (initial_*|download|s2dropbox) ]]
then
RSYNCLOCATION="$BKUSER@$BKSVR:"
# Try the backup server that is already defined
SSH_ERROR_FILE=$(mktemp)
if ! ssh "${SSH_OPTIONS[@]}" -i "$HOME"/.ssh/wpo_backups "$BKUSER"@"$BKSVR" 'exit' > /dev/null 2> "$SSH_ERROR_FILE"
then
# Remove any carriage returns
sed -i 's/\r//g' "$SSH_ERROR_FILE"
# Read the error from the temporary file and store it in a variable
SSH_ERROR=$(<"$SSH_ERROR_FILE")
# If there are no domains, it means initial backup was never run yet so connection is going to fail; just exit.
if [ ! "$(ls -A /home/nginx/domains)" ]
then
rm "$SSH_ERROR_FILE"
exit
fi
# SSH connection failed, log the error and send it to Slack.
KOPIA_STATUS=$(echo $KOPIA_STATUS | sed 's/"/\\"/g')
send_slack_alert "#wpo-backups" ":warning:" "Check Backup Connection Failed" "N/A" "\`\`\`$SSH_ERROR\`\`\`\n Kopia Status: \`\`\`$KOPIA_STATUS\`\`\`"
send_webhook_data "$HOSTNAME" "$serverip" "$NOW" "kopia"
# SSH connection failed, will open up a support task in WPO.
WPOIP=$(ip route get 1 | awk '{print $NF;exit}')
curl -H "X-SCOOTS-HASH=${VISITOR_HASH}" -H "X-SCOOTS-HOOK=true" -s -d "email=${WPOIP}&domain=${HOSTNAME}&type=Backup%20Failure" -X POST https://api-dev.bigscoots.com/alerts/generate-support-task >/dev/null 2>&1
# Clean up: remove the temporary file
rm "$SSH_ERROR_FILE"
# Exit with an error status
exit 1
fi
rm "$SSH_ERROR_FILE" # Clean up the temporary file in case of success as well
else
# If this is not a remote backup then well just set the backup location to the local backup drive.
RSYNCLOCATION=/backup/
fi
case $1 in
manual)
# Initialize JSON response
init_json_response
if [[ $2 == manual-* ]]
then
MANBACKUP="$2"
if wpcli core is-installed 2> /dev/null
then
add_json_message "Found valid WordPress install at $(pwd)"
if DB_NAME=$(wpcli config get DB_NAME 2> /dev/null)
then
add_json_message "Database is $DB_NAME"
# Configuration
DB_PREFIX="$(wpcli config get table_prefix 2> /dev/null)"
add_json_message "Database prefix is $DB_PREFIX"
DB_TABLES=$(mysql --skip-column-names --batch -e "SHOW TABLES FROM $DB_NAME")
# Add prefix to excluded tables
for i in "${!EXCLUDED_TABLES[@]}"; do
# Remove the prefix from the excluded table name
EXCLUDED_TABLE_NAME=${EXCLUDED_TABLES[i]#$DB_PREFIX}
# Concatenate the prefix with the table name (without the prefix)
EXCLUDED_TABLES[i]="${DB_PREFIX}${EXCLUDED_TABLE_NAME}"
add_json_message "Excluding table: ${EXCLUDED_TABLES[i]}"
done
# Include additional excluded tables from file if it exists
EXCLUDE_FILE="${BSPATH}/backups/${DB_NAME}.tbl.exclude"
if [ -f "$EXCLUDE_FILE" ]; then
ADDITIONAL_EXCLUDES=($(cat "$EXCLUDE_FILE"))
EXCLUDED_TABLES=("${EXCLUDED_TABLES[@]}" "${ADDITIONAL_EXCLUDES[@]}")
for table in "${ADDITIONAL_EXCLUDES[@]}"; do
add_json_message "Additional excluded table: $table"
done
fi
# Create an empty file to store the combined SQL
COMBINED_SQL="$DB_NAME.sql"
add_json_message "Creating empty $COMBINED_SQL"
if [ -f "$COMBINED_SQL" ]
then
rm "$COMBINED_SQL"
fi
touch "$COMBINED_SQL"
if [ -f "$COMBINED_SQL" ]
then
add_json_message "Successfully created $COMBINED_SQL"
else
add_json_error "Failed to create $COMBINED_SQL"
fi
# Backup each table (excluding the ones in EXCLUDE_TABLES)
for DB_TABLE in $DB_TABLES
do
add_json_message "Backing up database table: $DB_TABLE"
if [[ ! " ${EXCLUDED_TABLES[@]} " =~ " ${DB_TABLE} " ]]
then
RETRIES=0
while true; do
mysqldump --routines --events --single-transaction --max_allowed_packet=512M "$DB_NAME" "$DB_TABLE" >> "$COMBINED_SQL" 2>>database.err
EXIT_STATUS=$?
if [ $EXIT_STATUS -eq 0 ]; then
break
elif [ $EXIT_STATUS -eq 3 ]; then
if [ $RETRIES -eq 1 ]; then
SLACK "Skipping table \`$DB_TABLE\` due to persistent corruption.\n *Related Errors:*\`\`\`$(cat database.err)\`\`\`"
add_json_error "Skipping table $DB_TABLE due to persistent corruption. Related Errors: $(cat database.err)"
break
else
mysqlcheck --repair "$DB_NAME" "$DB_TABLE" &>>database.err
RETRIES=1
fi
else
SLACK "Skipping table \`$DB_TABLE\` due to exit status: $EXIT_STATUS\n *Related Errors:*\`\`\`$(cat database.err)\`\`\`"
add_json_error "Skipping table $DB_TABLE due to exit status: $EXIT_STATUS. Related Errors: $(cat database.err)"
break
fi
done
else
mysqldump --no-data --routines --events --single-transaction --max_allowed_packet=512M "$DB_NAME" "$DB_TABLE" >> "$COMBINED_SQL" 2>>database.err
fi
done
# Compress the combined SQL file
gzip -c "$COMBINED_SQL" > "$DB_NAME.sql.gz"
if [ -f "$DB_NAME.sql.gz" ]
then
add_json_message "Successfully compressed $DB_NAME.sql.gz"
rm -f "$DB_NAME.sql"
else
send_slack_alert "#wpo-backups" ":warning:" "Backup failed" "$DOMAIN" "Failed to gzip $DB_NAME.sql.gz"
add_json_error "Failed to gzip $DB_NAME.sql.gz"
fi
else
send_slack_alert "#wpo-backups" ":warning:" "Backup failed" "$DOMAIN" "Unable to pull the database from wp-config.php"
add_json_error "Unable to pull the database from wp-config.php"
fi
else
send_slack_alert "#wpo-backups" ":warning:" "Backup failed" "$DOMAIN" "Not a valid WordPress install"
add_json_error "Not a valid WordPress install"
print_json_response
exit 1
fi
fi
if [[ $remote == y ]]
then
# echo "rsync running remotely."
# echo "Sycning these files: ${HOMEDIR} /usr/local/nginx/conf/wpincludes /usr/local/nginx/conf/conf.d ${RSYNCLOCATION}${MANBACKUP}/"
ssh -n -oStrictHostKeyChecking=no -i "${HOME}/.ssh/wpo_backups" "${BKUSER}@${BKSVR}" "[ ! -e current ] && latest_backup=\$(ls -1 | grep ^back | tail -1) && [ -n \"\$latest_backup\" ] && ln -s \"\$latest_backup\" current"
bash /bigscoots/tools/rsync.sh -ah \
-e "${SSHOPTIONS}" \
--ignore-errors \
--delete \
--delete-excluded \
--exclude-from=/bigscoots/wpo/backups/rsync/exclude \
--exclude-from="${BSPATH}"/rsync/exclude \
--link-dest=../current \
"$(dirname ${PWD})" "${RSYNCLOCATION}incomplete_back-${date}"
# echo "moving incomplete_back-$date to ${MANBACKUP} since the rsync completed."
ssh -n -oStrictHostKeyChecking=no -i "${HOME}"/.ssh/wpo_backups "${BKUSER}"@"${BKSVR}" "mv incomplete_back-$date ${MANBACKUP}"
else
[ ! -e /backup/current ] && latest_backup=$(ls -1 /backup/ | grep ^back | tail -1) && [ -n "$latest_backup" ] && ln -s /backup/"$latest_backup" /backup/current
bash /bigscoots/tools/rsync.sh -ah \
-e "$SSHOPTIONS" \
--ignore-errors \
--delete \
--delete-excluded \
--exclude-from=/bigscoots/wpo/backups/rsync/exclude \
--exclude-from="$BSPATH"/rsync/exclude \
--link-dest=../current \
"$(dirname ${PWD})" "${RSYNCLOCATION}incomplete_back-${date}"
# echo "moving incomplete_back-$date to ${MANBACKUP} since the rsync completed."
mv /backup/incomplete_back-"${date}" /backup/${MANBACKUP}
fi
rm -f "$DB_NAME.sql.gz"
set_json_success
print_json_response
;;
delete)
if [[ -z $2 ]]; then
echo "Make sure to specify a manual backup folder name."
exit
fi
if [[ $2 == manual-* ]]; then
if [[ $remote == y ]]; then
mkdir -p "$HOMEDIR"/.empty
bash /bigscoots/tools/rsync.sh -a \
-e "$SSHOPTIONS" \
--ignore-errors \
--ignore-missing-args \
--delete \
"$HOMEDIR"/.empty/ "$BKUSER"@"$BKSVR":"$2"/"$(dirname "$PWD" | sed 's/\// /g' | awk '{print $4}')"
ssh -oStrictHostKeyChecking=no -i "$HOME"/.ssh/wpo_backups "$BKUSER"@"$BKSVR" "rmdir -p $2/$(dirname "$PWD" | sed 's/\// /g' | awk '{print $4}')"
ssh -oStrictHostKeyChecking=no -i "$HOME"/.ssh/wpo_backups "$BKUSER"@"$BKSVR" "if [ ! -e current ] ; then rm current ; latest_backup=\$(ls -1 | grep ^back | tail -1) ; ln -s \"\$latest_backup\" current ; fi"
else
mkdir -p "$HOMEDIR"/.empty
bash /bigscoots/tools/rsync.sh -a \
--ignore-errors \
--ignore-missing-args \
--delete \
"$HOMEDIR"/.empty/ /backup/"$2"/"$(dirname $PWD | sed 's/\// /g' | awk '{print $4}')"
rm -rf "/backup/$2"
if [ ! -e /backup/current ]
then
rm /backup/current
latest_backup=$(ls -1 /backup/| grep ^back | tail -1)
ln -s /backup/"$latest_backup" /backup/current
fi
fi
fi
;;
initial_client)
if crontab -l | grep -q /bigscoots/wpo_backups_dedi.sh
then
crontab -l | grep -v 'wpo_backups_dedi.sh' | crontab -
fi
if ! crontab -l | grep -q /bigscoots/wpo_backups_ovz.sh
then
crontab -l | { cat; echo "$(( ( RANDOM % 59 ) + 1 )) $(( ( RANDOM % 23 ) + 1 )) * * * /bigscoots/wpo_backups_ovz.sh"; } | crontab -
fi
if ! grep -q bksvr "${BSPATH}"/backupinfo && ! grep -qs '/backup ' /proc/mounts
then
BKSVR="$(shuf -e backup12.bigscoots.com backup21.bigscoots.com | head -1)"
echo bksvr="${BKSVR}" >> "${BSPATH}"/backupinfo
else
if ! grep -q bksvr "${BSPATH}"/backupinfo && grep -qs '/backup ' /proc/mounts
then
echo bksvr=local >> "${BSPATH}"/backupinfo
fi
fi
pushkey=false
if grep -qs destination=local /root/.bigscoots/backupinfo
then
pubkey=null
BKSVR=local
BKUSER=/backup
else
if [ ! -s ~/.ssh/wpo_backups ]; then
ssh-keygen -t ed25519 -f ~/.ssh/wpo_backups -q -N '' <<< y >/dev/null 2>&1
fi
if ! ssh -q -o BatchMode=yes -o StrictHostKeyChecking=no -o PasswordAuthentication=no -i "$HOME"/.ssh/wpo_backups "$BKUSER"@"$BKSVR" exit; then
ssh-keygen -t ed25519 -f ~/.ssh/wpo_backups -q -N '' <<< y >/dev/null 2>&1
pushkey=true
fi
if [ ! -s ~/.ssh/wpo_backups.pub ]; then
pubkey=null
else
pubkey=$(awk '{print $2}' /root/.ssh/wpo_backups.pub)
fi
fi
backupinfo="runSecondScript|sshpubkey|backupserver|backupuser|backuplimit
$pushkey|$pubkey|$BKSVR|$BKUSER|$BKLIMIT"
jq -Rn '
( input | split("|") ) as $keys |
( inputs | split("|") ) as $vals |
[[$keys, $vals] | transpose[] | {key:.[0],value:.[1]}] | from_entries
' <<<"$backupinfo"
;;
initial_server)
BKUSER="$2"
SSHPUBKEY="$3"
if ! adduser -b /home/wpo_users "$BKUSER" >/dev/null 2>&1; then
if [ ! -d /home/wpo_users/"$BKUSER" ]; then
userdel -r "$BKUSER" >/dev/null 2>&1
fi
fi
runuser -l "$BKUSER" -c 'ssh-keygen -t ed25519 -f ~/.ssh/id_ed25519 -q -N "" <<< y >/dev/null 2>&1 ; touch ~/.ssh/authorized_keys ; chmod 600 ~/.ssh/authorized_keys'
echo "ssh-ed25519 $SSHPUBKEY" >> /home/wpo_users/"$BKUSER"/.ssh/authorized_keys
;;
download)
BKUSER="$2"
DOMAIN="$3"
BACKUP="$4"
if [[ ${BKUSER} = '/backup' ]]
then
if ! rpm -q p7zip >/dev/null 2>&1
then
yum -q -y install p7zip p7zip-plugins >/dev/null 2>&1
fi
if ! cd /backup/"$BACKUP"
then
send_slack_alert "#wpo-backups" ":warning:" "Local download backup failed on $HOSTNAME" "N/A" "Tried to cd into /backup/$BACKUP on $HOSTNAME but failed during creating a backup for $DOMAIN"
exit
fi
# tar --warning=no-file-changed -zcf "$DOMAIN"-"$BACKUP".tar.gz "$DOMAIN"
7z a -tzip "$DOMAIN"-"$BACKUP".zip "$DOMAIN" 1> /dev/null
bash /bigscoots/wpo/backups/backup_link.sh "$DOMAIN"-"$BACKUP".zip local
else
if ! cd /home/wpo_users/"$BKUSER"/"$BACKUP"
then
send_slack_alert "#wpo-backups" ":warning:" "Tried to cd into /home/wpo_users/$BKUSER/$BACKUP on $HOSTNAME but failed during creating a backup for $DOMAIN" "N/A" "Download backup failed"
exit
fi
# tar --warning=no-file-changed -zcf "$DOMAIN"-"$BACKUP".tar.gz "$DOMAIN"
7z a -tzip "$DOMAIN"-"$BACKUP".zip "$DOMAIN" 1> /dev/null
bash /bigscoots/wpo/backups/backup_link.sh "$DOMAIN"-"$BACKUP".zip
fi
;;
s2dropbox)
# Only local backup supported at this time
BKUSER="$2"
DOMAIN="$3"
BACKUP="$4"
DATE=$(date "+%Y-%m-%d")
if [[ ${BKUSER} = '/backup' ]]
then
if ! rpm -q p7zip >/dev/null 2>&1
then
yum -q -y install p7zip p7zip-plugins >/dev/null 2>&1
fi
if ! cd /backup/"$BACKUP"
then
send_slack_alert "#wpo-backups" ":warning:" "Backup Not Found" "N/A" "Tried to cd into /backup/$BACKUP on $HOSTNAME but failed during creating a backup for $DOMAIN"
exit
fi
7z a -tzip "$DOMAIN"-"$BACKUP".zip "$DOMAIN" 1> /dev/null
bash /root/Dropbox-Uploader/dropbox_uploader.sh upload "$DOMAIN"-"$BACKUP".zip /
fi
;;
*)
LOCKFILE="/tmp/wpo_backups.lock"
# Check for existing lock
if [ -f "$LOCKFILE" ] && kill -0 "$(cat "$LOCKFILE")" 2>/dev/null; then
send_slack_alert "#wpo-backups" ":warning:" "WPO Backup" "-" "Daily Backup skipped because a backup is already running (PID: $(cat $LOCKFILE))."
exit 1
fi
# Create lock with current PID
echo $$ > "$LOCKFILE"
# Ensure lock is removed on script exit or failure
trap 'rm -f "$LOCKFILE"' EXIT
bash /bigscoots/wpo/backups/additional_dbs.sh create
# Resolve optional backup host for mysqldump from JSON
DB_INFO_FILE="/root/.bigscoots/info/db"
DUMP_HOST_ARG=""
if [ -f "$DB_INFO_FILE" ]; then
if command -v jq >/dev/null 2>&1; then
# Prefer explicit connection_defaults.host_backups; fallback to a node with purpose "backups"
DB_BACKUP_HOST="$(jq -r '
.connection_defaults.host_backups // empty
// (.nodes[]? | select((.purposes? // []) | index("backups")) | .ip_private // .ip_public // empty)
' "$DB_INFO_FILE" 2>/dev/null | head -n1)"
else
# Fallback parser that survives small JSON formatting issues
DB_BACKUP_HOST="$(grep -oE '"host_backups"\s*:\s*"[^"]+"' "$DB_INFO_FILE" | sed -E 's/.*"host_backups"\s*:\s*"([^"]+)".*/\1/' | head -n1)"
if [ -z "$DB_BACKUP_HOST" ]; then
# Secondary fallback: first node that lists "backups" in purposes -> use ip_private (or ip_public)
DB_BACKUP_HOST="$(awk '
BEGIN{inNode=0;hasBackups=0;priv="";pub=""}
/{/ {inNode++}
/}/ {
if(inNode>0){
if(hasBackups && (priv!="" || pub!="")) { print (priv!=""?priv:pub); exit }
hasBackups=0; priv=""; pub=""
inNode--
}
}
/"purposes"[[:space:]]*:/ { if($0 ~ /backups/) hasBackups=1 }
/"ip_private"[[:space:]]*:/ { gsub(/.*"ip_private"[[:space:]]*:[[:space:]]*"([^"]*)".*/,"\\1"); priv=$0 }
/"ip_public"[[:space:]]*:/ { gsub(/.*"ip_public"[[:space:]]*:[[:space:]]*"([^"]*)".*/,"\\1"); pub=$0 }
' "$DB_INFO_FILE" 2>/dev/null)"
fi
fi
if [ -n "$DB_BACKUP_HOST" ]; then
DUMP_HOST_ARG="--host=$DB_BACKUP_HOST"
fi
fi
SKIPPED_DB_DOMAINS=()
KOPIA_EXCLUDE_ARGS=""
RSYNC_EXCLUDE_ARGS=""
# ------------------------------------------------------------------------------
# 1. DATABASE PHASE
# ------------------------------------------------------------------------------
# We use < <(find ...) at the end of the loop to prevent subshell variable loss
while read -r WPINSTALL
do
# Determine Canonical Name (e.g., example.com)
CANONICAL="${WPINSTALL%/public}"
CANONICAL="${CANONICAL##*/}"
# Check for existing API or Cron lock - If locked, skip the WHOLE site (DB + Files)
if [ -f "/opt/bs-backup/api-${CANONICAL}.lock" ] || [ -f "/opt/bs-backup/cron-${CANONICAL}.lock" ]; then
SKIPPED_DB_DOMAINS+=("$CANONICAL")
# Build exclusion strings
KOPIA_EXCLUDE_ARGS+="--exclude=/home/nginx/domains/${CANONICAL} "
RSYNC_EXCLUDE_ARGS+="--exclude=domains/${CANONICAL} "
continue
fi
# Process the Database Dump
(
LOCK="/opt/bs-backup/cron-${CANONICAL}.lock"
touch "$LOCK"
trap 'rm -f "$LOCK"' EXIT
if wpcli core is-installed --path="$WPINSTALL" 2> /dev/null; then
if DB_NAME=$(wpcli config get DB_NAME --path="$WPINSTALL" 2> /dev/null); then
# Cleanup any stale SQL files
find "$WPINSTALL" -maxdepth 1 -type f -name "*.sql" -delete
DB_PREFIX="$(wpcli config get table_prefix --path="$WPINSTALL" 2> /dev/null)"
DB_TABLES=$(mysql --skip-column-names --batch -e "SHOW TABLES FROM $DB_NAME")
# Setup Excluded Tables
for i in "${!EXCLUDED_TABLES[@]}"; do
EXCLUDED_TABLE_NAME=${EXCLUDED_TABLES[i]#$DB_PREFIX}
EXCLUDED_TABLES[i]="${DB_PREFIX}${EXCLUDED_TABLE_NAME}"
done
# Include additional excluded tables from per-DB exclude file
EXCLUDE_FILE="${BSPATH}/backups/${DB_NAME}.tbl.exclude"
if [ -f "$EXCLUDE_FILE" ]; then
ADDITIONAL_EXCLUDES=($(cat "$EXCLUDE_FILE"))
EXCLUDED_TABLES=("${EXCLUDED_TABLES[@]}" "${ADDITIONAL_EXCLUDES[@]}")
fi
COMBINED_SQL="$DB_NAME.sql"
: > "$WPINSTALL/$COMBINED_SQL"
for DB_TABLE in $DB_TABLES; do
if [[ ! " ${EXCLUDED_TABLES[@]} " =~ " ${DB_TABLE} " ]]; then
mysqldump $DUMP_HOST_ARG --routines --events --single-transaction --max_allowed_packet=512M "$DB_NAME" "$DB_TABLE" >> "$WPINSTALL/$COMBINED_SQL" 2>>"$WPINSTALL/database.err"
else
mysqldump $DUMP_HOST_ARG --no-data --routines --events --single-transaction --max_allowed_packet=512M "$DB_NAME" "$DB_TABLE" >> "$WPINSTALL/$COMBINED_SQL" 2>>"$WPINSTALL/database.err"
fi
done
# Compression for non-Kopia runs
if [ "$kopia" != "y" ]; then
gzip -c "$WPINSTALL/$COMBINED_SQL" > "$WPINSTALL/$DB_NAME.sql.gz"
[ -f "$WPINSTALL/$DB_NAME.sql.gz" ] && rm -f "$WPINSTALL/$COMBINED_SQL"
fi
fi
fi
)
done < <(find /home/nginx/domains/*/public/ -type d -name 'wp-includes' -exec dirname {} \;)
# ------------------------------------------------------------------------------
# 2. SLACK SUMMARY
# ------------------------------------------------------------------------------
if [ ${#SKIPPED_DB_DOMAINS[@]} -gt 0 ]; then
SKIPPED_LIST=$(printf "%s\n" "${SKIPPED_DB_DOMAINS[@]}")
MSG="⚠️ *Backup Warning*: \`$(hostname)\` skipped ${#SKIPPED_DB_DOMAINS[@]} sites entirely (DB + Files) due to active API locks."
T_ID=$(send_slack_initial "$MSG" "#wpo-backups")
[ -n "$T_ID" ] && send_slack_thread "$T_ID" "Excluded domains: \n\`\`\`\n$SKIPPED_LIST\n\`\`\`" "#wpo-backups"
fi
# ------------------------------------------------------------------------------
# 3. KOPIA FILE BACKUP PHASE
# ------------------------------------------------------------------------------
if [[ $kopia == y && $1 != "initial_client" ]]; then
find /home/nginx/domains/*/public/ -maxdepth 1 -type f -name '*.sql' | while read -r SQLDUMP; do
WP_BACKUP_DIR=$(dirname "$SQLDUMP" | sed 's/\/public$/\/backup\/db/')
mkdir -p "$WP_BACKUP_DIR"
mv "$SQLDUMP" "$WP_BACKUP_DIR/"
done
# Run Kopia with exclusions
bash /bigscoots/wpo/manage/bsbackup_manager.sh run $KOPIA_EXCLUDE_ARGS
find /home/nginx/domains/*/backup/db/ -maxdepth 1 -type f -name '*.sql' -exec rm -f {} +
exit 0
fi
# ------------------------------------------------------------------------------
# 4. RSYNC BACKUP PHASE (Remote or Local)
# ------------------------------------------------------------------------------
if [[ $remote == y ]]; then
# Remote rsync block
ssh -n -oStrictHostKeyChecking=no -i "${HOME}/.ssh/wpo_backups" "${BKUSER}@${BKSVR}" "[ ! -e current ] && latest_backup=\$(ls -1 | grep ^back | tail -1) && [ -n \"\$latest_backup\" ] && ln -s \"\$latest_backup\" current"
bash /bigscoots/tools/rsync.sh -ah \
-e "${SSHOPTIONS}" \
--ignore-errors \
--delete \
--delete-excluded \
$RSYNC_EXCLUDE_ARGS \
--exclude-from=/bigscoots/wpo/backups/rsync/exclude \
--exclude-from="${BSPATH}"/rsync/exclude \
--link-dest=../current \
"${HOMEDIR}" /usr/local/nginx/conf/wpincludes /usr/local/nginx/conf/conf.d "${RSYNCLOCATION}incomplete_back-${date}/"
ssh -n -oStrictHostKeyChecking=no -i "${HOME}"/.ssh/wpo_backups "${BKUSER}"@"${BKSVR}" "mv incomplete_back-$date back-${date} && rm -f current && ln -s back-${date} current"
else
# Local rsync block
[ ! -e /backup/current ] && latest_backup=$(ls -1 /backup/ | grep ^back | tail -1) && [ -n "$latest_backup" ] && ln -s /backup/"$latest_backup" /backup/current
bash /bigscoots/tools/rsync.sh -ah \
-e "$SSHOPTIONS" \
--ignore-errors \
--delete \
--delete-excluded \
$RSYNC_EXCLUDE_ARGS \
--exclude-from=/bigscoots/wpo/backups/rsync/exclude \
--exclude-from="$BSPATH"/rsync/exclude \
--link-dest=../current \
"${HOMEDIR}" /usr/local/nginx/conf/wpincludes /usr/local/nginx/conf/conf.d "${RSYNCLOCATION}incomplete_back-${date}/"
mv /backup/incomplete_back-"${date}" /backup/back-"${date}" && rm -f /backup/current && ln -s /backup/back-"${date}" /backup/current
fi
# Cleanup
bash /bigscoots/wpo/backups/additional_dbs.sh cleanup
;;
esac
if [[ ! $1 =~ (initial_*|download|s2dropbox) ]]
then
for wpinstall in $(find /home/nginx/domains/*/public/ -type f -name wp-config.php | sed 's/wp-config.php//g')
do
if wpcli config get DB_NAME --path="$wpinstall" > /dev/null 2>&1 ; then
dbname=$(wpcli config get DB_NAME --path="$wpinstall" 2> /dev/null)
rm -f "$wpinstall$dbname".sql "$wpinstall$dbname".sql.gz "$wpinstall"database.err
fi
done
fi