File: //bigscoots/wpo/extras/les_audit.sh
#!/bin/bash
# BigScoots WPO Storage Audit
# Les touched this
# Check if this is a WPO server
if [ ! -d "/home/nginx/domains" ]; then
echo "This can only be used on WPO servers, exiting."
exit 1
fi
# Count the number of sites (directories) in /home/nginx/domains
SITE_COUNT=$(find /home/nginx/domains -maxdepth 1 -type d ! -path "/home/nginx/domains" | wc -l)
clear
echo -e "\033[1;36m🔍 QUICK STATUS:\033[0m"
CORES=$(nproc)
TOTAL_RAM_MB=$(free -m | awk 'NR==2{print $2}')
TOTAL_RAM_GB=$(echo "$TOTAL_RAM_MB" | awk '{printf "%.0fGB", $1/1024}')
SERVER_IP=$(ip a | grep 'inet ' | grep -v '127.0.0.1' | awk '{print $2}' | cut -d'/' -f1 | head -1)
CURRENT_LOAD=$(uptime | awk -F'load average:' '{print $2}' | awk '{print $1}' | sed 's/,//')
echo "Server: ${CORES} cores, ${TOTAL_RAM_GB} RAM"
echo "Hostname: $(hostname)"
echo "IP: ${SERVER_IP}"
# Function to calculate CPU usage percentage from load average
calculate_cpu_usage() {
local load_avg=$1
local cores=$2
if [ -n "$load_avg" ] && [ -n "$cores" ] && [ "$cores" -gt 0 ]; then
load_int=$(echo "$load_avg" | awk '{printf "%.0f", $1 * 100}')
cpu_percent=$((load_int * 100 / cores / 100))
if [ "$cpu_percent" -lt 50 ]; then
echo "🟢 ${cpu_percent}%"
elif [ "$cpu_percent" -lt 80 ]; then
echo "🟡 ${cpu_percent}%"
elif [ "$cpu_percent" -lt 100 ]; then
echo "🟠 ${cpu_percent}%"
else
echo "🔴 ${cpu_percent}%"
fi
else
echo "Unknown"
fi
}
CPU_USAGE=$(calculate_cpu_usage "$CURRENT_LOAD" "$CORES")
echo "Load: $CURRENT_LOAD ($CPU_USAGE)"
echo "Memory: $(free | awk 'NR==2{printf "%.1f%%", $3*100/$2}')"
echo "MySQL Active: $(mysqladmin proc 2>/dev/null | grep -v Sleep | grep -v "show processlist" | wc -l)"
echo
echo -e "\033[1;34m📊 RESOURCE SUMMARY:\033[0m"
echo "Load: $(uptime | awk -F'load average:' '{print $2}')"
echo "Memory: $(free -h | grep Mem | awk '{print $3"/"$2}')"
DISK_INFO=$(df -h / | awk 'NR==2{print $3"/"$2" ("$5")"}')
DISK_PCT=$(df / | awk 'NR==2{gsub(/%/, "", $5); print $5}')
if [ "$DISK_PCT" -gt 80 ]; then
echo "Disk: $DISK_INFO ⚠️"
else
echo "Disk: $DISK_INFO"
fi
echo "Connections: HTTP=$(netstat -tun 2>/dev/null | grep :80 | wc -l) HTTPS=$(netstat -tun 2>/dev/null | grep :443 | wc -l)"
echo
echo -e "\033[1;33m💾 DISK ANALYSIS:\033[0m"
# Website Usage (Files and DB) - Calculate totals and sort by size
echo
echo -e "\033[1;32m🌐 WEBSITE USAGE (Files + Database):\033[0m"
ALL_WEBSITE_ENTRIES=""
TOTAL_STORAGE_BYTES=0
for domain_path in /home/nginx/domains/*/; do
if [ -d "$domain_path" ]; then
domain=$(basename "$domain_path")
# Get file size in bytes
file_size_bytes=$(du -sb "$domain_path" 2>/dev/null | awk '{print $1}')
if ! [[ "$file_size_bytes" =~ ^[0-9]+$ ]]; then
file_size_bytes=0
fi
# Check if it's a WordPress site and get DB size
db_size_bytes=0
if [ -f "$domain_path/public/wp-config.php" ]; then
DB_NAME=$(grep "define.*DB_NAME" "$domain_path/public/wp-config.php" 2>/dev/null | sed "s/.*['\"]DB_NAME['\"].*['\"]\\([^'\"]*\\)['\"].*/\\1/" | head -1)
DB_USER=$(grep "define.*DB_USER" "$domain_path/public/wp-config.php" 2>/dev/null | sed "s/.*['\"]DB_USER['\"].*['\"]\\([^'\"]*\\)['\"].*/\\1/" | head -1)
DB_PASSWORD=$(grep "define.*DB_PASSWORD" "$domain_path/public/wp-config.php" 2>/dev/null | sed "s/.*['\"]DB_PASSWORD['\"].*['\"]\\([^'\"]*\\)['\"].*/\\1/" | head -1)
if [ -n "$DB_NAME" ] && [ -n "$DB_USER" ] && [ -n "$DB_PASSWORD" ]; then
db_size_result=$(mysql -u "$DB_USER" -p"$DB_PASSWORD" -e "SELECT COALESCE(SUM(data_length + index_length), 0) AS size_bytes FROM information_schema.tables WHERE table_schema='$DB_NAME';" 2>/dev/null | tail -1)
if [[ "$db_size_result" =~ ^[0-9]+$ ]] && [ "$db_size_result" -gt 0 ]; then
db_size_bytes="$db_size_result"
fi
fi
fi
# Convert sizes to human readable format
file_size_human=$(echo "$file_size_bytes" | awk '{
if ($1 >= 1099511627776) printf "%.1fT", $1/1099511627776
else if ($1 >= 1073741824) printf "%.1fG", $1/1073741824
else if ($1 >= 1048576) printf "%.1fM", $1/1048576
else if ($1 >= 1024) printf "%.1fK", $1/1024
else printf "%dB", $1
}')
db_size_human=$(echo "$db_size_bytes" | awk '{
if ($1 >= 1099511627776) printf "%.1fT", $1/1099511627776
else if ($1 >= 1073741824) printf "%.1fG", $1/1073741824
else if ($1 >= 1048576) printf "%.1fM", $1/1048576
else if ($1 >= 1024) printf "%.1fK", $1/1024
else printf "%dB", $1
}')
# Calculate total size
total_calculation=$(awk -v file_b="$file_size_bytes" -v db_b="$db_size_bytes" 'BEGIN {
total = file_b + db_b
if (total >= 1099511627776) printf "%.1fT|%.0f", total/1099511627776, total
else if (total >= 1073741824) printf "%.1fG|%.0f", total/1073741824, total
else if (total >= 1048576) printf "%.1fM|%.0f", total/1048576, total
else if (total >= 1024) printf "%.1fK|%.0f", total/1024, total
else printf "%dB|%.0f", total, total
}')
total_size_human=$(echo "$total_calculation" | cut -d'|' -f1)
total_size_bytes=$(echo "$total_calculation" | cut -d'|' -f2)
TOTAL_STORAGE_BYTES=$((TOTAL_STORAGE_BYTES + total_size_bytes))
# Determine if staging
if echo "$domain" | grep -q "bigscoots-staging"; then
staging_prefix="(Staging) "
else
staging_prefix=""
fi
entry_line=$(printf "%020.0f %s %s%s → (%s Files + %s Database)" "$total_size_bytes" "$total_size_human" "$staging_prefix" "$domain" "$file_size_human" "$db_size_human")
# Store database info for large database analysis
if [ "$db_size_bytes" -gt 1610612736 ]; then # 1.5GB in bytes
echo "$domain|$db_name|$db_user|$db_password|$db_size_human" >> /tmp/large_dbs_$$.tmp
fi
if [ -n "$ALL_WEBSITE_ENTRIES" ]; then
ALL_WEBSITE_ENTRIES="$ALL_WEBSITE_ENTRIES
$entry_line"
else
ALL_WEBSITE_ENTRIES="$entry_line"
fi
fi
done
# Calculate and display total storage usage
TOTAL_STORAGE_HUMAN=$(echo "$TOTAL_STORAGE_BYTES" | awk '{
if ($1 >= 1099511627776) printf "%.1fT", $1/1099511627776
else if ($1 >= 1073741824) printf "%.1fG", $1/1073741824
else if ($1 >= 1048576) printf "%.1fM", $1/1048576
else if ($1 >= 1024) printf "%.1fK", $1/1024
else printf "%dB", $1
}')
echo "TOTAL STORAGE USAGE: $TOTAL_STORAGE_HUMAN"
echo
# Sort by total size (descending) and format for display
if [ -n "$ALL_WEBSITE_ENTRIES" ]; then
echo "$ALL_WEBSITE_ENTRIES" | sort -nr | awk '{$1=""; printf " %s\n", substr($0,2)}'
fi
# Show large database table analysis if any databases > 1.5GB found
if [ -f "/tmp/large_dbs_$$.tmp" ]; then
echo
echo -e "\033[1;31m🗄️ LARGE DATABASE TABLE ANALYSIS (>1.5GB):\033[0m"
while IFS='|' read -r domain db_name db_user db_password db_size_human; do
echo " Database: $domain ($db_size_human)"
domain_path="/home/nginx/domains/$domain/public"
if [ -d "$domain_path" ]; then
cd "$domain_path" 2>/dev/null && wp db query "
SELECT
table_name AS 'Table Name',
ROUND(((data_length + index_length) / 1024 / 1024), 2) AS 'Size (MB)'
FROM information_schema.tables
WHERE table_schema = DATABASE()
ORDER BY (data_length + index_length) DESC
LIMIT 5;
" --allow-root 2>/dev/null | tail -n +2 | awk '{printf " ├─ %-4s MB %s\n", $2, $1}'
fi
done < /tmp/large_dbs_$$.tmp
rm -f /tmp/large_dbs_$$.tmp
fi
# Collect all plugin backup/cache usage (filter >= 10M) and sort by size
ALL_PLUGIN_ENTRIES=""
# UpdraftPlus backups
UPDRAFT_USAGE=$(find /home/nginx/domains -type d -path "*/wp-content/updraft" -exec du -sh {} \; 2>/dev/null | awk '$1 ~ /[1-9][0-9]+M|[0-9]+(\.[0-9]+)?G|[0-9]+(\.[0-9]+)?T/ {print}')
if [ -n "$UPDRAFT_USAGE" ]; then
UPDRAFT_FORMATTED=$(echo "$UPDRAFT_USAGE" | awk '{
size = $1;
path = $2;
gsub(/\/home\/nginx\/domains\//, "", path);
domain = path;
gsub(/\/.*/, "", domain);
if (domain ~ /bigscoots-staging/) {
printf "%s %s (Staging) UpdraftPlus (%s)\n", size, size, path
} else {
printf "%s %s UpdraftPlus (%s)\n", size, size, path
}
}')
if [ -n "$ALL_PLUGIN_ENTRIES" ]; then
ALL_PLUGIN_ENTRIES="$ALL_PLUGIN_ENTRIES
$UPDRAFT_FORMATTED"
else
ALL_PLUGIN_ENTRIES="$UPDRAFT_FORMATTED"
fi
fi
# Imagify backup directories
IMAGIFY_USAGE=$(find /home/nginx/domains -type d -path "*/wp-content/uploads/backup" -exec du -sh {} \; 2>/dev/null | awk '$1 ~ /[1-9][0-9]+M|[0-9]+(\.[0-9]+)?G|[0-9]+(\.[0-9]+)?T/ {print}')
if [ -n "$IMAGIFY_USAGE" ]; then
IMAGIFY_FORMATTED=$(echo "$IMAGIFY_USAGE" | awk '{
size = $1;
path = $2;
gsub(/\/home\/nginx\/domains\//, "", path);
domain = path;
gsub(/\/.*/, "", domain);
if (domain ~ /bigscoots-staging/) {
printf "%s %s (Staging) Imagify backups (%s)\n", size, size, path
} else {
printf "%s %s Imagify backups (%s)\n", size, size, path
}
}')
if [ -n "$ALL_PLUGIN_ENTRIES" ]; then
ALL_PLUGIN_ENTRIES="$ALL_PLUGIN_ENTRIES
$IMAGIFY_FORMATTED"
else
ALL_PLUGIN_ENTRIES="$IMAGIFY_FORMATTED"
fi
fi
# ShortPixel directories
SHORTPIXEL_USAGE=$(find /home/nginx/domains -type d \( -path "*/wp-content/uploads/*" -name "*shortpixel*" -o -path "*/wp-content/uploads/*" -name "*ShortpixelBackups*" \) -exec du -sh {} \; 2>/dev/null | awk '$1 ~ /[1-9][0-9]+M|[0-9]+(\.[0-9]+)?G|[0-9]+(\.[0-9]+)?T/ {print}')
if [ -n "$SHORTPIXEL_USAGE" ]; then
SHORTPIXEL_FORMATTED=$(echo "$SHORTPIXEL_USAGE" | awk '{
size = $1;
path = $2;
gsub(/\/home\/nginx\/domains\//, "", path);
domain = path;
gsub(/\/.*/, "", domain);
if (domain ~ /bigscoots-staging/) {
printf "%s %s (Staging) ShortPixel (%s)\n", size, size, path
} else {
printf "%s %s ShortPixel (%s)\n", size, size, path
}
}')
if [ -n "$ALL_PLUGIN_ENTRIES" ]; then
ALL_PLUGIN_ENTRIES="$ALL_PLUGIN_ENTRIES
$SHORTPIXEL_FORMATTED"
else
ALL_PLUGIN_ENTRIES="$SHORTPIXEL_FORMATTED"
fi
fi
# WP Smush WebP cache directories
SMUSH_USAGE=$(find /home/nginx/domains -type d -path "*/wp-content/smush-webp" -exec du -sh {} \; 2>/dev/null | awk '$1 ~ /[1-9][0-9]+M|[0-9]+(\.[0-9]+)?G|[0-9]+(\.[0-9]+)?T/ {print}')
if [ -n "$SMUSH_USAGE" ]; then
SMUSH_FORMATTED=$(echo "$SMUSH_USAGE" | awk '{
size = $1;
path = $2;
gsub(/\/home\/nginx\/domains\//, "", path);
domain = path;
gsub(/\/.*/, "", domain);
if (domain ~ /bigscoots-staging/) {
printf "%s %s (Staging) WP Smush WebP cache (%s)\n", size, size, path
} else {
printf "%s %s WP Smush WebP cache (%s)\n", size, size, path
}
}')
if [ -n "$ALL_PLUGIN_ENTRIES" ]; then
ALL_PLUGIN_ENTRIES="$ALL_PLUGIN_ENTRIES
$SMUSH_FORMATTED"
else
ALL_PLUGIN_ENTRIES="$SMUSH_FORMATTED"
fi
fi
# WP Rocket and other cache directories
CACHE_USAGE=$(find /home/nginx/domains -type d -path "*/wp-content/cache" -exec du -sh {} \; 2>/dev/null | awk '$1 ~ /[1-9][0-9]+M|[0-9]+(\.[0-9]+)?G|[0-9]+(\.[0-9]+)?T/ {print}')
if [ -n "$CACHE_USAGE" ]; then
CACHE_FORMATTED=$(echo "$CACHE_USAGE" | while read -r line; do
size=$(echo "$line" | awk '{print $1}')
path=$(echo "$line" | awk '{print $2}' | sed 's|/home/nginx/domains/||g')
domain=$(echo "$path" | cut -d'/' -f1)
if echo "$domain" | grep -q "bigscoots-staging"; then
staging_prefix="(Staging) "
else
staging_prefix=""
fi
if find "$(echo "$line" | awk '{print $2}')" -type d -name "*wp-rocket*" -print -quit 2>/dev/null | grep -q .; then
echo "$size $size ${staging_prefix}WP Rocket cache ($domain)"
else
echo "$size $size ${staging_prefix}Cache ($domain)"
fi
done)
if [ -n "$ALL_PLUGIN_ENTRIES" ]; then
ALL_PLUGIN_ENTRIES="$ALL_PLUGIN_ENTRIES
$CACHE_FORMATTED"
else
ALL_PLUGIN_ENTRIES="$CACHE_FORMATTED"
fi
fi
# All-in-One WP Migration backups
AI1WM_USAGE=$(find /home/nginx/domains -type d -path "*/wp-content/ai1wm-backups" -exec du -sh {} \; 2>/dev/null | awk '$1 ~ /[1-9][0-9]+M|[0-9]+(\.[0-9]+)?G|[0-9]+(\.[0-9]+)?T/ {print}')
if [ -n "$AI1WM_USAGE" ]; then
AI1WM_FORMATTED=$(echo "$AI1WM_USAGE" | awk '{
size = $1;
path = $2;
gsub(/\/home\/nginx\/domains\//, "", path);
domain = path;
gsub(/\/.*/, "", domain);
if (domain ~ /bigscoots-staging/) {
printf "%s %s (Staging) ai1wm (%s)\n", size, size, path
} else {
printf "%s %s ai1wm (%s)\n", size, size, path
}
}')
if [ -n "$ALL_PLUGIN_ENTRIES" ]; then
ALL_PLUGIN_ENTRIES="$ALL_PLUGIN_ENTRIES
$AI1WM_FORMATTED"
else
ALL_PLUGIN_ENTRIES="$AI1WM_FORMATTED"
fi
fi
# Sort all plugin entries by size and format for display
if [ -n "$ALL_PLUGIN_ENTRIES" ]; then
echo
echo -e "\033[1;33m🗂️ PLUGIN BACKUP/CACHE USAGE (>10MB):\033[0m"
echo "$ALL_PLUGIN_ENTRIES" | sort -hr | awk '{$1=""; printf " %s\n", substr($0,2)}'
fi
# ManageWP and other backup plugins (>10MB only)
BACKUP_DIRS=$(find /home/nginx/domains -type d \( -iname "*backup*" -o -iname "*bkup*" -o -path "*/blogvault*" -o -path "*/worker*backup*" -o -path "*/managewp*" \) ! -path "*/wp-content/uploads/backup" ! -path "*ShortpixelBackups*" ! -path "*/wp-content/ai1wm-backups*" ! -path "*/ai1wm-backups" -exec du -sh {} \; 2>/dev/null | awk '$1 ~ /[0-9]+[0-9]M|[0-9]+(\.[0-9]+)?G/ {if($1+0 >= 10 || $1 ~ /G/) print}' | sort -hr | head -5)
if [ -n "$BACKUP_DIRS" ]; then
echo
echo -e "\033[1;35m📄 OTHER BACKUP DIRECTORIES (>10MB):\033[0m"
echo "$BACKUP_DIRS" | awk '{printf " %s (%s)\n", $1, $2}' | sed 's|/home/nginx/domains/||g'
fi
# Check for large nginx logs
LARGE_LOGS=$(find /usr/local/nginx/logs -type f -name "*.log" -size +300M -exec du -h {} \; 2>/dev/null | sort -hr)
if [ -n "$LARGE_LOGS" ]; then
echo
echo -e "\033[1;31m📋 LARGE NGINX LOGS (>300MB):\033[0m"
echo "$LARGE_LOGS" | awk '{printf " %s\n", $0}' | head -5
fi
# Find largest directories (>300MB), excluding year-based upload directories, month subdirectories, and already-shown plugin directories
echo
echo -e "\033[1;34m📁 LARGEST DIRECTORIES (>300MB, excluding uploads/YEAR/MONTH and plugin caches):\033[0m"
# Get all directories with their sizes
ALL_DIRS=$(du -h /home/nginx/domains 2>/dev/null | awk '$1 ~ /[3-9][0-9][0-9]M|[0-9]+(\.[0-9]+)?G|[0-9]+(\.[0-9]+)?T/ && $2 !~ /\/wp-content\/uploads\/[0-9]{4}(\/[0-9]{2})?\/?$/ && $2 !~ /\/wp-content\/uploads\/backup(\/.*)?$/ && $2 !~ /\/wp-content\/updraft(\/.*)?$/ && $2 !~ /\/wp-content\/smush-webp(\/.*)?$/ && $2 !~ /\/wp-content\/cache(\/.*)?$/ && $2 !~ /ShortpixelBackups/')
if [ -n "$ALL_DIRS" ]; then
# Convert to bytes for proper sorting, then display cleanly
echo "$ALL_DIRS" | while read -r size_human path; do
clean_path=$(echo "$path" | sed 's|/home/nginx/domains/||g')
# Convert human readable size to bytes for sorting
size_bytes=$(echo "$size_human" | awk '{
if ($1 ~ /T$/) { gsub(/T/, "", $1); print int($1 * 1099511627776) }
else if ($1 ~ /G$/) { gsub(/G/, "", $1); print int($1 * 1073741824) }
else if ($1 ~ /M$/) { gsub(/M/, "", $1); print int($1 * 1048576) }
else if ($1 ~ /K$/) { gsub(/K/, "", $1); print int($1 * 1024) }
else { print int($1) }
}')
echo "$size_bytes|$size_human|$clean_path"
done | sort -t'|' -k1,1nr > /tmp/hierarchy_dirs_$$.tmp
# Filter out parent directories - keep only directories that don't have a child directory with >= 80% of their size
while IFS='|' read -r size_bytes size_human path; do
is_parent=false
# Check if any subdirectory has >= 80% of this directory's size
while IFS='|' read -r child_size_bytes child_size_human child_path; do
if [[ "$child_path" == "$path"/* ]] && [ "$child_size_bytes" -ge $((size_bytes * 80 / 100)) ]; then
is_parent=true
break
fi
done < /tmp/hierarchy_dirs_$$.tmp
if [ "$is_parent" = false ]; then
echo "$size_human|$path"
fi
done < /tmp/hierarchy_dirs_$$.tmp > /tmp/filtered_dirs_$$.tmp
# Clean display of top largest directories globally
while IFS='|' read -r size_human clean_path; do
# Skip the root /home/nginx/domains entry
if [ "$clean_path" = "" ] || [ "$clean_path" = "/home/nginx/domains" ]; then
continue
fi
# Show the full path with consistent spacing
printf " %-4s %s\n" "$size_human" "$clean_path"
done < /tmp/filtered_dirs_$$.tmp | head -15
rm -f /tmp/hierarchy_dirs_$$.tmp /tmp/filtered_dirs_$$.tmp
fi
# Find largest individual files (>100MB, excluding backup files already shown)
LARGEST_FILES=$(find /home/nginx/domains -type f -size +100M ! -path "*/updraft/*" ! -path "*/backup*" ! -iname "*backup*" ! -iname "*.wpress" -exec du -h {} \; 2>/dev/null | sort -hr | head -15 | awk '{printf " %s\n", $0}' | sed 's|/home/nginx/domains/||g')
if [ -n "$LARGEST_FILES" ]; then
echo
echo -e "\033[1;36m📄 LARGEST INDIVIDUAL FILES (>100MB, excluding plugin backups):\033[0m"
echo "$LARGEST_FILES"
fi
echo
echo -e "\033[1;32m✅ Storage analysis complete!\033[0m"