Add cron job for disk usage snapshot

This commit is contained in:
Peca 2025-08-12 13:58:17 +02:00
commit 457e5c862e
5 changed files with 151 additions and 0 deletions

102
bin/v-df-snapshot-diff Normal file
View file

@ -0,0 +1,102 @@
#!/bin/bash
# info: Make a diff between two snapshots of the disk usage
# options: FILE1 FILE2
whoami=$(whoami)
if [ "$whoami" != "root" ]; then
echo "You must be root to execute this script"
exit 1
fi
# Let's declare three associative arrays
declare -A FILE1
declare -A FILE2
declare -A FILED
file1=$1
file2=$2
if [[ ! "$file1" =~ ^/usr/local/vesta/data/df/snapshot-.*\.txt$ ]]; then
file1="/usr/local/vesta/data/df/$file1"
fi
if [[ ! "$file2" =~ ^/usr/local/vesta/data/df/snapshot-.*\.txt$ ]]; then
file2="/usr/local/vesta/data/df/$file2"
fi
if [ ! -f "$file1" ]; then
echo "File $file1 not found"
exit 1
fi
if [ ! -f "$file2" ]; then
echo "File $file2 not found"
exit 1
fi
timestamp=$(date +%Y-%m-%d-%H-%M-%S)
mkdir -p /usr/local/vesta/data/df-diff
file0="/usr/local/vesta/data/df-diff/diff-$timestamp.txt"
file0s="/usr/local/vesta/data/df-diff/diff-size-sorted-$timestamp.txt"
file0f="/usr/local/vesta/data/df-diff/diff-folder-sorted-$timestamp.txt"
touch $file0
# Let's load the first file and fill the array FILE1
while IFS=$'\t' read SIZE DIRECTORY; do
# Skip blank lines or lines that are not in the correct format
[[ -z "$DIRECTORY" ]] && continue
[[ "$DIRECTORY" = "total" ]] && continue
# Insert values into the array
FILE1["$DIRECTORY"]="$SIZE"
done < "$file1"
# Let's load the second file and fill the array FILE2
while IFS=$'\t' read SIZE DIRECTORY; do
# Skip blank lines or lines that are not in the correct format
[[ -z "$DIRECTORY" ]] && continue
[[ "$DIRECTORY" = "total" ]] && continue
# Insert values into the array
FILE2["$DIRECTORY"]="$SIZE"
done < "$file2"
# We iterate through FILE1 and look for the matching key in FILE2
for k in "${!FILE1[@]}"; do
if [[ -v FILE2["$k"] ]]; then
# If there is the same folder (KEY) in FILE2
DIFF=$(( ${FILE2[$k]} - ${FILE1[$k]} ))
FILED["$k"]=$DIFF
echo -e "${DIFF}\t${k}" >> $file0
else
# If the folder (KEY) is not found in FILE2
FILED["$k"]=${FILE1["$k"]}
echo -e "${FILE1["$k"]}\t${k}" >> $file0
fi
done
# sorted by size
sort -nr -k1,1 $file0 > $file0s
# sorted by folders
while IFS=$'\t' read SIZE DIRECTORY; do
[[ -z "$DIRECTORY" ]] && continue
[[ "$DIRECTORY" = "total" ]] && continue
echo -e "$DIRECTORY\t${FILED["$DIRECTORY"]}" >> $file0f
done < "$file2"
chmod 600 $file0 $file0s $file0f
chown root:root $file0 $file0s $file0f
echo "Done."
echo "You can do:"
echo "mcview $file0"
echo "mcview $file0s"
echo "mcview $file0f"
echo "--------------------------------"
echo "Here is the first 30 lines of the diff, sorted by size (descending, in MB):"
head -n 30 $file0s
echo "--------------------------------"
echo "Here is the first 30 lines of the diff, sorted by folders (in MB):"
head -n 30 $file0f
echo "--------------------------------"
exit 0

View file

@ -0,0 +1,11 @@
#!/bin/bash
# info: Clean up old snapshots of the disk usage
# options: NONE
folder="/usr/local/vesta/data/df"
mkdir -p $folder
find $folder -type f -mtime +30 -delete
folder="/usr/local/vesta/data/df-diff"
mkdir -p $folder
find $folder -type f -mtime +30 -delete

20
bin/v-df-snapshot-make Normal file
View file

@ -0,0 +1,20 @@
#!/bin/bash
# info: Make a snapshot of the disk usage
# options: NONE
folder="/usr/local/vesta/data/df"
mkdir -p $folder
timestamp=$(date +%Y-%m-%d-%H-%M-%S)
du --max-depth=1 -c -m -x / > $folder/snapshot-$timestamp.txt
du --max-depth=1 -c -m -x /home >> $folder/snapshot-$timestamp.txt
du --max-depth=2 -c -m -x /home >> $folder/snapshot-$timestamp.txt
du --max-depth=3 -c -m -x /home >> $folder/snapshot-$timestamp.txt
du --max-depth=6 -c -m -x /home >> $folder/snapshot-$timestamp.txt
du --max-depth=1 -c -m -x /var/lib/mysql >> $folder/snapshot-$timestamp.txt
du --max-depth=1 -c -m -x /var/log >> $folder/snapshot-$timestamp.txt
chmod 600 $folder/snapshot-$timestamp.txt
chown root:root $folder/snapshot-$timestamp.txt
exit 0

View file

@ -1896,6 +1896,10 @@ command="sudo $VESTA/bin/v-update-sys-rrd"
$VESTA/bin/v-add-cron-job 'admin' '*/5' '*' '*' '*' '*' "$command" $VESTA/bin/v-add-cron-job 'admin' '*/5' '*' '*' '*' '*' "$command"
command="sudo $VESTA/bin/v-fix-website-permissions-for-all-websites-only-php" command="sudo $VESTA/bin/v-fix-website-permissions-for-all-websites-only-php"
$VESTA/bin/v-add-cron-job 'admin' '05' '03' '*' '*' '*' "$command" $VESTA/bin/v-add-cron-job 'admin' '05' '03' '*' '*' '*' "$command"
command="sudo $VESTA/bin/v-df-snapshot-make"
$VESTA/bin/v-add-cron-job 'admin' '05' '04' '*' '*' '*' "$command"
command="sudo $VESTA/bin/v-df-snapshot-logs-cleaner"
$VESTA/bin/v-add-cron-job 'admin' '10' '04' '*' '*' '*' "$command"
systemctl restart cron.service systemctl restart cron.service
echo "== Building inititall rrd images" echo "== Building inititall rrd images"

View file

@ -38,6 +38,20 @@ if grep -q "fix-website-permissions-for-all-websites" /usr/local/vesta/data/user
fi fi
fi fi
# Adding cron job for disk usage snapshot
if ! grep -q "v-df-snapshot-make" /usr/local/vesta/data/users/admin/cron.conf; then
echo "== Adding cron job for disk usage snapshot"
command="sudo $VESTA/bin/v-df-snapshot-make"
$VESTA/bin/v-add-cron-job 'admin' '05' '04' '*' '*' '*' "$command"
systemctl restart cron.service
fi
if ! grep -q "v-df-snapshot-logs-cleaner" /usr/local/vesta/data/users/admin/cron.conf; then
echo "== Adding cron job for disk usage snapshot logs cleaner"
command="sudo $VESTA/bin/v-df-snapshot-logs-cleaner"
$VESTA/bin/v-add-cron-job 'admin' '10' '04' '*' '*' '*' "$command"
systemctl restart cron.service
fi
# Fixing PHP and .env permissions and ownership for all websites # Fixing PHP and .env permissions and ownership for all websites
if ! grep -q "fix-website-permissions-for-all-websites-only-php" /usr/local/vesta/data/users/admin/cron.conf; then if ! grep -q "fix-website-permissions-for-all-websites-only-php" /usr/local/vesta/data/users/admin/cron.conf; then
echo "== Fixing PHP and .env permissions and ownership for all websites" echo "== Fixing PHP and .env permissions and ownership for all websites"