mirror of
https://github.com/myvesta/vesta
synced 2025-07-06 04:51:54 -07:00
Calculate size of directories on /hdd too
This commit is contained in:
parent
8bdfade3d4
commit
0d86e2ca40
5 changed files with 29 additions and 4 deletions
|
@ -49,7 +49,8 @@ dom_diks=0
|
||||||
for account in $(search_objects "mail/$domain" 'SUSPENDED' "no" 'ACCOUNT'); do
|
for account in $(search_objects "mail/$domain" 'SUSPENDED' "no" 'ACCOUNT'); do
|
||||||
home_dir=$HOMEDIR/$user/mail/$domain/$account
|
home_dir=$HOMEDIR/$user/mail/$domain/$account
|
||||||
if [ -e "$home_dir" ]; then
|
if [ -e "$home_dir" ]; then
|
||||||
udisk=$(nice -n 19 du -shm $home_dir | cut -f 1 )
|
cd $home_dir
|
||||||
|
udisk=$(nice -n 19 du -shm ./ | cut -f 1 )
|
||||||
else
|
else
|
||||||
udisk=0
|
udisk=0
|
||||||
fi
|
fi
|
||||||
|
|
|
@ -35,9 +35,9 @@ fi
|
||||||
#----------------------------------------------------------#
|
#----------------------------------------------------------#
|
||||||
|
|
||||||
# Starting loop
|
# Starting loop
|
||||||
for domain in $(search_objects 'mail' 'SUSPENDED' "no" 'DOMAIN'); do
|
for domain in $(list_objects 'mail' 'DOMAIN'); do
|
||||||
dom_diks=0
|
dom_diks=0
|
||||||
accounts=$(search_objects "mail/$domain" 'SUSPENDED' "no" 'ACCOUNT')
|
accounts=$(list_objects "mail/$domain" 'ACCOUNT')
|
||||||
for account in $accounts; do
|
for account in $accounts; do
|
||||||
home_dir=$HOMEDIR/$user/mail/$domain/$account
|
home_dir=$HOMEDIR/$user/mail/$domain/$account
|
||||||
if [ -e "$home_dir" ]; then
|
if [ -e "$home_dir" ]; then
|
||||||
|
|
|
@ -50,6 +50,14 @@ if [ -e "$home_dir" ]; then
|
||||||
disk_usage=$(nice -n 19 du -shm $home_dir | cut -f 1 )
|
disk_usage=$(nice -n 19 du -shm $home_dir | cut -f 1 )
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
# Defining hdd home directory
|
||||||
|
home_dir="/hdd$HOMEDIR/$user/web/$domain/"
|
||||||
|
|
||||||
|
# Checking home directory exist
|
||||||
|
if [ -e "$home_dir" ] && [[ ! -L "$home_dir" ]]; then
|
||||||
|
disk_usage2=$(nice -n 19 du -shm $home_dir | cut -f 1 )
|
||||||
|
disk_usage=$(( disk_usage + disk_usage2 ))
|
||||||
|
fi
|
||||||
|
|
||||||
#----------------------------------------------------------#
|
#----------------------------------------------------------#
|
||||||
# Vesta #
|
# Vesta #
|
||||||
|
|
|
@ -32,11 +32,16 @@ is_object_valid 'user' 'USER' "$user"
|
||||||
#----------------------------------------------------------#
|
#----------------------------------------------------------#
|
||||||
|
|
||||||
# Domain loop
|
# Domain loop
|
||||||
for domain in $(search_objects 'web' 'SUSPENDED' "no" 'DOMAIN'); do
|
for domain in $(list_objects 'web' 'DOMAIN'); do
|
||||||
home_dir="$HOMEDIR/$user/web/$domain/"
|
home_dir="$HOMEDIR/$user/web/$domain/"
|
||||||
if [ -e "$home_dir" ]; then
|
if [ -e "$home_dir" ]; then
|
||||||
disk_usage=$(nice -n 19 du -shm $home_dir | cut -f 1 )
|
disk_usage=$(nice -n 19 du -shm $home_dir | cut -f 1 )
|
||||||
fi
|
fi
|
||||||
|
home_dir="/hdd$HOMEDIR/$user/web/$domain/"
|
||||||
|
if [ -e "$home_dir" ] && [[ ! -L "$home_dir" ]]; then
|
||||||
|
disk_usage2=$(nice -n 19 du -shm $home_dir | cut -f 1 )
|
||||||
|
disk_usage=$(( disk_usage + disk_usage2 ))
|
||||||
|
fi
|
||||||
update_object_value 'web' 'DOMAIN' "$domain" '$U_DISK' "$disk_usage"
|
update_object_value 'web' 'DOMAIN' "$domain" '$U_DISK' "$disk_usage"
|
||||||
done
|
done
|
||||||
|
|
||||||
|
|
11
func/main.sh
11
func/main.sh
|
@ -359,6 +359,17 @@ search_objects() {
|
||||||
IFS="$OLD_IFS"
|
IFS="$OLD_IFS"
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# List objects
|
||||||
|
list_objects() {
|
||||||
|
OLD_IFS="$IFS"
|
||||||
|
IFS=$'\n'
|
||||||
|
for line in $(cat $USER_DATA/$1.conf); do
|
||||||
|
eval $line
|
||||||
|
eval echo \$$2
|
||||||
|
done
|
||||||
|
IFS="$OLD_IFS"
|
||||||
|
}
|
||||||
|
|
||||||
# Get user value
|
# Get user value
|
||||||
get_user_value() {
|
get_user_value() {
|
||||||
grep "^${1//$/}=" $USER_DATA/user.conf |awk -F "'" '{print $2}'
|
grep "^${1//$/}=" $USER_DATA/user.conf |awk -F "'" '{print $2}'
|
||||||
|
|
Loading…
Add table
Add a link
Reference in a new issue