Update disk-check.sh

This commit is contained in:
colin 2024-11-30 12:51:41 -05:00
parent cdd8e6e47b
commit d038f4a6d4
1 changed files with 67 additions and 92 deletions

View File

@ -1,116 +1,91 @@
#!/bin/bash #!/bin/bash
REPORT_FILE="/tmp/disk_report.txt" echo "Starting disk space report... This may take a few minutes."
TMP_OUTPUT="/tmp/du_output.txt"
HASTE_URL="https://haste.nixc.us/documents"
echo "Starting disk space report... This may take a few minutes." > "$REPORT_FILE" # Function to estimate the size of the apt cache
estimate_apt_cache_size() {
# Check if Docker is installed echo "Estimating the size of the apt cache:"
is_docker_installed() { sudo du -sh /var/cache/apt
if ! command -v docker &>/dev/null; then echo
echo "Docker is not installed. Skipping Docker-related checks." >> "$REPORT_FILE"
return 1
fi
return 0
} }
# Function to estimate directory sizes # Function to estimate the size of old installed packages
estimate_size() { estimate_old_packages_size() {
local description=$1 echo "Estimating the size of old installed packages:"
local path=$2 sudo du -sh /var/lib/apt/lists /var/lib/apt/lists/partial
echo "Estimating the size of $description:" >> "$REPORT_FILE" echo
sudo du -sh "$path" 2>/dev/null >> "$REPORT_FILE" || echo "Error estimating $description size." >> "$REPORT_FILE"
echo >> "$REPORT_FILE"
} }
# Estimate unused Docker volumes # Function to estimate the size of journal logs
estimate_journal_size() {
echo "Estimating the size of journal logs:"
sudo journalctl --disk-usage
echo
}
# Function to estimate the size of temporary files
estimate_tmp_size() {
echo "Estimating the size of temporary files:"
sudo du -sh /tmp /var/tmp
echo
}
# Function to estimate the size of unused Docker volumes
estimate_docker_volumes_size() { estimate_docker_volumes_size() {
if is_docker_installed; then echo "Estimating the size of unused Docker volumes:"
echo "Estimating the size of unused Docker volumes:" >> "$REPORT_FILE" docker volume ls -qf dangling=true | xargs -I {} docker volume inspect --format '{{ .Mountpoint }}' {} | xargs -I {} sudo du -sh {} | awk '{ sum += $1 } END { print sum "B" }'
docker volume ls -qf dangling=true | \ echo
xargs -I {} docker volume inspect --format '{{ .Mountpoint }}' {} 2>/dev/null | \
xargs -I {} sudo du -sh {} 2>/dev/null | \
awk '{ sum += $1 } END { print sum "B" }' >> "$REPORT_FILE" || echo "Error estimating Docker volumes." >> "$REPORT_FILE"
echo >> "$REPORT_FILE"
fi
} }
# Check large Docker logs # Function to check and suggest logrotate for large Docker logs
check_docker_logs() { check_docker_logs() {
if is_docker_installed; then echo "Checking Docker logs for large files..."
echo "Checking Docker logs for large files..." >> "$REPORT_FILE" large_logs=$(docker ps -q --filter "status=exited" | xargs -I {} docker inspect --format '{{.LogPath}}' {} | xargs -I {} sudo find {} -type f -size +1G)
large_logs=$(docker ps -q --filter "status=exited" | \ if [ -n "$large_logs" ]; then
xargs -I {} docker inspect --format '{{.LogPath}}' {} 2>/dev/null | \ echo "The following Docker logs are larger than 1GB:"
xargs -I {} sudo find {} -type f -size +1G 2>/dev/null) echo "$large_logs"
if [ -n "$large_logs" ]; then echo
echo "The following Docker logs are larger than 1GB:" >> "$REPORT_FILE" echo "Consider setting up logrotate to manage Docker logs."
echo "$large_logs" >> "$REPORT_FILE" echo "To truncate all Docker logs, run:"
echo "Consider setting up logrotate to manage Docker logs." >> "$REPORT_FILE" echo 'sudo find /var/lib/docker/containers/ -type f -name "*.log" -exec truncate -s 0 {} \;'
else echo
echo "No large Docker logs found." >> "$REPORT_FILE" else
fi echo "No large Docker logs found."
echo >> "$REPORT_FILE" echo
fi fi
} }
# List and categorize large directories # Function to list directories consuming more than a specified size
list_large_directories() { list_large_directories() {
local directory=$1 local directory=$1
echo "Scanning $directory for large directories..." >> "$REPORT_FILE" local size_limit=$2
sudo du -ahx "$directory" --exclude=/home/virtfs 2>/dev/null | sort -hr > "$TMP_OUTPUT" || echo "Error scanning $directory." >> "$REPORT_FILE" echo "Directories in $directory consuming more than ${size_limit}GB:"
cat "$TMP_OUTPUT" >> "$REPORT_FILE" sudo du -ahx $directory 2>/dev/null | awk -v limit=$size_limit '{
} size=$1; unit=substr(size, length(size));
categorize_large_directories() {
echo "Categorizing large directories..." >> "$REPORT_FILE"
awk '
{
size=$1; path=$2;
unit=substr(size, length(size));
size_val=substr(size, 1, length(size)-1); size_val=substr(size, 1, length(size)-1);
if (unit == "G" || unit == "T") { if ((unit=="G" && size_val+0 > limit) || (unit=="T" && size_val*1024 > limit)) {
if (unit == "T") size_val *= 1024; print
if (size_val > 50) print "> 50GB: " path;
else if (size_val > 20) print "> 20GB: " path;
else if (size_val > 10) print "> 10GB: " path;
else if (size_val > 5) print "> 5GB: " path;
else if (size_val > 1) print "> 1GB: " path;
} }
}' "$TMP_OUTPUT" >> "$REPORT_FILE" }'
echo >> "$REPORT_FILE" echo
} }
# Main workflow # Estimate storage savings
echo "Estimating potential storage savings..." >> "$REPORT_FILE" echo "Estimating potential storage savings..."
estimate_apt_cache_size
estimate_size "apt cache" "/var/cache/apt" estimate_old_packages_size
estimate_size "old installed packages" "/var/lib/apt/lists /var/lib/apt/lists/partial" estimate_journal_size
estimate_size "journal logs" "(journalctl --disk-usage)" estimate_tmp_size
estimate_size "temporary files" "/tmp /var/tmp"
estimate_docker_volumes_size estimate_docker_volumes_size
# Check Docker logs
check_docker_logs check_docker_logs
list_large_directories "/" # List large directories
categorize_large_directories echo "Listing directories consuming more than 5GB and 10GB:"
list_large_directories / 5
list_large_directories /home 5
list_large_directories / 10
list_large_directories /home 10
list_large_directories "/home" echo "Storage savings estimation and large directory listing completed."
categorize_large_directories
# Scan and report specifically for /home/virtfs
echo "Scanning /home/virtfs for large directories..." >> "$REPORT_FILE"
sudo du -ahx /home/virtfs 2>/dev/null | sort -hr > "$TMP_OUTPUT" || echo "Error scanning /home/virtfs." >> "$REPORT_FILE"
categorize_large_directories
echo "Storage savings estimation and large directory listing completed." >> "$REPORT_FILE"
# Upload the report to hastebin
echo "Uploading report to hastebin..." >> "$REPORT_FILE"
response=$(curl -s -X POST -T "$REPORT_FILE" "$HASTE_URL")
echo "Raw response from hastebin: $response" >> "$REPORT_FILE"
if [[ $response == *"key"* ]]; then
key=$(echo $response | jq -r '.key')
echo "Report available at: $HASTE_URL/$key"
else
echo "Failed to upload report to haste.nixc.us. Response: $response"
fi