1
0
Fork 1
swap-increase/disk-check.sh

117 lines
4.3 KiB
Bash

#!/bin/bash
REPORT_FILE="/tmp/disk_report.txt"
TMP_OUTPUT="/tmp/du_output.txt"
HASTE_URL="https://haste.nixc.us/documents"
echo "Starting disk space report... This may take a few minutes." > "$REPORT_FILE"
# Check if Docker is installed
is_docker_installed() {
if ! command -v docker &>/dev/null; then
echo "Docker is not installed. Skipping Docker-related checks." >> "$REPORT_FILE"
return 1
fi
return 0
}
# Function to estimate directory sizes
estimate_size() {
local description=$1
local path=$2
echo "Estimating the size of $description:" >> "$REPORT_FILE"
sudo du -sh "$path" 2>/dev/null >> "$REPORT_FILE" || echo "Error estimating $description size." >> "$REPORT_FILE"
echo >> "$REPORT_FILE"
}
# Estimate unused Docker volumes
estimate_docker_volumes_size() {
if is_docker_installed; then
echo "Estimating the size of unused Docker volumes:" >> "$REPORT_FILE"
docker volume ls -qf dangling=true | \
xargs -I {} docker volume inspect --format '{{ .Mountpoint }}' {} 2>/dev/null | \
xargs -I {} sudo du -sh {} 2>/dev/null | \
awk '{ sum += $1 } END { print sum "B" }' >> "$REPORT_FILE" || echo "Error estimating Docker volumes." >> "$REPORT_FILE"
echo >> "$REPORT_FILE"
fi
}
# Check large Docker logs
check_docker_logs() {
if is_docker_installed; then
echo "Checking Docker logs for large files..." >> "$REPORT_FILE"
large_logs=$(docker ps -q --filter "status=exited" | \
xargs -I {} docker inspect --format '{{.LogPath}}' {} 2>/dev/null | \
xargs -I {} sudo find {} -type f -size +1G 2>/dev/null)
if [ -n "$large_logs" ]; then
echo "The following Docker logs are larger than 1GB:" >> "$REPORT_FILE"
echo "$large_logs" >> "$REPORT_FILE"
echo "Consider setting up logrotate to manage Docker logs." >> "$REPORT_FILE"
else
echo "No large Docker logs found." >> "$REPORT_FILE"
fi
echo >> "$REPORT_FILE"
fi
}
# List and categorize large directories
list_large_directories() {
local directory=$1
echo "Scanning $directory for large directories..." >> "$REPORT_FILE"
sudo du -ahx "$directory" --exclude=/home/virtfs 2>/dev/null | sort -hr > "$TMP_OUTPUT" || echo "Error scanning $directory." >> "$REPORT_FILE"
cat "$TMP_OUTPUT" >> "$REPORT_FILE"
}
categorize_large_directories() {
echo "Categorizing large directories..." >> "$REPORT_FILE"
awk '
{
size=$1; path=$2;
unit=substr(size, length(size));
size_val=substr(size, 1, length(size)-1);
if (unit == "G" || unit == "T") {
if (unit == "T") size_val *= 1024;
if (size_val > 50) print "> 50GB: " path;
else if (size_val > 20) print "> 20GB: " path;
else if (size_val > 10) print "> 10GB: " path;
else if (size_val > 5) print "> 5GB: " path;
else if (size_val > 1) print "> 1GB: " path;
}
}' "$TMP_OUTPUT" >> "$REPORT_FILE"
echo >> "$REPORT_FILE"
}
# Main workflow
echo "Estimating potential storage savings..." >> "$REPORT_FILE"
estimate_size "apt cache" "/var/cache/apt"
estimate_size "old installed packages" "/var/lib/apt/lists /var/lib/apt/lists/partial"
estimate_size "journal logs" "(journalctl --disk-usage)"
estimate_size "temporary files" "/tmp /var/tmp"
estimate_docker_volumes_size
check_docker_logs
list_large_directories "/"
categorize_large_directories
list_large_directories "/home"
categorize_large_directories
# Scan and report specifically for /home/virtfs
echo "Scanning /home/virtfs for large directories..." >> "$REPORT_FILE"
sudo du -ahx /home/virtfs 2>/dev/null | sort -hr > "$TMP_OUTPUT" || echo "Error scanning /home/virtfs." >> "$REPORT_FILE"
categorize_large_directories
echo "Storage savings estimation and large directory listing completed." >> "$REPORT_FILE"
# Upload the report to hastebin
echo "Uploading report to hastebin..." >> "$REPORT_FILE"
response=$(curl -s -X POST -T "$REPORT_FILE" "$HASTE_URL")
echo "Raw response from hastebin: $response" >> "$REPORT_FILE"
if [[ $response == *"key"* ]]; then
key=$(echo $response | jq -r '.key')
echo "Report available at: $HASTE_URL/$key"
else
echo "Failed to upload report to haste.nixc.us. Response: $response"
fi