143 lines
4.6 KiB
Bash
143 lines
4.6 KiB
Bash
#!/bin/bash
|
|
|
|
DEFAULT_MAX_TIME=1
|
|
DEFAULT_VARIANCE=2
|
|
DEFAULT_TTFB_MAX=500
|
|
DEFAULT_TEST_MODE="report"
|
|
DEFAULT_NUM_REQUESTS=100
|
|
DEFAULT_CONCURRENCY=10
|
|
|
|
TARGET_URLS=${TARGET_URLS:-""}
|
|
MAX_TIME=${MAX_TIME:-$DEFAULT_MAX_TIME}
|
|
VARIANCE=${VARIANCE:-$DEFAULT_VARIANCE}
|
|
TTFB_MAX=${TTFB_MAX:-$DEFAULT_TTFB_MAX}
|
|
TEST_MODE=${TEST_MODE:-$DEFAULT_TEST_MODE}
|
|
NUM_REQUESTS=${NUM_REQUESTS:-$DEFAULT_NUM_REQUESTS}
|
|
CONCURRENCY=${CONCURRENCY:-$DEFAULT_CONCURRENCY}
|
|
|
|
declare -a failed_urls
|
|
|
|
process_sitemap() {
|
|
IFS=',' read -r -a domains <<< "$TARGET_URLS"
|
|
for domain in "${domains[@]}"; do
|
|
|
|
domain="${domain%/}"
|
|
sitemap=$(curl -s "${domain}/sitemap.xml")
|
|
while IFS= read -r loc; do
|
|
url=$(echo "$loc" | sed -e 's|<loc>||g' -e 's|</loc>||g' | xargs)
|
|
if [[ "$url" =~ ^https?:// ]]; then
|
|
new_url="$url"
|
|
else
|
|
url="${url#/}"
|
|
new_url="${domain}/${url}"
|
|
fi
|
|
new_url=$(echo "$new_url" | sed -e 's|//|/|g' -e 's|:/|://|')
|
|
if [[ ! "${TARGET_URLS}" =~ "${new_url}" ]]; then
|
|
TARGET_URLS+=",${new_url}"
|
|
fi
|
|
done < <(echo "$sitemap" | grep -oE "<loc>.+?</loc>")
|
|
done
|
|
}
|
|
|
|
process_sitemap
|
|
|
|
measure_ttfb() {
|
|
local url=$1
|
|
local -a times=()
|
|
local min max sum=0 mean ttfb mean_ttfb
|
|
for i in {1..5}; do
|
|
ttfb=$(curl -o /dev/null -s -w '%{time_starttransfer}\n' "$url" | awk '{print $1 * 1000}')
|
|
times+=("$ttfb")
|
|
sum=$(echo "$sum + $ttfb" | bc)
|
|
done
|
|
min=$(printf '%s\n' "${times[@]}" | sort -n | head -1)
|
|
max=$(printf '%s\n' "${times[@]}" | sort -n | tail -1)
|
|
mean=$(echo "scale=3; $sum / 5" | bc)
|
|
|
|
echo "$mean"
|
|
}
|
|
|
|
run_ab() {
|
|
local url=$1
|
|
local result=$(ab -n $NUM_REQUESTS -c $CONCURRENCY "$url" 2>&1)
|
|
local avg_time=$(echo "$result" | grep 'Time per request' | head -1 | awk '{print $4}')
|
|
if [ -z "$avg_time" ] || [ "$(echo "$avg_time > ($MAX_TIME + $VARIANCE) * 1000" | bc)" -eq 1 ]; then
|
|
echo "ApacheBench test failed for $url. Average time: ${avg_time:-N/A} milliseconds."
|
|
return 1
|
|
else
|
|
echo "ApacheBench test passed for $url. Average time: $avg_time milliseconds."
|
|
return 0
|
|
fi
|
|
}
|
|
|
|
log_file="/log/scan.log"
|
|
mkdir -p $(dirname "$log_file")
|
|
cat /dev/null > $"$log_file"
|
|
|
|
IFS=',' read -ra URLS <<< "$TARGET_URLS"
|
|
for url in "${URLS[@]}"; do
|
|
mean_ttfb=$(measure_ttfb "$url")
|
|
if [ -z "$mean_ttfb" ] || [ "$(echo "$mean_ttfb > $TTFB_MAX" | bc)" -eq 1 ]; then
|
|
echo "TTFB test failed for $url. Mean TTFB: ${mean_ttfb}ms exceeds maximum of ${TTFB_MAX}ms." | tee -a "$log_file"
|
|
failed_urls+=("TTFB failure: $url")
|
|
else
|
|
echo "TTFB test passed for $url. Mean TTFB: ${mean_ttfb}ms." | tee -a "$log_file"
|
|
fi
|
|
run_ab_result=$(run_ab "$url")
|
|
echo "$run_ab_result" | tee -a "$log_file"
|
|
if [[ "$run_ab_result" == *"failed"* ]]; then
|
|
failed_urls+=("ApacheBench failure: $url")
|
|
fi
|
|
if [ "$TEST_MODE" = "fail-fast" ] && [ ${#failed_urls[@]} -gt 0 ]; then
|
|
echo "Exiting due to fail-fast mode with failures." | tee -a "$log_file"
|
|
for failed_url in "${failed_urls[@]}"; do
|
|
echo "- $failed_url" | tee -a "$log_file"
|
|
done
|
|
exit 1
|
|
fi
|
|
done
|
|
|
|
parse_to_csv() {
|
|
local input="$1"
|
|
local output="$2"
|
|
: > "$output"
|
|
grep -Eo 'https?://[^ ]+' "$input" | sort -u | while read -r url; do
|
|
local ttfb_line=$(grep -m 1 "$url" "$input" | grep 'TTFB test passed' | awk -F'Mean TTFB: ' '{print $2}')
|
|
local ab_line=$(grep -m 1 "$url" "$input" | grep 'ApacheBench test passed' | awk -F'Average time: ' '{print $2}')
|
|
local ttfb_test=${ttfb_line%ms.*}
|
|
local ab_test=${ab_line% milliseconds.*}
|
|
if [[ -n "$ttfb_test" || -n "$ab_test" ]]; then
|
|
if [[ -z "$ttfb_test" ]]; then ttfb_test="N/A"; fi
|
|
if [[ -z "$ab_test" ]]; then ab_test="N/A"; fi
|
|
echo "$url,$ttfb_test,$ab_test" >> "$output"
|
|
fi
|
|
done
|
|
}
|
|
|
|
parse_to_csv "/logs/scan.log" "/logs/scan.csv"
|
|
|
|
upload_to_hastebin() {
|
|
local file=$1
|
|
local haste_url="https://haste.nixc.us/documents"
|
|
local response=$(curl -X POST -s --data-binary @"$file" $haste_url)
|
|
local key=$(echo $response | awk -F '"' '{print $4}')
|
|
if [[ ! -z "$key" ]]; then
|
|
echo "Uploaded to hastebin: https://haste.nixc.us/$key"
|
|
else
|
|
echo "Failed to upload to hastebin."
|
|
fi
|
|
}
|
|
upload_to_hastebin "/logs/scan.csv"
|
|
|
|
if [ ${#failed_urls[@]} -gt 0 ]; then
|
|
echo "Summary of failed URLs:" | tee -a "$log_file"
|
|
for failed_url in "${failed_urls[@]}"; do
|
|
echo "- $failed_url" | tee -a "$log_file"
|
|
done
|
|
exit 1
|
|
else
|
|
echo "All URLs passed the performance and TTFB tests." | tee -a "$log_file"
|
|
exit 0
|
|
fi
|
|
|