Initial stable.

This commit is contained in:
colin 2023-12-13 20:30:38 +00:00 committed by Colin
parent 8fa11d79d3
commit 4815fa00a3
7 changed files with 362 additions and 6 deletions

View File

@ -1,6 +0,0 @@
version: "3.9"
services:
well-known:
build:
context: docker/vault
image: git.nixc.us/colin/vault:production

143
.woodpecker.yml Normal file
View File

@ -0,0 +1,143 @@
labels:
hostname: "macmini7"
clone:
git:
image: woodpeckerci/plugin-git
settings:
partial: false
depth: 1
steps:
# Build Step for staging Branch
build-staging:
name: build-staging
image: woodpeckerci/plugin-docker-buildx
secrets: [REGISTRY_USER, REGISTRY_PASSWORD]
volumes:
- /var/run/docker.sock:/var/run/docker.sock
commands:
- echo "Building application for staging branch"
- echo "$${REGISTRY_PASSWORD}" | docker login -u "$${REGISTRY_USER}" --password-stdin git.nixc.us
- echo compose build
- docker compose -f docker-compose.staging.yml build
when:
- branch: main
- event: push
- path:
include: [ 'stack.production.yml', 'stack.staging.yml', 'docker-compose.staging.yml', 'docker-compose.production.yml', 'Dockerfile', '*.tests.ts' ]
deploy-new:
name: deploy-new
when:
- branch: main
- path:
include: [ 'stack.production.yml', 'stack.staging.yml', 'docker-compose.staging.yml', 'docker-compose.production.yml', 'Dockerfile', '*.tests.ts' ]
image: woodpeckerci/plugin-docker-buildx
secrets: [REGISTRY_USER, REGISTRY_PASSWORD]
volumes:
- /var/run/docker.sock:/var/run/docker.sock
commands:
- echo "$${REGISTRY_PASSWORD}" | docker login -u "$${REGISTRY_USER}" --password-stdin git.nixc.us
- echo compose push
- docker compose -f docker-compose.staging.yml push
# - docker stack deploy --with-registry-auth -c ./stack.staging.yml $${CI_REPO_NAME}-staging
# # Wait for Deploy to Complete
# wait-for-deploy-staging:
# name: wait-for-deploy-staging
# image: woodpeckerci/plugin-git
# commands:
# - echo "Waiting for staging deploy step to complete rollout."
# - sleep 60
# when:
# - branch: main
# - event: push
# # Run Automated Tests on staging Branch
# test-staging:
# name: run-tests-staging
# image: git.nixc.us/colin/playwright:latest
# secrets: [ base_url ]
# when:
# - branch: main
# - event: push
# - path:
# include: [ 'tests/', 'src/','docker-compose.staging.yml', 'docker-compose.production.yml', '*.tests.ts' ] # Specify paths relevant to tests
# volumes:
# - /var/run/docker.sock:/var/run/docker.sock:ro
cleanup-staging:
name: cleanup-staging
when:
- branch: main
- path:
include: [ 'stack.production.yml', 'stack.staging.yml', 'docker-compose.staging.yml', 'docker-compose.production.yml', 'Dockerfile', '*.tests.ts' ]
image: woodpeckerci/plugin-docker-buildx
secrets: [REGISTRY_USER, REGISTRY_PASSWORD]
volumes:
- /var/run/docker.sock:/var/run/docker.sock
commands:
# - docker stack rm $${CI_REPO_NAME}-staging
## added fault tolerance for docker stack rm
# - for i in {1..5}; do docker stack rm ${CI_REPO_NAME}-staging && break || sleep 10; done
- docker compose -f docker-compose.staging.yml down
- docker compose -f docker-compose.staging.yml rm -f
# Build Step for staging Branch
build-build-push-production:
name: build-build-push-production
image: woodpeckerci/plugin-docker-buildx
secrets: [REGISTRY_USER, REGISTRY_PASSWORD]
volumes:
- /var/run/docker.sock:/var/run/docker.sock
commands:
- echo "Building application for staging branch"
- echo "$${REGISTRY_PASSWORD}" | docker login -u "$${REGISTRY_USER}" --password-stdin git.nixc.us
- echo compose build
- docker compose -f docker-compose.production.yml build
- docker compose -f docker-compose.production.yml push
when:
- branch: production
- event: push
- path:
include: [ 'stack.production.yml', 'stack.staging.yml', 'docker-compose.staging.yml', 'docker-compose.production.yml', 'Dockerfile', '*.tests.ts' ]
# Deploy to Production Branch
deploy-production:
name: deploy-production
image: woodpeckerci/plugin-docker-buildx
secrets: [REGISTRY_USER, REGISTRY_PASSWORD]
volumes:
- /var/run/docker.sock:/var/run/docker.sock
commands:
- echo "$${REGISTRY_PASSWORD}" | docker login -u "$${REGISTRY_USER}" --password-stdin git.nixc.us
# - docker stack deploy --with-registry-auth -c ./stack.production.yml $${CI_REPO_NAME}
when:
- branch: production
- event: push
# - path:
# include: [ 'stack.production.yml', 'stack.staging.yml', 'docker-compose.staging.yml', 'docker-compose.production.yml', 'Dockerfile', '*.tests.ts' ]
# # Wait for Deploy to Complete
# wait-for-deploy-production:
# name: wait-for-deploy-production
# image: woodpeckerci/plugin-git
# commands:
# - echo "Waiting for deploy step to complete rollout."
# - sleep 60
# when:
# - branch: production
# - event: push
# # Run Post-Deployment Smoke Tests
# post-deploy-smoke-tests-git-nixc-us:
# name: run-post-deploy-smoke-tests-git-nixc-us
# image: git.nixc.us/colin/playwright:latest
# # secrets: [TEST_USER, TEST_PASSWORD]
# environment:
# - BASE_URL=https://git.nixc.us
# when:
# - branch: production
# - event: push
# # - path:
# # include: [ 'stack.production.yml', 'stack.staging.yml', 'docker-compose.staging.yml', 'docker-compose.production.yml', 'Dockerfile', '*.tests.ts' ]

61
README.md Normal file
View File

@ -0,0 +1,61 @@
<!-- #build4 -->
# bench plugin
### Docker Run
Run the Docker image using the `docker run` command. I'll provide examples for different scenarios:
1. **Testing a Single URL:**
```bash
docker run -e TARGET_URLS="http://example.com" -e MAX_TIME=1 -e VARIANCE=2 -e TEST_MODE="report" git.nixc.us/colin/bench:production
```
2. **Testing Multiple URLs:**
```bash
docker run -e TARGET_URLS="http://example.com,http://example2.com" -e MAX_TIME=1 -e VARIANCE=2 -e TEST_MODE="report" git.nixc.us/colin/bench:production
```
3. **Using Fail-Fast Mode:**
```bash
docker run -e TARGET_URLS="http://example.com" -e MAX_TIME=1 -e VARIANCE=2 -e TEST_MODE="fail-fast" git.nixc.us/colin/bench:production
```
### Docker Compose
Example `docker-compose.yml` file that sets up the same configuration:
```yaml
version: '3'
services:
apachebench:
image: git.nixc.us/colin/bench:production
environment:
TARGET_URLS: "http://example.com,http://example2.com" # Comma-separated list of URLs
MAX_TIME: 1 # Maximum acceptable response time
VARIANCE: 2 # Acceptable time variance
TEST_MODE: "report" # Can be "report" or "fail-fast"
NUM_REQUESTS: 500 # Number of requests to perform
CONCURRENCY: 20 # Number of multiple requests to make at a time
```
Run the service defined in the `docker-compose.yml` file, use:
```bash
docker-compose up
```
### Customizing the Configuration
You can modify the `TARGET_URLS`, `MAX_TIME`, `VARIANCE`, and `TEST_MODE` environment variables to suit your specific testing needs. Here's what each variable represents:
- `TARGET_URLS`: A comma-separated list of URLs to test.
- `MAX_TIME`: The target maximum response time in seconds.
- `VARIANCE`: The acceptable variance in response time in seconds.
- `TEST_MODE`: The mode of operation, either `report` to test all URLs regardless of individual results, or `fail-fast` to stop testing as soon as a URL fails the performance criteria.
This setup allows you to run performance tests in a Docker environment, providing an automated way to benchmark and analyze the performance of various web services.

View File

@ -0,0 +1,6 @@
version: "3.9"
services:
bench:
build:
context: docker/bench
image: git.nixc.us/colin/bench:production

View File

@ -0,0 +1,6 @@
version: "3.9"
services:
bench:
build:
context: docker/bench
image: git.nixc.us/colin/bench:staging

5
docker/bench/Dockerfile Normal file
View File

@ -0,0 +1,5 @@
FROM alpine:latest
RUN apk --no-cache add apache2-utils curl ca-certificates bash
COPY run-ab.sh /run-ab.sh
RUN chmod +x /run-ab.sh
ENTRYPOINT ["/run-ab.sh"]

141
docker/bench/run-ab.sh Normal file
View File

@ -0,0 +1,141 @@
#!/bin/bash
DEFAULT_MAX_TIME=1
DEFAULT_VARIANCE=2
DEFAULT_TTFB_MAX=500
DEFAULT_TEST_MODE="report"
DEFAULT_NUM_REQUESTS=100
DEFAULT_CONCURRENCY=10
TARGET_URLS=${TARGET_URLS:-""}
MAX_TIME=${MAX_TIME:-$DEFAULT_MAX_TIME}
VARIANCE=${VARIANCE:-$DEFAULT_VARIANCE}
TTFB_MAX=${TTFB_MAX:-$DEFAULT_TTFB_MAX}
TEST_MODE=${TEST_MODE:-$DEFAULT_TEST_MODE}
NUM_REQUESTS=${NUM_REQUESTS:-$DEFAULT_NUM_REQUESTS}
CONCURRENCY=${CONCURRENCY:-$DEFAULT_CONCURRENCY}
declare -a failed_urls
process_sitemap() {
IFS=',' read -r -a domains <<< "$TARGET_URLS"
for domain in "${domains[@]}"; do
domain="${domain%/}"
sitemap=$(curl -s "${domain}/sitemap.xml")
while IFS= read -r loc; do
url=$(echo "$loc" | sed -e 's|<loc>||g' -e 's|</loc>||g' | xargs)
if [[ "$url" =~ ^https?:// ]]; then
new_url="$url"
else
url="${url#/}"
new_url="${domain}/${url}"
fi
new_url=$(echo "$new_url" | sed -e 's|//|/|g' -e 's|:/|://|')
if [[ ! "${TARGET_URLS}" =~ "${new_url}" ]]; then
TARGET_URLS+=",${new_url}"
fi
done < <(echo "$sitemap" | grep -oE "<loc>.+?</loc>")
done
}
process_sitemap
measure_ttfb() {
local url=$1
local -a times=()
local min max sum=0 mean ttfb mean_ttfb
for i in {1..5}; do
ttfb=$(curl -o /dev/null -s -w '%{time_starttransfer}\n' "$url" | awk '{print $1 * 1000}')
times+=("$ttfb")
sum=$(echo "$sum + $ttfb" | bc)
done
min=$(printf '%s\n' "${times[@]}" | sort -n | head -1)
max=$(printf '%s\n' "${times[@]}" | sort -n | tail -1)
mean=$(echo "scale=3; $sum / 5" | bc)
echo "$mean"
}
run_ab() {
local url=$1
local result=$(ab -n $NUM_REQUESTS -c $CONCURRENCY "$url" 2>&1)
local avg_time=$(echo "$result" | grep 'Time per request' | head -1 | awk '{print $4}')
if [ -z "$avg_time" ] || [ "$(echo "$avg_time > ($MAX_TIME + $VARIANCE) * 1000" | bc)" -eq 1 ]; then
echo "ApacheBench test failed for $url. Average time: ${avg_time:-N/A} milliseconds."
return 1
else
echo "ApacheBench test passed for $url. Average time: $avg_time milliseconds."
return 0
fi
}
log_file="/log/scan.log"
mkdir -p $(dirname "$log_file")
cat /dev/null > $"$log_file"
IFS=',' read -ra URLS <<< "$TARGET_URLS"
for url in "${URLS[@]}"; do
mean_ttfb=$(measure_ttfb "$url")
if [ -z "$mean_ttfb" ] || [ "$(echo "$mean_ttfb > $TTFB_MAX" | bc)" -eq 1 ]; then
echo "TTFB test failed for $url. Mean TTFB: ${mean_ttfb}ms exceeds maximum of ${TTFB_MAX}ms." | tee -a "$log_file"
failed_urls+=("TTFB failure: $url")
else
echo "TTFB test passed for $url. Mean TTFB: ${mean_ttfb}ms." | tee -a "$log_file"
fi
run_ab_result=$(run_ab "$url")
echo "$run_ab_result" | tee -a "$log_file"
if [[ "$run_ab_result" == *"failed"* ]]; then
failed_urls+=("ApacheBench failure: $url")
fi
if [ "$TEST_MODE" = "fail-fast" ] && [ ${#failed_urls[@]} -gt 0 ]; then
echo "Exiting due to fail-fast mode with failures." | tee -a "$log_file"
for failed_url in "${failed_urls[@]}"; do
echo "- $failed_url" | tee -a "$log_file"
done
exit 1
fi
done
if [ ${#failed_urls[@]} -gt 0 ]; then
echo "Summary of failed URLs:" | tee -a "$log_file"
for failed_url in "${failed_urls[@]}"; do
echo "- $failed_url" | tee -a "$log_file"
done
exit 1
else
echo "All URLs passed the performance and TTFB tests." | tee -a "$log_file"
exit 0
fi
parse_to_csv() {
local input="$1"
local output="$2"
: > "$output"
grep -Eo 'https?://[^ ]+' "$input" | sort -u | while read -r url; do
local ttfb_line=$(grep -m 1 "$url" "$input" | grep 'TTFB test passed' | awk -F'Mean TTFB: ' '{print $2}')
local ab_line=$(grep -m 1 "$url" "$input" | grep 'ApacheBench test passed' | awk -F'Average time: ' '{print $2}')
local ttfb_test=${ttfb_line%ms.*}
local ab_test=${ab_line% milliseconds.*}
if [[ -n "$ttfb_test" || -n "$ab_test" ]]; then
if [[ -z "$ttfb_test" ]]; then ttfb_test="N/A"; fi
if [[ -z "$ab_test" ]]; then ab_test="N/A"; fi
echo "$url,$ttfb_test,$ab_test" >> "$output"
fi
done
}
parse_to_csv "/logs/scan.log" "/logs/scan.csv"
upload_to_hastebin() {
local file=$1
local haste_url="https://haste.nixc.us/documents"
local response=$(curl -X POST -s --data-binary @"$file" $haste_url)
local key=$(echo $response | awk -F '"' '{print $4}')
if [[ ! -z "$key" ]]; then
echo "Uploaded to hastebin: https://haste.nixc.us/$key"
else
echo "Failed to upload to hastebin."
fi
}
upload_to_hastebin "/logs/scan.csv"