|
| 1 | +#!/usr/bin/env bash |
| 2 | +# ============================================================================= |
| 3 | +# compress-bench.sh — Compression benchmark suite |
| 4 | +# |
| 5 | +# Benchmarks static-web serving pre-compressed files with different encodings. |
| 6 | +# Tests: no compression (baseline), gzip, brotli, zstd, and on-the-fly gzip. |
| 7 | +# |
| 8 | +# Usage: |
| 9 | +# ./benchmark/compress-bench.sh [OPTIONS] |
| 10 | +# |
| 11 | +# Options: |
| 12 | +# -c <int> Connections (default: 50) |
| 13 | +# -n <int> Total requests (default: 100000) |
| 14 | +# -d <int> Duration in seconds — overrides -n when set |
| 15 | +# -k Keep containers running after benchmark (default: tear down) |
| 16 | +# -h Show this help |
| 17 | +# |
| 18 | +# Requirements: |
| 19 | +# - docker + docker compose |
| 20 | +# - bombardier (https://github.com/codesenberg/bombardier) |
| 21 | +# Install: brew install bombardier OR go install github.com/codesenberg/bombardier@latest |
| 22 | +# - Pre-compressed files in public/ (index.html.gz, index.html.br, index.html.zst) |
| 23 | +# Run: gzip -k -9 public/index.html && brotli -k -9 public/index.html && zstd -k public/index.html |
| 24 | +# ============================================================================= |
| 25 | +set -euo pipefail |
| 26 | + |
| 27 | +SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)" |
| 28 | +COMPOSE_FILE="${SCRIPT_DIR}/docker-compose.compression.yml" |
| 29 | +RESULTS_DIR="${SCRIPT_DIR}/results" |
| 30 | + |
| 31 | +# ---------- defaults --------------------------------------------------------- |
| 32 | +CONNECTIONS=50 |
| 33 | +REQUESTS=100000 |
| 34 | +DURATION="" # empty = use -n mode; set seconds e.g. 30 to use -d mode |
| 35 | +KEEP=false |
| 36 | + |
| 37 | +# ---------- colours ---------------------------------------------------------- |
| 38 | +RED='\033[0;31m'; YELLOW='\033[1;33m'; GREEN='\033[0;32m' |
| 39 | +CYAN='\033[0;36m'; BOLD='\033[1m'; RESET='\033[0m' |
| 40 | +BLUE='\033[0;34m' |
| 41 | + |
| 42 | +# ---------- arg parse -------------------------------------------------------- |
| 43 | +usage() { |
| 44 | + grep '^#' "$0" | grep -v '^#!/' | sed 's/^# \{0,2\}//' |
| 45 | + exit 0 |
| 46 | +} |
| 47 | + |
| 48 | +while getopts "c:n:d:kh" opt; do |
| 49 | + case $opt in |
| 50 | + c) CONNECTIONS="$OPTARG" ;; |
| 51 | + n) REQUESTS="$OPTARG" ;; |
| 52 | + d) DURATION="$OPTARG" ;; |
| 53 | + k) KEEP=true ;; |
| 54 | + h) usage ;; |
| 55 | + *) echo "Unknown option -$OPTARG"; exit 1 ;; |
| 56 | + esac |
| 57 | +done |
| 58 | + |
| 59 | +# ---------- dependency checks ------------------------------------------------ |
| 60 | +check_deps() { |
| 61 | + local missing="" |
| 62 | + command -v docker >/dev/null 2>&1 || missing="$missing docker" |
| 63 | + command -v bombardier >/dev/null 2>&1 || missing="$missing bombardier" |
| 64 | + |
| 65 | + if [ -n "$missing" ]; then |
| 66 | + echo -e "${RED}Missing dependencies:${missing}${RESET}" |
| 67 | + echo "" |
| 68 | + echo "Install bombardier: brew install bombardier" |
| 69 | + echo " OR go install github.com/codesenberg/bombardier@latest" |
| 70 | + exit 1 |
| 71 | + fi |
| 72 | +} |
| 73 | + |
| 74 | +# ---------- servers (parallel indexed arrays — bash 3 compatible) ------------- |
| 75 | +# Each server is configured to serve a specific encoding type |
| 76 | +# We use Accept-Encoding header to request specific encoding from same server |
| 77 | +SERVER_NAMES=( "no-compress" "gzip-precompressed" "brotli-precompressed" "zstd-precompressed" "gzip-onthefly" "zstd-onthefly" ) |
| 78 | +SERVER_URLS=( "http://localhost:9001/index.html" "http://localhost:9002/index.html" "http://localhost:9003/index.html" "http://localhost:9004/index.html" "http://localhost:9005/index.html" "http://localhost:9006/index.html" ) |
| 79 | +# Accept-Encoding header to use for each server |
| 80 | +ACCEPT_ENCODING=( "" "gzip" "br" "zstd" "gzip" "zstd" ) |
| 81 | +SERVER_COUNT=6 |
| 82 | + |
| 83 | +# ---------- helpers ---------------------------------------------------------- |
| 84 | +wait_for_server() { |
| 85 | + local name=$1 |
| 86 | + local url=$2 |
| 87 | + local max=30 |
| 88 | + local i=0 |
| 89 | + printf " Waiting for %-22s" "${name}..." |
| 90 | + while ! curl -sf -o /dev/null "$url" 2>/dev/null; do |
| 91 | + sleep 1 |
| 92 | + i=$((i + 1)) |
| 93 | + if [ "$i" -ge "$max" ]; then |
| 94 | + echo -e " ${RED}TIMEOUT${RESET}" |
| 95 | + return 1 |
| 96 | + fi |
| 97 | + printf "." |
| 98 | + done |
| 99 | + echo -e " ${GREEN}ready${RESET}" |
| 100 | +} |
| 101 | + |
| 102 | +run_bombardier() { |
| 103 | + local url=$1 |
| 104 | + local accept_enc=$2 |
| 105 | + |
| 106 | + if [ -n "$DURATION" ]; then |
| 107 | + if [ -n "$accept_enc" ]; then |
| 108 | + bombardier -c "$CONNECTIONS" -d "${DURATION}s" -l --print r -H "Accept-Encoding: $accept_enc" "$url" 2>/dev/null |
| 109 | + else |
| 110 | + bombardier -c "$CONNECTIONS" -d "${DURATION}s" -l --print r "$url" 2>/dev/null |
| 111 | + fi |
| 112 | + else |
| 113 | + if [ -n "$accept_enc" ]; then |
| 114 | + bombardier -c "$CONNECTIONS" -n "$REQUESTS" -l --print r -H "Accept-Encoding: $accept_enc" "$url" 2>/dev/null |
| 115 | + else |
| 116 | + bombardier -c "$CONNECTIONS" -n "$REQUESTS" -l --print r "$url" 2>/dev/null |
| 117 | + fi |
| 118 | + fi |
| 119 | +} |
| 120 | + |
| 121 | +# Extract Reqs/sec average from bombardier output |
| 122 | +parse_rps() { |
| 123 | + awk '/Reqs\/sec/{print $2; exit}' |
| 124 | +} |
| 125 | + |
| 126 | +# Extract p50 latency |
| 127 | +parse_p50() { |
| 128 | + awk '/50\%/{print $2; exit}' |
| 129 | +} |
| 130 | + |
| 131 | +# Extract p99 latency |
| 132 | +parse_p99() { |
| 133 | + awk '/99\%/{print $2; exit}' |
| 134 | +} |
| 135 | + |
| 136 | +# Extract bytes transferred |
| 137 | +parse_bytes() { |
| 138 | + awk '/Total data/{print $4; exit}' |
| 139 | +} |
| 140 | + |
| 141 | +# ---------- main ------------------------------------------------------------- |
| 142 | +main() { |
| 143 | + check_deps |
| 144 | + |
| 145 | + mkdir -p "$RESULTS_DIR" |
| 146 | + |
| 147 | + echo "" |
| 148 | + echo -e "${BOLD}╔════════════════════════════════════════════════════════════════════╗${RESET}" |
| 149 | + echo -e "${BOLD}║ Compression Benchmark Suite ║${RESET}" |
| 150 | + echo -e "${BOLD}╚════════════════════════════════════════════════════════════════════╝${RESET}" |
| 151 | + echo "" |
| 152 | + |
| 153 | + if [ -n "$DURATION" ]; then |
| 154 | + echo -e " ${CYAN}Mode: duration ${DURATION}s${RESET}" |
| 155 | + else |
| 156 | + echo -e " ${CYAN}Mode: ${REQUESTS} requests${RESET}" |
| 157 | + fi |
| 158 | + echo -e " ${CYAN}Connections: ${CONNECTIONS}${RESET}" |
| 159 | + echo -e " ${CYAN}Tool: $(bombardier --version 2>&1 | head -1)${RESET}" |
| 160 | + echo -e " ${CYAN}Date: $(date -u '+%Y-%m-%d %H:%M:%S UTC')${RESET}" |
| 161 | + echo "" |
| 162 | + |
| 163 | + # ---- start containers ----------------------------------------------------- |
| 164 | + echo -e "${BOLD}→ Starting containers...${RESET}" |
| 165 | + docker compose -f "$COMPOSE_FILE" up -d --build 2>&1 | \ |
| 166 | + grep -E '(building|built|pulling|pulled|started|created|Built|Started|Created)' || true |
| 167 | + echo "" |
| 168 | + |
| 169 | + # ---- wait for readiness --------------------------------------------------- |
| 170 | + echo -e "${BOLD}→ Waiting for servers to be ready...${RESET}" |
| 171 | + i=0 |
| 172 | + while [ $i -lt $SERVER_COUNT ]; do |
| 173 | + wait_for_server "${SERVER_NAMES[$i]}" "${SERVER_URLS[$i]}" |
| 174 | + i=$((i + 1)) |
| 175 | + done |
| 176 | + echo "" |
| 177 | + |
| 178 | + # ---- warmup pass ---------------------------------------------------------- |
| 179 | + echo -e "${BOLD}→ Warming up (10 000 requests each)...${RESET}" |
| 180 | + i=0 |
| 181 | + while [ $i -lt $SERVER_COUNT ]; do |
| 182 | + printf " %-22s" "${SERVER_NAMES[$i]}" |
| 183 | + if [ -n "${ACCEPT_ENCODING[$i]}" ]; then |
| 184 | + bombardier -c "$CONNECTIONS" -n 10000 --print i -H "Accept-Encoding: ${ACCEPT_ENCODING[$i]}" "${SERVER_URLS[$i]}" >/dev/null 2>&1 |
| 185 | + else |
| 186 | + bombardier -c "$CONNECTIONS" -n 10000 --print i "${SERVER_URLS[$i]}" >/dev/null 2>&1 |
| 187 | + fi |
| 188 | + echo -e " ${GREEN}done${RESET}" |
| 189 | + i=$((i + 1)) |
| 190 | + done |
| 191 | + echo "" |
| 192 | + |
| 193 | + # ---- benchmark each server ------------------------------------------------ |
| 194 | + echo -e "${BOLD}→ Running compression benchmarks...${RESET}" |
| 195 | + echo "" |
| 196 | + |
| 197 | + # Parallel indexed result arrays |
| 198 | + RPS=() |
| 199 | + P50=() |
| 200 | + P99=() |
| 201 | + BYTES=() |
| 202 | + |
| 203 | + i=0 |
| 204 | + while [ $i -lt $SERVER_COUNT ]; do |
| 205 | + name="${SERVER_NAMES[$i]}" |
| 206 | + url="${SERVER_URLS[$i]}" |
| 207 | + accept="${ACCEPT_ENCODING[$i]}" |
| 208 | + out_file="${RESULTS_DIR}/compress-${name}.txt" |
| 209 | + |
| 210 | + echo -e " ${BOLD}[ ${name} ]${RESET} ${url}" |
| 211 | + if [ -n "$accept" ]; then |
| 212 | + echo -e " ${BLUE}Accept-Encoding: ${accept}${RESET}" |
| 213 | + fi |
| 214 | + echo -e " ─────────────────────────────────────────────" |
| 215 | + |
| 216 | + raw=$(run_bombardier "$url" "$accept" | tee "$out_file") |
| 217 | + |
| 218 | + rps=$(echo "$raw" | parse_rps) |
| 219 | + p50=$(echo "$raw" | parse_p50) |
| 220 | + p99=$(echo "$raw" | parse_p99) |
| 221 | + bytes=$(echo "$raw" | parse_bytes) |
| 222 | + |
| 223 | + RPS[$i]="${rps:-0}" |
| 224 | + P50[$i]="${p50:-N/A}" |
| 225 | + P99[$i]="${p99:-N/A}" |
| 226 | + BYTES[$i]="${bytes:-0}" |
| 227 | + |
| 228 | + echo "" |
| 229 | + i=$((i + 1)) |
| 230 | + done |
| 231 | + |
| 232 | + # ---- rank by req/s (simple insertion sort, bash 3 compatible) ------------- |
| 233 | + # Build a sorted index array (descending by RPS) |
| 234 | + SORTED_IDX=() |
| 235 | + i=0 |
| 236 | + while [ $i -lt $SERVER_COUNT ]; do |
| 237 | + SORTED_IDX[$i]=$i |
| 238 | + i=$((i + 1)) |
| 239 | + done |
| 240 | + n=${#SORTED_IDX[@]} |
| 241 | + i=1 |
| 242 | + while [ $i -lt $n ]; do |
| 243 | + key_idx=${SORTED_IDX[$i]} |
| 244 | + key_rps=${RPS[$key_idx]} |
| 245 | + j=$((i - 1)) |
| 246 | + while [ $j -ge 0 ]; do |
| 247 | + cmp_idx=${SORTED_IDX[$j]} |
| 248 | + cmp_rps=${RPS[$cmp_idx]} |
| 249 | + # Compare floats via awk |
| 250 | + if awk "BEGIN{exit !($cmp_rps < $key_rps)}" 2>/dev/null; then |
| 251 | + SORTED_IDX[$((j + 1))]=${SORTED_IDX[$j]} |
| 252 | + j=$((j - 1)) |
| 253 | + else |
| 254 | + break |
| 255 | + fi |
| 256 | + done |
| 257 | + SORTED_IDX[$((j + 1))]=$key_idx |
| 258 | + i=$((i + 1)) |
| 259 | + done |
| 260 | + |
| 261 | + echo -e "${BOLD}╔══════════════════════════════════════════════════════════════════════════════════════════╗${RESET}" |
| 262 | + echo -e "${BOLD}║ Results Summary ║${RESET}" |
| 263 | + echo -e "${BOLD}╠══════════════════════════════════════════════════════════════════════════════════════════╣${RESET}" |
| 264 | + printf "${BOLD}║ %-4s %-22s %10s %10s %10s %12s ║${RESET}\n" \ |
| 265 | + "#" "Server" "Req/sec" "p50 lat" "p99 lat" "Transferred" |
| 266 | + echo -e "${BOLD}╠══════════════════════════════════════════════════════════════════════════════════════════╣${RESET}" |
| 267 | + |
| 268 | + rank=1 |
| 269 | + for idx in "${SORTED_IDX[@]}"; do |
| 270 | + name="${SERVER_NAMES[$idx]}" |
| 271 | + rps="${RPS[$idx]}" |
| 272 | + p50="${P50[$idx]}" |
| 273 | + p99="${P99[$idx]}" |
| 274 | + bytes="${BYTES[$idx]}" |
| 275 | + |
| 276 | + if [ "$rank" -eq 1 ]; then |
| 277 | + colour="$GREEN"; medal="1st" |
| 278 | + elif [ "$rank" -eq 2 ]; then |
| 279 | + colour="$YELLOW"; medal="2nd" |
| 280 | + elif [ "$rank" -eq 3 ]; then |
| 281 | + colour="$YELLOW"; medal="3rd" |
| 282 | + else |
| 283 | + colour="$RESET"; medal="${rank}th" |
| 284 | + fi |
| 285 | + |
| 286 | + printf "${colour}║ %-4s %-22s %10s %10s %10s %12s ║${RESET}\n" \ |
| 287 | + "$medal" "$name" "$rps" "$p50" "$p99" "$bytes" |
| 288 | + rank=$((rank + 1)) |
| 289 | + done |
| 290 | + |
| 291 | + echo -e "${BOLD}╚══════════════════════════════════════════════════════════════════════════════════════════╝${RESET}" |
| 292 | + echo "" |
| 293 | + echo -e " Full results saved to: ${CYAN}${RESULTS_DIR}/compress-*.txt${RESET}" |
| 294 | + echo "" |
| 295 | + |
| 296 | + # ---- compression ratio summary -------------------------------------------- |
| 297 | + echo -e "${BOLD}→ Compression effectiveness:${RESET}" |
| 298 | + echo "" |
| 299 | + |
| 300 | + # Get uncompressed file size |
| 301 | + if [ -f "${SCRIPT_DIR}/../public/index.html" ]; then |
| 302 | + uncompressed_size=$(stat -f%z "${SCRIPT_DIR}/../public/index.html" 2>/dev/null || stat -c%s "${SCRIPT_DIR}/../public/index.html" 2>/dev/null || echo "0") |
| 303 | + echo -e " ${CYAN}Uncompressed: ${uncompressed_size} bytes${RESET}" |
| 304 | + |
| 305 | + for ext in gz br zst; do |
| 306 | + if [ -f "${SCRIPT_DIR}/../public/index.html.${ext}" ]; then |
| 307 | + compressed_size=$(stat -f%z "${SCRIPT_DIR}/../public/index.html.${ext}" 2>/dev/null || stat -c%s "${SCRIPT_DIR}/../public/index.html.${ext}" 2>/dev/null || echo "0") |
| 308 | + ratio=$(awk "BEGIN {printf \"%.1f\", ($uncompressed_size - $compressed_size) / $uncompressed_size * 100}") |
| 309 | + echo -e " ${CYAN}.${ext} compressed: ${compressed_size} bytes (${ratio}% reduction)${RESET}" |
| 310 | + fi |
| 311 | + done |
| 312 | + fi |
| 313 | + echo "" |
| 314 | + |
| 315 | + # ---- teardown ------------------------------------------------------------- |
| 316 | + if [ "$KEEP" = "false" ]; then |
| 317 | + echo -e "${BOLD}→ Tearing down containers...${RESET}" |
| 318 | + docker compose -f "$COMPOSE_FILE" down --remove-orphans 2>&1 | \ |
| 319 | + grep -E '(Stopped|Removed|Removing|error)' || true |
| 320 | + echo "" |
| 321 | + else |
| 322 | + echo -e " ${YELLOW}Containers left running (-k flag). Stop with:${RESET}" |
| 323 | + echo -e " docker compose -f benchmark/docker-compose.compression.yml down" |
| 324 | + echo "" |
| 325 | + fi |
| 326 | +} |
| 327 | + |
| 328 | +main "$@" |
0 commit comments