Skip to content

Commit d2f6358

Browse files
committed
Changes from testing across environments
1 parent 14d25f9 commit d2f6358

2 files changed

Lines changed: 93 additions & 56 deletions

File tree

cache/__tests__/cache-metrics-worst-case.sh

Lines changed: 43 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -102,6 +102,18 @@ log_warning() {
102102
echo -e "${YELLOW}[WARN]${NC} $1"
103103
}
104104

105+
log_overhead() {
106+
local overhead=$1
107+
shift # Remove first argument, rest is the message
108+
local message="$@"
109+
110+
if [ $overhead -le 0 ]; then
111+
echo -e "${GREEN}[PASS]${NC} $message"
112+
else
113+
echo -e "${YELLOW}[PASS]${NC} $message"
114+
fi
115+
}
116+
105117
# Check server connectivity
106118
check_server() {
107119
log_info "Checking server connectivity at ${BASE_URL}..."
@@ -225,6 +237,7 @@ fill_cache() {
225237
log_info "Filling cache to $target_size entries with diverse query patterns..."
226238

227239
# Strategy: Use parallel requests for much faster cache filling
240+
# Create truly unique queries by varying the query content itself
228241
# Process in batches of 100 parallel requests (good balance of speed vs server load)
229242
local batch_size=100
230243
local completed=0
@@ -240,18 +253,20 @@ fill_cache() {
240253
(
241254
local pattern=$((count % 3))
242255

256+
# Create truly unique cache entries by varying query parameters
257+
# Use unique type values so each creates a distinct cache key
243258
if [ $pattern -eq 0 ]; then
244259
curl -s -X POST "${API_BASE}/api/query" \
245260
-H "Content-Type: application/json" \
246-
-d "{\"type\":\"PerfTest\",\"limit\":10,\"skip\":$count}" > /dev/null 2>&1
261+
-d "{\"type\":\"WorstCaseFill_$count\",\"limit\":100}" > /dev/null 2>&1
247262
elif [ $pattern -eq 1 ]; then
248-
curl -s -X POST "${API_BASE}/api/query" \
263+
curl -s -X POST "${API_BASE}/api/search" \
249264
-H "Content-Type: application/json" \
250-
-d "{\"type\":\"Annotation\",\"limit\":10,\"skip\":$count}" > /dev/null 2>&1
265+
-d "{\"searchText\":\"worst_case_$count\",\"limit\":100}" > /dev/null 2>&1
251266
else
252-
curl -s -X POST "${API_BASE}/api/query" \
267+
curl -s -X POST "${API_BASE}/api/search/phrase" \
253268
-H "Content-Type: application/json" \
254-
-d "{\"limit\":10,\"skip\":$count}" > /dev/null 2>&1
269+
-d "{\"searchText\":\"worst fill $count\",\"limit\":100}" > /dev/null 2>&1
255270
fi
256271
) &
257272
done
@@ -274,13 +289,17 @@ fill_cache() {
274289
echo "[INFO] Cache stats - Actual size: ${final_size}, Max allowed: ${max_length}, Target: ${target_size}"
275290

276291
if [ "$final_size" -lt "$target_size" ] && [ "$final_size" -eq "$max_length" ]; then
277-
log_warning "Cache is full at max capacity (${max_length}). Unable to fill to ${target_size} entries."
278-
log_warning "To test with ${target_size} entries, set CACHE_MAX_LENGTH=${target_size} in .env and restart server."
292+
log_failure "Cache is full at max capacity (${max_length}) but target was ${target_size}"
293+
log_info "To test with ${target_size} entries, set CACHE_MAX_LENGTH=${target_size} in .env and restart server."
294+
exit 1
279295
elif [ "$final_size" -lt "$target_size" ]; then
280-
log_warning "Cache size (${final_size}) is less than target (${target_size})"
296+
log_failure "Cache size (${final_size}) is less than target (${target_size})"
297+
log_info "This may indicate TTL expiration, cache eviction, or non-unique queries."
298+
log_info "Current CACHE_TTL: $(echo "$final_stats" | jq -r '.ttl' 2>/dev/null || echo 'unknown')ms"
299+
exit 1
281300
fi
282301

283-
log_success "Cache filled to ${final_size} entries (~33% matching test type)"
302+
log_success "Cache filled to ${final_size} entries (non-matching for worst case testing)"
284303
}
285304

286305
# Warm up the system (JIT compilation, connection pools, OS caches)
@@ -610,11 +629,11 @@ test_create_endpoint() {
610629
local overhead=$((full_avg - empty_avg))
611630
local overhead_pct=$((overhead * 100 / empty_avg))
612631
if [ $overhead -gt 0 ]; then
613-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%) per operation"
632+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%) per operation"
614633
log_info " Empty cache: ${empty_avg}ms avg, ${empty_median}ms median"
615634
log_info " Full cache: ${full_avg}ms avg, ${full_median}ms median"
616635
else
617-
log_info "No measurable overhead"
636+
log_overhead 0 "No measurable overhead"
618637
fi
619638
fi
620639
}
@@ -730,7 +749,7 @@ test_update_endpoint() {
730749

731750
local overhead=$((full_avg - empty_avg))
732751
local overhead_pct=$((overhead * 100 / empty_avg))
733-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
752+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
734753
log_info " Empty cache: ${empty_avg}ms avg, ${empty_median}ms median"
735754
log_info " Full cache: ${full_avg}ms avg, ${full_median}ms median"
736755
}
@@ -859,7 +878,7 @@ test_delete_endpoint() {
859878

860879
local overhead=$((full_avg - empty_avg))
861880
local overhead_pct=$((overhead * 100 / empty_avg))
862-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
881+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
863882
log_info " Empty cache: ${empty_avg}ms avg, ${empty_median}ms median (deleted: $empty_success)"
864883
log_info " Full cache: ${full_avg}ms avg, ${full_median}ms median (deleted: $full_success)"
865884
}
@@ -1059,7 +1078,7 @@ test_patch_endpoint() {
10591078

10601079
local overhead=$((full_avg - empty_avg))
10611080
local overhead_pct=$((overhead * 100 / empty_avg))
1062-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
1081+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
10631082
log_info " Empty cache: ${empty_avg}ms avg, ${empty_median}ms median"
10641083
log_info " Full cache: ${full_avg}ms avg, ${full_median}ms median"
10651084
}
@@ -1163,7 +1182,7 @@ test_set_endpoint() {
11631182

11641183
local overhead=$((full_avg - empty_avg))
11651184
local overhead_pct=$((overhead * 100 / empty_avg))
1166-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
1185+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
11671186
log_info " Empty cache: ${empty_avg}ms avg, ${empty_median}ms median"
11681187
log_info " Full cache: ${full_avg}ms avg, ${full_median}ms median"
11691188
}
@@ -1283,7 +1302,7 @@ test_unset_endpoint() {
12831302

12841303
local overhead=$((full_avg - empty_avg))
12851304
local overhead_pct=$((overhead * 100 / empty_avg))
1286-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
1305+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
12871306
log_info " Empty cache: ${empty_avg}ms avg, ${empty_median}ms median"
12881307
log_info " Full cache: ${full_avg}ms avg, ${full_median}ms median"
12891308
}
@@ -1387,7 +1406,7 @@ test_overwrite_endpoint() {
13871406

13881407
local overhead=$((full_avg - empty_avg))
13891408
local overhead_pct=$((overhead * 100 / empty_avg))
1390-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
1409+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%)"
13911410
log_info " Empty cache: ${empty_avg}ms avg, ${empty_median}ms median"
13921411
log_info " Full cache: ${full_avg}ms avg, ${full_median}ms median"
13931412
}
@@ -1793,7 +1812,7 @@ test_create_endpoint_full() {
17931812

17941813
# WORST-CASE TEST: Always show actual overhead (including negative)
17951814
# Negative values indicate DB variance, not cache efficiency
1796-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%) [Empty: ${empty_avg}ms → Full: ${full_avg}ms]"
1815+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%) [Empty: ${empty_avg}ms → Full: ${full_avg}ms]"
17971816
if [ $overhead -lt 0 ]; then
17981817
log_info " ⚠️ Negative overhead due to DB performance variance between runs"
17991818
fi
@@ -1923,7 +1942,7 @@ test_update_endpoint_full() {
19231942
local overhead_pct=$((overhead * 100 / empty_avg))
19241943

19251944
# WORST-CASE TEST: Always show actual overhead (including negative)
1926-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%) [Empty: ${empty_avg}ms → Full: ${full_avg}ms]"
1945+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%) [Empty: ${empty_avg}ms → Full: ${full_avg}ms]"
19271946
if [ $overhead -lt 0 ]; then
19281947
log_info " ⚠️ Negative overhead due to DB performance variance between runs"
19291948
fi
@@ -1997,7 +2016,7 @@ test_patch_endpoint_full() {
19972016
local overhead_pct=$((overhead * 100 / empty))
19982017

19992018
# WORST-CASE TEST: Always show actual overhead (including negative)
2000-
log_info "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%) [Empty: ${empty}ms → Full: ${avg}ms]"
2019+
log_overhead $overhead "Cache invalidation overhead: ${overhead}ms (${overhead_pct}%) [Empty: ${empty}ms → Full: ${avg}ms]"
20012020
if [ $overhead -lt 0 ]; then
20022021
log_info " ⚠️ Negative overhead due to DB performance variance between runs"
20032022
fi
@@ -2057,7 +2076,7 @@ test_set_endpoint_full() {
20572076
local full=${ENDPOINT_WARM_TIMES["set"]}
20582077

20592078
# WORST-CASE TEST: Always show actual overhead (including negative)
2060-
log_info "Overhead: ${overhead}ms [Empty: ${empty}ms → Full: ${full}ms]"
2079+
log_overhead $overhead "Overhead: ${overhead}ms [Empty: ${empty}ms → Full: ${full}ms]"
20612080
if [ $overhead -lt 0 ]; then
20622081
log_info " ⚠️ Negative overhead due to DB performance variance between runs"
20632082
fi
@@ -2119,7 +2138,7 @@ test_unset_endpoint_full() {
21192138
local full=${ENDPOINT_WARM_TIMES["unset"]}
21202139

21212140
# WORST-CASE TEST: Always show actual overhead (including negative)
2122-
log_info "Overhead: ${overhead}ms [Empty: ${empty}ms → Full: ${full}ms]"
2141+
log_overhead $overhead "Overhead: ${overhead}ms [Empty: ${empty}ms → Full: ${full}ms]"
21232142
if [ $overhead -lt 0 ]; then
21242143
log_info " ⚠️ Negative overhead due to DB performance variance between runs"
21252144
fi
@@ -2179,7 +2198,7 @@ test_overwrite_endpoint_full() {
21792198
local full=${ENDPOINT_WARM_TIMES["overwrite"]}
21802199

21812200
# WORST-CASE TEST: Always show actual overhead (including negative)
2182-
log_info "Overhead: ${overhead}ms [Empty: ${empty}ms → Full: ${full}ms]"
2201+
log_overhead $overhead "Overhead: ${overhead}ms [Empty: ${empty}ms → Full: ${full}ms]"
21832202
if [ $overhead -lt 0 ]; then
21842203
log_info " ⚠️ Negative overhead due to DB performance variance between runs"
21852204
fi
@@ -2259,7 +2278,7 @@ test_delete_endpoint_full() {
22592278
local full=${ENDPOINT_WARM_TIMES["delete"]}
22602279

22612280
# WORST-CASE TEST: Always show actual overhead (including negative)
2262-
log_info "Overhead: ${overhead}ms [Empty: ${empty}ms → Full: ${full}ms] (deleted: $success)"
2281+
log_overhead $overhead "Overhead: ${overhead}ms [Empty: ${empty}ms → Full: ${full}ms] (deleted: $success)"
22632282
if [ $overhead -lt 0 ]; then
22642283
log_info " ⚠️ Negative overhead due to DB performance variance between runs"
22652284
fi

0 commit comments

Comments
 (0)