Skip to content

Commit ef67fde

Browse files
committed
Redo for the clustering
1 parent 0c7eba9 commit ef67fde

11 files changed

Lines changed: 1137 additions & 1674 deletions

cache/__tests__/cache-limits.test.js

Lines changed: 252 additions & 297 deletions
Large diffs are not rendered by default.

cache/__tests__/cache-metrics-worst-case.sh

Lines changed: 3 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -3,60 +3,44 @@
33
################################################################################
44
# RERUM Cache WORST-CASE Scenario Performance Test
55
#
6-
# Tests the absolute worst-case scenario for cache performance:
7-
# - Read operations: Query for data NOT in cache (cache miss, full scan)
8-
# - Write operations: Invalidate data NOT matching cache (full scan, no invalidations)
9-
#
10-
# This measures maximum overhead when cache provides NO benefit.
6+
# Tests worst-case cache performance (cache misses, full scans, no invalidations)
7+
# Measures maximum overhead when cache provides NO benefit
118
#
129
# Produces: /cache/docs/CACHE_METRICS_WORST_CASE_REPORT.md
1310
#
1411
# Author: thehabes
1512
# Date: October 23, 2025
1613
################################################################################
1714

18-
# Exit on error (disabled for better error reporting)
19-
# set -e
20-
21-
# Configuration
2215
BASE_URL="${BASE_URL:-http://localhost:3001}"
2316
API_BASE="${BASE_URL}/v1"
24-
# Auth token will be prompted from user
2517
AUTH_TOKEN=""
2618

27-
# Test configuration
2819
CACHE_FILL_SIZE=1000
2920
WARMUP_ITERATIONS=20
3021
NUM_WRITE_TESTS=100
3122

32-
# Colors for output
3323
RED='\033[0;31m'
3424
GREEN='\033[0;32m'
3525
YELLOW='\033[1;33m'
3626
BLUE='\033[0;34m'
3727
CYAN='\033[0;36m'
3828
MAGENTA='\033[0;35m'
39-
NC='\033[0m' # No Color
29+
NC='\033[0m'
4030

41-
# Test counters
4231
TOTAL_TESTS=0
4332
PASSED_TESTS=0
4433
FAILED_TESTS=0
4534
SKIPPED_TESTS=0
4635

47-
# Performance tracking arrays
4836
declare -A ENDPOINT_COLD_TIMES
4937
declare -A ENDPOINT_WARM_TIMES
5038
declare -A ENDPOINT_STATUS
5139
declare -A ENDPOINT_DESCRIPTIONS
5240

53-
# Array to store created object IDs for cleanup
5441
declare -a CREATED_IDS=()
55-
56-
# Associative array to store full created objects (to avoid unnecessary GET requests)
5742
declare -A CREATED_OBJECTS
5843

59-
# Report file - go up to repo root first
6044
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
6145
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
6246
REPORT_FILE="$REPO_ROOT/cache/docs/CACHE_METRICS_WORST_CASE_REPORT.md"

cache/__tests__/cache-metrics.sh

Lines changed: 39 additions & 40 deletions
Original file line numberDiff line numberDiff line change
@@ -3,59 +3,43 @@
33
################################################################################
44
# RERUM Cache Comprehensive Metrics & Functionality Test
55
#
6-
# Combines:
7-
# - Integration testing (endpoint functionality with cache)
8-
# - Performance testing (read/write speed with/without cache)
9-
# - Limit enforcement testing (cache boundaries)
10-
#
6+
# Combines integration, performance, and limit enforcement testing
117
# Produces: /cache/docs/CACHE_METRICS_REPORT.md
128
#
139
# Author: thehabes
1410
# Date: October 22, 2025
1511
################################################################################
1612

17-
# Exit on error (disabled for better error reporting)
18-
# set -e
19-
2013
# Configuration
2114
BASE_URL="${BASE_URL:-http://localhost:3001}"
2215
API_BASE="${BASE_URL}/v1"
23-
# Auth token will be prompted from user
2416
AUTH_TOKEN=""
2517

26-
# Test configuration
2718
CACHE_FILL_SIZE=1000
2819
WARMUP_ITERATIONS=20
2920
NUM_WRITE_TESTS=100
3021

31-
# Colors for output
3222
RED='\033[0;31m'
3323
GREEN='\033[0;32m'
3424
YELLOW='\033[1;33m'
3525
BLUE='\033[0;34m'
3626
CYAN='\033[0;36m'
3727
MAGENTA='\033[0;35m'
38-
NC='\033[0m' # No Color
28+
NC='\033[0m'
3929

40-
# Test counters
4130
TOTAL_TESTS=0
4231
PASSED_TESTS=0
4332
FAILED_TESTS=0
4433
SKIPPED_TESTS=0
4534

46-
# Performance tracking arrays
4735
declare -A ENDPOINT_COLD_TIMES
4836
declare -A ENDPOINT_WARM_TIMES
4937
declare -A ENDPOINT_STATUS
5038
declare -A ENDPOINT_DESCRIPTIONS
5139

52-
# Array to store created object IDs for cleanup
5340
declare -a CREATED_IDS=()
54-
55-
# Associative array to store full created objects (to avoid unnecessary GET requests)
5641
declare -A CREATED_OBJECTS
5742

58-
# Report file - go up to repo root first
5943
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
6044
REPO_ROOT="$(cd "$SCRIPT_DIR/../.." && pwd)"
6145
REPORT_FILE="$REPO_ROOT/cache/docs/CACHE_METRICS_REPORT.md"
@@ -116,7 +100,6 @@ log_overhead() {
116100
fi
117101
}
118102

119-
# Check server connectivity
120103
check_server() {
121104
log_info "Checking server connectivity at ${BASE_URL}..."
122105
if ! curl -s -f "${BASE_URL}" > /dev/null 2>&1; then
@@ -127,7 +110,6 @@ check_server() {
127110
log_success "Server is running at ${BASE_URL}"
128111
}
129112

130-
# Get bearer token from user
131113
get_auth_token() {
132114
log_header "Authentication Setup"
133115

@@ -150,17 +132,14 @@ get_auth_token() {
150132
exit 1
151133
fi
152134

153-
# Validate JWT format (3 parts separated by dots)
154135
log_info "Validating token..."
155136
if ! echo "$AUTH_TOKEN" | grep -qE '^[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+\.[A-Za-z0-9_-]+$'; then
156137
echo -e "${RED}ERROR: Token is not a valid JWT format${NC}"
157138
echo "Expected format: header.payload.signature"
158139
exit 1
159140
fi
160141

161-
# Extract and decode payload (second part of JWT)
162142
local payload=$(echo "$AUTH_TOKEN" | cut -d. -f2)
163-
# Add padding if needed for base64 decoding
164143
local padded_payload="${payload}$(printf '%*s' $((4 - ${#payload} % 4)) '' | tr ' ' '=')"
165144
local decoded_payload=$(echo "$padded_payload" | base64 -d 2>/dev/null)
166145

@@ -169,7 +148,6 @@ get_auth_token() {
169148
exit 1
170149
fi
171150

172-
# Extract expiration time (exp field in seconds since epoch)
173151
local exp=$(echo "$decoded_payload" | grep -o '"exp":[0-9]*' | cut -d: -f2)
174152

175153
if [ -z "$exp" ]; then
@@ -192,14 +170,13 @@ get_auth_token() {
192170
fi
193171
}
194172

195-
# Measure endpoint performance
196173
measure_endpoint() {
197174
local endpoint=$1
198175
local method=$2
199176
local data=$3
200177
local description=$4
201178
local needs_auth=${5:-false}
202-
local timeout=${6:-30} # Allow custom timeout, default 30 seconds
179+
local timeout=${6:-30}
203180

204181
local start=$(date +%s%3N)
205182
if [ "$needs_auth" == "true" ]; then
@@ -246,8 +223,8 @@ clear_cache() {
246223
while [ $attempt -le $max_attempts ]; do
247224
curl -s -X POST "${API_BASE}/api/cache/clear" > /dev/null 2>&1
248225

249-
# Wait for cache clear to complete and stabilize
250-
sleep 2
226+
# Wait longer for cache clear to complete and stats sync to stabilize (5s interval)
227+
sleep 6
251228

252229
# Sanity check: Verify cache is actually empty
253230
local stats=$(get_cache_stats)
@@ -278,7 +255,7 @@ fill_cache() {
278255

279256
# Strategy: Use parallel requests for faster cache filling
280257
# Reduced batch size and added delays to prevent overwhelming the server
281-
local batch_size=20 # Reduced from 100 to prevent connection exhaustion
258+
local batch_size=100 # Reduced from 100 to prevent connection exhaustion
282259
local completed=0
283260
local successful_requests=0
284261
local failed_requests=0
@@ -615,14 +592,15 @@ test_query_endpoint_cold() {
615592
ENDPOINT_DESCRIPTIONS["query"]="Query database with filters"
616593

617594
log_info "Testing query with cold cache..."
618-
local result=$(measure_endpoint "${API_BASE}/api/query" "POST" '{"type":"Annotation","limit":5}' "Query for Annotations")
595+
# Use the same query that will be cached in Phase 3 and tested in Phase 4
596+
local result=$(measure_endpoint "${API_BASE}/api/query" "POST" '{"type":"CreatePerfTest"}' "Query for CreatePerfTest")
619597
local cold_time=$(echo "$result" | cut -d'|' -f1)
620598
local cold_code=$(echo "$result" | cut -d'|' -f2)
621599

622600
ENDPOINT_COLD_TIMES["query"]=$cold_time
623601

624602
if [ "$cold_code" == "200" ]; then
625-
log_success "Query endpoint functional"
603+
log_success "Query endpoint functional (${cold_time}ms)"
626604
ENDPOINT_STATUS["query"]="✅ Functional"
627605
else
628606
log_failure "Query endpoint failed (HTTP $cold_code)"
@@ -659,16 +637,16 @@ test_search_endpoint() {
659637

660638
clear_cache
661639

662-
# Test search functionality
640+
# Test search functionality with the same query that will be cached in Phase 3 and tested in Phase 4
663641
log_info "Testing search with cold cache..."
664-
local result=$(measure_endpoint "${API_BASE}/api/search" "POST" '{"searchText":"annotation","limit":5}' "Search for 'annotation'")
642+
local result=$(measure_endpoint "${API_BASE}/api/search" "POST" '{"searchText":"annotation"}' "Search for 'annotation'")
665643
local cold_time=$(echo "$result" | cut -d'|' -f1)
666644
local cold_code=$(echo "$result" | cut -d'|' -f2)
667645

668646
ENDPOINT_COLD_TIMES["search"]=$cold_time
669647

670648
if [ "$cold_code" == "200" ]; then
671-
log_success "Search endpoint functional"
649+
log_success "Search endpoint functional (${cold_time}ms)"
672650
ENDPOINT_STATUS["search"]="✅ Functional"
673651
elif [ "$cold_code" == "501" ]; then
674652
log_skip "Search endpoint not implemented or requires MongoDB Atlas Search indexes"
@@ -944,16 +922,16 @@ test_search_phrase_endpoint() {
944922

945923
clear_cache
946924

947-
# Test search phrase functionality
925+
# Test search phrase functionality with the same query that will be cached in Phase 3 and tested in Phase 4
948926
log_info "Testing search phrase with cold cache..."
949-
local result=$(measure_endpoint "${API_BASE}/api/search/phrase" "POST" '{"searchText":"test phrase","limit":5}' "Phrase search")
927+
local result=$(measure_endpoint "${API_BASE}/api/search/phrase" "POST" '{"searchText":"test annotation"}' "Phrase search")
950928
local cold_time=$(echo "$result" | cut -d'|' -f1)
951929
local cold_code=$(echo "$result" | cut -d'|' -f2)
952930

953931
ENDPOINT_COLD_TIMES["searchPhrase"]=$cold_time
954932

955933
if [ "$cold_code" == "200" ]; then
956-
log_success "Search phrase endpoint functional"
934+
log_success "Search phrase endpoint functional (${cold_time}ms)"
957935
ENDPOINT_STATUS["searchPhrase"]="✅ Functional"
958936
elif [ "$cold_code" == "501" ]; then
959937
log_skip "Search phrase endpoint not implemented or requires MongoDB Atlas Search indexes"
@@ -1989,15 +1967,36 @@ main() {
19891967
# IMPORTANT: Queries must match cache fill patterns (default limit=100, skip=0) to get cache hits
19901968
log_info "Testing /api/query with full cache..."
19911969
local result=$(measure_endpoint "${API_BASE}/api/query" "POST" '{"type":"CreatePerfTest"}' "Query with full cache")
1992-
log_success "Query with full cache"
1970+
local warm_time=$(echo "$result" | cut -d'|' -f1)
1971+
local warm_code=$(echo "$result" | cut -d'|' -f2)
1972+
ENDPOINT_WARM_TIMES["query"]=$warm_time
1973+
if [ "$warm_code" == "200" ]; then
1974+
log_success "Query with full cache (${warm_time}ms)"
1975+
else
1976+
log_warning "Query failed with code $warm_code"
1977+
fi
19931978

19941979
log_info "Testing /api/search with full cache..."
19951980
result=$(measure_endpoint "${API_BASE}/api/search" "POST" '{"searchText":"annotation"}' "Search with full cache")
1996-
log_success "Search with full cache"
1981+
warm_time=$(echo "$result" | cut -d'|' -f1)
1982+
warm_code=$(echo "$result" | cut -d'|' -f2)
1983+
ENDPOINT_WARM_TIMES["search"]=$warm_time
1984+
if [ "$warm_code" == "200" ]; then
1985+
log_success "Search with full cache (${warm_time}ms)"
1986+
else
1987+
log_warning "Search failed with code $warm_code"
1988+
fi
19971989

19981990
log_info "Testing /api/search/phrase with full cache..."
19991991
result=$(measure_endpoint "${API_BASE}/api/search/phrase" "POST" '{"searchText":"test annotation"}' "Search phrase with full cache")
2000-
log_success "Search phrase with full cache"
1992+
warm_time=$(echo "$result" | cut -d'|' -f1)
1993+
warm_code=$(echo "$result" | cut -d'|' -f2)
1994+
ENDPOINT_WARM_TIMES["searchPhrase"]=$warm_time
1995+
if [ "$warm_code" == "200" ]; then
1996+
log_success "Search phrase with full cache (${warm_time}ms)"
1997+
else
1998+
log_warning "Search phrase failed with code $warm_code"
1999+
fi
20012000

20022001
# For ID, history, since - use objects created in Phase 1/2 if available
20032002
# Use object index 100+ to avoid objects that will be deleted by DELETE tests (indices 0-99)

0 commit comments

Comments
 (0)