|
| 1 | +/** |
| 2 | + * Cache limit enforcement tests |
| 3 | + * Verifies that the cache properly enforces maxLength and maxBytes limits |
| 4 | + * @author thehabes |
| 5 | + */ |
| 6 | + |
| 7 | +import { jest } from '@jest/globals' |
| 8 | +import cache from '../index.js' |
| 9 | + |
| 10 | +/** |
| 11 | + * Helper to create a test cache with custom limits |
| 12 | + * We'll manipulate the singleton cache's limits for testing |
| 13 | + */ |
| 14 | +function setupTestCache(maxLength, maxBytes, ttl = 300000) { |
| 15 | + cache.clear() |
| 16 | + cache.maxLength = maxLength |
| 17 | + cache.maxBytes = maxBytes |
| 18 | + cache.ttl = ttl |
| 19 | + // Reset stats |
| 20 | + cache.stats = { |
| 21 | + hits: 0, |
| 22 | + misses: 0, |
| 23 | + evictions: 0, |
| 24 | + sets: 0, |
| 25 | + invalidations: 0 |
| 26 | + } |
| 27 | + return cache |
| 28 | +} |
| 29 | + |
| 30 | +/** |
| 31 | + * Helper to restore default cache settings |
| 32 | + */ |
| 33 | +function restoreDefaultCache() { |
| 34 | + cache.clear() |
| 35 | + cache.maxLength = parseInt(process.env.CACHE_MAX_LENGTH ?? 1000) |
| 36 | + cache.maxBytes = parseInt(process.env.CACHE_MAX_BYTES ?? 1000000000) |
| 37 | + cache.ttl = parseInt(process.env.CACHE_TTL ?? 300000) |
| 38 | + cache.stats = { |
| 39 | + hits: 0, |
| 40 | + misses: 0, |
| 41 | + evictions: 0, |
| 42 | + sets: 0, |
| 43 | + invalidations: 0 |
| 44 | + } |
| 45 | +} |
| 46 | + |
| 47 | +describe('Cache Length Limit Enforcement', () => { |
| 48 | + let testCache |
| 49 | + |
| 50 | + beforeEach(() => { |
| 51 | + testCache = setupTestCache(10, 1000000000, 300000) |
| 52 | + }) |
| 53 | + |
| 54 | + afterEach(() => { |
| 55 | + restoreDefaultCache() |
| 56 | + }) |
| 57 | + |
| 58 | + it('should not exceed maxLength when adding entries', () => { |
| 59 | + const maxLength = 10 |
| 60 | + |
| 61 | + // Add more entries than the limit |
| 62 | + for (let i = 0; i < 20; i++) { |
| 63 | + const key = testCache.generateKey('id', `test${i}`) |
| 64 | + testCache.set(key, { data: `value${i}` }) |
| 65 | + } |
| 66 | + |
| 67 | + // Cache should never exceed maxLength |
| 68 | + expect(testCache.cache.size).toBeLessThanOrEqual(maxLength) |
| 69 | + expect(testCache.cache.size).toBe(maxLength) |
| 70 | + |
| 71 | + // Should have evicted the oldest entries |
| 72 | + expect(testCache.stats.evictions).toBe(10) |
| 73 | + }) |
| 74 | + |
| 75 | + it('should evict least recently used entries when limit is reached', () => { |
| 76 | + testCache = setupTestCache(5, 1000000000, 300000) |
| 77 | + |
| 78 | + // Add 5 entries |
| 79 | + for (let i = 0; i < 5; i++) { |
| 80 | + const key = testCache.generateKey('id', `test${i}`) |
| 81 | + testCache.set(key, { data: `value${i}` }) |
| 82 | + } |
| 83 | + |
| 84 | + expect(testCache.cache.size).toBe(5) |
| 85 | + |
| 86 | + // Add one more entry, should evict test0 |
| 87 | + const key6 = testCache.generateKey('id', 'test5') |
| 88 | + testCache.set(key6, { data: 'value5' }) |
| 89 | + |
| 90 | + expect(testCache.cache.size).toBe(5) |
| 91 | + |
| 92 | + // test0 should be evicted (it was the first, least recently used) |
| 93 | + const key0 = testCache.generateKey('id', 'test0') |
| 94 | + const result = testCache.get(key0) |
| 95 | + expect(result).toBeNull() |
| 96 | + |
| 97 | + // test5 should be present |
| 98 | + const result5 = testCache.get(key6) |
| 99 | + expect(result5).toEqual({ data: 'value5' }) |
| 100 | + }) |
| 101 | + |
| 102 | + it('should maintain LRU order when accessing entries', () => { |
| 103 | + testCache = setupTestCache(3, 1000000000, 300000) |
| 104 | + |
| 105 | + // Add 3 entries |
| 106 | + const key1 = testCache.generateKey('id', 'test1') |
| 107 | + const key2 = testCache.generateKey('id', 'test2') |
| 108 | + const key3 = testCache.generateKey('id', 'test3') |
| 109 | + |
| 110 | + testCache.set(key1, { data: 'value1' }) |
| 111 | + testCache.set(key2, { data: 'value2' }) |
| 112 | + testCache.set(key3, { data: 'value3' }) |
| 113 | + |
| 114 | + // Access test1 to make it most recently used |
| 115 | + testCache.get(key1) |
| 116 | + |
| 117 | + // Add a new entry, should evict test2 (oldest) |
| 118 | + const key4 = testCache.generateKey('id', 'test4') |
| 119 | + testCache.set(key4, { data: 'value4' }) |
| 120 | + |
| 121 | + // test2 should be evicted |
| 122 | + expect(testCache.get(key2)).toBeNull() |
| 123 | + |
| 124 | + // test1 should still be present (was accessed recently) |
| 125 | + expect(testCache.get(key1)).toEqual({ data: 'value1' }) |
| 126 | + |
| 127 | + // test3 and test4 should be present |
| 128 | + expect(testCache.get(key3)).toEqual({ data: 'value3' }) |
| 129 | + expect(testCache.get(key4)).toEqual({ data: 'value4' }) |
| 130 | + }) |
| 131 | +}) |
| 132 | + |
| 133 | +describe('Cache Size (Bytes) Limit Enforcement', () => { |
| 134 | + let testCache |
| 135 | + |
| 136 | + beforeEach(() => { |
| 137 | + testCache = setupTestCache(1000, 500, 300000) // 500 bytes limit |
| 138 | + }) |
| 139 | + |
| 140 | + afterEach(() => { |
| 141 | + restoreDefaultCache() |
| 142 | + }) |
| 143 | + |
| 144 | + it('should not exceed maxBytes when adding entries', () => { |
| 145 | + // Create entries with known size |
| 146 | + // Each entry will be roughly 50-60 bytes when serialized |
| 147 | + const largeValue = { data: 'x'.repeat(50) } |
| 148 | + |
| 149 | + // Add entries until we exceed the byte limit |
| 150 | + for (let i = 0; i < 20; i++) { |
| 151 | + const key = testCache.generateKey('id', `test${i}`) |
| 152 | + testCache.set(key, largeValue) |
| 153 | + } |
| 154 | + |
| 155 | + // Cache should never exceed maxBytes |
| 156 | + const currentBytes = Buffer.byteLength(JSON.stringify(testCache.cache), 'utf8') |
| 157 | + expect(currentBytes).toBeLessThanOrEqual(500) |
| 158 | + |
| 159 | + // Should have evicted some entries |
| 160 | + expect(testCache.stats.evictions).toBeGreaterThan(0) |
| 161 | + }) |
| 162 | + |
| 163 | + it('should evict multiple entries if needed to stay under byte limit', () => { |
| 164 | + testCache = setupTestCache(1000, 200, 300000) // Very small limit |
| 165 | + |
| 166 | + // Add a few small entries |
| 167 | + for (let i = 0; i < 3; i++) { |
| 168 | + const key = testCache.generateKey('id', `small${i}`) |
| 169 | + testCache.set(key, { data: 'tiny' }) |
| 170 | + } |
| 171 | + |
| 172 | + const initialSize = testCache.cache.size |
| 173 | + expect(initialSize).toBeGreaterThan(0) |
| 174 | + |
| 175 | + // Add a large entry that will force multiple evictions |
| 176 | + const largeKey = testCache.generateKey('id', 'large') |
| 177 | + const largeValue = { data: 'x'.repeat(100) } |
| 178 | + testCache.set(largeKey, largeValue) |
| 179 | + |
| 180 | + // Should have evicted entries to make room |
| 181 | + const currentBytes = Buffer.byteLength(JSON.stringify(testCache.cache), 'utf8') |
| 182 | + expect(currentBytes).toBeLessThanOrEqual(200) |
| 183 | + }) |
| 184 | + |
| 185 | + it('should handle byte limit with realistic cache entries', () => { |
| 186 | + testCache = setupTestCache(1000, 5000, 300000) // 5KB limit |
| 187 | + |
| 188 | + // Simulate realistic query cache entries |
| 189 | + const sampleQuery = { |
| 190 | + type: 'Annotation', |
| 191 | + body: { |
| 192 | + value: 'Sample annotation text', |
| 193 | + format: 'text/plain' |
| 194 | + } |
| 195 | + } |
| 196 | + |
| 197 | + const sampleResults = Array.from({ length: 10 }, (_, i) => ({ |
| 198 | + '@id': `http://example.org/annotation/${i}`, |
| 199 | + '@type': 'Annotation', |
| 200 | + body: { |
| 201 | + value: `Annotation content ${i}`, |
| 202 | + format: 'text/plain' |
| 203 | + }, |
| 204 | + target: `http://example.org/target/${i}` |
| 205 | + })) |
| 206 | + |
| 207 | + // Add multiple query results |
| 208 | + for (let i = 0; i < 10; i++) { |
| 209 | + const key = testCache.generateKey('query', { ...sampleQuery, page: i }) |
| 210 | + testCache.set(key, sampleResults) |
| 211 | + } |
| 212 | + |
| 213 | + // Verify byte limit is enforced |
| 214 | + const currentBytes = Buffer.byteLength(JSON.stringify(testCache.cache), 'utf8') |
| 215 | + expect(currentBytes).toBeLessThanOrEqual(5000) |
| 216 | + |
| 217 | + // Should have some entries cached |
| 218 | + expect(testCache.cache.size).toBeGreaterThan(0) |
| 219 | + }) |
| 220 | +}) |
| 221 | + |
| 222 | +describe('Combined Length and Size Limits', () => { |
| 223 | + let testCache |
| 224 | + |
| 225 | + beforeEach(() => { |
| 226 | + testCache = setupTestCache(10, 2000, 300000) |
| 227 | + }) |
| 228 | + |
| 229 | + afterEach(() => { |
| 230 | + restoreDefaultCache() |
| 231 | + }) |
| 232 | + |
| 233 | + it('should enforce both length and byte limits', () => { |
| 234 | + // Add entries with varying sizes |
| 235 | + for (let i = 0; i < 20; i++) { |
| 236 | + const key = testCache.generateKey('id', `test${i}`) |
| 237 | + const size = i * 10 // Varying sizes |
| 238 | + testCache.set(key, { data: 'x'.repeat(size) }) |
| 239 | + } |
| 240 | + |
| 241 | + // Should respect both limits |
| 242 | + expect(testCache.cache.size).toBeLessThanOrEqual(10) |
| 243 | + |
| 244 | + const currentBytes = Buffer.byteLength(JSON.stringify(testCache.cache), 'utf8') |
| 245 | + expect(currentBytes).toBeLessThanOrEqual(2000) |
| 246 | + }) |
| 247 | + |
| 248 | + it('should prioritize byte limit over length limit when necessary', () => { |
| 249 | + testCache = setupTestCache(100, 500, 300000) // High length limit, low byte limit |
| 250 | + |
| 251 | + // Add large entries that will hit byte limit before length limit |
| 252 | + const largeValue = { data: 'x'.repeat(50) } |
| 253 | + |
| 254 | + for (let i = 0; i < 20; i++) { |
| 255 | + const key = testCache.generateKey('id', `test${i}`) |
| 256 | + testCache.set(key, largeValue) |
| 257 | + } |
| 258 | + |
| 259 | + // Should have fewer entries than maxLength due to byte limit |
| 260 | + expect(testCache.cache.size).toBeLessThan(100) |
| 261 | + expect(testCache.cache.size).toBeGreaterThan(0) |
| 262 | + |
| 263 | + // Should respect byte limit |
| 264 | + const currentBytes = Buffer.byteLength(JSON.stringify(testCache.cache), 'utf8') |
| 265 | + expect(currentBytes).toBeLessThanOrEqual(500) |
| 266 | + }) |
| 267 | +}) |
| 268 | + |
| 269 | +describe('Edge Cases', () => { |
| 270 | + let testCache |
| 271 | + |
| 272 | + beforeEach(() => { |
| 273 | + testCache = setupTestCache(5, 1000000000, 300000) |
| 274 | + }) |
| 275 | + |
| 276 | + afterEach(() => { |
| 277 | + restoreDefaultCache() |
| 278 | + }) |
| 279 | + |
| 280 | + it('should handle updating existing entries without exceeding limits', () => { |
| 281 | + // Fill cache to limit |
| 282 | + for (let i = 0; i < 5; i++) { |
| 283 | + const key = testCache.generateKey('id', `test${i}`) |
| 284 | + testCache.set(key, { data: `value${i}` }) |
| 285 | + } |
| 286 | + |
| 287 | + expect(testCache.cache.size).toBe(5) |
| 288 | + |
| 289 | + // Update an existing entry (should not trigger eviction) |
| 290 | + const key2 = testCache.generateKey('id', 'test2') |
| 291 | + testCache.set(key2, { data: 'updated value' }) |
| 292 | + |
| 293 | + expect(testCache.cache.size).toBe(5) |
| 294 | + expect(testCache.get(key2)).toEqual({ data: 'updated value' }) |
| 295 | + }) |
| 296 | + |
| 297 | + it('should handle single large entry that fits within limits', () => { |
| 298 | + testCache = setupTestCache(1000, 1000, 300000) |
| 299 | + |
| 300 | + // Add a large but valid entry |
| 301 | + const largeKey = testCache.generateKey('id', 'large') |
| 302 | + const largeValue = { data: 'x'.repeat(200) } |
| 303 | + testCache.set(largeKey, largeValue) |
| 304 | + |
| 305 | + expect(testCache.cache.size).toBe(1) |
| 306 | + expect(testCache.get(largeKey)).toEqual(largeValue) |
| 307 | + }) |
| 308 | + |
| 309 | + it('should handle empty cache when checking limits', () => { |
| 310 | + testCache = setupTestCache(10, 1000, 300000) |
| 311 | + |
| 312 | + expect(testCache.cache.size).toBe(0) |
| 313 | + |
| 314 | + const stats = testCache.getStats() |
| 315 | + expect(stats.length).toBe(0) |
| 316 | + expect(stats.maxLength).toBe(10) |
| 317 | + expect(stats.maxBytes).toBe(1000) |
| 318 | + }) |
| 319 | +}) |
| 320 | + |
| 321 | +describe('Real-world Simulation', () => { |
| 322 | + let testCache |
| 323 | + |
| 324 | + beforeEach(() => { |
| 325 | + // Use actual default values from production |
| 326 | + testCache = setupTestCache(1000, 1000000000, 300000) |
| 327 | + }) |
| 328 | + |
| 329 | + afterEach(() => { |
| 330 | + restoreDefaultCache() |
| 331 | + }) |
| 332 | + |
| 333 | + it('should handle realistic RERUM API cache usage', () => { |
| 334 | + // Simulate 2000 cache operations (should trigger evictions) |
| 335 | + for (let i = 0; i < 2000; i++) { |
| 336 | + const key = testCache.generateKey('query', { |
| 337 | + type: 'Annotation', |
| 338 | + '@context': 'http://www.w3.org/ns/anno.jsonld', |
| 339 | + page: Math.floor(i / 10) |
| 340 | + }) |
| 341 | + |
| 342 | + // Realistic result set |
| 343 | + const results = Array.from({ length: 100 }, (_, j) => ({ |
| 344 | + '@id': `http://store.rerum.io/v1/id/${i}_${j}`, |
| 345 | + '@type': 'Annotation' |
| 346 | + })) |
| 347 | + |
| 348 | + testCache.set(key, results) |
| 349 | + } |
| 350 | + |
| 351 | + // Should respect length limit |
| 352 | + expect(testCache.cache.size).toBeLessThanOrEqual(1000) |
| 353 | + |
| 354 | + // Due to the page grouping (Math.floor(i/10)), we actually only have 200 unique keys |
| 355 | + // (2000 / 10 = 200 unique page numbers) |
| 356 | + // So the final cache size should be 200, not 1000 |
| 357 | + expect(testCache.cache.size).toBe(200) |
| 358 | + |
| 359 | + // No evictions should occur because we only created 200 unique entries |
| 360 | + // (Each i/10 page gets overwritten 10 times, not added) |
| 361 | + expect(testCache.stats.evictions).toBe(0) |
| 362 | + |
| 363 | + // Stats should show 2000 sets (including overwrites) |
| 364 | + const stats = testCache.getStats() |
| 365 | + expect(stats.sets).toBe(2000) |
| 366 | + expect(stats.length).toBe(200) |
| 367 | + |
| 368 | + // Verify byte limit is not exceeded |
| 369 | + expect(stats.bytes).toBeLessThanOrEqual(1000000000) |
| 370 | + }) |
| 371 | +}) |
| 372 | + |
0 commit comments