From 834ace3d91b5e687bfe6e407195bdc54a1258643 Mon Sep 17 00:00:00 2001 From: Satyaki Ghosh Date: Sun, 26 Apr 2026 00:30:15 -0600 Subject: [PATCH 1/2] Create a key based file databse so each key has its own file --- src/datastore/DataStore.ts | 4 +- .../{FileStore.ts => FileStoreFactory.ts} | 8 +- .../{LMDB.ts => LMDBStoreFactory.ts} | 0 src/datastore/file/EncryptedFile.ts | 120 ++++++++++++++ src/datastore/file/EncryptedFileStore.ts | 128 --------------- src/datastore/file/Encryption.ts | 1 + src/datastore/file/KeyedFileStore.ts | 155 ++++++++++++++++++ .../datastore/FileStore.encryption.test.ts | 106 ++++++++++++ tst/unit/datastore/FileStore.test.ts | 102 ++++++------ tst/unit/datastore/FilestoreWorker.ts | 4 +- tst/unit/datastore/LMDB.corruption.test.ts | 2 +- tst/unit/datastore/LMDB.recovery.test.ts | 2 +- tst/unit/datastore/LMDB.startup.test.ts | 2 +- tst/unit/datastore/LMDB.test.ts | 2 +- 14 files changed, 444 insertions(+), 192 deletions(-) rename src/datastore/{FileStore.ts => FileStoreFactory.ts} (93%) rename src/datastore/{LMDB.ts => LMDBStoreFactory.ts} (100%) create mode 100644 src/datastore/file/EncryptedFile.ts delete mode 100644 src/datastore/file/EncryptedFileStore.ts create mode 100644 src/datastore/file/KeyedFileStore.ts create mode 100644 tst/unit/datastore/FileStore.encryption.test.ts diff --git a/src/datastore/DataStore.ts b/src/datastore/DataStore.ts index be20188..17d041a 100644 --- a/src/datastore/DataStore.ts +++ b/src/datastore/DataStore.ts @@ -1,8 +1,8 @@ import { Closeable } from '../utils/Closeable'; import { isWindows } from '../utils/Environment'; import { pathToStorage } from '../utils/Storage'; -import { FileStoreFactory } from './FileStore'; -import { LMDBStoreFactory } from './LMDB'; +import { FileStoreFactory } from './FileStoreFactory'; +import { LMDBStoreFactory } from './LMDBStoreFactory'; import { MemoryStoreFactory } from './MemoryStore'; export enum Persistence { diff --git a/src/datastore/FileStore.ts b/src/datastore/FileStoreFactory.ts similarity index 93% rename from src/datastore/FileStore.ts rename to src/datastore/FileStoreFactory.ts index 4356472..2f40b45 100644 --- a/src/datastore/FileStore.ts +++ b/src/datastore/FileStoreFactory.ts @@ -6,14 +6,14 @@ import { ScopedTelemetry } from '../telemetry/ScopedTelemetry'; import { Telemetry } from '../telemetry/TelemetryDecorator'; import { formatNumber } from '../utils/String'; import { DataStore, DataStoreFactory, PersistedStores, StoreName } from './DataStore'; -import { EncryptedFileStore } from './file/EncryptedFileStore'; import { encryptionKey } from './file/Encryption'; +import { KeyedFileStore } from './file/KeyedFileStore'; export class FileStoreFactory implements DataStoreFactory { private readonly log: Logger; @Telemetry({ scope: 'FileStore.Global' }) private readonly telemetry!: ScopedTelemetry; - private readonly stores = new Map(); + private readonly stores = new Map(); private readonly fileDbRoot: string; private readonly fileDbDir: string; @@ -35,7 +35,7 @@ export class FileStoreFactory implements DataStoreFactory { } for (const store of storeNames) { - this.stores.set(store, new EncryptedFileStore(encryptionKey(VersionNumber), store, this.fileDbDir)); + this.stores.set(store, new KeyedFileStore(encryptionKey(VersionNumber), store, this.fileDbDir)); } this.metricsInterval = setInterval(() => { @@ -116,5 +116,5 @@ export class FileStoreFactory implements DataStoreFactory { } } -const VersionNumber = 2; +const VersionNumber = 3; const Version = `v${VersionNumber}`; diff --git a/src/datastore/LMDB.ts b/src/datastore/LMDBStoreFactory.ts similarity index 100% rename from src/datastore/LMDB.ts rename to src/datastore/LMDBStoreFactory.ts diff --git a/src/datastore/file/EncryptedFile.ts b/src/datastore/file/EncryptedFile.ts new file mode 100644 index 0000000..bcac281 --- /dev/null +++ b/src/datastore/file/EncryptedFile.ts @@ -0,0 +1,120 @@ +import { existsSync, readFileSync, statSync, unlinkSync } from 'fs'; // eslint-disable-line no-restricted-imports -- files being checked +import { rename, unlink, writeFile } from 'fs/promises'; +import { join } from 'path'; +import { Logger } from 'pino'; +import { lock, LockOptions, lockSync } from 'proper-lockfile'; +import { LoggerFactory } from '../../telemetry/LoggerFactory'; +import { TelemetryService } from '../../telemetry/TelemetryService'; +import { decrypt, encrypt } from './Encryption'; + +const LOCK_OPTIONS_SYNC: LockOptions = { stale: 10_000 }; +const LOCK_OPTIONS: LockOptions = { ...LOCK_OPTIONS_SYNC, retries: { retries: 20, minTimeout: 50, maxTimeout: 1000 } }; + +/** + * Encrypted on-disk envelope. Stores the original key alongside the value + * so the key can be recovered from the file during startup. + */ +export type EncryptedEntry = { + readonly key: string; + readonly value: T; +}; + +export class EncryptedFile { + private readonly log: Logger; + private readonly file: string; + private key: string | undefined; + private content: EncryptedEntry | undefined = undefined; + + constructor( + private readonly KEY: Buffer, + storeName: string, + fileName: string, + fileDbDir: string, + ) { + this.log = LoggerFactory.getLogger(`EncryptedFile.${storeName}`); + this.file = join(fileDbDir, fileName); + + if (this.exists()) { + const release = lockSync(this.file, LOCK_OPTIONS_SYNC); + try { + this.content = this.readFile(); + } catch (error) { + this.log.error(error, 'Failed to decrypt file store, deleting store'); + TelemetryService.instance.get(`FileStore.${storeName}`).count('filestore.recreate', 1); + unlinkSync(this.file); + } finally { + release(); + } + } + } + + setKey(key: string) { + if (this.key !== undefined) { + throw new Error('File key was already set'); + } + this.key = key; + } + + exists() { + return existsSync(this.file); + } + + entry(): EncryptedEntry | undefined { + return this.content; + } + + get(): T | undefined { + return this.content?.value as T | undefined; + } + + async put(value: T): Promise { + if (this.key === undefined) { + throw new Error('File key is not set'); + } + + this.content = { key: this.key, value }; + + if (!this.exists()) { + await this.save(); + return true; + } + + const release = await lock(this.file, LOCK_OPTIONS); + try { + await this.save(); + return true; + } finally { + await release(); + } + } + + async remove() { + this.content = undefined; + + if (!this.exists()) { + return true; + } + + const release = await lock(this.file, LOCK_OPTIONS); + try { + await unlink(this.file); + return true; + } finally { + await release(); + } + } + + fileSize(): number { + return existsSync(this.file) ? statSync(this.file).size : 0; + } + + private readFile(): EncryptedEntry { + return JSON.parse(decrypt(this.KEY, readFileSync(this.file))) as EncryptedEntry; + } + + private async save() { + const tmp = `${this.file}.${process.pid}.tmp`; + await writeFile(tmp, encrypt(this.KEY, JSON.stringify(this.content))); + await rename(tmp, this.file); + } +} diff --git a/src/datastore/file/EncryptedFileStore.ts b/src/datastore/file/EncryptedFileStore.ts deleted file mode 100644 index fc4b71c..0000000 --- a/src/datastore/file/EncryptedFileStore.ts +++ /dev/null @@ -1,128 +0,0 @@ -import { existsSync, readFileSync, renameSync, statSync, writeFileSync } from 'fs'; // eslint-disable-line no-restricted-imports -- files being checked -import { rename, writeFile } from 'fs/promises'; -import { join } from 'path'; -import { Logger } from 'pino'; -import { lock, LockOptions, lockSync } from 'proper-lockfile'; -import { LoggerFactory } from '../../telemetry/LoggerFactory'; -import { ScopedTelemetry } from '../../telemetry/ScopedTelemetry'; -import { TelemetryService } from '../../telemetry/TelemetryService'; -import { DataStore } from '../DataStore'; -import { decrypt, encrypt } from './Encryption'; - -const LOCK_OPTIONS_SYNC: LockOptions = { stale: 10_000 }; -const LOCK_OPTIONS: LockOptions = { ...LOCK_OPTIONS_SYNC, retries: { retries: 20, minTimeout: 50, maxTimeout: 1000 } }; - -export class EncryptedFileStore implements DataStore { - private readonly log: Logger; - private readonly file: string; - private content: Record = {}; - private readonly telemetry: ScopedTelemetry; - - constructor( - private readonly KEY: Buffer, - name: string, - fileDbDir: string, - ) { - this.log = LoggerFactory.getLogger(`FileStore.${name}`); - this.file = join(fileDbDir, `${name}.enc`); - this.telemetry = TelemetryService.instance.get(`FileStore.${name}`); - - if (existsSync(this.file)) { - const release = lockSync(this.file, LOCK_OPTIONS_SYNC); - try { - this.content = this.readFile(); - } catch (error) { - this.log.error(error, 'Failed to decrypt file store, recreating store'); - this.telemetry.count('filestore.recreate', 1); - this.saveSync(); - } finally { - release(); - } - } else { - this.saveSync(); - } - } - - get(key: string): T | undefined { - return this.telemetry.countExecution('get', () => this.content[key] as T | undefined, { - captureErrorAttributes: true, - }); - } - - put(key: string, value: T): Promise { - return this.withLock('put', async () => { - this.content[key] = value; - await this.save(); - return true; - }); - } - - remove(key: string): Promise { - return this.withLock('remove', async () => { - if (!(key in this.content)) { - return false; - } - - delete this.content[key]; - await this.save(); - return true; - }); - } - - clear(): Promise { - return this.withLock('clear', async () => { - this.content = {}; - await this.save(); - }); - } - - keys(limit: number): ReadonlyArray { - return this.telemetry.countExecution('keys', () => Object.keys(this.content).slice(0, limit), { - captureErrorAttributes: true, - }); - } - - stats(): FileStoreStats { - return { - entries: Object.keys(this.content).length, - totalSize: existsSync(this.file) ? statSync(this.file).size : 0, - }; - } - - private async withLock(operation: string, fn: () => Promise): Promise { - return await this.telemetry.measureAsync( - operation, - async () => { - const release = await lock(this.file, LOCK_OPTIONS); - try { - this.content = this.readFile(); - return await fn(); - } finally { - await release(); - } - }, - { captureErrorAttributes: true }, - ); - } - - private readFile(): Record { - return JSON.parse(decrypt(this.KEY, readFileSync(this.file))) as Record; - } - - private saveSync() { - const tmp = `${this.file}.${process.pid}.tmp`; - writeFileSync(tmp, encrypt(this.KEY, JSON.stringify(this.content))); - renameSync(tmp, this.file); - } - - private async save() { - const tmp = `${this.file}.${process.pid}.tmp`; - await writeFile(tmp, encrypt(this.KEY, JSON.stringify(this.content))); - await rename(tmp, this.file); - } -} - -export type FileStoreStats = { - entries: number; - totalSize: number; -}; diff --git a/src/datastore/file/Encryption.ts b/src/datastore/file/Encryption.ts index a3cefcb..84110df 100644 --- a/src/datastore/file/Encryption.ts +++ b/src/datastore/file/Encryption.ts @@ -3,6 +3,7 @@ import { stableMachineSpecificKey } from '../../utils/MachineKey'; export function encryptionKey(version: number): Buffer { switch (version) { + case 3: case 2: case 1: { return stableMachineSpecificKey('filedb-static-salt', 'filedb-encryption-key-derivation', 32); diff --git a/src/datastore/file/KeyedFileStore.ts b/src/datastore/file/KeyedFileStore.ts new file mode 100644 index 0000000..32d3558 --- /dev/null +++ b/src/datastore/file/KeyedFileStore.ts @@ -0,0 +1,155 @@ +import { readdirSync } from 'fs'; +import { Logger } from 'pino'; +import { LoggerFactory } from '../../telemetry/LoggerFactory'; +import { ScopedTelemetry } from '../../telemetry/ScopedTelemetry'; +import { TelemetryService } from '../../telemetry/TelemetryService'; +import { stableHashCode } from '../../utils/StableHash'; +import { DataStore } from '../DataStore'; +import { EncryptedFile } from './EncryptedFile'; + +export class KeyedFileStore implements DataStore { + private readonly log: Logger; + private readonly keysToFiles = new Map(); + private readonly telemetry: ScopedTelemetry; + + constructor( + private readonly encryptionKey: Buffer, + private readonly storeName: string, + private readonly fileDbDir: string, + ) { + this.log = LoggerFactory.getLogger(`KeyedFileStore.${storeName}`); + this.telemetry = TelemetryService.instance.get(`FileStore.${storeName}`); + this.loadAllKeys(); + } + + get(key: string): T | undefined { + return this.telemetry.measure('get', () => this.keysToFiles.get(key)?.get(), { + captureErrorAttributes: true, + }); + } + + put(key: string, value: T): Promise { + return this.telemetry.measureAsync( + 'put', + async () => { + await this.getOrCreate(key).put(value); + return true; + }, + { captureErrorAttributes: true }, + ); + } + + remove(key: string): Promise { + return this.telemetry.measureAsync( + 'remove', + async () => { + const file = this.keysToFiles.get(key); + if (!file) { + return false; + } + + this.keysToFiles.delete(key); + await file.remove(); + return true; + }, + { captureErrorAttributes: true }, + ); + } + + clear(): Promise { + return this.telemetry.measureAsync( + 'clear', + async () => { + this.loadAllKeys(); + const files = [...this.keysToFiles.values()]; + this.keysToFiles.clear(); + for (const file of files) { + await file.remove(); + } + }, + { captureErrorAttributes: true }, + ); + } + + keys(limit: number): ReadonlyArray { + return this.telemetry.measure( + 'keys', + () => { + this.loadAllKeys(); + return [...this.keysToFiles.keys()].slice(0, limit); + }, + { + captureErrorAttributes: true, + }, + ); + } + + stats(): FileStoreStats { + this.loadAllKeys(); + let entries = 0; + let totalSize = 0; + for (const store of this.keysToFiles.values()) { + entries++; + totalSize += store.fileSize(); + } + return { entries, totalSize }; + } + + private getOrCreate(key: string): EncryptedFile { + let store = this.keysToFiles.get(key); + if (!store) { + store = new EncryptedFile( + this.encryptionKey, + this.storeName, + fileName(this.storeName, key), + this.fileDbDir, + ); + + const existing = store.entry(); + if (existing && existing.key !== key) { + throw new Error( + `Hash collision in ${this.storeName}: key "${key}" maps to same file as "${existing.key}"`, + ); + } + + store.setKey(key); + this.keysToFiles.set(key, store); + } + return store; + } + + private loadAllKeys(): void { + const prefix = `${this.storeName}.`; + try { + for (const entry of readdirSync(this.fileDbDir)) { + if (entry.startsWith(prefix) && entry.endsWith('.enc')) { + this.recoverKey(entry); + } + } + } catch (error) { + this.log.warn(error, 'Failed to scan existing keyed files'); + } + } + + private recoverKey(filename: string): void { + try { + const store = new EncryptedFile(this.encryptionKey, this.storeName, filename, this.fileDbDir); + const entry = store.entry(); + if (entry?.key) { + store.setKey(entry.key); + this.keysToFiles.set(entry.key, store); + } + } catch (error) { + this.log.warn(error, `Failed to recover key from ${filename}`); + } + } +} + +type FileStoreStats = { + entries: number; + totalSize: number; +}; + +function fileName(storeName: string, key: string) { + return `${storeName}.${stableHashCode(key)}.enc`; +} diff --git a/tst/unit/datastore/FileStore.encryption.test.ts b/tst/unit/datastore/FileStore.encryption.test.ts new file mode 100644 index 0000000..a89bae6 --- /dev/null +++ b/tst/unit/datastore/FileStore.encryption.test.ts @@ -0,0 +1,106 @@ +import { randomBytes } from 'crypto'; +import { describe, it, expect } from 'vitest'; +import { decrypt, encrypt, encryptionKey } from '../../../src/datastore/file/Encryption'; + +describe('Encryption', () => { + const key = encryptionKey(3); + + describe('encrypt and decrypt', () => { + it('should round-trip a simple string', () => { + const plaintext = 'hello world'; + const encrypted = encrypt(key, plaintext); + expect(decrypt(key, encrypted)).toBe(plaintext); + }); + + it('should round-trip JSON with unicode', () => { + const data = JSON.stringify({ region: 'us-east-1', emoji: '🔑', tags: ['α', 'β'] }); + const encrypted = encrypt(key, data); + expect(decrypt(key, encrypted)).toBe(data); + }); + + it('should round-trip an empty string', () => { + const encrypted = encrypt(key, ''); + expect(decrypt(key, encrypted)).toBe(''); + }); + + it('should round-trip large payloads', () => { + const data = 'x'.repeat(1_000_000); + const encrypted = encrypt(key, data); + expect(decrypt(key, encrypted)).toBe(data); + }); + + it('should produce different ciphertext for the same plaintext', () => { + const plaintext = 'deterministic?'; + const a = encrypt(key, plaintext); + const b = encrypt(key, plaintext); + expect(a.equals(b)).toBe(false); + expect(decrypt(key, a)).toBe(plaintext); + expect(decrypt(key, b)).toBe(plaintext); + }); + + it('should not contain plaintext in the ciphertext', () => { + const secret = 'super-secret-password-12345'; + const encrypted = encrypt(key, secret); + expect(encrypted.toString('utf8')).not.toContain(secret); + }); + }); + + describe('tamper detection', () => { + it('should reject decryption with a wrong key', () => { + const encrypted = encrypt(key, 'data'); + const wrongKey = randomBytes(32); + expect(() => decrypt(wrongKey, encrypted)).toThrow(); + }); + + it('should reject a flipped byte in the ciphertext payload', () => { + const encrypted = encrypt(key, 'integrity check'); + const tampered = Buffer.from(encrypted); + // Flip a byte in the encrypted payload area (after IV + auth tag = 28 bytes) + tampered[28] ^= 0xff; + expect(() => decrypt(key, tampered)).toThrow(); + }); + + it('should reject a modified auth tag', () => { + const encrypted = encrypt(key, 'auth tag check'); + const tampered = Buffer.from(encrypted); + // Auth tag starts at byte 12 + tampered[12] ^= 0xff; + expect(() => decrypt(key, tampered)).toThrow(); + }); + + it('should reject a modified IV', () => { + const encrypted = encrypt(key, 'iv check'); + const tampered = Buffer.from(encrypted); + tampered[0] ^= 0xff; + expect(() => decrypt(key, tampered)).toThrow(); + }); + + it('should reject truncated data', () => { + const encrypted = encrypt(key, 'truncation check'); + expect(() => decrypt(key, encrypted.subarray(0, 10))).toThrow(); + }); + }); + + describe('encryptionKey', () => { + it('should return the same key for versions 1, 2, and 3', () => { + const k1 = encryptionKey(1); + const k2 = encryptionKey(2); + const k3 = encryptionKey(3); + expect(k1.equals(k2)).toBe(true); + expect(k2.equals(k3)).toBe(true); + }); + + it('should return a 32-byte key', () => { + expect(encryptionKey(3)).toHaveLength(32); + }); + + it('should throw for unknown versions', () => { + expect(() => encryptionKey(0)).toThrow('Unknown FileDB version 0'); + expect(() => encryptionKey(99)).toThrow('Unknown FileDB version 99'); + }); + + it('should return deterministic keys across calls', () => { + expect(encryptionKey(3).equals(encryptionKey(3))).toBe(true); + }); + }); +}); diff --git a/tst/unit/datastore/FileStore.test.ts b/tst/unit/datastore/FileStore.test.ts index aea50dc..a12126a 100644 --- a/tst/unit/datastore/FileStore.test.ts +++ b/tst/unit/datastore/FileStore.test.ts @@ -1,13 +1,14 @@ import { execFile } from 'child_process'; import { randomUUID as v4 } from 'crypto'; import { rmSync, mkdirSync, writeFileSync, existsSync, readdirSync, readFileSync } from 'fs'; -import { join } from 'path'; +import { basename, join } from 'path'; import { promisify } from 'util'; import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; import { DataStore, StoreName } from '../../../src/datastore/DataStore'; -import { EncryptedFileStore } from '../../../src/datastore/file/EncryptedFileStore'; -import { decrypt, encrypt, encryptionKey } from '../../../src/datastore/file/Encryption'; -import { FileStoreFactory } from '../../../src/datastore/FileStore'; +import { decrypt, encryptionKey } from '../../../src/datastore/file/Encryption'; +import { KeyedFileStore } from '../../../src/datastore/file/KeyedFileStore'; +import { FileStoreFactory } from '../../../src/datastore/FileStoreFactory'; +import { stableHashCode } from '../../../src/utils/StableHash'; describe('FileStore', () => { let fileFactory: FileStoreFactory; @@ -40,21 +41,22 @@ describe('FileStore', () => { mkdirSync(encTestDir, { recursive: true }); const key = encryptionKey(2); - const store1 = new EncryptedFileStore(key, 'test', encTestDir); + const store1 = new KeyedFileStore(key, 'test', encTestDir); await store1.put('key1', 'value1'); // store2 loads key1 from disk in constructor - const store2 = new EncryptedFileStore(key, 'test', encTestDir); + const store2 = new KeyedFileStore(key, 'test', encTestDir); expect(store2.get('key1')).toBe('value1'); // store1 writes key2 to disk — store2 doesn't see it via get() - // because get() reads from in-memory cache only + // because each key is loaded at construction time, not on every read await store1.put('key2', 'value2'); expect(store2.get('key2')).toBeUndefined(); - // But after store2 does a write (which re-reads disk under lock), it sees key2 - await store2.put('key3', 'value3'); - expect(store2.get('key2')).toBe('value2'); + // A new instance sees both keys from disk + const store3 = new KeyedFileStore(key, 'test', encTestDir); + expect(store3.get('key1')).toBe('value1'); + expect(store3.get('key2')).toBe('value2'); }); }); @@ -76,11 +78,11 @@ describe('FileStore', () => { mkdirSync(encTestDir, { recursive: true }); const key = encryptionKey(2); - const store1 = new EncryptedFileStore(key, 'test', encTestDir); + const store1 = new KeyedFileStore(key, 'test', encTestDir); await store1.put('key1', 'from-store1'); // store2 loads from disk in constructor, sees key1 - const store2 = new EncryptedFileStore(key, 'test', encTestDir); + const store2 = new KeyedFileStore(key, 'test', encTestDir); // store1 writes key2 — this goes to disk await store1.put('key2', 'from-store1'); @@ -89,7 +91,7 @@ describe('FileStore', () => { await store2.put('key3', 'from-store2'); // Verify store2's file has all three keys - const store3 = new EncryptedFileStore(key, 'test', encTestDir); + const store3 = new KeyedFileStore(key, 'test', encTestDir); expect(store3.get('key1')).toBe('from-store1'); expect(store3.get('key2')).toBe('from-store1'); expect(store3.get('key3')).toBe('from-store2'); @@ -168,11 +170,11 @@ describe('FileStore', () => { describe('stats', () => { it('should report entries and file size', async () => { - const store = fileFactory.get(StoreName.public_schemas) as EncryptedFileStore; + const store = fileFactory.get(StoreName.public_schemas) as KeyedFileStore; const emptyStats = store.stats(); expect(emptyStats.entries).toBe(0); - expect(emptyStats.totalSize).toBeGreaterThan(0); // file exists with encrypted {} + expect(emptyStats.totalSize).toBe(0); await fileStore.put('key1', 'value1'); await fileStore.put('key2', 'value2'); @@ -206,11 +208,11 @@ describe('FileStore', () => { mkdirSync(encTestDir, { recursive: true }); const key = encryptionKey(2); - const store1 = new EncryptedFileStore(key, 'test', encTestDir); + const store1 = new KeyedFileStore(key, 'test', encTestDir); await store1.put('key1', 'value1'); // Fresh instance writes key2 — key1 must survive - const store2 = new EncryptedFileStore(key, 'test', encTestDir); + const store2 = new KeyedFileStore(key, 'test', encTestDir); await store2.put('key2', 'value2'); expect(store2.get('key1')).toBe('value1'); @@ -243,16 +245,16 @@ describe('FileStore', () => { mkdirSync(encTestDir, { recursive: true }); const key = encryptionKey(2); - const store = new EncryptedFileStore(key, 'test', encTestDir); - const filePath = join(encTestDir, 'test.enc'); + const store = new KeyedFileStore(key, 'test', encTestDir); for (let i = 0; i < 5; i++) { await store.put(`key${i}`, `value${i}`); // After every write, the file on disk must be valid encrypted JSON - const raw = readFileSync(filePath); + const raw = readFileSync(encodedFilePath(encTestDir, 'test', `key${i}`).filePath); const decrypted = JSON.parse(decrypt(key, raw)); - expect(decrypted[`key${i}`]).toBe(`value${i}`); + expect(decrypted['value']).toBe(`value${i}`); + expect(store.get(`key${i}`)).toBe(`value${i}`); } // No leftover temp files @@ -260,15 +262,17 @@ describe('FileStore', () => { expect(files.filter((f) => f.endsWith('.tmp'))).toHaveLength(0); }); - it('should not leave temp files after constructor creates a new store', () => { + it('should not leave temp files after constructor creates a new store', async () => { const encTestDir = join(testDir, 'no-tmp-test'); mkdirSync(encTestDir, { recursive: true }); const key = encryptionKey(2); - new EncryptedFileStore(key, 'test', encTestDir); + const store = new KeyedFileStore(key, 'test', encTestDir); - const files = readdirSync(encTestDir); - expect(files).toEqual(['test.enc']); + expect(readdirSync(encTestDir)).toEqual([]); + + await store.put('key', 'someValue'); + expect(readdirSync(encTestDir)).toEqual([encodedFilePath(encTestDir, 'test', 'key').relPath]); }); }); @@ -281,7 +285,7 @@ describe('FileStore', () => { const corruptedFile = join(encTestDir, 'test.enc'); writeFileSync(corruptedFile, 'corrupted-not-encrypted-data'); - const store = new EncryptedFileStore(key, 'test', encTestDir); + const store = new KeyedFileStore(key, 'test', encTestDir); // Should start empty after recovery expect(store.get('anyKey')).toBeUndefined(); @@ -292,7 +296,7 @@ describe('FileStore', () => { expect(store.get('newKey')).toBe('newValue'); // Data persists after reload - const store2 = new EncryptedFileStore(key, 'test', encTestDir); + const store2 = new KeyedFileStore(key, 'test', encTestDir); expect(store2.get('newKey')).toBe('newValue'); }); @@ -302,16 +306,16 @@ describe('FileStore', () => { const key = encryptionKey(2); // Write valid data first - const store1 = new EncryptedFileStore(key, 'test', encTestDir); + const store1 = new KeyedFileStore(key, 'test', encTestDir); await store1.put('key', 'value'); // Truncate the file to simulate crash mid-write (pre-atomic-write scenario) - const filePath = join(encTestDir, 'test.enc'); + const filePath = encodedFilePath(encTestDir, 'test', 'key').filePath; const original = readFileSync(filePath); writeFileSync(filePath, original.subarray(0, 10)); // Should recover gracefully - const store2 = new EncryptedFileStore(key, 'test', encTestDir); + const store2 = new KeyedFileStore(key, 'test', encTestDir); expect(store2.get('key')).toBeUndefined(); await store2.put('recovered', 'yes'); @@ -324,7 +328,7 @@ describe('FileStore', () => { const key = encryptionKey(2); writeFileSync(join(encTestDir, 'test.enc'), 'garbage'); - new EncryptedFileStore(key, 'test', encTestDir); + new KeyedFileStore(key, 'test', encTestDir); const files = readdirSync(encTestDir); expect(files.filter((f) => f.endsWith('.tmp'))).toHaveLength(0); @@ -388,7 +392,7 @@ describe('FileStore', () => { // Verify all writes from all workers are present — no data lost const key = encryptionKey(2); - const store = new EncryptedFileStore(key, 'test', encTestDir); + const store = new KeyedFileStore(key, 'test', encTestDir); for (let w = 0; w < numWorkers; w++) { for (let k = 0; k < numWrites; k++) { @@ -440,15 +444,15 @@ describe('FileStore', () => { mkdirSync(join(fileDbRoot, 'v1'), { recursive: true }); writeFileSync(join(fileDbRoot, 'v1', 'data.enc'), 'old'); - // Current version (v2) should exist from factory constructor - expect(existsSync(join(fileDbRoot, 'v2'))).toBe(true); + // Current version should exist from factory constructor + expect(existsSync(join(fileDbRoot, 'v3'))).toBe(true); expect(existsSync(join(fileDbRoot, 'v1'))).toBe(true); // Trigger cleanup directly (normally runs after 2min timeout) (fileFactory as any).cleanupOldVersions(); expect(existsSync(join(fileDbRoot, 'v1'))).toBe(false); - expect(existsSync(join(fileDbRoot, 'v2'))).toBe(true); + expect(existsSync(join(fileDbRoot, 'v3'))).toBe(true); }); it('should handle cleanup when directory does not exist', async () => { @@ -470,10 +474,10 @@ describe('FileStore', () => { mkdirSync(encTestDir, { recursive: true }); const key = encryptionKey(2); - const store = new EncryptedFileStore(key, 'test', encTestDir); + const store = new KeyedFileStore(key, 'test', encTestDir); await store.put('secret', 'sensitive-data'); - const raw = readFileSync(join(encTestDir, 'test.enc')); + const raw = readFileSync(encodedFilePath(encTestDir, 'test', 'secret').filePath); // Raw bytes should not contain the plaintext expect(raw.toString('utf8')).not.toContain('sensitive-data'); @@ -481,21 +485,7 @@ describe('FileStore', () => { // But decrypting with the correct key should work const decrypted = JSON.parse(decrypt(key, raw)); - expect(decrypted['secret']).toBe('sensitive-data'); - }); - - it('should fail to decrypt with wrong key', () => { - const encTestDir = join(testDir, 'wrong-key-test'); - mkdirSync(encTestDir, { recursive: true }); - const key = encryptionKey(2); - - // Write with correct key - const data = encrypt(key, JSON.stringify({ key: 'value' })); - writeFileSync(join(encTestDir, 'test.enc'), data); - - // Decrypt with wrong key should throw - const wrongKey = Buffer.alloc(32, 0xff); - expect(() => decrypt(wrongKey, data)).toThrow(); + expect(decrypted['value']).toBe('sensitive-data'); }); }); @@ -530,3 +520,11 @@ describe('FileStore', () => { }); }); }); + +function encodedFilePath(dir: string, store: string, key: string) { + const filePath = join(dir, `${store}.${stableHashCode(key)}.enc`); + return { + filePath, + relPath: basename(filePath), + }; +} diff --git a/tst/unit/datastore/FilestoreWorker.ts b/tst/unit/datastore/FilestoreWorker.ts index 5948c2d..4e79888 100644 --- a/tst/unit/datastore/FilestoreWorker.ts +++ b/tst/unit/datastore/FilestoreWorker.ts @@ -1,8 +1,8 @@ import { randomUUID as v4 } from 'crypto'; import { join } from 'path'; import { staticInitialize } from '../../../src/app/initialize'; -import { EncryptedFileStore } from '../../../src/datastore/file/EncryptedFileStore'; import { encryptionKey } from '../../../src/datastore/file/Encryption'; +import { KeyedFileStore } from '../../../src/datastore/file/KeyedFileStore'; // Worker script for multiprocess FileStore testing staticInitialize(undefined, { @@ -15,7 +15,7 @@ const [encTestDir, workerId, numWrites] = process.argv.slice(2); const key = encryptionKey(2); async function main() { - const store = new EncryptedFileStore(key, 'test', encTestDir); + const store = new KeyedFileStore(key, 'test', encTestDir); for (let i = 0; i < Number.parseInt(numWrites); i++) { await store.put(`worker${workerId}_key${i}`, `worker${workerId}_value${i}`); diff --git a/tst/unit/datastore/LMDB.corruption.test.ts b/tst/unit/datastore/LMDB.corruption.test.ts index 8791645..14e7540 100644 --- a/tst/unit/datastore/LMDB.corruption.test.ts +++ b/tst/unit/datastore/LMDB.corruption.test.ts @@ -1,7 +1,7 @@ import fs from 'fs'; import { join } from 'path'; import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { LMDBStoreFactory } from '../../../src/datastore/LMDB'; +import { LMDBStoreFactory } from '../../../src/datastore/LMDBStoreFactory'; describe('LMDB corruption error detection', () => { it('should identify MDB_CORRUPTED error message', () => { diff --git a/tst/unit/datastore/LMDB.recovery.test.ts b/tst/unit/datastore/LMDB.recovery.test.ts index 3598ed5..8f45dd4 100644 --- a/tst/unit/datastore/LMDB.recovery.test.ts +++ b/tst/unit/datastore/LMDB.recovery.test.ts @@ -3,7 +3,7 @@ import fs from 'fs'; import { join } from 'path'; import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest'; import { StoreName } from '../../../src/datastore/DataStore'; -import { LMDBStoreFactory } from '../../../src/datastore/LMDB'; +import { LMDBStoreFactory } from '../../../src/datastore/LMDBStoreFactory'; describe('LMDB fork detection and recovery', () => { let testDir: string; diff --git a/tst/unit/datastore/LMDB.startup.test.ts b/tst/unit/datastore/LMDB.startup.test.ts index 95cb440..04d2dea 100644 --- a/tst/unit/datastore/LMDB.startup.test.ts +++ b/tst/unit/datastore/LMDB.startup.test.ts @@ -3,7 +3,7 @@ import { join } from 'path'; import { open } from 'lmdb'; import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import { StoreName } from '../../../src/datastore/DataStore'; -import { LMDBStoreFactory } from '../../../src/datastore/LMDB'; +import { LMDBStoreFactory } from '../../../src/datastore/LMDBStoreFactory'; vi.mock('lmdb', async () => { const actual = await vi.importActual('lmdb'); diff --git a/tst/unit/datastore/LMDB.test.ts b/tst/unit/datastore/LMDB.test.ts index 4998961..904c47e 100644 --- a/tst/unit/datastore/LMDB.test.ts +++ b/tst/unit/datastore/LMDB.test.ts @@ -3,7 +3,7 @@ import fs from 'fs'; import { join } from 'path'; import { describe, it, expect, beforeEach, afterEach } from 'vitest'; import { DataStore, StoreName } from '../../../src/datastore/DataStore'; -import { LMDBStoreFactory } from '../../../src/datastore/LMDB'; +import { LMDBStoreFactory } from '../../../src/datastore/LMDBStoreFactory'; describe('LMDB', () => { let lmdbFactory: LMDBStoreFactory; From 4b141d78507478f07893df62af33225da853de9d Mon Sep 17 00:00:00 2001 From: Satyaki Ghosh Date: Sun, 26 Apr 2026 01:06:55 -0600 Subject: [PATCH 2/2] Add filedb feature flag, cleanup code and add tests --- assets/featureFlag/alpha.json | 98 ++++++++++--------- assets/featureFlag/beta.json | 96 +++++++++--------- assets/featureFlag/prod.json | 96 +++++++++--------- src/datastore/DataStore.ts | 5 +- src/featureFlag/DynamicFeatureFlag.ts | 2 +- src/featureFlag/FeatureFlagProvider.ts | 9 +- src/featureFlag/FeatureFlagSupplier.ts | 21 ++-- src/server/CfnExternal.ts | 21 +--- src/server/CfnInfraCore.ts | 21 +++- src/services/RelationshipSchemaService.ts | 6 +- src/utils/File.ts | 2 +- src/utils/RemoteDownload.ts | 3 + src/utils/Retry.ts | 2 +- tools/telemetry-generator.ts | 11 +-- tst/unit/featureFlag/FeatureFlag.test.ts | 69 +++++++++++++ .../featureFlag/FeatureFlagProvider.test.ts | 30 +++++- .../featureFlag/FeatureFlagSupplier.test.ts | 6 +- tst/unit/utils/File.test.ts | 60 ++++++++++++ tst/unit/utils/Retry.test.ts | 11 ++- tst/utils/MockServerComponents.ts | 1 + tst/utils/TestExtension.ts | 13 +-- 21 files changed, 391 insertions(+), 192 deletions(-) create mode 100644 tst/unit/utils/File.test.ts diff --git a/assets/featureFlag/alpha.json b/assets/featureFlag/alpha.json index e61838e..f8a6cbe 100644 --- a/assets/featureFlag/alpha.json +++ b/assets/featureFlag/alpha.json @@ -1,49 +1,53 @@ { - "version": 1, - "description": "Feature flags for alpha environment", - "features": { - "Constants": { - "enabled": false - }, - "EnhancedDryRun": { - "enabled": true, - "fleetPercentage": 100, - "allowlistedRegions": [ - "us-east-1", - "us-east-2", - "us-west-1", - "us-west-2", - "ca-central-1", - "ca-west-1", - "sa-east-1", - "mx-central-1", - "eu-north-1", - "eu-west-1", - "eu-west-2", - "eu-west-3", - "eu-central-1", - "eu-central-2", - "eu-south-1", - "eu-south-2", - "ap-east-1", - "ap-east-2", - "ap-south-1", - "ap-south-2", - "ap-northeast-1", - "ap-northeast-2", - "ap-northeast-3", - "ap-southeast-1", - "ap-southeast-2", - "ap-southeast-3", - "ap-southeast-4", - "ap-southeast-5", - "ap-southeast-7", - "me-south-1", - "me-central-1", - "af-south-1", - "il-central-1", - "test-region" - ] + "version": 1, + "description": "Feature flags for alpha environment", + "features": { + "Constants": { + "enabled": false + }, + "EnhancedDryRun": { + "enabled": true, + "fleetPercentage": 100, + "allowlistedRegions": [ + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", + "ca-central-1", + "ca-west-1", + "sa-east-1", + "mx-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "eu-central-1", + "eu-central-2", + "eu-south-1", + "eu-south-2", + "ap-east-1", + "ap-east-2", + "ap-south-1", + "ap-south-2", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-southeast-1", + "ap-southeast-2", + "ap-southeast-3", + "ap-southeast-4", + "ap-southeast-5", + "ap-southeast-7", + "me-south-1", + "me-central-1", + "af-south-1", + "il-central-1", + "test-region" + ] + }, + "FileDb": { + "enabled": true, + "fleetPercentage": 100 + } } - } -} \ No newline at end of file +} diff --git a/assets/featureFlag/beta.json b/assets/featureFlag/beta.json index 7539720..4469780 100644 --- a/assets/featureFlag/beta.json +++ b/assets/featureFlag/beta.json @@ -1,48 +1,52 @@ { - "version": 1, - "description": "Feature flags for beta environment", - "features": { - "Constants": { - "enabled": false - }, - "EnhancedDryRun": { - "enabled": true, - "fleetPercentage": 100, - "allowlistedRegions": [ - "us-east-1", - "us-east-2", - "us-west-1", - "us-west-2", - "ca-central-1", - "ca-west-1", - "sa-east-1", - "mx-central-1", - "eu-north-1", - "eu-west-1", - "eu-west-2", - "eu-west-3", - "eu-central-1", - "eu-central-2", - "eu-south-1", - "eu-south-2", - "ap-east-1", - "ap-east-2", - "ap-south-1", - "ap-south-2", - "ap-northeast-1", - "ap-northeast-2", - "ap-northeast-3", - "ap-southeast-1", - "ap-southeast-2", - "ap-southeast-3", - "ap-southeast-4", - "ap-southeast-5", - "ap-southeast-7", - "me-south-1", - "me-central-1", - "af-south-1", - "il-central-1" - ] + "version": 1, + "description": "Feature flags for beta environment", + "features": { + "Constants": { + "enabled": false + }, + "EnhancedDryRun": { + "enabled": true, + "fleetPercentage": 100, + "allowlistedRegions": [ + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", + "ca-central-1", + "ca-west-1", + "sa-east-1", + "mx-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "eu-central-1", + "eu-central-2", + "eu-south-1", + "eu-south-2", + "ap-east-1", + "ap-east-2", + "ap-south-1", + "ap-south-2", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-southeast-1", + "ap-southeast-2", + "ap-southeast-3", + "ap-southeast-4", + "ap-southeast-5", + "ap-southeast-7", + "me-south-1", + "me-central-1", + "af-south-1", + "il-central-1" + ] + }, + "FileDb": { + "enabled": false, + "fleetPercentage": 100 + } } - } -} \ No newline at end of file +} diff --git a/assets/featureFlag/prod.json b/assets/featureFlag/prod.json index 9499347..3ef2fde 100644 --- a/assets/featureFlag/prod.json +++ b/assets/featureFlag/prod.json @@ -1,48 +1,52 @@ { - "version": 1, - "description": "Feature flags for prod environment", - "features": { - "Constants": { - "enabled": false - }, - "EnhancedDryRun": { - "enabled": true, - "fleetPercentage": 100, - "allowlistedRegions": [ - "us-east-1", - "us-east-2", - "us-west-1", - "us-west-2", - "ca-central-1", - "ca-west-1", - "sa-east-1", - "mx-central-1", - "eu-north-1", - "eu-west-1", - "eu-west-2", - "eu-west-3", - "eu-central-1", - "eu-central-2", - "eu-south-1", - "eu-south-2", - "ap-east-1", - "ap-east-2", - "ap-south-1", - "ap-south-2", - "ap-northeast-1", - "ap-northeast-2", - "ap-northeast-3", - "ap-southeast-1", - "ap-southeast-2", - "ap-southeast-3", - "ap-southeast-4", - "ap-southeast-5", - "ap-southeast-7", - "me-south-1", - "me-central-1", - "af-south-1", - "il-central-1" - ] + "version": 1, + "description": "Feature flags for prod environment", + "features": { + "Constants": { + "enabled": false + }, + "EnhancedDryRun": { + "enabled": true, + "fleetPercentage": 100, + "allowlistedRegions": [ + "us-east-1", + "us-east-2", + "us-west-1", + "us-west-2", + "ca-central-1", + "ca-west-1", + "sa-east-1", + "mx-central-1", + "eu-north-1", + "eu-west-1", + "eu-west-2", + "eu-west-3", + "eu-central-1", + "eu-central-2", + "eu-south-1", + "eu-south-2", + "ap-east-1", + "ap-east-2", + "ap-south-1", + "ap-south-2", + "ap-northeast-1", + "ap-northeast-2", + "ap-northeast-3", + "ap-southeast-1", + "ap-southeast-2", + "ap-southeast-3", + "ap-southeast-4", + "ap-southeast-5", + "ap-southeast-7", + "me-south-1", + "me-central-1", + "af-south-1", + "il-central-1" + ] + }, + "FileDb": { + "enabled": false, + "fleetPercentage": 0 + } } - } -} \ No newline at end of file +} diff --git a/src/datastore/DataStore.ts b/src/datastore/DataStore.ts index 17d041a..ed26663 100644 --- a/src/datastore/DataStore.ts +++ b/src/datastore/DataStore.ts @@ -1,3 +1,4 @@ +import { FeatureFlag } from '../featureFlag/FeatureFlagI'; import { Closeable } from '../utils/Closeable'; import { isWindows } from '../utils/Environment'; import { pathToStorage } from '../utils/Storage'; @@ -62,8 +63,8 @@ export class MultiDataStoreFactoryProvider implements DataStoreFactoryProvider { private readonly memoryStoreFactory: MemoryStoreFactory; private readonly persistedStore: DataStoreFactory; - constructor() { - if (isWindows) { + constructor(fileDbFeatureFlag: FeatureFlag) { + if (fileDbFeatureFlag.isEnabled() || isWindows) { this.persistedStore = new FileStoreFactory(pathToStorage()); } else { this.persistedStore = new LMDBStoreFactory(pathToStorage()); diff --git a/src/featureFlag/DynamicFeatureFlag.ts b/src/featureFlag/DynamicFeatureFlag.ts index 5ad6a77..04c743c 100644 --- a/src/featureFlag/DynamicFeatureFlag.ts +++ b/src/featureFlag/DynamicFeatureFlag.ts @@ -2,7 +2,7 @@ import { Closeable } from '../utils/Closeable'; import { FeatureFlagBuilderType, FeatureFlagConfigType, TargetedFeatureFlagBuilderType } from './FeatureFlagBuilder'; import { FeatureFlag, TargetedFeatureFlag } from './FeatureFlagI'; -export const DynamicRefreshIntervalMs = 60 * 1000; +export const DynamicRefreshIntervalMs = 2 * 60 * 1000; export class DynamicFeatureFlag implements FeatureFlag, Closeable { private flag: FeatureFlag; diff --git a/src/featureFlag/FeatureFlagProvider.ts b/src/featureFlag/FeatureFlagProvider.ts index 926f220..8675f49 100644 --- a/src/featureFlag/FeatureFlagProvider.ts +++ b/src/featureFlag/FeatureFlagProvider.ts @@ -15,7 +15,7 @@ import { FeatureFlagSupplier, FeatureFlagConfigKey, TargetedFeatureFlagConfigKey const log = LoggerFactory.getLogger('FeatureFlagProvider'); -const RefreshIntervalMs = 5 * 60 * 1000; +const RefreshIntervalMs = 15 * 60 * 1000; export class FeatureFlagProvider implements Closeable { @Telemetry() @@ -28,7 +28,7 @@ export class FeatureFlagProvider implements Closeable { constructor( private readonly getLatestFeatureFlags: (env: string) => Promise, - private readonly localFile = join(__dirname, 'assets', 'featureFlag', `${AwsEnv.toLowerCase()}.json`), + private readonly localFile = featureFlagLocalFile(), refreshIntervalMs: number = RefreshIntervalMs, dynamicRefreshIntervalMs: number = DynamicRefreshIntervalMs, ) { @@ -78,6 +78,7 @@ export class FeatureFlagProvider implements Closeable { this.config = newConfig; writeFileSync(this.localFile, JSON.stringify(newConfig, undefined, 2)); this.telemetry.count('refresh.local.update', 1); + log.info('Updated and saved feature flags'); this.log(); } @@ -135,3 +136,7 @@ function defaultConfig(configFile: string, telemetry: ScopedTelemetry): FeatureF return { version: 1, description: 'Default empty config', features: {} }; } } + +export function featureFlagLocalFile(baseDir: string = __dirname) { + return join(baseDir, 'assets', 'featureFlag', `${AwsEnv.toLowerCase()}.json`); +} diff --git a/src/featureFlag/FeatureFlagSupplier.ts b/src/featureFlag/FeatureFlagSupplier.ts index cb9b502..68441fe 100644 --- a/src/featureFlag/FeatureFlagSupplier.ts +++ b/src/featureFlag/FeatureFlagSupplier.ts @@ -33,7 +33,10 @@ export class FeatureFlagSupplier implements Closeable { defaultConfig: () => unknown, dynamicRefreshIntervalMs: number = DynamicRefreshIntervalMs, ) { - for (const [key, builder] of Object.entries(FeatureBuilders)) { + for (const [key, builder] of Object.entries(FeatureBuilders) as [ + FeatureFlagConfigKey, + FeatureFlagBuilderType, + ][]) { const ff = new DynamicFeatureFlag( key, () => featureConfigSupplier(key, configSupplier, defaultConfig, this.telemetry), @@ -43,7 +46,10 @@ export class FeatureFlagSupplier implements Closeable { this._featureFlags.set(key, ff); } - for (const [key, builder] of Object.entries(TargetedFeatureBuilders)) { + for (const [key, builder] of Object.entries(TargetedFeatureBuilders) as [ + TargetedFeatureFlagConfigKey, + TargetedFeatureFlagBuilderType, + ][]) { const ff = new DynamicTargetedFeatureFlag( key, () => featureConfigSupplier(key, configSupplier, defaultConfig, this.telemetry), @@ -95,19 +101,20 @@ function featureConfigSupplier( return FeatureFlagConfigSchema.parse(configSupplier()).features[key]; } catch (err) { telemetry.count('used.config.default', 1); - log.warn(err, `Failed to parse feature flag config: \n${toString(configSupplier())}. Using defaults instead`); + log.error(err, `Failed to parse feature flag config: \n${toString(configSupplier())}. Using defaults instead`); return FeatureFlagConfigSchema.parse(defaultConfig()).features[key]; } } -const FeatureBuilders: Record = { +const FeatureBuilders = { Constants: buildStatic, -} as const; -const TargetedFeatureBuilders: Record> = { + FileDb: buildLocalHost, +} as const satisfies Record; +const TargetedFeatureBuilders = { EnhancedDryRun: (name: string, config?: FeatureFlagConfigType) => { return new CompoundFeatureFlag(buildLocalHost(name, config), buildRegional(name, config)); }, -} as const; +} as const satisfies Record>; export type FeatureFlagConfigKey = keyof typeof FeatureBuilders; export type TargetedFeatureFlagConfigKey = keyof typeof TargetedFeatureBuilders; diff --git a/src/server/CfnExternal.ts b/src/server/CfnExternal.ts index 8840af7..36aad85 100644 --- a/src/server/CfnExternal.ts +++ b/src/server/CfnExternal.ts @@ -1,4 +1,4 @@ -import { FeatureFlagProvider, getFromGitHub } from '../featureFlag/FeatureFlagProvider'; +import { FeatureFlagProvider } from '../featureFlag/FeatureFlagProvider'; import { LspComponents } from '../protocol/LspComponents'; import { getSamSchemas } from '../schema/GetSamSchemaTask'; import { getRemotePrivateSchemas, getRemotePublicSchemas } from '../schema/GetSchemaTask'; @@ -41,7 +41,7 @@ export class CfnExternal implements Configurables, Closeable { readonly featureFlags: FeatureFlagProvider; readonly onlineFeatureGuard: OnlineFeatureGuard; - constructor(lsp: LspComponents, core: CfnInfraCore, overrides: Partial = {}) { + constructor(lsp: LspComponents, core: CfnInfraCore, overrides: Omit, 'featureFlags'> = {}) { this.awsClient = overrides.awsClient ?? new AwsClient(core.awsCredentials, core.awsMetadata?.cloudformation?.endpoint); @@ -71,14 +71,7 @@ export class CfnExternal implements Configurables, Closeable { new GuardService(core.documentManager, core.diagnosticCoordinator, core.syntaxTreeManager); this.onlineStatus = overrides.onlineStatus ?? new OnlineStatus(); - this.featureFlags = - overrides.featureFlags ?? - new FeatureFlagProvider( - getFromGitHub, - undefined, - validatePositiveOrUndefined(core.awsMetadata?.featureFlags?.refreshIntervalMs), - validatePositiveOrUndefined(core.awsMetadata?.featureFlags?.dynamicRefreshIntervalMs), - ); + this.featureFlags = core.featureFlags; this.onlineFeatureGuard = overrides.onlineFeatureGuard ?? new OnlineFeatureGuard(core.awsCredentials); } @@ -87,12 +80,6 @@ export class CfnExternal implements Configurables, Closeable { } async close() { - return await closeSafely( - this.cfnLintService, - this.guardService, - this.schemaRetriever, - this.onlineStatus, - this.featureFlags, - ); + return await closeSafely(this.cfnLintService, this.guardService, this.schemaRetriever, this.onlineStatus); } } diff --git a/src/server/CfnInfraCore.ts b/src/server/CfnInfraCore.ts index 7f2422f..44a5288 100644 --- a/src/server/CfnInfraCore.ts +++ b/src/server/CfnInfraCore.ts @@ -5,6 +5,7 @@ import { SyntaxTreeManager } from '../context/syntaxtree/SyntaxTreeManager'; import { DataStoreFactoryProvider, MultiDataStoreFactoryProvider } from '../datastore/DataStore'; import { DocumentManager } from '../document/DocumentManager'; import { DocumentMetadata } from '../document/DocumentProtocol'; +import { featureFlagLocalFile, FeatureFlagProvider, getFromGitHub } from '../featureFlag/FeatureFlagProvider'; import { LspComponents } from '../protocol/LspComponents'; import { DiagnosticCoordinator } from '../services/DiagnosticCoordinator'; import { SettingsManager } from '../settings/SettingsManager'; @@ -15,6 +16,7 @@ import { UsageTracker } from '../usageTracker/UsageTracker'; import { UsageTrackerMetrics } from '../usageTracker/UsageTrackerMetrics'; import { Closeable, closeSafely } from '../utils/Closeable'; import { Configurable, Configurables } from '../utils/Configurable'; +import { validatePositiveOrUndefined } from '../utils/Number'; import { AwsMetadata, ExtendedInitializeParams } from './InitParams'; /** @@ -24,6 +26,7 @@ import { AwsMetadata, ExtendedInitializeParams } from './InitParams'; */ export class CfnInfraCore implements Configurables, Closeable { readonly awsMetadata?: AwsMetadata; + readonly featureFlags: FeatureFlagProvider; readonly dataStoreFactory: DataStoreFactoryProvider; readonly clientMessage: ClientMessage; readonly settingsManager: SettingsManager; @@ -45,7 +48,16 @@ export class CfnInfraCore implements Configurables, Closeable { overrides: Partial = {}, ) { this.awsMetadata = initializeParams.initializationOptions?.aws; - this.dataStoreFactory = overrides.dataStoreFactory ?? new MultiDataStoreFactoryProvider(); + this.featureFlags = + overrides.featureFlags ?? + new FeatureFlagProvider( + getFromGitHub, + featureFlagLocalFile(), + validatePositiveOrUndefined(this.awsMetadata?.featureFlags?.refreshIntervalMs), + validatePositiveOrUndefined(this.awsMetadata?.featureFlags?.dynamicRefreshIntervalMs), + ); + this.dataStoreFactory = + overrides.dataStoreFactory ?? new MultiDataStoreFactoryProvider(this.featureFlags.get('FileDb')); this.clientMessage = overrides.clientMessage ?? new ClientMessage(lspComponents.communication); this.settingsManager = overrides.settingsManager ?? new SettingsManager(lspComponents.workspace, this.awsMetadata?.settings); @@ -82,6 +94,11 @@ export class CfnInfraCore implements Configurables, Closeable { } async close() { - return await closeSafely(this.documentManager, this.dataStoreFactory, TelemetryService.instance); + return await closeSafely( + this.documentManager, + this.dataStoreFactory, + this.featureFlags, + TelemetryService.instance, + ); } } diff --git a/src/services/RelationshipSchemaService.ts b/src/services/RelationshipSchemaService.ts index 1e2a33a..bdb91e2 100644 --- a/src/services/RelationshipSchemaService.ts +++ b/src/services/RelationshipSchemaService.ts @@ -26,7 +26,7 @@ export type RelationshipGroupData = Record; export class RelationshipSchemaService { private readonly relationshipCache: Map = new Map(); - constructor(private readonly schemaFilePath: string = join(__dirname, 'assets', 'relationship_schemas.json')) { + constructor(private readonly schemaFilePath: string = relationshipLocalFile()) { this.loadAllSchemas(); } @@ -152,3 +152,7 @@ export class RelationshipSchemaService { return [...resourceTypes]; } } + +export function relationshipLocalFile(baseDir: string = __dirname) { + return join(baseDir, 'assets', 'relationship_schemas.json'); +} diff --git a/src/utils/File.ts b/src/utils/File.ts index 9c78ca7..b7021e4 100644 --- a/src/utils/File.ts +++ b/src/utils/File.ts @@ -44,7 +44,7 @@ export function readBufferIfExists( encoding?: null | undefined; flag?: string | undefined; } | null, -): NonSharedBuffer { +): Buffer { try { if (existsSync(path)) { return readFileSync(path, options); diff --git a/src/utils/RemoteDownload.ts b/src/utils/RemoteDownload.ts index 27cf26e..f46d36c 100644 --- a/src/utils/RemoteDownload.ts +++ b/src/utils/RemoteDownload.ts @@ -1,4 +1,5 @@ import axios from 'axios'; +import { LoggerFactory } from '../telemetry/LoggerFactory'; export async function downloadFile(url: string): Promise { const response = await axios({ @@ -7,10 +8,12 @@ export async function downloadFile(url: string): Promise { responseType: 'arraybuffer', }); + LoggerFactory.getLogger('Remote').info(`Fetching ${url}`); return Buffer.from(response.data); } export async function downloadJson(url: string): Promise { + LoggerFactory.getLogger('Remote').info(`Fetching ${url}`); const response = await axios({ method: 'get', url: url, diff --git a/src/utils/Retry.ts b/src/utils/Retry.ts index 211eee6..6be88ff 100644 --- a/src/utils/Retry.ts +++ b/src/utils/Retry.ts @@ -11,7 +11,7 @@ export type RetryOptions = { totalTimeoutMs: number; }; -function sleep(ms: number): Promise { +export function sleep(ms: number): Promise { return new Promise((resolve) => { setTimeout(resolve, ms); }); diff --git a/tools/telemetry-generator.ts b/tools/telemetry-generator.ts index 5822243..fd92636 100644 --- a/tools/telemetry-generator.ts +++ b/tools/telemetry-generator.ts @@ -120,9 +120,8 @@ import { LspS3Handlers } from '../src/protocol/LspS3Handlers'; import { LspSystemHandlers } from '../src/protocol/LspSystemHandlers'; import { RelationshipSchemaService } from '../src/services/RelationshipSchemaService'; import { LspCfnEnvironmentHandlers } from '../src/protocol/LspCfnEnvironmentHandlers'; -import { FeatureFlagProvider, getFromGitHub } from '../src/featureFlag/FeatureFlagProvider'; -import { AwsEnv } from '../src/utils/Environment'; import { TelemetryService } from '../src/telemetry/TelemetryService'; +import { featureFlagLocalFile, FeatureFlagProvider, getFromGitHub } from '../src/featureFlag/FeatureFlagProvider'; const textDocuments = new TextDocuments(TextDocument); @@ -193,7 +192,8 @@ function main() { stubInterface(), ); - const dataStoreFactory = new MultiDataStoreFactoryProvider(); + const featureFlags = new FeatureFlagProvider(getFromGitHub, featureFlagLocalFile(join(__dirname, '..'))); + const dataStoreFactory = new MultiDataStoreFactoryProvider(featureFlags.get('FileDb')); const core = new CfnInfraCore( lsp, { @@ -207,16 +207,13 @@ function main() { { dataStoreFactory, documentManager: new DocumentManager(textDocuments), + featureFlags, }, ); const schemaStore = new SchemaStore(dataStoreFactory); const external = new CfnExternal(lsp, core, { schemaStore, - featureFlags: new FeatureFlagProvider( - getFromGitHub, - join(__dirname, '..', 'assets', 'featureFlag', `${AwsEnv}.json`), - ), }); const providers = new CfnLspProviders(core, external, { diff --git a/tst/unit/featureFlag/FeatureFlag.test.ts b/tst/unit/featureFlag/FeatureFlag.test.ts index 13a4a01..5ba3238 100644 --- a/tst/unit/featureFlag/FeatureFlag.test.ts +++ b/tst/unit/featureFlag/FeatureFlag.test.ts @@ -1,9 +1,11 @@ import { describe, it, expect } from 'vitest'; +import { AndFeatureFlag, LocalHostTargetedFeatureFlag } from '../../../src/featureFlag/CombinedFeatureFlags'; import { StaticFeatureFlag, FleetTargetedFeatureFlag, RegionAllowlistFeatureFlag, } from '../../../src/featureFlag/FeatureFlag'; +import { buildLocalHost } from '../../../src/featureFlag/FeatureFlagBuilder'; import { AwsRegion } from '../../../src/utils/Region'; describe('StaticFeatureFlag', () => { @@ -80,3 +82,70 @@ describe('RegionAllowlistFeatureFlag', () => { expect(description).toContain('eu-west-1'); }); }); + +describe('AndFeatureFlag', () => { + it('should return true when all flags are enabled', () => { + const flag = new AndFeatureFlag(new StaticFeatureFlag('a', true), new StaticFeatureFlag('b', true)); + expect(flag.isEnabled()).toBe(true); + }); + + it('should return false when any flag is disabled', () => { + const flag = new AndFeatureFlag(new StaticFeatureFlag('a', true), new StaticFeatureFlag('b', false)); + expect(flag.isEnabled()).toBe(false); + }); + + it('should throw when constructed with no flags', () => { + expect(() => new AndFeatureFlag()).toThrow('1 or more feature flags required'); + }); + + it('should describe all child flags', () => { + const flag = new AndFeatureFlag(new StaticFeatureFlag('a', true), new StaticFeatureFlag('b', false)); + expect(flag.describe()).toContain('a'); + expect(flag.describe()).toContain('b'); + }); +}); + +describe('LocalHostTargetedFeatureFlag', () => { + it('should be enabled at 100% fleet percentage', () => { + const flag = new LocalHostTargetedFeatureFlag(new FleetTargetedFeatureFlag('test', 100)); + expect(flag.isEnabled()).toBe(true); + }); + + it('should be disabled at 0% fleet percentage', () => { + const flag = new LocalHostTargetedFeatureFlag(new FleetTargetedFeatureFlag('test', 0)); + expect(flag.isEnabled()).toBe(false); + }); + + it('should return consistent results across calls', () => { + const flag = new LocalHostTargetedFeatureFlag(new FleetTargetedFeatureFlag('test', 50)); + expect(flag.isEnabled()).toBe(flag.isEnabled()); + }); + + it('should describe itself with fleet info', () => { + const flag = new LocalHostTargetedFeatureFlag(new FleetTargetedFeatureFlag('test', 75)); + expect(flag.describe()).toContain('LocalHostTargetedFeatureFlag'); + expect(flag.describe()).toContain('75'); + }); +}); + +describe('buildLocalHost', () => { + it('should return enabled flag when enabled with 100% fleet', () => { + const flag = buildLocalHost('FileDb', { enabled: true, fleetPercentage: 100 }); + expect(flag.isEnabled()).toBe(true); + }); + + it('should return disabled flag when enabled is false', () => { + const flag = buildLocalHost('FileDb', { enabled: false, fleetPercentage: 100 }); + expect(flag.isEnabled()).toBe(false); + }); + + it('should return disabled flag when fleet percentage is 0', () => { + const flag = buildLocalHost('FileDb', { enabled: true, fleetPercentage: 0 }); + expect(flag.isEnabled()).toBe(false); + }); + + it('should default to disabled with no config', () => { + const flag = buildLocalHost('FileDb'); + expect(flag.isEnabled()).toBe(false); + }); +}); diff --git a/tst/unit/featureFlag/FeatureFlagProvider.test.ts b/tst/unit/featureFlag/FeatureFlagProvider.test.ts index 2209ca8..b622858 100644 --- a/tst/unit/featureFlag/FeatureFlagProvider.test.ts +++ b/tst/unit/featureFlag/FeatureFlagProvider.test.ts @@ -1,8 +1,8 @@ -import { readFileSync } from 'fs'; +import { existsSync, readFileSync } from 'fs'; import { join } from 'path'; import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; import { FeatureFlagConfigSchema } from '../../../src/featureFlag/FeatureFlagBuilder'; -import { FeatureFlagProvider } from '../../../src/featureFlag/FeatureFlagProvider'; +import { featureFlagLocalFile, FeatureFlagProvider } from '../../../src/featureFlag/FeatureFlagProvider'; import { ScopedTelemetry } from '../../../src/telemetry/ScopedTelemetry'; describe('FeatureFlagProvider', () => { @@ -143,4 +143,30 @@ describe('FeatureFlagProvider', () => { await expect((provider as any).getFeatureFlags('alpha')).rejects.toThrow('status code 500'); }); }); + + describe('featureFlagLocalFile', () => { + const projectRoot = join(__dirname, '..', '..', '..'); + + it('should resolve to an existing file with project root as baseDir', () => { + const path = featureFlagLocalFile(projectRoot); + expect(existsSync(path)).toBe(true); + }); + + it('should produce a parseable feature flag config', () => { + const path = featureFlagLocalFile(projectRoot); + const content = JSON.parse(readFileSync(path, 'utf8')); + expect(FeatureFlagConfigSchema.parse(content)).toBeDefined(); + }); + + it('should build path with assets/featureFlag/.json structure', () => { + const path = featureFlagLocalFile('/some/base'); + expect(path).toMatch(/\/some\/base\/assets\/featureFlag\/\w+\.json$/); + }); + + it('should default baseDir to __dirname of the source module', () => { + const defaultPath = featureFlagLocalFile(); + expect(defaultPath).toContain(join('assets', 'featureFlag')); + expect(defaultPath).toMatch(/\.json$/); + }); + }); }); diff --git a/tst/unit/featureFlag/FeatureFlagSupplier.test.ts b/tst/unit/featureFlag/FeatureFlagSupplier.test.ts index 71142d1..e46af18 100644 --- a/tst/unit/featureFlag/FeatureFlagSupplier.test.ts +++ b/tst/unit/featureFlag/FeatureFlagSupplier.test.ts @@ -25,7 +25,7 @@ describe('FeatureFlagSupplier', () => { it('should initialize with feature flags', () => { const supplier = new FeatureFlagSupplier(configSupplier, throwError); - expect([...supplier.featureFlags.keys()]).toEqual(['Constants']); + expect([...supplier.featureFlags.keys()]).toEqual(['Constants', 'FileDb']); expect(supplier.featureFlags.get('Constants')?.isEnabled()).toBe(false); expect([...supplier.targetedFeatureFlags.keys()]).toEqual(['EnhancedDryRun']); @@ -52,7 +52,7 @@ describe('FeatureFlagSupplier', () => { it('should handle invalid config and fallback to default', () => { const supplier = new FeatureFlagSupplier(() => 'invalid', configSupplier); - expect([...supplier.featureFlags.keys()]).toEqual(['Constants']); + expect([...supplier.featureFlags.keys()]).toEqual(['Constants', 'FileDb']); expect([...supplier.targetedFeatureFlags.keys()]).toEqual(['EnhancedDryRun']); supplier.close(); @@ -61,7 +61,7 @@ describe('FeatureFlagSupplier', () => { it('should handle undefined config', () => { const supplier = new FeatureFlagSupplier(() => undefined, configSupplier); - expect([...supplier.featureFlags.keys()]).toEqual(['Constants']); + expect([...supplier.featureFlags.keys()]).toEqual(['Constants', 'FileDb']); expect([...supplier.targetedFeatureFlags.keys()]).toEqual(['EnhancedDryRun']); supplier.close(); diff --git a/tst/unit/utils/File.test.ts b/tst/unit/utils/File.test.ts new file mode 100644 index 0000000..cf42eb6 --- /dev/null +++ b/tst/unit/utils/File.test.ts @@ -0,0 +1,60 @@ +import { randomUUID as v4 } from 'crypto'; +import { mkdirSync, rmSync, writeFileSync } from 'fs'; +import { join } from 'path'; +import { describe, it, expect, beforeAll, afterAll } from 'vitest'; +import { readFileIfExists, readFileIfExistsAsync, readBufferIfExists } from '../../../src/utils/File'; + +describe('File', () => { + const testDir = join(process.cwd(), 'node_modules', '.cache', 'file-tests', v4()); + const textFile = join(testDir, 'text.txt'); + const binaryFile = join(testDir, 'binary.bin'); + const textContent = 'hello world 🌍'; + const binaryContent = Buffer.from([0x00, 0x01, 0x02, 0xff]); + const nonexistentPath = join(testDir, 'does-not-exist.txt'); + + beforeAll(() => { + mkdirSync(testDir, { recursive: true }); + writeFileSync(textFile, textContent, 'utf8'); + writeFileSync(binaryFile, binaryContent); + }); + + afterAll(() => { + rmSync(testDir, { recursive: true, force: true }); + }); + + describe('readFileIfExists', () => { + it('should return file content as string', () => { + expect(readFileIfExists(textFile)).toBe(textContent); + }); + + it('should accept encoding options object', () => { + expect(readFileIfExists(textFile, { encoding: 'utf8' })).toBe(textContent); + }); + + it('should throw for nonexistent path', () => { + expect(() => readFileIfExists(nonexistentPath)).toThrow('does not exist'); + }); + }); + + describe('readFileIfExistsAsync', () => { + it('should return file content as string', async () => { + await expect(readFileIfExistsAsync(textFile)).resolves.toBe(textContent); + }); + + it('should throw for nonexistent path', async () => { + await expect(readFileIfExistsAsync(nonexistentPath)).rejects.toThrow('does not exist'); + }); + }); + + describe('readBufferIfExists', () => { + it('should return file content as buffer', () => { + const result = readBufferIfExists(binaryFile); + expect(Buffer.isBuffer(result)).toBe(true); + expect(Buffer.compare(result, binaryContent)).toBe(0); + }); + + it('should throw for nonexistent path', () => { + expect(() => readBufferIfExists(nonexistentPath)).toThrow('does not exist'); + }); + }); +}); diff --git a/tst/unit/utils/Retry.test.ts b/tst/unit/utils/Retry.test.ts index 8931a7c..599f09c 100644 --- a/tst/unit/utils/Retry.test.ts +++ b/tst/unit/utils/Retry.test.ts @@ -1,7 +1,16 @@ import { Logger } from 'pino'; import * as sinon from 'sinon'; import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest'; -import { RetryOptions, retryWithExponentialBackoff } from '../../../src/utils/Retry'; +import { RetryOptions, retryWithExponentialBackoff, sleep } from '../../../src/utils/Retry'; + +describe('sleep', () => { + it('should resolve after the specified duration', async () => { + const start = performance.now(); + await sleep(50); + const elapsed = performance.now() - start; + expect(elapsed).toBeGreaterThanOrEqual(40); + }); +}); describe('retryWithExponentialBackoff', () => { const options: RetryOptions = { diff --git a/tst/utils/MockServerComponents.ts b/tst/utils/MockServerComponents.ts index 33ceb5a..ea60d4e 100644 --- a/tst/utils/MockServerComponents.ts +++ b/tst/utils/MockServerComponents.ts @@ -362,6 +362,7 @@ export function createMockComponents(o: Partial = {} const core: MockInfraCoreComponents = { dataStoreFactory, + featureFlags: overrides.featureFlags ?? stubInterface(), clientMessage: overrides.clientMessage ?? createMockClientMessage(), settingsManager: overrides.settingsManager ?? createMockSettingsManager(), syntaxTreeManager: overrides.syntaxTreeManager ?? createMockSyntaxTreeManager(), diff --git a/tst/utils/TestExtension.ts b/tst/utils/TestExtension.ts index e746a57..5714a39 100644 --- a/tst/utils/TestExtension.ts +++ b/tst/utils/TestExtension.ts @@ -51,7 +51,7 @@ import { IamCredentialsUpdateRequest, IamCredentialsDeleteNotification } from '. import { AwsCredentials } from '../../src/auth/AwsCredentials'; import { UpdateCredentialsParams } from '../../src/auth/AwsLspAuthTypes'; import { MultiDataStoreFactoryProvider } from '../../src/datastore/DataStore'; -import { FeatureFlagProvider } from '../../src/featureFlag/FeatureFlagProvider'; +import { featureFlagLocalFile, FeatureFlagProvider } from '../../src/featureFlag/FeatureFlagProvider'; import { LspCapabilities } from '../../src/protocol/LspCapabilities'; import { LspConnection } from '../../src/protocol/LspConnection'; import { SchemaRetriever } from '../../src/schema/SchemaRetriever'; @@ -133,9 +133,14 @@ export class TestExtension implements Closeable { const lsp = this.serverConnection.components; LoggerFactory.reconfigure('warn'); - const dataStoreFactory = new MultiDataStoreFactoryProvider(); + const ffFile = featureFlagLocalFile(join(__dirname, '..', '..')); + const featureFlags = new FeatureFlagProvider((_env) => { + return Promise.resolve(JSON.parse(readFileSync(ffFile, 'utf8'))); + }, ffFile); + const dataStoreFactory = new MultiDataStoreFactoryProvider(featureFlags.get('FileDb')); this.core = new CfnInfraCore(lsp, params, { dataStoreFactory, + featureFlags, }); const schemaStore = new SchemaStore(dataStoreFactory); @@ -150,14 +155,10 @@ export class TestExtension implements Closeable { }, ); - const ffFile = join(__dirname, '..', '..', 'assets', 'featureFlag', 'alpha.json'); this.external = new CfnExternal(lsp, this.core, { schemaStore, schemaRetriever, cfnLintService: createMockCfnLintService(), - featureFlags: new FeatureFlagProvider((_env) => { - return Promise.resolve(JSON.parse(readFileSync(ffFile, 'utf8'))); - }, ffFile), awsClient: config.awsClientFactory?.( this.core.awsCredentials, this.core.awsMetadata?.cloudformation?.endpoint,