Skip to content

Commit 17347be

Browse files
Skiipy11claude
andcommitted
fix: tenant-scoped dedup, import validation, query-string auth removal
- Scope dedup check by client_id + type to prevent cross-client collisions - Add validation + credential scrubbing to import path (was bypassed) - Remove query-string API key support (leaks secrets in logs/referrers) - Add client_id filter to listStatuses in both SQLite and Postgres - Add missing idx_statuses_client index (events/facts had one, statuses didn't) - Remove duplicate entity_relationships schema block in SQLite init - Pass task type to embed() in import path for correct Gemini embeddings Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
1 parent 3185e4c commit 17347be

5 files changed

Lines changed: 34 additions & 25 deletions

File tree

api/src/middleware/auth.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -59,7 +59,7 @@ export function authMiddleware(req, res, next) {
5959
return res.status(429).json({ error: 'Too many failed attempts. Try again later.' });
6060
}
6161

62-
const key = req.headers['x-api-key'] || req.query.key;
62+
const key = req.headers['x-api-key'];
6363
if (!key) {
6464
recordFailure(ip);
6565
return res.status(401).json({ error: 'Missing API key' });

api/src/routes/export.js

Lines changed: 28 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,8 @@ import crypto from 'crypto';
33
import { scrollPoints, upsertPoint, findByPayload } from '../services/qdrant.js';
44
import { embed } from '../services/embedders/interface.js';
55
import { isStoreAvailable, createEvent, upsertFact, upsertStatus } from '../services/stores/interface.js';
6+
import { scrubCredentials } from '../services/scrub.js';
7+
import { validateMemoryInput } from '../middleware/validate.js';
68

79
export const exportRouter = Router();
810

@@ -96,24 +98,44 @@ exportRouter.post('/import', async (req, res) => {
9698
// Process each record in the batch sequentially
9799
for (const record of batch) {
98100
try {
99-
const content = record.content || record.text || '';
100-
if (!content) {
101+
const rawContent = record.content || record.text || '';
102+
if (!rawContent) {
101103
errors++;
102104
continue;
103105
}
104106

105-
// Compute content hash (SHA-256, first 16 hex chars — matches memory.js pattern)
107+
// Validate input (same rules as POST /memory)
108+
const validationError = validateMemoryInput({
109+
type: record.type || 'event',
110+
content: rawContent,
111+
source_agent: record.source_agent || 'import',
112+
importance: record.importance,
113+
client_id: record.client_id,
114+
});
115+
if (validationError) {
116+
errors++;
117+
continue;
118+
}
119+
120+
// Scrub credentials (same as POST /memory)
121+
const content = scrubCredentials(rawContent);
122+
123+
// Compute content hash from scrubbed content
106124
const contentHash = crypto.createHash('sha256').update(content).digest('hex').slice(0, 16);
107125

108-
// Check for existing memory with same content hash
109-
const existing = await findByPayload('content_hash', contentHash);
126+
// Check for existing memory with same content hash, scoped by tenant + type
127+
const existing = await findByPayload('content_hash', contentHash, {
128+
active: true,
129+
client_id: record.client_id || 'global',
130+
type: record.type || 'event',
131+
});
110132
if (existing.length > 0) {
111133
skipped++;
112134
continue;
113135
}
114136

115137
// Embed and generate ID
116-
const vector = await embed(content);
138+
const vector = await embed(content, 'store');
117139
const pointId = record.id || crypto.randomUUID();
118140
const now = new Date().toISOString();
119141

api/src/routes/memory.js

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -56,7 +56,7 @@ memoryRouter.post('/', async (req, res) => {
5656
const contentHash = crypto.createHash('sha256').update(cleanContent).digest('hex').slice(0, 16);
5757

5858
// --- Deduplication check ---
59-
const duplicates = await findByPayload('content_hash', contentHash, { active: true });
59+
const duplicates = await findByPayload('content_hash', contentHash, { active: true, client_id: client_id || 'global', type });
6060
if (duplicates.length > 0) {
6161
const existing = duplicates[0];
6262
const existingObservedBy = existing.payload.observed_by || [existing.payload.source_agent];

api/src/services/stores/postgres.js

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -59,6 +59,7 @@ export class PostgresStore {
5959
CREATE INDEX IF NOT EXISTS idx_facts_key ON facts(key);
6060
CREATE INDEX IF NOT EXISTS idx_facts_client ON facts(client_id);
6161
CREATE INDEX IF NOT EXISTS idx_statuses_subject ON statuses(subject);
62+
CREATE INDEX IF NOT EXISTS idx_statuses_client ON statuses(client_id);
6263
`);
6364

6465
// Migrate: add knowledge_category to existing tables (safe if already exists)
@@ -241,6 +242,7 @@ export class PostgresStore {
241242

242243
if (filters.source_agent) { sql += ` AND source_agent = $${i++}`; params.push(filters.source_agent); }
243244
if (filters.category) { sql += ` AND category = $${i++}`; params.push(filters.category); }
245+
if (filters.client_id) { sql += ` AND client_id = $${i++}`; params.push(filters.client_id); }
244246
if (filters.subject) { sql += ` AND subject ILIKE $${i++}`; params.push(`%${filters.subject}%`); }
245247

246248
sql += ' ORDER BY updated_at DESC LIMIT 50';

api/src/services/stores/sqlite.js

Lines changed: 2 additions & 17 deletions
Original file line numberDiff line numberDiff line change
@@ -67,6 +67,7 @@ export class SQLiteStore {
6767
CREATE INDEX IF NOT EXISTS idx_facts_key ON facts(key);
6868
CREATE INDEX IF NOT EXISTS idx_facts_client ON facts(client_id);
6969
CREATE INDEX IF NOT EXISTS idx_statuses_subject ON statuses(subject);
70+
CREATE INDEX IF NOT EXISTS idx_statuses_client ON statuses(client_id);
7071
7172
CREATE TABLE IF NOT EXISTS entities (
7273
id INTEGER PRIMARY KEY AUTOINCREMENT,
@@ -126,23 +127,6 @@ export class SQLiteStore {
126127
}
127128
}
128129

129-
// Entity relationships table
130-
this.db.exec(`
131-
CREATE TABLE IF NOT EXISTS entity_relationships (
132-
id INTEGER PRIMARY KEY AUTOINCREMENT,
133-
source_entity_id INTEGER REFERENCES entities(id),
134-
target_entity_id INTEGER REFERENCES entities(id),
135-
relationship_type TEXT NOT NULL DEFAULT 'co_occurrence',
136-
strength INTEGER DEFAULT 1,
137-
created_at TEXT DEFAULT (datetime('now')),
138-
updated_at TEXT DEFAULT (datetime('now')),
139-
UNIQUE(source_entity_id, target_entity_id, relationship_type)
140-
);
141-
142-
CREATE INDEX IF NOT EXISTS idx_er_source ON entity_relationships(source_entity_id);
143-
CREATE INDEX IF NOT EXISTS idx_er_target ON entity_relationships(target_entity_id);
144-
`);
145-
146130
// FTS5 virtual table for keyword search (BM25)
147131
try {
148132
this.db.exec(`
@@ -294,6 +278,7 @@ export class SQLiteStore {
294278

295279
if (filters.source_agent) { sql += ' AND source_agent = @source_agent'; params.source_agent = filters.source_agent; }
296280
if (filters.category) { sql += ' AND category = @category'; params.category = filters.category; }
281+
if (filters.client_id) { sql += ' AND client_id = @client_id'; params.client_id = filters.client_id; }
297282
if (filters.subject) { sql += ' AND subject LIKE @subject'; params.subject = `%${filters.subject}%`; }
298283

299284
sql += ' ORDER BY updated_at DESC LIMIT 50';

0 commit comments

Comments
 (0)