Skip to content

Commit da7c027

Browse files
committed
2 parents b514d55 + 5705ac0 commit da7c027

5 files changed

Lines changed: 31 additions & 31 deletions

File tree

packages/openmemory-js/src/core/models.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -9,13 +9,13 @@ export const load_models = (): model_cfg => {
99
if (cfg) return cfg;
1010
const p = join(__dirname, "../../../models.yml");
1111
if (!existsSync(p)) {
12-
console.warn("[MODELS] models.yml not found, using defaults");
12+
console.error("[MODELS] models.yml not found, using defaults");
1313
return get_defaults();
1414
}
1515
try {
1616
const yml = readFileSync(p, "utf-8");
1717
cfg = parse_yaml(yml);
18-
console.log(
18+
console.error(
1919
`[MODELS] Loaded models.yml (${Object.keys(cfg).length} sectors)`,
2020
);
2121
return cfg;

packages/openmemory-js/src/core/vector/postgres.ts

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -15,7 +15,7 @@ export class PostgresVectorStore implements VectorStore {
1515
}
1616

1717
async storeVector(id: string, sector: string, vector: number[], dim: number, user_id?: string): Promise<void> {
18-
console.log(`[Vector] Storing ID: ${id}, Sector: ${sector}, Dim: ${dim}`);
18+
console.error(`[Vector] Storing ID: ${id}, Sector: ${sector}, Dim: ${dim}`);
1919
const v = vectorToBuffer(vector);
2020
const sql = `insert into ${this.table}(id,sector,user_id,v,dim) values($1,$2,$3,$4,$5) on conflict(id,sector) do update set user_id=excluded.user_id,v=excluded.v,dim=excluded.dim`;
2121
await this.db.run_async(sql, [id, sector, user_id || "anonymous", v, dim]);
@@ -32,7 +32,7 @@ export class PostgresVectorStore implements VectorStore {
3232
async searchSimilar(sector: string, queryVec: number[], topK: number): Promise<Array<{ id: string; score: number }>> {
3333
// Postgres implementation (in-memory cosine sim for now, as per original)
3434
const rows = await this.db.all_async(`select id,v,dim from ${this.table} where sector=$1`, [sector]);
35-
console.log(`[Vector] Search Sector: ${sector}, Found ${rows.length} rows.`);
35+
console.error(`[Vector] Search Sector: ${sector}, Found ${rows.length} rows.`);
3636
const sims: Array<{ id: string; score: number }> = [];
3737
for (const row of rows) {
3838
const vec = bufferToVector(row.v);

packages/openmemory-js/src/memory/embed.ts

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ const fuse_vecs = (syn: number[], sem: number[]): number[] => {
7777
};
7878

7979
export async function embedForSector(t: string, s: string): Promise<number[]> {
80-
console.log(`[EMBED] Provider: ${env.emb_kind}, Tier: ${tier}, Sector: ${s}`);
80+
console.error(`[EMBED] Provider: ${env.emb_kind}, Tier: ${tier}, Sector: ${s}`);
8181
if (!sector_configs[s]) throw new Error(`Unknown sector: ${s}`);
8282
if (tier === "hybrid") return gen_syn_emb(t, s);
8383
if (tier === "smart" && env.emb_kind !== "synthetic") {
@@ -113,7 +113,7 @@ export async function embedQueryForAllSectors(
113113
for (const s of sectors) txts[s] = query;
114114
return await emb_gemini(txts);
115115
} catch (e) {
116-
console.warn(`[EMBED] Gemini batch failed, falling back to sequential: ${e}`);
116+
console.error(`[EMBED] Gemini batch failed, falling back to sequential: ${e}`);
117117
}
118118
}
119119

@@ -157,7 +157,7 @@ async function get_sem_emb(t: string, s: string): Promise<number[]> {
157157
try {
158158
const result = await embed_with_provider(provider, t, s);
159159
if (i > 0) {
160-
console.log(
160+
console.error(
161161
`[EMBED] Fallback to ${provider} succeeded for sector: ${s}`,
162162
);
163163
}
@@ -167,7 +167,7 @@ async function get_sem_emb(t: string, s: string): Promise<number[]> {
167167
const nextProvider = providers[i + 1];
168168

169169
if (nextProvider) {
170-
console.warn(
170+
console.error(
171171
`[EMBED] ${provider} failed: ${errMsg}, trying ${nextProvider}`,
172172
);
173173
} else {
@@ -208,7 +208,7 @@ async function emb_batch_with_fallback(
208208
}
209209
}
210210
if (i > 0) {
211-
console.log(
211+
console.error(
212212
`[EMBED] Fallback to ${provider} succeeded for batch`,
213213
);
214214
}
@@ -218,7 +218,7 @@ async function emb_batch_with_fallback(
218218
const nextProvider = providers[i + 1];
219219

220220
if (nextProvider) {
221-
console.warn(
221+
console.error(
222222
`[EMBED] ${provider} batch failed: ${errMsg}, trying ${nextProvider}`,
223223
);
224224
} else {
@@ -325,7 +325,7 @@ async function emb_gemini(
325325
1000,
326326
1000 * Math.pow(2, a),
327327
);
328-
console.warn(
328+
console.error(
329329
`[EMBED] Gemini rate limit (${a + 1}/3), waiting ${d}ms`,
330330
);
331331
await new Promise((x) => setTimeout(x, d));
@@ -350,7 +350,7 @@ async function emb_gemini(
350350
`Gemini failed after 3 attempts: ${errMsg}`,
351351
);
352352
}
353-
console.warn(`[EMBED] Gemini error (${a + 1}/3): ${errMsg}`);
353+
console.error(`[EMBED] Gemini error (${a + 1}/3): ${errMsg}`);
354354
await new Promise((x) => setTimeout(x, 1000 * Math.pow(2, a)));
355355
}
356356
}
@@ -394,15 +394,15 @@ async function emb_aws(t: string, s: string): Promise<number[]> {
394394

395395
const jsonString = new TextDecoder().decode(response.body);
396396
const parsedResponse = JSON.parse(jsonString);
397-
return resize_vec(parsedResponse, env.vec_dim);
397+
return resize_vec(parsedResponse.embedding, env.vec_dim);
398398
} catch (error) {
399399
throw new Error(`AWS: ${error}`);
400400
}
401401
}
402402

403403
async function emb_local(t: string, s: string): Promise<number[]> {
404404
if (!env.local_model_path) {
405-
console.warn("[EMBED] Local model missing, using synthetic");
405+
console.error("[EMBED] Local model missing, using synthetic");
406406
return gen_syn_emb(t, s);
407407
}
408408
try {
@@ -419,7 +419,7 @@ async function emb_local(t: string, s: string): Promise<number[]> {
419419
const n = Math.sqrt(e.reduce((sum, v) => sum + v * v, 0));
420420
return e.map((v) => v / n);
421421
} catch {
422-
console.warn("[EMBED] Local embedding failed, using synthetic");
422+
console.error("[EMBED] Local embedding failed, using synthetic");
423423
return gen_syn_emb(t, s);
424424
}
425425
}
@@ -556,7 +556,7 @@ export async function embedMultiSector(
556556
simp &&
557557
(env.emb_kind === "gemini" || env.emb_kind === "openai")
558558
) {
559-
console.log(
559+
console.error(
560560
`[EMBED] Simple mode (1 batch for ${secs.length} sectors)`,
561561
);
562562
const tb: Record<string, string> = {};
@@ -567,7 +567,7 @@ export async function embedMultiSector(
567567
r.push({ sector: s, vector: v, dim: v.length }),
568568
);
569569
} else {
570-
console.log(`[EMBED] Advanced mode (${secs.length} calls)`);
570+
console.error(`[EMBED] Advanced mode (${secs.length} calls)`);
571571
const par = env.adv_embed_parallel && env.emb_kind !== "gemini";
572572
if (par) {
573573
const p = secs.map(async (s) => {

packages/openmemory-js/src/memory/hsg.ts

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -577,7 +577,7 @@ export async function create_contextual_waypoints(
577577
const existing = await q.get_waypoint.get(mem_id, rel_id);
578578
if (existing) {
579579
const new_wt = Math.min(1.0, existing.weight + 0.1);
580-
await q.upd_waypoint.run(new_wt, now, mem_id, rel_id);
580+
await q.upd_waypoint.run(mem_id, new_wt, now, rel_id);
581581
} else {
582582
await q.ins_waypoint.run(
583583
mem_id,
@@ -635,7 +635,7 @@ export async function reinforce_waypoints(trav_path: string[]): Promise<void> {
635635
reinforcement.max_waypoint_weight,
636636
wp.weight + reinforcement.waypoint_boost,
637637
);
638-
await q.upd_waypoint.run(new_wt, now, src_id, dst_id);
638+
await q.upd_waypoint.run(src_id, new_wt, now, dst_id);
639639
}
640640
}
641641
}
@@ -945,7 +945,7 @@ export async function hsg_query(
945945
r.id,
946946
r.salience,
947947
);
948-
await q.upd_seen.run(Date.now(), rsal, Date.now(), r.id);
948+
await q.upd_seen.run(r.id, Date.now(), rsal, Date.now());
949949
if (r.path.length > 1) {
950950
await reinforce_waypoints(r.path);
951951
const wps = await q.get_waypoints_by_src.all(r.id);
@@ -974,10 +974,10 @@ export async function hsg_query(
974974
Math.min(1, linked_mem.salience + ctx_boost),
975975
);
976976
await q.upd_seen.run(
977+
u.node_id,
977978
Date.now(),
978979
new_sal,
979980
Date.now(),
980-
u.node_id,
981981
);
982982
}
983983
}
@@ -1008,7 +1008,7 @@ export async function run_decay_process(): Promise<{
10081008
const ds = (Date.now() - m.last_seen_at) / 86400000;
10091009
const ns = calc_decay(m.primary_sector, m.salience, ds);
10101010
if (ns !== m.salience) {
1011-
await q.upd_seen.run(m.last_seen_at, ns, Date.now(), m.id);
1011+
await q.upd_seen.run(m.id, m.last_seen_at, ns, Date.now());
10121012
d++;
10131013
}
10141014
p++;
@@ -1053,7 +1053,7 @@ export async function add_hsg_memory(
10531053
if (existing && hamming_dist(simhash, existing.simhash) <= 3) {
10541054
const now = Date.now();
10551055
const boosted_sal = Math.min(1, existing.salience + 0.15);
1056-
await q.upd_seen.run(now, boosted_sal, now, existing.id);
1056+
await q.upd_seen.run(existing.id, now, boosted_sal, now);
10571057
return {
10581058
id: existing.id,
10591059
primary_sector: existing.primary_sector,
@@ -1162,7 +1162,7 @@ export async function reinforce_memory(
11621162
const mem = await q.get_mem.get(id);
11631163
if (!mem) throw new Error(`Memory ${id} not found`);
11641164
const new_sal = Math.min(reinforcement.max_salience, mem.salience + boost);
1165-
await q.upd_seen.run(Date.now(), new_sal, Date.now(), id);
1165+
await q.upd_seen.run(id, Date.now(), new_sal, Date.now());
11661166
if (new_sal > 0.8) await log_maint_op("consolidate", 1);
11671167
}
11681168
export async function update_memory(

packages/openmemory-js/src/memory/reflect.ts

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -102,18 +102,18 @@ const boost = async (ids: string[]) => {
102102
};
103103

104104
export const run_reflection = async () => {
105-
console.log("[REFLECT] Starting reflection job...");
105+
console.error("[REFLECT] Starting reflection job...");
106106
const min = env.reflect_min || 20;
107107
const mems = await q.all_mem.all(100, 0);
108-
console.log(
108+
console.error(
109109
`[REFLECT] Fetched ${mems.length} memories (min required: ${min})`,
110110
);
111111
if (mems.length < min) {
112-
console.log("[REFLECT] Not enough memories, skipping");
112+
console.error("[REFLECT] Not enough memories, skipping");
113113
return { created: 0, reason: "low" };
114114
}
115115
const cls = cluster(mems);
116-
console.log(`[REFLECT] Clustered into ${cls.length} groups`);
116+
console.error(`[REFLECT] Clustered into ${cls.length} groups`);
117117
let n = 0;
118118
for (const c of cls) {
119119
const txt = summ(c);
@@ -125,7 +125,7 @@ export const run_reflection = async () => {
125125
freq: c.n,
126126
at: new Date().toISOString(),
127127
};
128-
console.log(
128+
console.error(
129129
`[REFLECT] Creating reflection: ${c.n} memories, salience=${s.toFixed(3)}, sector=${c.mem[0].primary_sector}`,
130130
);
131131
await add_hsg_memory(txt, j(["reflect:auto"]), meta);
@@ -134,7 +134,7 @@ export const run_reflection = async () => {
134134
n++;
135135
}
136136
if (n > 0) await log_maint_op("reflect", n);
137-
console.log(`[REFLECT] Job complete: created ${n} reflections`);
137+
console.error(`[REFLECT] Job complete: created ${n} reflections`);
138138
return { created: n, clusters: cls.length };
139139
};
140140

@@ -147,7 +147,7 @@ export const start_reflection = () => {
147147
() => run_reflection().catch((e) => console.error("[REFLECT]", e)),
148148
int,
149149
);
150-
console.log(`[REFLECT] Started: every ${env.reflect_interval || 10}m`);
150+
console.error(`[REFLECT] Started: every ${env.reflect_interval || 10}m`);
151151
};
152152

153153
export const stop_reflection = () => {

0 commit comments

Comments
 (0)