Skip to content

Commit 7bef26d

Browse files
committed
fix (memory): added IGNORE action for identical memories, added fix to onboarding memory route, added middleware exception for logo loading without auth
1 parent 41f8b26 commit 7bef26d

5 files changed

Lines changed: 69 additions & 28 deletions

File tree

src/client/middleware.js

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -39,7 +39,8 @@ export const config = {
3939
* - favicon.ico, sitemap.xml, robots.txt (metadata files)
4040
* - api (API routes)
4141
* - PWA files (manifest, icons, service worker, workbox)
42+
* - .png and .svg files (static images)
4243
*/
43-
"/((?!_next/static|_next/image|favicon.ico|sitemap.xml|robots.txt|api|manifest.json|manifest.webmanifest|sw.js|workbox-.*\\.js$|.*\\.png$).*)"
44+
"/((?!_next/static|_next/image|favicon.ico|sitemap.xml|robots.txt|api|manifest.json|manifest.webmanifest|sw.js|workbox-.*\\.js$|.*\\.png$|.*\\.svg$).*)"
4445
]
45-
}
46+
}

src/server/main/misc/routes.py

Lines changed: 8 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -45,6 +45,14 @@ async def save_onboarding_data_endpoint(
4545
user_email = payload.get("email")
4646
plan = payload.get("plan", "free")
4747

48+
# --- IDEMPOTENCY CHECK ---
49+
# First, check if onboarding is already marked as complete to prevent re-processing.
50+
existing_profile = await mongo_manager.get_user_profile(user_id)
51+
if existing_profile and existing_profile.get("userData", {}).get("onboardingComplete", False):
52+
logger.warning(f"User {user_id} attempted to submit onboarding data again, but it's already complete. Ignoring request.")
53+
return JSONResponse(content={"message": "Onboarding already completed.", "status": 200})
54+
# --- END CHECK ---
55+
4856
logger.info(f"[{datetime.datetime.now()}] [ONBOARDING] User {user_id}, Data keys: {list(request_body.data.keys())}")
4957
try:
5058
default_privacy_filters = {

src/server/mcp_hub/memory/formats.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -28,7 +28,7 @@
2828
cud_decision_required_format = {
2929
"type": "object",
3030
"properties": {
31-
"action": {"type": "string", "enum": ["ADD", "UPDATE", "DELETE"]},
31+
"action": {"type": "string", "enum": ["ADD", "UPDATE", "DELETE", "IGNORE"]},
3232
"fact_id": {"type": ["integer", "null"], "description": "The ID of the fact to be updated or deleted. This should be null if the action is ADD."},
3333
"content": {"type": ["string", "null"], "description": "The new, full content of the fact if the action is ADD or UPDATE. Should be null for DELETE."},
3434
"analysis": {

src/server/mcp_hub/memory/prompts.py

Lines changed: 6 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -28,14 +28,16 @@
2828
You are a memory management reasoning engine. Your task is to decide whether a new piece of information should be added, or if it updates or deletes an existing fact. You must also perform a full analysis for any new or updated content. Adhere strictly to the provided JSON schema.
2929
3030
Actions:
31-
- **ADD**: The user's request is entirely new information. The `content` should be the new fact, and `analysis` must be completed. `fact_id` is null.
32-
- **UPDATE**: The user's request is a modification of an existing fact. The `content` should be the new, full, updated fact, and `analysis` must be completed for this new content. `fact_id` is the ID of the original fact.
33-
- **DELETE**: The user's request is an explicit or implicit instruction to remove an existing fact. The `fact_id` is the ID of the fact to remove. `content` and `analysis` must be null.
31+
- ADD: The user's request is entirely new information not covered by existing facts.
32+
- UPDATE: The user's request is a modification or refinement of an existing fact.
33+
- DELETE: The user's request is an explicit or implicit instruction to remove an existing fact.
34+
- IGNORE: The new information is an exact or near-exact duplicate of an existing fact, providing no new details.
3435
3536
Instructions:
3637
1. **Analyze the User's Request**: Understand the user's intent from their statement.
3738
2. **Compare with Existing Facts**: Review the list of similar facts provided. Is the user's request about one of them?
38-
3. **Decide the Action**: Choose ADD, UPDATE, or DELETE.
39+
- If the request is an EXACT or SEMANTICALLY IDENTICAL duplicate of an existing fact, choose IGNORE.
40+
3. **Decide the Action**: Choose ADD, UPDATE, DELETE, or IGNORE.
3941
4. **Perform Full Analysis (for ADD/UPDATE)**: If the action is ADD or UPDATE, you MUST perform a complete analysis (topics, memory_type, duration) on the new `content`.
4042
5. **Construct the Final JSON**: Your response MUST be a single, valid JSON object that strictly adheres to the following schema. Do not include any other text or explanations.
4143

src/server/mcp_hub/memory/utils.py

Lines changed: 51 additions & 21 deletions
Original file line numberDiff line numberDiff line change
@@ -253,6 +253,46 @@ async def _insert_fact_with_analysis(conn, user_id: str, content: str, source: O
253253
message += f" This is a short-term memory and will be forgotten around {expires_at.strftime('%Y-%m-%d %H:%M %Z')}."
254254
return message
255255

256+
async def _update_fact_with_analysis(conn, user_id: str, fact_id: int, content: str, analysis: dict) -> str:
257+
"""Internal function to update a fact and its related metadata."""
258+
logger.info(f"Executing _update_fact_with_analysis for fact_id {fact_id}.")
259+
260+
expires_at = None
261+
if analysis.get("memory_type") == "short-term":
262+
expires_at = parse_duration(analysis.get("duration"))
263+
264+
async with conn.transaction():
265+
embedding = _get_normalized_embedding(content, task_type="RETRIEVAL_DOCUMENT")
266+
267+
await conn.execute(
268+
"""
269+
UPDATE facts SET
270+
content = $1,
271+
embedding = $2,
272+
expires_at = $3,
273+
updated_at = NOW()
274+
WHERE id = $4 AND user_id = $5
275+
""",
276+
content, embedding, expires_at, fact_id, user_id
277+
)
278+
logger.info(f"Updated fact record for ID: {fact_id}.")
279+
280+
# Update topics: delete old, insert new
281+
await conn.execute("DELETE FROM fact_topics WHERE fact_id = $1", fact_id)
282+
283+
topic_names = analysis.get("topics", ["Miscellaneous"])
284+
for topic_name in topic_names:
285+
topic_id = await conn.fetchval("SELECT id FROM topics WHERE name = $1", topic_name)
286+
if topic_id:
287+
await conn.execute("INSERT INTO fact_topics (fact_id, topic_id) VALUES ($1, $2) ON CONFLICT DO NOTHING", fact_id, topic_id)
288+
289+
logger.info(f"Updated topics for fact ID: {fact_id}.")
290+
291+
message = f"Fact {fact_id} updated."
292+
if expires_at:
293+
message += f" It will now be forgotten around {expires_at.strftime('%Y-%m-%d %H:%M %Z')}."
294+
return message
295+
256296
async def _process_single_fact_cud(conn, user_id: str, fact_content: str, source: Optional[str] = None) -> str:
257297
"""
258298
Processes a single atomic fact by deciding to ADD, UPDATE, or DELETE it.
@@ -291,29 +331,25 @@ async def _process_single_fact_cud(conn, user_id: str, fact_content: str, source
291331
logger.info(f"Step 3/3: Executing action '{decision.get('action')}'.")
292332
action = decision.get("action")
293333

294-
if action == "ADD" and decision.get("content") and decision.get("analysis"):
334+
if action == "IGNORE":
335+
fact_id = decision.get("fact_id")
336+
logger.info(f"Action is IGNORE for fact '{fact_content[:50]}...'. It is a duplicate of fact {fact_id}. No changes made.")
337+
return f"Fact already exists (ID: {fact_id}) and was ignored."
338+
elif action == "ADD" and decision.get("content") and decision.get("analysis"):
295339
logger.info("Action is ADD. Inserting new fact with full analysis.")
296340
return await _insert_fact_with_analysis(conn, user_id, decision["content"], source, decision["analysis"])
297341

298342
elif action == "UPDATE" and decision.get("fact_id") and decision.get("content") and decision.get("analysis"):
299343
fact_id = decision["fact_id"]
300-
logger.info(f"Action is UPDATE for fact_id {fact_id}. Replacing fact.")
301-
async with conn.transaction():
302-
await conn.execute("DELETE FROM facts WHERE id = $1 AND user_id = $2", fact_id, user_id)
303-
logger.info(f"Original fact {fact_id} deleted.")
304-
return await _insert_fact_with_analysis(conn, user_id, decision["content"], source, decision["analysis"])
344+
logger.info(f"Action is UPDATE for fact_id {fact_id}. Updating fact in place.")
345+
return await _update_fact_with_analysis(conn, user_id, fact_id, decision["content"], decision["analysis"])
305346

306347
elif action == "DELETE" and decision.get("fact_id"):
307348
fact_id = decision["fact_id"]
308349
logger.info(f"Action is DELETE for fact_id {fact_id}.")
309350
result = await conn.execute("DELETE FROM facts WHERE id = $1 AND user_id = $2", fact_id, user_id)
310351

311-
deleted_count = 0
312-
try:
313-
# Format is "DELETE 1", so we split and take the number.
314-
deleted_count = int(result.split(" ")[1])
315-
except (IndexError, ValueError):
316-
pass # Keep deleted_count as 0
352+
deleted_count = int(result.split(" ")[1]) if result and " " in result else 0
317353

318354
if deleted_count == 1:
319355
logger.info(f"Successfully deleted fact {fact_id}.")
@@ -433,10 +469,7 @@ async def delete_memory_by_source(user_id: str, source_name: str) -> str:
433469
async with pool.acquire() as conn:
434470
result = await conn.execute("DELETE FROM facts WHERE user_id = $1 AND source = $2", user_id, source_name)
435471

436-
try:
437-
deleted_count = int(result.split(" ")[1])
438-
except (IndexError, ValueError):
439-
deleted_count = 0
472+
deleted_count = int(result.split(" ")[1]) if result and " " in result else 0
440473
logger.info(f"Deleted {deleted_count} facts from source: {source_name}")
441474
return f"Deleted {deleted_count} facts from source: {source_name}"
442475

@@ -447,8 +480,5 @@ async def purge_expired_facts():
447480
async with pool.acquire() as conn:
448481
# The index on expires_at makes this query very efficient.
449482
result = await conn.execute("DELETE FROM facts WHERE expires_at IS NOT NULL AND expires_at < NOW()")
450-
try:
451-
deleted_count = int(result.split(" ")[1])
452-
logger.info(f"Purge job: Purged {deleted_count} expired short-term memories.")
453-
except (IndexError, ValueError):
454-
logger.info("Purge job: No expired memories found to purge.")
483+
deleted_count = int(result.split(" ")[1]) if result and " " in result else 0
484+
logger.info(f"Purge job: Purged {deleted_count} expired short-term memories.")

0 commit comments

Comments
 (0)