Skip to content

Commit aa73e5b

Browse files
authored
🐛 fix: recover when duplication lands on parent (#189)
1 parent 84a547c commit aa73e5b

1 file changed

Lines changed: 85 additions & 12 deletions

File tree

src/mcp_services/notion/notion_state_manager.py

Lines changed: 85 additions & 12 deletions
Original file line numberDiff line numberDiff line change
@@ -520,19 +520,92 @@ def _duplicate_current_initial_state(
520520
duplicated_url = page.url
521521
# Validate that the resulting URL is a genuine duplicate of the original template.
522522
if not self._is_valid_duplicate_url(original_url, duplicated_url):
523-
logger.error(
524-
"| ✗ Unexpected URL after duplication – URL does not match expected duplicate pattern.\n Original: %s\n Observed: %s",
525-
original_url,
526-
duplicated_url,
527-
)
528-
# Attempt to clean up stray duplicate before propagating error.
529-
self._cleanup_orphan_duplicate(
530-
original_initial_state_id, original_initial_state_title
531-
)
532-
raise RuntimeError(
533-
"Duplicate URL pattern mismatch – duplication likely failed"
523+
# Sometimes duplication succeeds but UI navigates to parent instead of the new page.
524+
# In that case, try to find the most recently created page named exactly "<title> (1)".
525+
logger.warning(
526+
"| ✗ Duplicate URL pattern mismatch. Attempting recovery by searching for latest '%s (1)' page...",
527+
original_initial_state_title,
534528
)
535529

530+
target_title = f"{original_initial_state_title} (1)"
531+
try:
532+
# Wait 5 seconds before the first search to allow Notion to index the new page
533+
time.sleep(5)
534+
535+
attempts = 3
536+
for retry_idx in range(attempts):
537+
response = self.source_notion_client.search(
538+
query=target_title,
539+
filter={"property": "object", "value": "page"},
540+
)
541+
542+
candidates = []
543+
for res in response.get("results", []):
544+
props = res.get("properties", {})
545+
title_prop = props.get("title", {}).get("title") or props.get(
546+
"Name", {}
547+
).get("title")
548+
title_plain = "".join(
549+
t.get("plain_text", "") for t in (title_prop or [])
550+
).strip()
551+
if title_plain == target_title:
552+
created_time = res.get("created_time") or res.get(
553+
"last_edited_time"
554+
)
555+
candidates.append((created_time, res))
556+
557+
if candidates:
558+
# Pick the most recently created/edited candidate (ISO8601 strings are lexicographically comparable)
559+
latest_res = max(candidates, key=lambda x: x[0])[1]
560+
fallback_url = latest_res.get("url")
561+
if fallback_url:
562+
logger.info(
563+
"| ○ Navigating directly to latest '%s' duplicate via API result...",
564+
target_title,
565+
)
566+
page.goto(fallback_url, wait_until="load", timeout=60_000)
567+
time.sleep(5)
568+
duplicated_url = page.url
569+
break
570+
571+
if retry_idx < attempts - 1:
572+
logger.debug(
573+
"| ○ '%s' not visible yet via search. Waiting 5s before retry %d/%d...",
574+
target_title,
575+
retry_idx + 1,
576+
attempts - 1,
577+
)
578+
time.sleep(5)
579+
580+
# Re-validate after attempted recovery
581+
if not self._is_valid_duplicate_url(original_url, duplicated_url):
582+
logger.error(
583+
"| ✗ Could not locate a valid '%s' duplicate after recovery attempt.\n Original: %s\n Observed: %s",
584+
target_title,
585+
original_url,
586+
duplicated_url,
587+
)
588+
# Attempt to clean up stray duplicate before propagating error.
589+
self._cleanup_orphan_duplicate(
590+
original_initial_state_id, original_initial_state_title
591+
)
592+
raise RuntimeError(
593+
"Duplicate URL pattern mismatch – duplication likely failed"
594+
)
595+
except Exception as search_exc:
596+
logger.error(
597+
"| ✗ Failed during recovery search for '%s': %s",
598+
target_title,
599+
search_exc,
600+
)
601+
# Attempt to clean up stray duplicate before propagating error.
602+
self._cleanup_orphan_duplicate(
603+
original_initial_state_id, original_initial_state_title
604+
)
605+
raise RuntimeError(
606+
"Duplicate URL pattern mismatch – duplication likely failed"
607+
) from search_exc
608+
536609
duplicated_initial_state_id = self._extract_initial_state_id_from_url(
537610
duplicated_url
538611
)
@@ -688,7 +761,7 @@ def _duplicate_initial_state_for_task(
688761
last_exc = e
689762
if attempt < max_retries:
690763
logger.warning(
691-
"| ⚠️ Duplication attempt %d failed: %s. Retrying...",
764+
"| Duplication attempt %d failed: %s. Retrying...",
692765
attempt + 1,
693766
e,
694767
)

0 commit comments

Comments
 (0)