1111from loguru import logger
1212from sqlalchemy .exc import IntegrityError
1313
14+ from basic_memory .config import ProjectConfig
15+ from basic_memory .file_utils import has_frontmatter
1416from basic_memory .markdown import EntityParser
1517from basic_memory .models import Entity
1618from basic_memory .repository import EntityRepository , RelationRepository
@@ -65,13 +67,15 @@ class SyncService:
6567
6668 def __init__ (
6769 self ,
70+ config : ProjectConfig ,
6871 entity_service : EntityService ,
6972 entity_parser : EntityParser ,
7073 entity_repository : EntityRepository ,
7174 relation_repository : RelationRepository ,
7275 search_service : SearchService ,
7376 file_service : FileService ,
7477 ):
78+ self .config = config
7579 self .entity_service = entity_service
7680 self .entity_parser = entity_parser
7781 self .entity_repository = entity_repository
@@ -327,36 +331,40 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona
327331 """
328332 # Parse markdown first to get any existing permalink
329333 logger .debug ("Parsing markdown file" , path = path )
330- entity_markdown = await self .entity_parser .parse_file (path )
331334
332- # Resolve permalink - this handles all the cases including conflicts
333- permalink = await self .entity_service .resolve_permalink (path , markdown = entity_markdown )
335+ file_path = self .entity_parser .base_path / path
336+ file_content = file_path .read_text ()
337+ file_contains_frontmatter = has_frontmatter (file_content )
334338
335- # If permalink changed, update the file
336- if permalink != entity_markdown .frontmatter .permalink :
337- logger .info (
338- "Updating permalink" ,
339- path = path ,
340- old_permalink = entity_markdown .frontmatter .permalink ,
341- new_permalink = permalink ,
342- )
339+ # entity markdown will always contain front matter, so it can be used up create/update the entity
340+ entity_markdown = await self .entity_parser .parse_file (path )
343341
344- entity_markdown .frontmatter .metadata ["permalink" ] = permalink
345- checksum = await self .file_service .update_frontmatter (path , {"permalink" : permalink })
346- else :
347- checksum = await self .file_service .compute_checksum (path )
342+ # if the file contains frontmatter, resolve a permalink
343+ if file_contains_frontmatter :
344+ # Resolve permalink - this handles all the cases including conflicts
345+ permalink = await self .entity_service .resolve_permalink (path , markdown = entity_markdown )
346+
347+ # If permalink changed, update the file
348+ if permalink != entity_markdown .frontmatter .permalink :
349+ logger .info (
350+ "Updating permalink" ,
351+ path = path ,
352+ old_permalink = entity_markdown .frontmatter .permalink ,
353+ new_permalink = permalink ,
354+ )
355+
356+ entity_markdown .frontmatter .metadata ["permalink" ] = permalink
357+ await self .file_service .update_frontmatter (path , {"permalink" : permalink })
348358
349359 # if the file is new, create an entity
350360 if new :
351361 # Create entity with final permalink
352- logger .debug ("Creating new entity from markdown" , path = path , permalink = permalink )
353-
362+ logger .debug ("Creating new entity from markdown" , path = path )
354363 await self .entity_service .create_entity_from_markdown (Path (path ), entity_markdown )
355364
356365 # otherwise we need to update the entity and observations
357366 else :
358- logger .debug ("Updating entity from markdown" , path = path , permalink = permalink )
359-
367+ logger .debug ("Updating entity from markdown" , path = path )
360368 await self .entity_service .update_entity_and_observations (Path (path ), entity_markdown )
361369
362370 # Update relations and search index
@@ -366,10 +374,10 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona
366374 # This is necessary for files with wikilinks to ensure consistent checksums
367375 # after relation processing is complete
368376 final_checksum = await self .file_service .compute_checksum (path )
369-
377+
370378 # set checksum
371379 await self .entity_repository .update (entity .id , {"checksum" : final_checksum })
372-
380+
373381 logger .debug (
374382 "Markdown sync completed" ,
375383 path = path ,
@@ -378,7 +386,7 @@ async def sync_markdown_file(self, path: str, new: bool = True) -> Tuple[Optiona
378386 relation_count = len (entity .relations ),
379387 checksum = final_checksum ,
380388 )
381-
389+
382390 # Return the final checksum to ensure everything is consistent
383391 return entity , final_checksum
384392
@@ -475,8 +483,30 @@ async def handle_move(self, old_path, new_path):
475483
476484 entity = await self .entity_repository .get_by_file_path (old_path )
477485 if entity :
478- # Update file_path but keep the same permalink for link stability
479- updated = await self .entity_repository .update (entity .id , {"file_path" : new_path })
486+ # Update file_path in all cases
487+ updates = {"file_path" : new_path }
488+
489+ # If configured, also update permalink to match new path
490+ if self .config .update_permalinks_on_move :
491+ # generate new permalink value
492+ new_permalink = await self .entity_service .resolve_permalink (new_path )
493+
494+ # write to file and get new checksum
495+ new_checksum = await self .file_service .update_frontmatter (
496+ new_path , {"permalink" : new_permalink }
497+ )
498+
499+ updates ["permalink" ] = new_permalink
500+ updates ["checksum" ] = new_checksum
501+
502+ logger .info (
503+ "Updating permalink on move" ,
504+ old_permalink = entity .permalink ,
505+ new_permalink = new_permalink ,
506+ new_checksum = new_checksum ,
507+ )
508+
509+ updated = await self .entity_repository .update (entity .id , updates )
480510
481511 if updated is None : # pragma: no cover
482512 logger .error (
0 commit comments