-
Notifications
You must be signed in to change notification settings - Fork 188
Expand file tree
/
Copy pathfile_service.py
More file actions
293 lines (228 loc) · 9.7 KB
/
file_service.py
File metadata and controls
293 lines (228 loc) · 9.7 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
"""Service for file operations with checksum tracking."""
import mimetypes
from os import stat_result
from pathlib import Path
from typing import Any, Dict, Tuple, Union
from basic_memory import file_utils
from basic_memory.file_utils import FileError
from basic_memory.markdown.markdown_processor import MarkdownProcessor
from basic_memory.models import Entity as EntityModel
from basic_memory.schemas import Entity as EntitySchema
from basic_memory.services.exceptions import FileOperationError
from basic_memory.utils import FilePath
from loguru import logger
class FileService:
"""Service for handling file operations.
All paths are handled as Path objects internally. Strings are converted to
Path objects when passed in. Relative paths are assumed to be relative to
base_path.
Features:
- Consistent file writing with checksums
- Frontmatter management
- Atomic operations
- Error handling
"""
def __init__(
self,
base_path: Path,
markdown_processor: MarkdownProcessor,
):
self.base_path = base_path.resolve() # Get absolute path
self.markdown_processor = markdown_processor
def get_entity_path(self, entity: Union[EntityModel, EntitySchema]) -> Path:
"""Generate absolute filesystem path for entity.
Args:
entity: Entity model or schema with file_path attribute
Returns:
Absolute Path to the entity file
"""
return self.base_path / entity.file_path
async def read_entity_content(self, entity: EntityModel) -> str:
"""Get entity's content without frontmatter or structured sections.
Used to index for search. Returns raw content without frontmatter,
observations, or relations.
Args:
entity: Entity to read content for
Returns:
Raw content string without metadata sections
"""
logger.debug("Reading entity content", entity_id=entity.id, permalink=entity.permalink)
file_path = self.get_entity_path(entity)
markdown = await self.markdown_processor.read_file(file_path)
return markdown.content or ""
async def delete_entity_file(self, entity: EntityModel) -> None:
"""Delete entity file from filesystem.
Args:
entity: Entity model whose file should be deleted
Raises:
FileOperationError: If deletion fails
"""
path = self.get_entity_path(entity)
await self.delete_file(path)
async def exists(self, path: FilePath) -> bool:
"""Check if file exists at the provided path.
If path is relative, it is assumed to be relative to base_path.
Args:
path: Path to check (Path or string)
Returns:
True if file exists, False otherwise
Raises:
FileOperationError: If check fails
"""
try:
# Convert string to Path if needed
path_obj = Path(path) if isinstance(path, str) else path
if path_obj.is_absolute():
return path_obj.exists()
else:
return (self.base_path / path_obj).exists()
except Exception as e:
logger.error("Failed to check file existence", path=str(path), error=str(e))
raise FileOperationError(f"Failed to check file existence: {e}")
async def write_file(self, path: FilePath, content: str) -> str:
"""Write content to file and return checksum.
Handles both absolute and relative paths. Relative paths are resolved
against base_path.
Args:
path: Where to write (Path or string)
content: Content to write
Returns:
Checksum of written content
Raises:
FileOperationError: If write fails
"""
# Convert string to Path if needed
path_obj = Path(path) if isinstance(path, str) else path
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
try:
# Ensure parent directory exists
await file_utils.ensure_directory(full_path.parent)
# Write content atomically
logger.info(
"Writing file",
operation="write_file",
path=str(full_path),
content_length=len(content),
is_markdown=full_path.suffix.lower() == ".md",
)
await file_utils.write_file_atomic(full_path, content)
# Compute and return checksum
checksum = await file_utils.compute_checksum(content)
logger.debug("File write completed", path=str(full_path), checksum=checksum)
return checksum
except Exception as e:
logger.exception("File write error", path=str(full_path), error=str(e))
raise FileOperationError(f"Failed to write file: {e}")
# TODO remove read_file
async def read_file(self, path: FilePath) -> Tuple[str, str]:
"""Read file and compute checksum.
Handles both absolute and relative paths. Relative paths are resolved
against base_path.
Args:
path: Path to read (Path or string)
Returns:
Tuple of (content, checksum)
Raises:
FileOperationError: If read fails
"""
# Convert string to Path if needed
path_obj = Path(path) if isinstance(path, str) else path
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
try:
logger.debug("Reading file", operation="read_file", path=str(full_path))
content = full_path.read_text(encoding="utf-8")
checksum = await file_utils.compute_checksum(content)
logger.debug(
"File read completed",
path=str(full_path),
checksum=checksum,
content_length=len(content),
)
return content, checksum
except Exception as e:
logger.exception("File read error", path=str(full_path), error=str(e))
raise FileOperationError(f"Failed to read file: {e}")
async def delete_file(self, path: FilePath) -> None:
"""Delete file if it exists.
Handles both absolute and relative paths. Relative paths are resolved
against base_path.
Args:
path: Path to delete (Path or string)
"""
# Convert string to Path if needed
path_obj = Path(path) if isinstance(path, str) else path
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
full_path.unlink(missing_ok=True)
async def update_frontmatter(self, path: FilePath, updates: Dict[str, Any]) -> str:
"""
Update frontmatter fields in a file while preserving all content.
Args:
path: Path to the file (Path or string)
updates: Dictionary of frontmatter fields to update
Returns:
Checksum of updated file
"""
# Convert string to Path if needed
path_obj = Path(path) if isinstance(path, str) else path
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
return await file_utils.update_frontmatter(full_path, updates)
async def compute_checksum(self, path: FilePath) -> str:
"""Compute checksum for a file.
Args:
path: Path to the file (Path or string)
Returns:
Checksum of the file content
Raises:
FileError: If checksum computation fails
"""
# Convert string to Path if needed
path_obj = Path(path) if isinstance(path, str) else path
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
try:
if self.is_markdown(path):
# read str
content = full_path.read_text(encoding="utf-8")
else:
# read bytes
content = full_path.read_bytes()
return await file_utils.compute_checksum(content)
except Exception as e: # pragma: no cover
logger.error("Failed to compute checksum", path=str(full_path), error=str(e))
raise FileError(f"Failed to compute checksum for {path}: {e}")
def file_stats(self, path: FilePath) -> stat_result:
"""Return file stats for a given path.
Args:
path: Path to the file (Path or string)
Returns:
File statistics
"""
# Convert string to Path if needed
path_obj = Path(path) if isinstance(path, str) else path
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
# get file timestamps
return full_path.stat()
def content_type(self, path: FilePath) -> str:
"""Return content_type for a given path.
Args:
path: Path to the file (Path or string)
Returns:
MIME type of the file
"""
# Convert string to Path if needed
path_obj = Path(path) if isinstance(path, str) else path
full_path = path_obj if path_obj.is_absolute() else self.base_path / path_obj
# get file timestamps
mime_type, _ = mimetypes.guess_type(full_path.name)
# .canvas files are json
if full_path.suffix == ".canvas":
mime_type = "application/json"
content_type = mime_type or "text/plain"
return content_type
def is_markdown(self, path: FilePath) -> bool:
"""Check if a file is a markdown file.
Args:
path: Path to the file (Path or string)
Returns:
True if the file is a markdown file, False otherwise
"""
return self.content_type(path) == "text/markdown"