Skip to content

Commit b9351fc

Browse files
committed
feat(robot): add advisory file locking to SQLite cache
Add shared/exclusive advisory file locking (fcntl.flock) to prevent `prune` from deleting a cache database while another process (language server, analyze run) is using it.
1 parent 1421107 commit b9351fc

File tree

3 files changed

+241
-3
lines changed

3 files changed

+241
-3
lines changed

packages/analyze/src/robotcode/analyze/cache/cli.py

Lines changed: 24 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -6,7 +6,14 @@
66
from robotcode.plugin import Application, OutputFormat, pass_application
77
from robotcode.robot.config.loader import load_robot_config_from_path
88
from robotcode.robot.config.utils import get_config_files
9-
from robotcode.robot.diagnostics.data_cache import CACHE_DIR_NAME, CacheSection, SqliteDataCache, build_cache_dir
9+
from robotcode.robot.diagnostics.data_cache import (
10+
_LOCK_FILE_NAME,
11+
CACHE_DIR_NAME,
12+
CacheSection,
13+
SqliteDataCache,
14+
build_cache_dir,
15+
exclusive_cache_lock,
16+
)
1017

1118
from ..config import AnalyzeConfig
1219

@@ -371,11 +378,12 @@ def _resolve_cache_root(
371378

372379

373380
@cache_group.command(name="prune")
381+
@click.option("--force", is_flag=True, help="Force prune even if cache is in use by another process.")
374382
@click.argument(
375383
"paths", nargs=-1, type=click.Path(exists=True, dir_okay=True, file_okay=True, readable=True, path_type=Path)
376384
)
377385
@pass_application
378-
def cache_prune(app: Application, paths: Tuple[Path, ...]) -> None:
386+
def cache_prune(app: Application, force: bool, paths: Tuple[Path, ...]) -> None:
379387
"""\
380388
Remove the entire cache directory.
381389
@@ -390,5 +398,19 @@ def cache_prune(app: Application, paths: Tuple[Path, ...]) -> None:
390398
app.echo(f"Cache directory does not exist: {cache_root}")
391399
return
392400

401+
if not force:
402+
locked_dirs = []
403+
for lock_file in cache_root.rglob(_LOCK_FILE_NAME):
404+
with exclusive_cache_lock(lock_file.parent) as acquired:
405+
if not acquired:
406+
locked_dirs.append(lock_file.parent)
407+
408+
if locked_dirs:
409+
app.echo("Cannot prune: cache is in use by another process.")
410+
for d in locked_dirs:
411+
app.echo(f" Locked: {d}")
412+
app.echo("Use --force to override.")
413+
return
414+
393415
shutil.rmtree(cache_root)
394416
app.echo(f"Removed {cache_root}")

packages/robot/src/robotcode/robot/diagnostics/data_cache.py

Lines changed: 75 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,10 +1,12 @@
1+
import os
12
import pickle
23
import sqlite3
34
import sys
5+
from contextlib import contextmanager
46
from dataclasses import dataclass
57
from enum import Enum
68
from pathlib import Path
7-
from typing import Any, Generic, List, Optional, Tuple, Type, TypeVar, Union, cast
9+
from typing import Any, Generic, Iterator, List, Optional, Tuple, Type, TypeVar, Union, cast
810

911
from ..utils import get_robot_version_str
1012

@@ -80,6 +82,74 @@ def data(self) -> _D:
8082
_TABLE_NAMES = [s.value for s in CacheSection]
8183

8284
CACHE_DIR_NAME = ".robotcode_cache"
85+
_LOCK_FILE_NAME = "cache.lock"
86+
87+
88+
def _acquire_shared_lock(cache_dir: Path) -> Optional[int]:
89+
"""Acquire a shared advisory lock on the cache directory.
90+
91+
Returns the file descriptor on Unix, None on Windows
92+
(Windows already prevents deletion of open files).
93+
"""
94+
if sys.platform == "win32":
95+
return None
96+
97+
import fcntl
98+
99+
lock_path = cache_dir / _LOCK_FILE_NAME
100+
fd = os.open(str(lock_path), os.O_RDWR | os.O_CREAT, 0o666)
101+
try:
102+
fcntl.flock(fd, fcntl.LOCK_SH)
103+
except Exception:
104+
os.close(fd)
105+
raise
106+
return fd
107+
108+
109+
def _release_lock(fd: Optional[int]) -> None:
110+
"""Release an advisory lock acquired via _acquire_shared_lock."""
111+
if fd is None:
112+
return
113+
try:
114+
if sys.platform != "win32":
115+
import fcntl
116+
117+
fcntl.flock(fd, fcntl.LOCK_UN)
118+
finally:
119+
os.close(fd)
120+
121+
122+
@contextmanager
123+
def exclusive_cache_lock(cache_dir: Path) -> Iterator[bool]:
124+
"""Context manager that tries to acquire an exclusive lock on a cache directory.
125+
126+
Yields True if the lock was acquired (cache is not in use),
127+
False if another process holds a shared lock (cache is in use).
128+
The lock is held until the context manager exits.
129+
"""
130+
if sys.platform == "win32":
131+
yield True
132+
return
133+
134+
import fcntl
135+
136+
lock_path = cache_dir / _LOCK_FILE_NAME
137+
if not lock_path.exists():
138+
yield True
139+
return
140+
141+
fd = os.open(str(lock_path), os.O_RDWR)
142+
try:
143+
fcntl.flock(fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
144+
yield True
145+
except OSError:
146+
yield False
147+
finally:
148+
try:
149+
fcntl.flock(fd, fcntl.LOCK_UN)
150+
except OSError:
151+
pass
152+
os.close(fd)
83153

84154

85155
def build_cache_dir(base_path: Path) -> Path:
@@ -109,6 +179,8 @@ def __init__(self, cache_dir: Path, app_version: str = "") -> None:
109179
"utf-8",
110180
)
111181

182+
self._lock_fd = _acquire_shared_lock(cache_dir)
183+
112184
db_path = cache_dir / "cache.db"
113185
self._conn = sqlite3.connect(str(db_path), check_same_thread=False)
114186
self._conn.execute("PRAGMA journal_mode=WAL")
@@ -177,6 +249,8 @@ def save_entry(
177249

178250
def close(self) -> None:
179251
self._conn.close()
252+
_release_lock(self._lock_fd)
253+
self._lock_fd = None
180254

181255
@property
182256
def db_path(self) -> Path:
Lines changed: 142 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,142 @@
1+
"""Tests for SqliteDataCache advisory file locking."""
2+
3+
import sys
4+
from pathlib import Path
5+
6+
import pytest
7+
8+
from robotcode.robot.diagnostics.data_cache import (
9+
_LOCK_FILE_NAME,
10+
CacheSection,
11+
SqliteDataCache,
12+
exclusive_cache_lock,
13+
)
14+
15+
pytestmark = pytest.mark.skipif(sys.platform == "win32", reason="Advisory file locking is Unix-only")
16+
17+
18+
class TestSharedLocking:
19+
def test_lock_file_created_on_open(self, tmp_path: Path) -> None:
20+
cache_dir = tmp_path / "cache"
21+
cache = SqliteDataCache(cache_dir)
22+
assert (cache_dir / _LOCK_FILE_NAME).exists()
23+
cache.close()
24+
25+
def test_two_caches_can_open_same_dir(self, tmp_path: Path) -> None:
26+
cache_dir = tmp_path / "cache"
27+
cache1 = SqliteDataCache(cache_dir)
28+
cache2 = SqliteDataCache(cache_dir)
29+
30+
# Both can read and write
31+
cache1.save_entry(CacheSection.LIBRARY, "e1", None, "data1")
32+
cache2.save_entry(CacheSection.LIBRARY, "e2", None, "data2")
33+
34+
entry1 = cache2.read_entry(CacheSection.LIBRARY, "e1", str, str)
35+
assert entry1 is not None
36+
assert entry1.data == "data1"
37+
38+
cache1.close()
39+
cache2.close()
40+
41+
def test_lock_released_on_close(self, tmp_path: Path) -> None:
42+
cache_dir = tmp_path / "cache"
43+
cache = SqliteDataCache(cache_dir)
44+
cache.close()
45+
46+
with exclusive_cache_lock(cache_dir) as acquired:
47+
assert acquired, "Exclusive lock should succeed after cache is closed"
48+
49+
50+
class TestExclusiveLocking:
51+
def test_exclusive_blocked_by_shared(self, tmp_path: Path) -> None:
52+
cache_dir = tmp_path / "cache"
53+
cache = SqliteDataCache(cache_dir)
54+
55+
with exclusive_cache_lock(cache_dir) as acquired:
56+
assert not acquired, "Exclusive lock should fail while cache is open"
57+
58+
cache.close()
59+
60+
def test_exclusive_blocked_by_multiple_shared(self, tmp_path: Path) -> None:
61+
cache_dir = tmp_path / "cache"
62+
cache1 = SqliteDataCache(cache_dir)
63+
cache2 = SqliteDataCache(cache_dir)
64+
65+
with exclusive_cache_lock(cache_dir) as acquired:
66+
assert not acquired
67+
68+
cache1.close()
69+
70+
# Still blocked by cache2
71+
with exclusive_cache_lock(cache_dir) as acquired:
72+
assert not acquired
73+
74+
cache2.close()
75+
76+
# Now should succeed
77+
with exclusive_cache_lock(cache_dir) as acquired:
78+
assert acquired
79+
80+
def test_exclusive_succeeds_when_no_cache_open(self, tmp_path: Path) -> None:
81+
cache_dir = tmp_path / "cache"
82+
cache = SqliteDataCache(cache_dir)
83+
cache.close()
84+
85+
with exclusive_cache_lock(cache_dir) as acquired:
86+
assert acquired
87+
88+
def test_exclusive_succeeds_when_no_lock_file(self, tmp_path: Path) -> None:
89+
cache_dir = tmp_path / "cache"
90+
cache_dir.mkdir(parents=True)
91+
# No lock file exists
92+
93+
with exclusive_cache_lock(cache_dir) as acquired:
94+
assert acquired
95+
96+
def test_exclusive_lock_held_during_context(self, tmp_path: Path) -> None:
97+
cache_dir = tmp_path / "cache"
98+
cache = SqliteDataCache(cache_dir)
99+
cache.close()
100+
101+
with exclusive_cache_lock(cache_dir) as acquired:
102+
assert acquired
103+
# While we hold exclusive, a new shared lock should block
104+
# (opening a new SqliteDataCache would try LOCK_SH which
105+
# is compatible with LOCK_EX held by the same process on Linux,
106+
# so we test with a raw flock instead)
107+
import fcntl
108+
import os
109+
110+
fd = os.open(str(cache_dir / _LOCK_FILE_NAME), os.O_RDWR)
111+
try:
112+
with pytest.raises(BlockingIOError):
113+
fcntl.flock(fd, fcntl.LOCK_SH | fcntl.LOCK_NB)
114+
finally:
115+
os.close(fd)
116+
117+
118+
class TestLockingWithPrune:
119+
def test_prune_safe_when_cache_closed(self, tmp_path: Path) -> None:
120+
import shutil
121+
122+
cache_dir = tmp_path / "cache"
123+
cache = SqliteDataCache(cache_dir)
124+
cache.save_entry(CacheSection.LIBRARY, "e1", None, "data")
125+
cache.close()
126+
127+
with exclusive_cache_lock(cache_dir) as acquired:
128+
assert acquired
129+
shutil.rmtree(cache_dir)
130+
131+
assert not cache_dir.exists()
132+
133+
def test_prune_blocked_when_cache_open(self, tmp_path: Path) -> None:
134+
cache_dir = tmp_path / "cache"
135+
cache = SqliteDataCache(cache_dir)
136+
137+
with exclusive_cache_lock(cache_dir) as acquired:
138+
assert not acquired
139+
# Cache dir should still exist
140+
assert cache_dir.exists()
141+
142+
cache.close()

0 commit comments

Comments
 (0)