@@ -1179,6 +1179,41 @@ def test_sqlite_cache_rejects_path_backed_object_code(tmp_path):
11791179 cache [b"k" ] = path_backed
11801180
11811181
1182+ @needs_sqlite3
1183+ def test_sqlite_cache_treats_path_backed_payload_as_miss_and_prunes (tmp_path ):
1184+ """An older version, a corrupt/injected entry, or a direct DB write may
1185+ leave a pickled path-backed ObjectCode on disk. Reopen must refuse to
1186+ surface it (pickling stores only the path, so the bytes would be stale
1187+ or missing) and prune the row so subsequent access doesn't re-hit."""
1188+ import pickle
1189+ import sqlite3
1190+ import time as _time
1191+
1192+ from cuda .core ._module import ObjectCode
1193+ from cuda .core .utils import SQLiteProgramCache
1194+
1195+ db = tmp_path / "cache.db"
1196+ with SQLiteProgramCache (db ):
1197+ pass # materialise schema
1198+ path_backed = ObjectCode .from_cubin (str (tmp_path / "nonexistent.cubin" ), name = "x" )
1199+ payload = pickle .dumps (path_backed )
1200+ now = _time .time ()
1201+ conn = sqlite3 .connect (db )
1202+ try :
1203+ conn .execute (
1204+ "INSERT INTO entries(key, payload, size_bytes, created_at, accessed_at) VALUES (?, ?, ?, ?, ?)" ,
1205+ (b"k" , payload , len (payload ), now , now ),
1206+ )
1207+ conn .commit ()
1208+ finally :
1209+ conn .close ()
1210+
1211+ with SQLiteProgramCache (db ) as cache :
1212+ assert cache .get (b"k" ) is None
1213+ assert b"k" not in cache
1214+ assert len (cache ) == 0
1215+
1216+
11821217@needs_sqlite3
11831218def test_sqlite_cache_accepts_str_keys (tmp_path ):
11841219 from cuda .core .utils import SQLiteProgramCache
@@ -1694,6 +1729,33 @@ def test_filestream_cache_rejects_path_backed_object_code(tmp_path):
16941729 cache [b"k" ] = path_backed
16951730
16961731
1732+ def test_filestream_cache_treats_path_backed_payload_as_miss_and_prunes (tmp_path ):
1733+ """Same guarantee as the SQLite backend: a pickled path-backed ObjectCode
1734+ on disk (older version, corruption, direct injection) must not surface
1735+ on reopen, and the file must be pruned so it doesn't keep re-hitting."""
1736+ import pickle
1737+ import time as _time
1738+
1739+ from cuda .core ._module import ObjectCode
1740+ from cuda .core .utils import FileStreamProgramCache
1741+ from cuda .core .utils ._program_cache import _FILESTREAM_SCHEMA_VERSION
1742+
1743+ root = tmp_path / "fc"
1744+ with FileStreamProgramCache (root ) as cache :
1745+ target = cache ._path_for_key (b"k" )
1746+ path_backed = ObjectCode .from_cubin (str (tmp_path / "nonexistent.cubin" ), name = "x" )
1747+ payload = pickle .dumps (path_backed )
1748+ record = pickle .dumps ((_FILESTREAM_SCHEMA_VERSION , b"k" , payload , _time .time ()))
1749+ target .parent .mkdir (parents = True , exist_ok = True )
1750+ target .write_bytes (record )
1751+
1752+ with FileStreamProgramCache (root ) as cache :
1753+ assert cache .get (b"k" ) is None
1754+ assert b"k" not in cache
1755+ assert len (cache ) == 0
1756+ assert not target .exists ()
1757+
1758+
16971759def test_filestream_cache_rejects_negative_size_cap (tmp_path ):
16981760 from cuda .core .utils import FileStreamProgramCache
16991761
0 commit comments