diff --git a/README.rst b/README.rst index 5ec99e37..947a9622 100644 --- a/README.rst +++ b/README.rst @@ -138,6 +138,7 @@ The following parameters will only be applied to decorators defined after `set_d * `cache_dir` * `pickle_reload` * `separate_files` +* `entry_size_limit` These parameters can be changed at any time and they will apply to all decorators: @@ -269,6 +270,22 @@ You can specify a maximum allowed age for a cached value on a per-call basis usi - If the cached value is older than this threshold, a new calculation is triggered and the cache is updated. - If not, the cached value is returned as usual. +Entry Size Limit +~~~~~~~~~~~~~~~~ +You can prevent very large return values from being cached by specifying +``entry_size_limit`` on the decorator. Values larger than this limit are +returned but not stored. The limit accepts an integer number of bytes or a +human readable string like ``"200MB"``. + +.. code-block:: python + + @cachier(entry_size_limit="10KB") + def load_data(): + ... + +When ``cachier__verbose=True`` is passed to a call that returns a value +exceeding the limit, an informative message is printed. + Ignore Cache ~~~~~~~~~~~~ diff --git a/pyproject.toml b/pyproject.toml index 85bb1135..e04af960 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -45,6 +45,7 @@ dynamic = [ ] dependencies = [ "portalocker>=2.3.2", + "pympler>=1", "watchdog>=2.3.1", ] urls.Source = "https://github.com/python-cachier/cachier" diff --git a/scripts/test-local.sh b/scripts/test-local.sh index 9c9b9b8e..f5d784a0 100755 --- a/scripts/test-local.sh +++ b/scripts/test-local.sh @@ -168,16 +168,40 @@ validate_cores "$SELECTED_CORES" # Function to check if Docker is available check_docker() { if ! command -v docker &> /dev/null; then - print_message $RED "Error: Docker is required but not installed." + print_message $RED "═══════════════════════════════════════════════════════════════" + print_message $RED "ERROR: Docker is not installed!" + print_message $RED "═══════════════════════════════════════════════════════════════" + echo "" + echo "This script requires Docker to run external backend tests (MongoDB, Redis, PostgreSQL)." echo "Please install Docker from: https://www.docker.com/products/docker-desktop" + echo "" exit 1 fi - if ! docker ps &> /dev/null; then - print_message $RED "Error: Docker daemon is not running." - echo "Please start Docker and try again." + # Try to run docker ps and capture the actual error + if ! docker ps > /dev/null 2>&1; then + print_message $RED "═══════════════════════════════════════════════════════════════" + print_message $RED "ERROR: Docker daemon is not running!" + print_message $RED "═══════════════════════════════════════════════════════════════" + echo "" + echo "Docker is installed but the Docker daemon is not running." + echo "" + echo "To fix this:" + echo " • On macOS: Start Docker Desktop from Applications" + echo " • On Linux: Run 'sudo systemctl start docker' or 'sudo service docker start'" + echo " • On Windows: Start Docker Desktop from the Start Menu" + echo "" + echo "After starting Docker, wait a few seconds and try running this script again." + echo "" + + # Show the actual docker error for debugging + echo "Technical details:" + docker ps 2>&1 | sed 's/^/ /' + echo "" exit 1 fi + + print_message $GREEN "✓ Docker is installed and running" } # Function to check and install dependencies diff --git a/src/cachier/__init__.py b/src/cachier/__init__.py index cfaeaea3..922ab021 100644 --- a/src/cachier/__init__.py +++ b/src/cachier/__init__.py @@ -8,6 +8,7 @@ set_global_params, ) from .core import cachier +from .util import parse_bytes __all__ = [ "cachier", @@ -15,6 +16,7 @@ "get_default_params", "set_global_params", "get_global_params", + "parse_bytes", "enable_caching", "disable_caching", "__version__", diff --git a/src/cachier/config.py b/src/cachier/config.py index 53dfbe82..4c7bb1d7 100644 --- a/src/cachier/config.py +++ b/src/cachier/config.py @@ -65,6 +65,7 @@ class Params: allow_none: bool = False cleanup_stale: bool = False cleanup_interval: timedelta = timedelta(days=1) + entry_size_limit: Optional[int] = None _global_params = Params() diff --git a/src/cachier/core.py b/src/cachier/core.py index 4db5e329..8c56d960 100644 --- a/src/cachier/core.py +++ b/src/cachier/core.py @@ -19,18 +19,14 @@ from warnings import warn from ._types import RedisClient -from .config import ( - Backend, - HashFunc, - Mongetter, - _update_with_defaults, -) +from .config import Backend, HashFunc, Mongetter, _update_with_defaults from .cores.base import RecalculationNeeded, _BaseCore from .cores.memory import _MemoryCore from .cores.mongo import _MongoCore from .cores.pickle import _PickleCore from .cores.redis import _RedisCore from .cores.sql import _SQLCore +from .util import parse_bytes MAX_WORKERS_ENVAR_NAME = "CACHIER_MAX_WORKERS" DEFAULT_MAX_WORKERS = 8 @@ -60,11 +56,15 @@ def _function_thread(core, key, func, args, kwds): print(f"Function call failed with the following exception:\n{exc}") -def _calc_entry(core, key, func, args, kwds) -> Optional[Any]: +def _calc_entry( + core, key, func, args, kwds, printer=lambda *_: None +) -> Optional[Any]: core.mark_entry_being_calculated(key) try: func_res = func(*args, **kwds) - core.set_entry(key, func_res) + stored = core.set_entry(key, func_res) + if not stored: + printer("Result exceeds entry_size_limit; not cached") return func_res finally: core.mark_entry_not_calculated(key) @@ -123,6 +123,7 @@ def cachier( allow_none: Optional[bool] = None, cleanup_stale: Optional[bool] = None, cleanup_interval: Optional[timedelta] = None, + entry_size_limit: Optional[Union[int, str]] = None, ): """Wrap as a persistent, stale-free memoization decorator. @@ -191,6 +192,10 @@ def cachier( thread. Defaults to False. cleanup_interval: datetime.timedelta, optional Minimum time between automatic cleanup runs. Defaults to one day. + entry_size_limit: int or str, optional + Maximum serialized size of a cached value. Values exceeding the limit + are returned but not cached. Human readable strings like ``"10MB"`` are + allowed. """ # Check for deprecated parameters @@ -204,6 +209,9 @@ def cachier( # Update parameters with defaults if input is None backend = _update_with_defaults(backend, "backend") mongetter = _update_with_defaults(mongetter, "mongetter") + size_limit_bytes = parse_bytes( + _update_with_defaults(entry_size_limit, "entry_size_limit") + ) # Override the backend parameter if a mongetter is provided. if callable(mongetter): backend = "mongo" @@ -215,28 +223,34 @@ def cachier( cache_dir=cache_dir, separate_files=separate_files, wait_for_calc_timeout=wait_for_calc_timeout, + entry_size_limit=size_limit_bytes, ) elif backend == "mongo": core = _MongoCore( hash_func=hash_func, mongetter=mongetter, wait_for_calc_timeout=wait_for_calc_timeout, + entry_size_limit=size_limit_bytes, ) elif backend == "memory": core = _MemoryCore( - hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout + hash_func=hash_func, + wait_for_calc_timeout=wait_for_calc_timeout, + entry_size_limit=size_limit_bytes, ) elif backend == "sql": core = _SQLCore( hash_func=hash_func, sql_engine=sql_engine, wait_for_calc_timeout=wait_for_calc_timeout, + entry_size_limit=size_limit_bytes, ) elif backend == "redis": core = _RedisCore( hash_func=hash_func, redis_client=redis_client, wait_for_calc_timeout=wait_for_calc_timeout, + entry_size_limit=size_limit_bytes, ) else: raise ValueError("specified an invalid core: %s" % backend) @@ -324,12 +338,12 @@ def _call(*args, max_age: Optional[timedelta] = None, **kwds): ) key, entry = core.get_entry((), kwargs) if overwrite_cache: - return _calc_entry(core, key, func, args, kwds) + return _calc_entry(core, key, func, args, kwds, _print) if entry is None or ( not entry._completed and not entry._processing ): _print("No entry found. No current calc. Calling like a boss.") - return _calc_entry(core, key, func, args, kwds) + return _calc_entry(core, key, func, args, kwds, _print) _print("Entry found.") if _allow_none or entry.value is not None: _print("Cached result found.") @@ -362,7 +376,7 @@ def _call(*args, max_age: Optional[timedelta] = None, **kwds): try: return core.wait_on_entry_calc(key) except RecalculationNeeded: - return _calc_entry(core, key, func, args, kwds) + return _calc_entry(core, key, func, args, kwds, _print) if _next_time: _print("Async calc and return stale") core.mark_entry_being_calculated(key) @@ -374,15 +388,15 @@ def _call(*args, max_age: Optional[timedelta] = None, **kwds): core.mark_entry_not_calculated(key) return entry.value _print("Calling decorated function and waiting") - return _calc_entry(core, key, func, args, kwds) + return _calc_entry(core, key, func, args, kwds, _print) if entry._processing: _print("No value but being calculated. Waiting.") try: return core.wait_on_entry_calc(key) except RecalculationNeeded: - return _calc_entry(core, key, func, args, kwds) + return _calc_entry(core, key, func, args, kwds, _print) _print("No entry found. No current calc. Calling like a boss.") - return _calc_entry(core, key, func, args, kwds) + return _calc_entry(core, key, func, args, kwds, _print) # MAINTAINER NOTE: The main function wrapper is now a standard function # that passes *args and **kwargs to _call. This ensures that user diff --git a/src/cachier/cores/base.py b/src/cachier/cores/base.py index edb8e7ed..ef631850 100644 --- a/src/cachier/cores/base.py +++ b/src/cachier/cores/base.py @@ -9,9 +9,12 @@ import abc # for the _BaseCore abstract base class import inspect +import sys import threading from datetime import timedelta -from typing import Callable, Optional, Tuple +from typing import Any, Callable, Optional, Tuple + +from pympler import asizeof # type: ignore from .._types import HashFunc from ..config import CacheEntry, _update_with_defaults @@ -34,10 +37,12 @@ def __init__( self, hash_func: Optional[HashFunc], wait_for_calc_timeout: Optional[int], + entry_size_limit: Optional[int] = None, ): self.hash_func = _update_with_defaults(hash_func, "hash_func") self.wait_for_calc_timeout = wait_for_calc_timeout self.lock = threading.RLock() + self.entry_size_limit = entry_size_limit def set_func(self, func): """Set the function this core will use. @@ -90,8 +95,22 @@ def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]: """ + def _estimate_size(self, value: Any) -> int: + try: + return asizeof.asizeof(value) + except Exception: + return sys.getsizeof(value) + + def _should_store(self, value: Any) -> bool: + if self.entry_size_limit is None: + return True + try: + return self._estimate_size(value) <= self.entry_size_limit + except Exception: + return True + @abc.abstractmethod - def set_entry(self, key: str, func_res): + def set_entry(self, key: str, func_res: Any) -> bool: """Map the given result to the given key in this core's cache.""" @abc.abstractmethod diff --git a/src/cachier/cores/memory.py b/src/cachier/cores/memory.py index ddd0acdb..21386b4b 100644 --- a/src/cachier/cores/memory.py +++ b/src/cachier/cores/memory.py @@ -16,8 +16,9 @@ def __init__( self, hash_func: Optional[HashFunc], wait_for_calc_timeout: Optional[int], + entry_size_limit: Optional[int] = None, ): - super().__init__(hash_func, wait_for_calc_timeout) + super().__init__(hash_func, wait_for_calc_timeout, entry_size_limit) self.cache: Dict[str, CacheEntry] = {} def _hash_func_key(self, key: str) -> str: @@ -29,7 +30,9 @@ def get_entry_by_key( with self.lock: return key, self.cache.get(self._hash_func_key(key), None) - def set_entry(self, key: str, func_res: Any) -> None: + def set_entry(self, key: str, func_res: Any) -> bool: + if not self._should_store(func_res): + return False hash_key = self._hash_func_key(key) with self.lock: try: @@ -47,6 +50,7 @@ def set_entry(self, key: str, func_res: Any) -> None: _condition=cond, _completed=True, ) + return True def mark_entry_being_calculated(self, key: str) -> None: with self.lock: diff --git a/src/cachier/cores/mongo.py b/src/cachier/cores/mongo.py index fbc93711..9a28dd1c 100644 --- a/src/cachier/cores/mongo.py +++ b/src/cachier/cores/mongo.py @@ -40,6 +40,7 @@ def __init__( hash_func: Optional[HashFunc], mongetter: Optional[Mongetter], wait_for_calc_timeout: Optional[int], + entry_size_limit: Optional[int] = None, ): if "pymongo" not in sys.modules: warnings.warn( @@ -49,7 +50,9 @@ def __init__( ) # pragma: no cover super().__init__( - hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout + hash_func=hash_func, + wait_for_calc_timeout=wait_for_calc_timeout, + entry_size_limit=entry_size_limit, ) if mongetter is None: raise MissingMongetter( @@ -87,7 +90,9 @@ def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]: ) return key, entry - def set_entry(self, key: str, func_res: Any) -> None: + def set_entry(self, key: str, func_res: Any) -> bool: + if not self._should_store(func_res): + return False thebytes = pickle.dumps(func_res) self.mongo_collection.update_one( filter={"func": self._func_str, "key": key}, @@ -104,6 +109,7 @@ def set_entry(self, key: str, func_res: Any) -> None: }, upsert=True, ) + return True def mark_entry_being_calculated(self, key: str) -> None: self.mongo_collection.update_one( diff --git a/src/cachier/cores/pickle.py b/src/cachier/cores/pickle.py index 344fcba7..6a49cb2e 100644 --- a/src/cachier/cores/pickle.py +++ b/src/cachier/cores/pickle.py @@ -12,7 +12,7 @@ import time from contextlib import suppress from datetime import datetime, timedelta -from typing import Any, Dict, Optional, Tuple, Union +from typing import IO, Any, Dict, Optional, Tuple, Union, cast import portalocker # to lock on pickle cache IO from watchdog.events import PatternMatchingEventHandler @@ -78,8 +78,9 @@ def __init__( cache_dir: Optional[Union[str, os.PathLike]], separate_files: Optional[bool], wait_for_calc_timeout: Optional[int], + entry_size_limit: Optional[int] = None, ): - super().__init__(hash_func, wait_for_calc_timeout) + super().__init__(hash_func, wait_for_calc_timeout, entry_size_limit) self._cache_dict: Dict[str, CacheEntry] = {} self.reload = _update_with_defaults(pickle_reload, "pickle_reload") self.cache_dir = os.path.expanduser( @@ -119,7 +120,7 @@ def _convert_legacy_cache_entry( def _load_cache_dict(self) -> Dict[str, CacheEntry]: try: with portalocker.Lock(self.cache_fpath, mode="rb") as cf: - cache = pickle.load(cf) + cache = pickle.load(cast(IO[bytes], cf)) self._cache_used_fpath = str(self.cache_fpath) except (FileNotFoundError, EOFError): cache = {} @@ -146,7 +147,7 @@ def _load_cache_by_key( fpath += f"_{hash_str or key}" try: with portalocker.Lock(fpath, mode="rb") as cache_file: - entry = pickle.load(cache_file) + entry = pickle.load(cast(IO[bytes], cache_file)) return _PickleCore._convert_legacy_cache_entry(entry) except (FileNotFoundError, EOFError): return None @@ -185,7 +186,7 @@ def _save_cache( fpath += f"_{hash_str}" with self.lock: with portalocker.Lock(fpath, mode="wb") as cf: - pickle.dump(cache, cf, protocol=4) + pickle.dump(cache, cast(IO[bytes], cf), protocol=4) # the same as check for separate_file, but changed for typing if isinstance(cache, dict): self._cache_dict = cache @@ -198,7 +199,9 @@ def get_entry_by_key( return key, self._load_cache_by_key(key) return key, self.get_cache_dict(reload).get(key) - def set_entry(self, key: str, func_res: Any) -> None: + def set_entry(self, key: str, func_res: Any) -> bool: + if not self._should_store(func_res): + return False key_data = CacheEntry( value=func_res, time=datetime.now(), @@ -208,12 +211,13 @@ def set_entry(self, key: str, func_res: Any) -> None: ) if self.separate_files: self._save_cache(key_data, key) - return # pragma: no cover + return True # pragma: no cover with self.lock: cache = self.get_cache_dict() cache[key] = key_data self._save_cache(cache) + return True def mark_entry_being_calculated_separate_files(self, key: str) -> None: self._save_cache( diff --git a/src/cachier/cores/redis.py b/src/cachier/cores/redis.py index ccd0ffe0..ff4d8fd0 100644 --- a/src/cachier/cores/redis.py +++ b/src/cachier/cores/redis.py @@ -35,6 +35,7 @@ def __init__( ], wait_for_calc_timeout: Optional[int] = None, key_prefix: str = "cachier", + entry_size_limit: Optional[int] = None, ): if not REDIS_AVAILABLE: warnings.warn( @@ -45,7 +46,9 @@ def __init__( ) super().__init__( - hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout + hash_func=hash_func, + wait_for_calc_timeout=wait_for_calc_timeout, + entry_size_limit=entry_size_limit, ) if redis_client is None: raise MissingRedisClient( @@ -122,7 +125,9 @@ def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]: warnings.warn(f"Redis get_entry_by_key failed: {e}", stacklevel=2) return key, None - def set_entry(self, key: str, func_res: Any) -> None: + def set_entry(self, key: str, func_res: Any) -> bool: + if not self._should_store(func_res): + return False """Map the given result to the given key in Redis.""" redis_client = self._resolve_redis_client() redis_key = self._get_redis_key(key) @@ -143,8 +148,10 @@ def set_entry(self, key: str, func_res: Any) -> None: "completed": "true", }, ) + return True except Exception as e: warnings.warn(f"Redis set_entry failed: {e}", stacklevel=2) + return False def mark_entry_being_calculated(self, key: str) -> None: """Mark the entry mapped by the given key as being calculated.""" diff --git a/src/cachier/cores/sql.py b/src/cachier/cores/sql.py index 543531ef..16de020f 100644 --- a/src/cachier/cores/sql.py +++ b/src/cachier/cores/sql.py @@ -3,7 +3,7 @@ import pickle import threading from datetime import datetime, timedelta -from typing import Any, Callable, Optional, Tuple, Union +from typing import Any, Callable, Optional, Tuple, Union, cast try: from sqlalchemy import ( @@ -63,6 +63,7 @@ def __init__( hash_func: Optional[HashFunc], sql_engine: Optional[Union[str, "Engine", Callable[[], "Engine"]]], wait_for_calc_timeout: Optional[int] = None, + entry_size_limit: Optional[int] = None, ): if not SQLALCHEMY_AVAILABLE: raise ImportError( @@ -70,7 +71,9 @@ def __init__( "Install with `pip install SQLAlchemy`." ) super().__init__( - hash_func=hash_func, wait_for_calc_timeout=wait_for_calc_timeout + hash_func=hash_func, + wait_for_calc_timeout=wait_for_calc_timeout, + entry_size_limit=entry_size_limit, ) self._engine = self._resolve_engine(sql_engine) self._Session = sessionmaker(bind=self._engine) @@ -109,14 +112,16 @@ def get_entry_by_key(self, key: str) -> Tuple[str, Optional[CacheEntry]]: value = pickle.loads(row.value) if row.value is not None else None entry = CacheEntry( value=value, - time=row.timestamp, - stale=row.stale, - _processing=row.processing, - _completed=row.completed, + time=cast(datetime, row.timestamp), + stale=cast(bool, row.stale), + _processing=cast(bool, row.processing), + _completed=cast(bool, row.completed), ) return key, entry - def set_entry(self, key: str, func_res: Any) -> None: + def set_entry(self, key: str, func_res: Any) -> bool: + if not self._should_store(func_res): + return False with self._lock, self._Session() as session: thebytes = pickle.dumps(func_res) now = datetime.now() @@ -187,6 +192,7 @@ def set_entry(self, key: str, func_res: Any) -> None: ) ) session.commit() + return True def mark_entry_being_calculated(self, key: str) -> None: with self._lock, self._Session() as session: diff --git a/src/cachier/util.py b/src/cachier/util.py new file mode 100644 index 00000000..dda540bf --- /dev/null +++ b/src/cachier/util.py @@ -0,0 +1,25 @@ +"""Utility helpers for Cachier.""" + +import re +from typing import Optional, Union + + +def parse_bytes(size: Union[int, str, None]) -> Optional[int]: + """Convert a human friendly size string to bytes.""" + if size is None: + return None + if isinstance(size, int): + return size + match = re.fullmatch(r"(?i)\s*(\d+(?:\.\d+)?)\s*([kmgt]?b)?\s*", str(size)) + if not match: + raise ValueError(f"Invalid size value: {size}") + number = float(match.group(1)) + unit = (match.group(2) or "b").upper() + factor = { + "B": 1, + "KB": 1024, + "MB": 1024**2, + "GB": 1024**3, + "TB": 1024**4, + }[unit] + return int(number * factor) diff --git a/tests/requirements.txt b/tests/requirements.txt index 23c73edc..5aa12f1a 100644 --- a/tests/requirements.txt +++ b/tests/requirements.txt @@ -9,3 +9,4 @@ collective.checkdocs pygments # the memory core tests dataframe caching pandas +pympler diff --git a/tests/test_core_lookup.py b/tests/test_core_lookup.py index 2b2a9191..c39b653d 100644 --- a/tests/test_core_lookup.py +++ b/tests/test_core_lookup.py @@ -14,6 +14,7 @@ def test_get_default_params(): "caching_enabled", "cleanup_interval", "cleanup_stale", + "entry_size_limit", "hash_func", "mongetter", "next_time", diff --git a/tests/test_entry_size_limit.py b/tests/test_entry_size_limit.py new file mode 100644 index 00000000..f2784967 --- /dev/null +++ b/tests/test_entry_size_limit.py @@ -0,0 +1,37 @@ +import pytest + +import cachier + + +@pytest.mark.memory +def test_entry_size_limit_not_cached(): + call_count = 0 + + @cachier.cachier(backend="memory", entry_size_limit="10B") + def func(x): + nonlocal call_count + call_count += 1 + return "a" * 50 + + func.clear_cache() + val1 = func(1) + val2 = func(1) + assert val1 == val2 + assert call_count == 2 + + +@pytest.mark.memory +def test_entry_size_limit_cached(): + call_count = 0 + + @cachier.cachier(backend="memory", entry_size_limit="1KB") + def func(x): + nonlocal call_count + call_count += 1 + return "small" + + func.clear_cache() + val1 = func(1) + val2 = func(1) + assert val1 == val2 + assert call_count == 1 diff --git a/uv.lock b/uv.lock index eaef5993..9ac89397 100644 --- a/uv.lock +++ b/uv.lock @@ -7,12 +7,14 @@ name = "cachier" source = { editable = "." } dependencies = [ { name = "portalocker" }, + { name = "pympler" }, { name = "watchdog" }, ] [package.metadata] requires-dist = [ { name = "portalocker", specifier = ">=2.3.2" }, + { name = "pympler", specifier = ">=1.0" }, { name = "watchdog", specifier = ">=2.3.1" }, ] @@ -28,6 +30,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/4b/a6/38c8e2f318bf67d338f4d629e93b0b4b9af331f455f0390ea8ce4a099b26/portalocker-3.2.0-py3-none-any.whl", hash = "sha256:3cdc5f565312224bc570c49337bd21428bba0ef363bbcf58b9ef4a9f11779968", size = 22424, upload-time = "2025-06-14T13:20:38.083Z" }, ] +[[package]] +name = "pympler" +version = "1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pywin32", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/37/c384631908029676d8e7213dd956bb686af303a80db7afbc9be36bc49495/pympler-1.1.tar.gz", hash = "sha256:1eaa867cb8992c218430f1708fdaccda53df064144d1c5656b1e6f1ee6000424", size = 179954, upload-time = "2024-06-28T19:56:06.563Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/4f/a6a2e2b202d7fd97eadfe90979845b8706676b41cbd3b42ba75adf329d1f/Pympler-1.1-py3-none-any.whl", hash = "sha256:5b223d6027d0619584116a0cbc28e8d2e378f7a79c1e5e024f9ff3b673c58506", size = 165766, upload-time = "2024-06-28T19:56:05.087Z" }, +] + [[package]] name = "pywin32" version = "310"