Skip to content
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 2 additions & 0 deletions environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,8 @@ dependencies:
- simpeg==0.25.2
- pymatsolver==0.4.0
- discretize==0.12.0
# Optional dependencies
- python-xxhash
# Required by notebooks
- harmonica
- verde
Expand Down
3 changes: 3 additions & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -14,6 +14,9 @@ dependencies = [
"discretize==0.12.0",
]

[project.optional-dependencies]
xxhash = ["xxhash"]

[build-system]
requires = ["setuptools>=61", "wheel", "setuptools_scm[toml]>=6.2"]
build-backend = "setuptools.build_meta"
Expand Down
1 change: 1 addition & 0 deletions requirements-dev.txt
Original file line number Diff line number Diff line change
Expand Up @@ -10,3 +10,4 @@ scipy-stubs
discretize==0.12.0
simpeg==0.25.2
pymatsolver==0.4.0
xxhash
16 changes: 13 additions & 3 deletions src/inversion_ideas/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
"""

import functools
import hashlib
import logging

import numpy as np
Expand All @@ -12,6 +11,16 @@
from ._utils import array_to_str
from .typing import SparseArray

try:
import xxhash
except ImportError:
import hashlib

HASHING_FUNCTION = hashlib.sha256
else:
HASHING_FUNCTION = xxhash.xxh32


__all__ = [
"cache_on_model",
"get_logger",
Expand Down Expand Up @@ -110,7 +119,7 @@ def wrapper(self, model, *args, **kwargs):
raise AttributeError(msg)

if self.cache:
model_hash = hashlib.sha256(model)
model_hash = HASHING_FUNCTION(model)

# Return cached object if the model hash matches with the cached one
if hasattr(self, cache_attr):
Expand All @@ -136,7 +145,8 @@ def wrapper(self, model, *args, **kwargs):
# -- Debug log --
msg = (
f"Computed new result '{array_to_str(result)}' after "
f"calling '{func}' with model with hash '{model_hash.hexdigest()}'. "
f"calling '{func}' with model with hash "
f"'{model_hash.name}:{model_hash.hexdigest()}'. "
"Cached the result into the object."
)
if args:
Expand Down
Loading