|
49 | 49 | T = TypeVar("T", "Root", "Timestamp", "Snapshot", "Targets") |
50 | 50 |
|
51 | 51 |
|
52 | | -def _hash(algo: str) -> Any: # noqa: ANN401 |
53 | | - """Returns new hash object, supporting custom "blake2b-256" algo name.""" |
| 52 | +def _get_digest(algo: str) -> Any: # noqa: ANN401 |
| 53 | + """New digest helper to support custom "blake2b-256" algo name.""" |
54 | 54 | if algo == _BLAKE_HASH_ALGORITHM: |
55 | 55 | return hashlib.blake2b(digest_size=32) |
56 | 56 |
|
57 | 57 | return hashlib.new(algo) |
58 | 58 |
|
59 | 59 |
|
60 | | -def _file_hash(f: IO[bytes], algo: str) -> Any: # noqa: ANN401 |
61 | | - """Returns hashed file.""" |
| 60 | +def _hash_bytes(data: bytes, algo: str) -> str: |
| 61 | + """Returns hexdigest for data using algo.""" |
| 62 | + digest = _get_digest(algo) |
| 63 | + digest.update(data) |
| 64 | + |
| 65 | + return digest.hexdigest() |
| 66 | + |
| 67 | + |
| 68 | +def _hash_file(f: IO[bytes], algo: str) -> str: |
| 69 | + """Returns hexdigest for file using algo.""" |
62 | 70 | f.seek(0) |
63 | 71 | if sys.version_info >= (3, 11): |
64 | | - digest = hashlib.file_digest(f, lambda: _hash(algo)) # type: ignore[arg-type] |
| 72 | + digest = hashlib.file_digest(f, lambda: _get_digest(algo)) # type: ignore[arg-type] |
65 | 73 |
|
66 | 74 | else: |
67 | 75 | # Fallback for older Pythons. Chunk size is taken from the previously |
68 | 76 | # used and now deprecated `securesystemslib.hash.digest_fileobject`. |
69 | | - digest = _hash(algo) |
| 77 | + digest = _get_digest(algo) |
70 | 78 | for chunk in iter(lambda: f.read(4096), b""): |
71 | 79 | digest.update(chunk) |
72 | 80 |
|
73 | | - return digest |
| 81 | + return digest.hexdigest() |
74 | 82 |
|
75 | 83 |
|
76 | 84 | class Signed(metaclass=abc.ABCMeta): |
@@ -695,17 +703,15 @@ def _verify_hashes( |
695 | 703 | for algo, exp_hash in expected_hashes.items(): |
696 | 704 | try: |
697 | 705 | if isinstance(data, bytes): |
698 | | - digest_object = _hash(algo) |
699 | | - digest_object.update(data) |
| 706 | + observed_hash = _hash_bytes(data, algo) |
700 | 707 | else: |
701 | 708 | # if data is not bytes, assume it is a file object |
702 | | - digest_object = _file_hash(data, algo) |
| 709 | + observed_hash = _hash_file(data, algo) |
703 | 710 | except (ValueError, TypeError) as e: |
704 | 711 | raise LengthOrHashMismatchError( |
705 | 712 | f"Unsupported algorithm '{algo}'" |
706 | 713 | ) from e |
707 | 714 |
|
708 | | - observed_hash = digest_object.hexdigest() |
709 | 715 | if observed_hash != exp_hash: |
710 | 716 | raise LengthOrHashMismatchError( |
711 | 717 | f"Observed hash {observed_hash} does not match " |
@@ -760,15 +766,12 @@ def _get_length_and_hashes( |
760 | 766 | for algorithm in hash_algorithms: |
761 | 767 | try: |
762 | 768 | if isinstance(data, bytes): |
763 | | - digest_object = _hash(algorithm) |
764 | | - digest_object.update(data) |
| 769 | + hashes[algorithm] = _hash_bytes(data, algorithm) |
765 | 770 | else: |
766 | | - digest_object = _file_hash(data, algorithm) |
| 771 | + hashes[algorithm] = _hash_file(data, algorithm) |
767 | 772 | except (ValueError, TypeError) as e: |
768 | 773 | raise ValueError(f"Unsupported algorithm '{algorithm}'") from e |
769 | 774 |
|
770 | | - hashes[algorithm] = digest_object.hexdigest() |
771 | | - |
772 | 775 | return (length, hashes) |
773 | 776 |
|
774 | 777 |
|
|
0 commit comments