|
| 1 | +from __future__ import annotations |
| 2 | + |
| 3 | +import io |
| 4 | +import shutil |
| 5 | +import tempfile |
| 6 | +from pathlib import Path |
| 7 | +from typing import TYPE_CHECKING, Literal, cast |
| 8 | + |
| 9 | +from ..errors import InvalidCompressionSchemeError |
| 10 | +from .archive_extraction import WorkspaceArchiveExtractor, safe_zip_member_rel_path |
| 11 | + |
| 12 | +if TYPE_CHECKING: |
| 13 | + from .base_sandbox_session import BaseSandboxSession |
| 14 | + |
| 15 | + |
| 16 | +async def extract_archive( |
| 17 | + session: BaseSandboxSession, |
| 18 | + path: Path | str, |
| 19 | + data: io.IOBase, |
| 20 | + *, |
| 21 | + compression_scheme: Literal["tar", "zip"] | None = None, |
| 22 | +) -> None: |
| 23 | + if isinstance(path, str): |
| 24 | + path = Path(path) |
| 25 | + |
| 26 | + if compression_scheme is None: |
| 27 | + suffix = path.suffix.removeprefix(".") |
| 28 | + compression_scheme = cast(Literal["tar", "zip"], suffix) if suffix else None |
| 29 | + |
| 30 | + if compression_scheme is None or compression_scheme not in ["zip", "tar"]: |
| 31 | + raise InvalidCompressionSchemeError(path=path, scheme=compression_scheme) |
| 32 | + |
| 33 | + normalized_path = await session._validate_path_access(path, for_write=True) |
| 34 | + destination_root = normalized_path.parent |
| 35 | + |
| 36 | + # Materialize the archive into a local spool once because both `write()` and the |
| 37 | + # extraction step consume the stream, and zip extraction may require seeking. |
| 38 | + spool = tempfile.SpooledTemporaryFile(max_size=16 * 1024 * 1024, mode="w+b") |
| 39 | + try: |
| 40 | + shutil.copyfileobj(data, spool) |
| 41 | + spool.seek(0) |
| 42 | + await session.write(normalized_path, spool) |
| 43 | + spool.seek(0) |
| 44 | + |
| 45 | + if compression_scheme == "tar": |
| 46 | + await session._extract_tar_archive( |
| 47 | + archive_path=normalized_path, |
| 48 | + destination_root=destination_root, |
| 49 | + data=spool, |
| 50 | + ) |
| 51 | + else: |
| 52 | + await session._extract_zip_archive( |
| 53 | + archive_path=normalized_path, |
| 54 | + destination_root=destination_root, |
| 55 | + data=spool, |
| 56 | + ) |
| 57 | + finally: |
| 58 | + spool.close() |
| 59 | + |
| 60 | + |
| 61 | +async def extract_tar_archive( |
| 62 | + session: BaseSandboxSession, |
| 63 | + *, |
| 64 | + archive_path: Path, |
| 65 | + destination_root: Path, |
| 66 | + data: io.IOBase, |
| 67 | +) -> None: |
| 68 | + extractor = _build_workspace_archive_extractor(session) |
| 69 | + await extractor.extract_tar_archive( |
| 70 | + archive_path=archive_path, |
| 71 | + destination_root=destination_root, |
| 72 | + data=data, |
| 73 | + ) |
| 74 | + |
| 75 | + |
| 76 | +async def extract_zip_archive( |
| 77 | + session: BaseSandboxSession, |
| 78 | + *, |
| 79 | + archive_path: Path, |
| 80 | + destination_root: Path, |
| 81 | + data: io.IOBase, |
| 82 | +) -> None: |
| 83 | + extractor = _build_workspace_archive_extractor(session) |
| 84 | + await extractor.extract_zip_archive( |
| 85 | + archive_path=archive_path, |
| 86 | + destination_root=destination_root, |
| 87 | + data=data, |
| 88 | + ) |
| 89 | + |
| 90 | + |
| 91 | +def _build_workspace_archive_extractor(session: BaseSandboxSession) -> WorkspaceArchiveExtractor: |
| 92 | + return WorkspaceArchiveExtractor( |
| 93 | + mkdir=lambda path: session.mkdir(path, parents=True), |
| 94 | + write=session.write, |
| 95 | + ls=lambda path: session.ls(path), |
| 96 | + ) |
| 97 | + |
| 98 | + |
| 99 | +__all__ = [ |
| 100 | + "extract_archive", |
| 101 | + "extract_tar_archive", |
| 102 | + "extract_zip_archive", |
| 103 | + "safe_zip_member_rel_path", |
| 104 | +] |
0 commit comments