Skip to content
Open
54 changes: 50 additions & 4 deletions src/fastapi_cloud_cli/commands/deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -108,15 +108,19 @@ def _should_exclude_entry(path: Path) -> bool:
return False


def archive(path: Path, tar_path: Path) -> Path:
logger.debug("Starting archive creation for path: %s", path)
files = rignore.walk(
def _rignore_walk(path: Path) -> rignore.Walker:
return rignore.walk(
path,
should_exclude_entry=_should_exclude_entry,
additional_ignore_paths=[".fastapicloudignore"],
ignore_hidden=False,
)


def archive(path: Path, tar_path: Path) -> Path:
logger.debug("Starting archive creation for path: %s", path)
files = _rignore_walk(path)

logger.debug("Archive will be created at: %s", tar_path)

file_count = 0
Expand All @@ -134,6 +138,20 @@ def archive(path: Path, tar_path: Path) -> Path:
return tar_path


def _get_large_files(path: Path, threshold_mb: int) -> list[tuple[Path, int]]:
Comment thread
YuriiMotov marked this conversation as resolved.
threshold_bytes = threshold_mb * 1024 * 1024
large_files = []
files = _rignore_walk(path)
for filename in files:
if filename.is_dir():
continue
file_size = filename.stat().st_size
if file_size >= threshold_bytes:
Comment thread
YuriiMotov marked this conversation as resolved.
Outdated
large_files.append((filename.relative_to(path), file_size))

return sorted(large_files, key=lambda x: x[1], reverse=True)


class Team(BaseModel):
id: str
slug: str
Expand Down Expand Up @@ -679,6 +697,10 @@ def deploy(
envvar="FASTAPI_CLOUD_APP_ID",
),
] = None,
large_file_threshold: Annotated[
int,
typer.Option(help="File size threshold in MB for warning about large files"),
] = 10, # 10 MB
) -> Any:
"""
Deploy a [bold]FastAPI[/bold] app to FastAPI Cloud. 🚀
Expand Down Expand Up @@ -804,10 +826,34 @@ def deploy(
)
raise typer.Exit(1)

app_path = path or Path.cwd()

large_files = _get_large_files(app_path, threshold_mb=large_file_threshold)
Comment thread
YuriiMotov marked this conversation as resolved.
Outdated
if large_files:
toolkit.print(
f"⚠️ Some uploaded files are larger than {large_file_threshold} MB ⚖️ :",
tag="warning",
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

I think this additional directory traversal will not add big overhead.
Alternative is to embed this into archive function, but, IMO, the code will be less clear

)
for fname, fsize in large_files[:3]:
fsize_mb = fsize // (1024 * 1024)
toolkit.print(f" • {fname} [yellow]({fsize_mb} MB)[/yellow]")
Comment on lines +842 to +843
Copy link
Copy Markdown
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

_format_size rounds the value, so 10 MB + 1 byte will be printed as 10.00 MB. It will also be confusing.
I think it's fine as it is

is_more = len(large_files) > 3
if is_more:
toolkit.print(f" [dim]...and {len(large_files) - 3} more[/dim]")

large_files_docs_url = (
"https://fastapicloud.com/docs/deployment#control-what-is-uploaded"
)
toolkit.print(
f"Read more: [link={large_files_docs_url}]{large_files_docs_url}[/link]",
tag="tip",
)
toolkit.print_line()

with tempfile.TemporaryDirectory() as temp_dir:
logger.debug("Creating archive for deployment")
archive_path = Path(temp_dir) / "archive.tar"
archive(path or Path.cwd(), archive_path)
archive(app_path, archive_path)

with (
toolkit.progress(
Expand Down
119 changes: 119 additions & 0 deletions tests/test_cli_deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -2194,3 +2194,122 @@ def test_ctrl_c_during_build_streaming_shows_cancelled(

assert "🟡" in result.output
assert "Cancelled." in result.output


def _create_file(path: Path, size_bytes: int) -> None:
"""Create a file of the given size."""
path.parent.mkdir(parents=True, exist_ok=True)
with open(path, "wb") as f:
if size_bytes > 0:
f.seek(size_bytes - 1)
f.write(b"\0")


@pytest.mark.respx
def test_large_file_threshold_warning(
logged_in_cli: None, tmp_path: Path, respx_mock: respx.MockRouter
) -> None:
app_data = _get_random_app()
app_id = app_data["id"]
team_id = "some-team-id"
deployment_data = _get_random_deployment(app_id=app_id)

_setup_deployment_mocks(respx_mock, app_id, team_id, deployment_data, tmp_path)
respx_mock.get(f"/apps/{app_id}/deployments/{deployment_data['id']}").mock(
return_value=Response(200, json={**deployment_data, "status": "success"})
)

_create_file(tmp_path / "model.bin", 12 * 1024 * 1024) # 12 MB
_create_file(tmp_path / "data.csv", 11 * 1024 * 1024) # 11 MB

with changing_dir(tmp_path):
result = runner.invoke(app, ["deploy"])

assert result.exit_code == 0
assert "Some uploaded files are larger than 10 MB" in result.output
assert "model.bin" in result.output
assert "12 MB" in result.output
assert "data.csv" in result.output
assert "11 MB" in result.output


@pytest.mark.respx
def test_large_file_threshold_only_top_three_files_with_more_indicator(
logged_in_cli: None, tmp_path: Path, respx_mock: respx.MockRouter
) -> None:
app_data = _get_random_app()
app_id = app_data["id"]
team_id = "some-team-id"
deployment_data = _get_random_deployment(app_id=app_id)

_setup_deployment_mocks(respx_mock, app_id, team_id, deployment_data, tmp_path)
respx_mock.get(f"/apps/{app_id}/deployments/{deployment_data['id']}").mock(
return_value=Response(200, json={**deployment_data, "status": "success"})
)

_create_file(tmp_path / "huge.bin", 50 * 1024 * 1024)
_create_file(tmp_path / "big.bin", 40 * 1024 * 1024)
_create_file(tmp_path / "medium.bin", 30 * 1024 * 1024)
_create_file(tmp_path / "smaller.bin", 20 * 1024 * 1024)
_create_file(tmp_path / "smallest.bin", 15 * 1024 * 1024)

with changing_dir(tmp_path):
result = runner.invoke(app, ["deploy"])

assert result.exit_code == 0
assert "huge.bin" in result.output
assert "big.bin" in result.output
assert "medium.bin" in result.output
assert "smaller.bin" not in result.output
assert "smallest.bin" not in result.output
assert "...and 2 more" in result.output


@pytest.mark.respx
def test_large_file_threshold_does_not_warn_when_no_large_files(
logged_in_cli: None, tmp_path: Path, respx_mock: respx.MockRouter
) -> None:
app_data = _get_random_app()
app_id = app_data["id"]
team_id = "some-team-id"
deployment_data = _get_random_deployment(app_id=app_id)

_setup_deployment_mocks(respx_mock, app_id, team_id, deployment_data, tmp_path)
respx_mock.get(f"/apps/{app_id}/deployments/{deployment_data['id']}").mock(
return_value=Response(200, json={**deployment_data, "status": "success"})
)

# 5 MB file: below the default 10 MB threshold
_create_file(tmp_path / "data.bin", 5 * 1024 * 1024)

with changing_dir(tmp_path):
result = runner.invoke(app, ["deploy"])

assert result.exit_code == 0
assert "Some uploaded files are larger than" not in result.output
assert "data.bin" not in result.output


@pytest.mark.respx
def test_large_file_threshold_custom_threshold(
logged_in_cli: None, tmp_path: Path, respx_mock: respx.MockRouter
) -> None:
app_data = _get_random_app()
app_id = app_data["id"]
team_id = "some-team-id"
deployment_data = _get_random_deployment(app_id=app_id)

_setup_deployment_mocks(respx_mock, app_id, team_id, deployment_data, tmp_path)
respx_mock.get(f"/apps/{app_id}/deployments/{deployment_data['id']}").mock(
return_value=Response(200, json={**deployment_data, "status": "success"})
)

# 5 MB file: above a 1 MB threshold, below the default 10 MB threshold
_create_file(tmp_path / "data.bin", 5 * 1024 * 1024)

with changing_dir(tmp_path):
result = runner.invoke(app, ["deploy", "--large-file-threshold", "1"])

assert result.exit_code == 0
assert "Some uploaded files are larger than 1 MB" in result.output
assert "data.bin" in result.output
52 changes: 52 additions & 0 deletions tests/test_deploy_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,12 +3,21 @@
import pytest

from fastapi_cloud_cli.commands.deploy import (
_get_large_files,
_should_exclude_entry,
validate_app_directory,
)
from fastapi_cloud_cli.utils.api import DeploymentStatus


def _create_file(path: Path, size_bytes: int) -> None:
path.parent.mkdir(parents=True, exist_ok=True)
with open(path, "wb") as f:
if size_bytes > 0:
f.seek(size_bytes - 1)
f.write(b"\0")


@pytest.mark.parametrize(
"path",
[
Expand Down Expand Up @@ -135,3 +144,46 @@ def test_validate_app_directory_invalid(value: str, expected_message: str) -> No
validate_app_directory(value)

assert str(exc_info.value) == expected_message


def test_get_large_files_no_files_above_threshold(tmp_path: Path) -> None:
"""Should not return files smaller than the threshold."""
_create_file(tmp_path / "small.bin", 512 * 1024) # 0.5 MB

assert _get_large_files(tmp_path, threshold_mb=1) == []


def test_get_large_files_returns_files_at_or_above_threshold(tmp_path: Path) -> None:
"""Should return files at or above the threshold with sizes and relative paths."""
_create_file(tmp_path / "big.bin", 2 * 1024 * 1024) # 2 MB
_create_file(tmp_path / "subdir" / "huge.bin", 5 * 1024 * 1024) # 5 MB
_create_file(tmp_path / "small.bin", 100 * 1024) # 0.1 MB

result = _get_large_files(tmp_path, threshold_mb=1)

assert result == [
(Path("subdir") / "huge.bin", 5 * 1024 * 1024),
(Path("big.bin"), 2 * 1024 * 1024),
]


def test_get_large_files_excludes_default_exclusions(tmp_path: Path) -> None:
"""Should not count files in excluded directories like .venv or __pycache__."""
_create_file(tmp_path / ".venv" / "lib" / "huge.so", 5 * 1024 * 1024)
_create_file(tmp_path / "__pycache__" / "module.cpython-311.pyc", 5 * 1024 * 1024)
_create_file(tmp_path / "main.py", 5 * 1024 * 1024)

assert _get_large_files(tmp_path, threshold_mb=1) == [
(Path("main.py"), 5 * 1024 * 1024)
]


def test_get_large_files_respects_fastapicloudignore(tmp_path: Path) -> None:
"""Should not count files matching .fastapicloudignore patterns."""
_create_file(tmp_path / "data" / "huge.bin", 5 * 1024 * 1024)
_create_file(tmp_path / "main.bin", 5 * 1024 * 1024)
(tmp_path / ".fastapicloudignore").write_text("data/\n")

assert _get_large_files(tmp_path, threshold_mb=1) == [
(Path("main.bin"), 5 * 1024 * 1024)
]
Loading