diff --git a/.gitignore b/.gitignore index 94fad6d..ebe3cf9 100644 --- a/.gitignore +++ b/.gitignore @@ -1,2 +1,3 @@ docs/_build -scratch \ No newline at end of file +scratch +.idea/ \ No newline at end of file diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..3c84a66 --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,224 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository. + +## Project Overview + +This is a **fork** of [py-pkgs/py-pkgs-cookiecutter](https://github.com/py-pkgs/py-pkgs-cookiecutter), a cookiecutter template for generating Python packages. This fork extends the original with: + +- Clean Architecture / DDD patterns (domain, application, infrastructure layers) +- FastAPI REST API with versioned endpoints +- Docker and docker-compose support +- Automatic setup and dependency installation via cookiecutter hooks +- Makefile with common development commands +- Semantic Release for automated versioning +- Pre-commit hooks for code quality +- CI workflows (GitHub Actions) + +## Repository Structure + +- **Root level**: The cookiecutter template configuration +- **`{{ cookiecutter.__package_slug }}/`**: The template directory that gets rendered when creating a new project +- **`tests/`**: Tests for the cookiecutter template itself (not the generated package) +- **`docs/`**: Documentation for the cookiecutter template (Sphinx-based) +- **`hooks/`**: Pre- and post-generation hooks for template initialization + +## Development Commands + +### Setting Up the Cookiecutter Development Environment + +```bash +# Install template dependencies +pip install -r requirements.txt + +# This installs: cookiecutter >= 2.0.0, pytest +``` + +### Testing the Cookiecutter Template + +```bash +# Run all template tests (parametrized across license/CI combinations) +pytest tests/ + +# Run a specific test +pytest tests/test_cookiecutter.py::test_cookiecutter_default_options -v + +# Generate a test package with default options +cookiecutter . --no-input --output-dir /tmp/test + +# Generate a test package with specific options +cookiecutter . --no-input --output-dir /tmp/test \ + package_name="test_pkg" \ + open_source_license="MIT" \ + include_github_actions="ci" +``` + +### Building Documentation + +```bash +cd docs +make html +# Generated docs will be in docs/_build/html +``` + +## Template Configuration + +The template options are defined in `cookiecutter.json`: +- `author_name`: Author name and email +- `package_name`: Name of the generated package +- `package_short_description`: Package description +- `package_version`: Initial version (default: 0.1.0) +- `python_version`: Python version to target (default: 3.12.8) +- `open_source_license`: License choice (MIT, Apache, GPL, BSD, CC0, Proprietary, None) +- `include_github_actions`: CI/CD options (no, ci) + +## Generated Package Architecture + +The template generates a package following clean architecture/DDD patterns: + +``` +src// +├── domain/ # Business logic layer +│ ├── entities/ # Domain entities (Task, TaskList) +│ ├── repositories/ # Repository interfaces +│ └── services/ # Domain services +├── application/ # Use cases/application services +└── infrastructure/ # External concerns + ├── persistence/ # Database models and repository implementations + │ ├── models/ # SQLAlchemy models + │ └── repositories/ # RDS repository implementations + └── web/ # FastAPI web layer + ├── api/v1/ # REST API endpoints + └── ui/ # UI routes +``` + +### Generated Package Makefile Targets + +The generated package includes a Makefile with these commands: + +**Development Setup:** +- `make setup` - Setup Python environment (pyenv, Python, virtualenv, Poetry) +- `make deps` - Install dependencies from pyproject.toml +- `make install` - Install package in development mode + +**Testing:** +- `make test` - Run unit tests with pytest +- `make test-cov` - Run tests with coverage report + +**Running the Application:** +- `make start` - Start FastAPI app (localhost only) +- `make dev-server` - Start FastAPI app (network accessible) +- `make prod-server` - Start FastAPI app in production mode + +**Docker:** +- `make docker-build` - Build Docker image +- `make docker-up` - Build and start Docker compose stack +- `make docker-down` - Stop Docker compose stack +- `make docker-logs` - Tail Docker logs + +**Utilities:** +- `make clean` - Clean build artifacts +- `make help` - Show available commands + +## Cookiecutter Hooks + +### Pre-Generation Hook (`hooks/pre_gen_project.py`) + +Validates that cookiecutter >= 2.0.0 is installed. Exits with error message if requirement not met. + +### Post-Generation Hook (`hooks/post_gen_project.py`) + +Runs after template generation to: +1. **Setup Environment**: Runs `make setup` to initialize Python environment (pyenv, virtualenv, Poetry) +2. **Initialize Git**: Creates git repository and makes initial commit with all generated files +3. **Install Dependencies** (Optional): Prompts user to select dependency groups (core, test, linting) and installs them with `poetry add` +4. **Open in VS Code** (Optional): If VS Code is installed, offers to open the project in the editor +5. **Ready to Code**: Displays next steps and available commands + +The hook automatically configures git with the author information from the template. + +**Interactive Prompts:** +- Dependency groups selection (core, test, linting) - defaults to Yes, can skip with no +- VS Code opening - defaults to Yes, can skip with no +- Only shows in interactive mode (terminals, not CI/CD) + +## Generated Package Features + +Each generated project includes: + +**Clean Architecture Structure:** +- Domain layer (entities, repositories, services) +- Application layer (use cases) +- Infrastructure layer (persistence, web, DI) + +**FastAPI REST API:** +- Versioned endpoints (v1) +- Pydantic schemas for validation +- Dependency injection with dependency-injector +- OpenAPI/Swagger documentation at `/docs` + +**Database Support:** +- SQLAlchemy ORM +- Alembic migrations +- Support for SQLite (default) and PostgreSQL + +**Testing:** +- pytest with fixtures (conftest.py) +- CRUD flow tests +- API integration tests +- Repository tests + +**Development Tools:** +- Makefile with 20+ commands +- Pre-commit hooks (ruff, black, mypy, bandit) +- Docker and docker-compose support +- Semantic Release for versioning + +## Dependency Groups + +The post-generation hook prompts users to optionally install dependency groups: + +**Core Dependencies** - Production dependencies: +- FastAPI, SQLAlchemy, Pydantic, Click, Python-dotenv +- Database support: Alembic, databases, aiosqlite, psycopg +- Development tools: pre-commit, semantic-release, ipython, ipdb, twine + +**Test Dependencies** - Testing and QA: +- pytest, pytest-cov, pytest-asyncio, coverage, pytest-html, tox, httpx + +**Linting Dependencies** - Code quality: +- ruff, black + +Users can select any combination of these groups during project generation, and they will be automatically installed using `poetry add`. + +## VS Code Configuration + +The generated project includes VS Code settings for optimal development experience: + +- **Python Interpreter**: Automatically detected from pyenv environment +- **Terminal Integration**: Shells configured to activate the pyenv environment with login shell (`-l` flag) +- **Pytest Integration**: Configured for running tests directly from VS Code +- **Type Checking**: Set to "basic" mode for Python analysis + +The `.vscode/settings.json` file includes: +- `python.venvPath`: Points to `~/.pyenv/versions/{python_version}/envs` where pyenv creates virtual environments +- `python.terminal.activateEnvironment`: Disabled (pyenv handles activation via `.python-version`) +- Task automation: Set to manual/on-demand execution + +**How VS Code detects the Python environment:** +1. `.python-version` file tells pyenv which environment to activate (e.g., "myproject") +2. `python.venvPath` tells VS Code where to look for venvs (pyenv's envs folder) +3. VS Code matches the project name with the venv name to detect the correct interpreter +4. Terminal automatically uses login shell which activates pyenv + +## Key Files + +- `cookiecutter.json`: Template variables and available options +- `tests/test_cookiecutter.py`: Parametrized tests covering all license/CI combinations +- `hooks/post_gen_project.py`: Post-generation hook for setup, git init, and optional dependency installation +- `{{ cookiecutter.__package_slug }}/pyproject.toml`: Poetry configuration with semantic-release setup +- `{{ cookiecutter.__package_slug }}/Makefile`: Generated package's development commands +- `{{ cookiecutter.__package_slug }}/scripts/setup_env.sh`: Environment setup script (pyenv, Python, virtualenv, Poetry) +- `{{ cookiecutter.__package_slug }}/.python-version`: Python version specification for pyenv +- `{{ cookiecutter.__package_slug }}/.pre-commit-config.yaml`: Pre-commit hooks configuration +- `{{ cookiecutter.__package_slug }}/.vscode/settings.json`: VS Code configuration for Python development diff --git a/README.md b/README.md index 01f817e..29a65cb 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,7 @@ # Py-Pkgs-Cookiecutter: A cookiecutter template for Python packages -[![Documentation Status](https://readthedocs.org/projects/py-pkgs-cookiecutter/badge/?version=latest)](https://py-pkgs-cookiecutter.readthedocs.io/en/latest/) -![tests](https://github.com/py-pkgs/py-pkgs-cookiecutter/workflows/test/badge.svg) -[![release](https://img.shields.io/github/release/py-pkgs/py-pkgs-cookiecutter.svg)](https://github.com/py-pkgs/py-pkgs-cookiecutter/releases) +![tests](https://github.com/dctx-ztocker/py-pkgs-cookiecutter/workflows/test/badge.svg) +[![release](https://img.shields.io/github/release/dctx-ztocker/py-pkgs-cookiecutter.svg)](https://github.com/dctx-ztocker/py-pkgs-cookiecutter/releases) [![python](https://img.shields.io/badge/python-%5E3.8-blue)]() [![os](https://img.shields.io/badge/OS-Ubuntu%2C%20Mac%2C%20Windows-purple)]() @@ -24,10 +23,10 @@ Please see the [documentation](https://py-pkgs-cookiecutter.readthedocs.io/en/la pip install cookiecutter ``` -2. Generate a Python package structure using [`py-pkgs-cookiecutter`](https://github.com/py-pkgs/py-pkgs-cookiecutter): +2. Generate a Python package structure using [`py-pkgs-cookiecutter`](https://github.com/dctx-ztocker/py-pkgs-cookiecutter): ```bash - cookiecutter https://github.com/py-pkgs/py-pkgs-cookiecutter.git + cookiecutter https://github.com/dctx-ztocker/py-pkgs-cookiecutter.git ``` 3. After responding to the prompts you should have a directory structure similar to that shown below. To learn more about the contents of this directory structure, please see the `py-pkgs-cookiecutter` [documentation](https://py-pkgs-cookiecutter.readthedocs.io/en/latest/). diff --git a/cookiecutter.json b/cookiecutter.json index a9af49c..17992e5 100644 --- a/cookiecutter.json +++ b/cookiecutter.json @@ -1,10 +1,10 @@ { - "author_name": "Monty Python", + "author_name": "Jose Miguel ", "package_name": "mypkg", "__package_slug": "{{ cookiecutter.package_name|lower|replace(' ', '_')|replace('-', '_') }}", "package_short_description": "A package for doing great things!", "package_version": "0.1.0", - "python_version": "3.12", + "python_version": "3.12.8", "open_source_license": [ "MIT", "Apache License 2.0", @@ -16,7 +16,6 @@ ], "include_github_actions": [ "no", - "ci", - "ci+cd" + "ci" ] -} \ No newline at end of file +} diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py old mode 100644 new mode 100755 index 59f95a3..4cfd437 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -1,34 +1,336 @@ +#!/usr/bin/env python +""" +Post-generation hook for cookiecutter template. + +This hook: +1. Sets up the Python environment (pyenv, virtualenv, Poetry) +2. Initializes git repository +3. Asks user which dependencies to install and installs them +""" + +import subprocess +import sys import os -import shutil - -############################################################################## -# Utilities -############################################################################## - -def remove(filepath): - if os.path.isfile(filepath): - os.remove(filepath) - elif os.path.isdir(filepath): - shutil.rmtree(filepath) - - -############################################################################## -# Cookiecutter clean-up -############################################################################## - -# Directive flags -no_github_actions = "{{cookiecutter.include_github_actions}}" == "no" -github_actions_ci = "{{cookiecutter.include_github_actions}}" == "ci" -no_license = "{{cookiecutter.open_source_license}}" == "None" - -# Remove workflow files (if specified) -if no_github_actions: - remove(".github/") -elif github_actions_ci: - remove(".github/workflows/ci-cd.yml") -else: - remove(".github/workflows/ci.yml") - -# Remove license (if specified) -if no_license: - remove("LICENSE") + + +# Available dependency groups with their packages +DEPENDENCY_GROUPS = [ + { + "name": "core", + "description": "Core dependencies (FastAPI, SQLAlchemy, etc.)", + "packages": [ + "click^8.2.1", + "python-dotenv", + "pydantic", + "pydantic-settings", + "email-validator", + "sqlalchemy", + "fastapi", + "uvicorn", + "alembic", + "dependency-injector", + "databases", + "aiosqlite", + "python-multipart", + "greenlet", + "requests", + "psycopg[binary]", + ], + "dev_packages": [ + "twine", + "pre-commit", + "python-semantic-release^9.21.1", + "ipython", + "ipdb", + ], + }, + { + "name": "test", + "description": "Testing tools (pytest, pytest-cov, etc.)", + "packages": [], + "dev_packages": [ + "pytest", + "pytest-cov", + "pytest-asyncio", + "coverage[toml]", + "pytest-html", + "tox", + "httpx", + ], + }, + { + "name": "linting", + "description": "Linting & formatting (ruff, black, etc.)", + "packages": [], + "dev_packages": [ + "ruff", + "black", + ], + }, +] + + +def run_command(cmd, description, verbose=False): + """Run a shell command and handle errors.""" + if verbose: + print(f"\n▶ {description}") + try: + result = subprocess.run( + cmd, + shell=True, + check=True, + capture_output=not verbose, + text=True + ) + return True + except subprocess.CalledProcessError as e: + if verbose: + print(f"✗ Error during {description}") + return False + + +def is_interactive(): + """Check if stdin is available for interactive input.""" + import sys + return sys.stdin.isatty() + + +def ask_yes_no(question, default=True): + """Ask user a yes/no question. Default is yes.""" + while True: + try: + default_str = "[Y/n]" if default else "[y/N]" + response = input(f"{question} {default_str}: ").strip().lower() + + if response == "": + return default + elif response in ["y", "yes"]: + return True + elif response in ["n", "no"]: + return False + else: + print("⚠ Please enter 'y', 'n', or press Enter for default") + except EOFError: + # No stdin available, return default + return default + + +def ask_for_dependency_groups(): + """Ask user which dependency groups to install (after setup).""" + if not is_interactive(): + return [] + + print("\n" + "="*60) + print("📦 OPTIONAL DEPENDENCIES") + print("="*60) + print("\nWould you like to install optional dependencies now?") + print("(You can also run 'make deps' later to install all)") + print("") + + selected_groups = [] + + for group in DEPENDENCY_GROUPS: + if ask_yes_no(f" {group['name']:<8} - {group['description']}?"): + selected_groups.append(group["name"]) + + return selected_groups + + +def get_python_version_from_pyenv(): + """Get the Python version from .python-version file.""" + try: + with open(".python-version", "r") as f: + return f.read().strip() + except FileNotFoundError: + return None + + +def activate_pyenv_and_add_deps(groups): + """Add dependencies using poetry in the activated pyenv environment.""" + if not groups: + return True + + # Get the Python version/env name + env_name = get_python_version_from_pyenv() + if not env_name: + print("⚠ Could not determine Python version from .python-version") + return False + + # Collect all packages to install + all_packages = [] + all_dev_packages = [] + + for group_name in groups: + group = next((g for g in DEPENDENCY_GROUPS if g["name"] == group_name), None) + if group: + all_packages.extend(group["packages"]) + all_dev_packages.extend(group["dev_packages"]) + + success = True + + # Get poetry path - use the version from .python-version + poetry_path = os.path.expanduser(f"~/.pyenv/versions/{env_name}/bin/poetry") + + # Verify poetry exists + if not os.path.exists(poetry_path): + # Try to find poetry in current environment + result = subprocess.run("which poetry", shell=True, capture_output=True, text=True) + if result.returncode != 0: + print("⚠ Poetry not found in the environment. Skipping dependency installation.") + print("You can install dependencies later with: make deps") + return False + poetry_path = result.stdout.strip() + + # Build all package strings + if all_packages: + print(f"\n▶ Installing runtime dependencies...") + packages_str = " ".join(all_packages) + cmd = f'bash -l -c "cd {os.getcwd()} && {poetry_path} add {packages_str}"' + if not run_command(cmd, "Installing runtime dependencies", verbose=True): + success = False + + # Install development dependencies + if all_dev_packages: + print(f"\n▶ Installing development dependencies...") + dev_packages_str = " ".join(all_dev_packages) + cmd = f'bash -l -c "cd {os.getcwd()} && {poetry_path} add --group dev {dev_packages_str}"' + if not run_command(cmd, "Installing development dependencies", verbose=True): + success = False + + return success + + +def install_dependency_groups(groups): + """Install specified dependency groups using poetry add.""" + if not groups: + return True + + print(f"\nInstalling: {', '.join(groups)}") + success = activate_pyenv_and_add_deps(groups) + + if success: + print("\n✓ Dependencies installed successfully!") + else: + print("\n⚠ Some dependencies failed to install.") + print("You can retry later with: make deps") + + return success + + +def run_setup(): + """Run the setup process.""" + # Try to use make if available + if subprocess.run("which make", shell=True, capture_output=True).returncode == 0: + return run_command("make setup", "Setting up environment", verbose=True) + else: + # Fallback: install poetry and run basic setup + success = True + success = run_command("pip install poetry", "Installing poetry", verbose=True) and success + success = run_command("poetry install", "Installing dependencies", verbose=True) and success + return success + + +def init_git_repo(): + """Initialize git repository and make initial commit.""" + # Check if git is already initialized + if subprocess.run("git rev-parse --git-dir", shell=True, capture_output=True).returncode == 0: + return True # Already a git repo + + # Initialize git + if not run_command("git init", "Initializing git repository", verbose=False): + return False + + # Configure git for this repo (use template author info) + author_name = "{{ cookiecutter.author_name }}" + # Extract email if present, otherwise use a default + if "<" in author_name and ">" in author_name: + email = author_name[author_name.index("<")+1:author_name.index(">")] + name = author_name[:author_name.index("<")].strip() + else: + name = author_name + email = "dev@example.com" + + run_command(f'git config user.name "{name}"', "Configuring git name", verbose=False) + run_command(f'git config user.email "{email}"', "Configuring git email", verbose=False) + + # Stage all files + if not run_command("git add .", "Staging files", verbose=False): + return False + + # Make initial commit + commit_message = "Initial commit: Project setup with template" + cmd = f'git commit -m "{commit_message}"' + if not run_command(cmd, "Creating initial commit", verbose=False): + return False + + return True + + +def open_in_vscode(): + """Check if VS Code is available and ask user if they want to open the project.""" + # Check if code command exists + if subprocess.run("which code", shell=True, capture_output=True).returncode != 0: + return True # code not available, but that's ok + + if not is_interactive(): + return True # non-interactive mode, skip + + if ask_yes_no("\n🔧 Open project in VS Code?", default=True): + try: + subprocess.Popen(["code", "."], stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL) + return True + except Exception as e: + print(f"⚠ Could not open VS Code: {e}") + return False + + return True + + +def main(): + """Main hook function.""" + print("\n" + "="*70) + print("PROJECT SETUP") + print("="*70) + + # Step 1: Setup environment (Python, Poetry, venv) + print("\n[1/3] Setting up environment (pyenv, Python, virtualenv, Poetry)...") + setup_ok = run_setup() + + if not setup_ok: + print("\n⚠ Setup had issues. You may need to run 'make setup' manually.") + + # Step 2: Initialize git repository + print("[2/3] Initializing git repository...") + init_git_repo() + + # Step 3: Ask for optional dependencies + print("[3/3] Installing optional dependencies...") + selected_groups = ask_for_dependency_groups() + if selected_groups: + install_dependency_groups(selected_groups) + + # Final message + print("\n" + "="*70) + print("✓ READY TO CODE") + print("="*70) + print("\nYour environment is configured! Next steps:") + print("\n $ cd {{ cookiecutter.__package_slug }}") + print(" $ make help # See all available commands") + print("\nQuick start:") + print(" $ make test # Run tests") + print(" $ make start # Start FastAPI server") + print("") + + # Optional: Open in VS Code + open_in_vscode() + + +if __name__ == "__main__": + try: + main() + except KeyboardInterrupt: + print("\n\nSetup cancelled by user.") + sys.exit(1) + except Exception as e: + print(f"\n\nUnexpected error: {e}") + sys.exit(1) diff --git a/tests/test_cookiecutter.py b/tests/test_cookiecutter.py index e76ba27..90decae 100644 --- a/tests/test_cookiecutter.py +++ b/tests/test_cookiecutter.py @@ -50,13 +50,13 @@ def test_cookiecutter_all_options( assert num_items(path, ["docs"]) == 9 print(f"Checking pair: {open_source_license}, {include_github_actions}") if open_source_license == "None": - if include_github_actions in ["ci", "ci+cd"]: + if include_github_actions == "ci": assert num_items(path, [".github", "workflows"]) == 1 assert num_items(path) == 11 else: assert num_items(path) == 10 else: - if include_github_actions in ["ci", "ci+cd"]: + if include_github_actions == "ci": assert num_items(path, [".github", "workflows"]) == 1 assert num_items(path) == 12 else: diff --git a/{{ cookiecutter.__package_slug }}/.build/Dockerfile b/{{ cookiecutter.__package_slug }}/.build/Dockerfile new file mode 100644 index 0000000..d0374c3 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/.build/Dockerfile @@ -0,0 +1,42 @@ +# Use official Python image matching the version in pyproject.toml +FROM python:{{ cookiecutter.python_version }}-slim + +# Set environment variables +ENV PYTHONUNBUFFERED=1 \ + PYTHONDONTWRITEBYTECODE=1 \ + PIP_NO_CACHE_DIR=1 \ + PIP_DISABLE_PIP_VERSION_CHECK=1 \ + POETRY_HOME="/opt/poetry" \ + POETRY_CACHE_DIR="/tmp/poetry_cache" \ + POETRY_VENV_IN_PROJECT=1 \ + POETRY_NO_INTERACTION=1 + +# Install Poetry +RUN pip install poetry + +# Create application directory +WORKDIR /app + +# Copy poetry configuration files +COPY pyproject.toml poetry.lock* ./ + +# Configure poetry and install dependencies +RUN poetry config virtualenvs.create false \ + && poetry install --no-root \ + && rm -rf $POETRY_CACHE_DIR + +# Copy application code +COPY . . + +# Build the package using poetry +RUN poetry build + +# Install the built package using pip +RUN pip install dist/*.whl + +# Create a non-root user +RUN useradd --create-home --shell /bin/bash appuser +USER appuser + +# Default command +CMD ["python", "-m", "{{ cookiecutter.__package_slug }}"] diff --git a/{{ cookiecutter.__package_slug }}/.build/docker-compose.yml b/{{ cookiecutter.__package_slug }}/.build/docker-compose.yml new file mode 100644 index 0000000..77023fd --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/.build/docker-compose.yml @@ -0,0 +1,35 @@ +services: + db: + image: postgres:16-alpine + environment: + - POSTGRES_DB=db + - POSTGRES_USER=user + - POSTGRES_PASSWORD=password + ports: + - "5432:5432" + healthcheck: + test: ["CMD-SHELL", "pg_isready -U user -d db"] + interval: 5s + timeout: 5s + retries: 10 + restart: unless-stopped + + app: + image: {{ cookiecutter.__package_slug }} + build: + context: .. + dockerfile: .build/Dockerfile + environment: + - PYTHONUNBUFFERED=1 + - DATABASE_URL=postgresql+psycopg://user:password@db:5432/db + depends_on: + db: + condition: service_healthy + command: + ["uvicorn", "{{ cookiecutter.__package_slug }}.{{ cookiecutter.__package_slug }}:app", "--host", "0.0.0.0", "--port", "8000"] + ports: + - "8000:8000" + restart: "no" + +volumes: + pgdata: diff --git a/{{ cookiecutter.__package_slug }}/.dockerignore b/{{ cookiecutter.__package_slug }}/.dockerignore new file mode 100644 index 0000000..66c3f9e --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/.dockerignore @@ -0,0 +1,19 @@ +.git +.gitignore +.env +.venv +__pycache__ +*.pyc +*.pyo +*.pyd +.Python +.pytest_cache +.coverage +htmlcov +dist +build +*.egg-info +.idea +.vscode +*.swp +.DS_Store diff --git a/{{ cookiecutter.__package_slug }}/.github/workflows/ci-cd.yml b/{{ cookiecutter.__package_slug }}/.github/workflows/ci-cd.yml deleted file mode 100644 index 5140d19..0000000 --- a/{{ cookiecutter.__package_slug }}/.github/workflows/ci-cd.yml +++ /dev/null @@ -1,95 +0,0 @@ -name: ci-cd - -on: [push, pull_request] - -jobs: - ci: - # Set up operating system - runs-on: ubuntu-latest - - # Define job steps - steps: - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "{{ cookiecutter.python_version }}" - - - name: Check-out repository - uses: actions/checkout@v3 - - - name: Install poetry - uses: snok/install-poetry@v1 - - - name: Install package - run: poetry install - - - name: Test with pytest - run: poetry run pytest tests/ --cov={{ cookiecutter.__package_slug }} --cov-report=xml - - - name: Use Codecov to track coverage - uses: codecov/codecov-action@v5 - with: - token: {% raw %}${{ secrets.CODECOV_TOKEN }}{% endraw %} - files: ./coverage.xml # coverage report - - - name: Build documentation - run: poetry run make html --directory docs/ - - cd: - permissions: - id-token: write - contents: write - - # Only run this job if the "ci" job passes - needs: ci - - # Only run this job if new work is pushed to "main" - if: github.event_name == 'push' && github.ref == 'refs/heads/main' - - # Set up operating system - runs-on: ubuntu-latest - - # Define job steps - steps: - - name: Set up Python - uses: actions/setup-python@v4 - with: - python-version: "{{ cookiecutter.python_version }}" - - - name: Check-out repository - uses: actions/checkout@v3 - with: - fetch-depth: 0 - - - name: Use Python Semantic Release to prepare release - id: release - uses: python-semantic-release/python-semantic-release@v8.3.0 - with: - github_token: {% raw %}${{ secrets.GITHUB_TOKEN }}{% endraw %} - - - name: Publish to TestPyPI - uses: pypa/gh-action-pypi-publish@release/v1 - if: steps.release.outputs.released == 'true' - with: - repository-url: https://test.pypi.org/legacy/ - password: {% raw %}${{ secrets.TEST_PYPI_API_TOKEN }}{% endraw %} - - - name: Test install from TestPyPI - if: steps.release.outputs.released == 'true' - run: | - pip install \ - --index-url https://test.pypi.org/simple/ \ - --extra-index-url https://pypi.org/simple \ - {{ cookiecutter.__package_slug }} - - - name: Publish to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 - if: steps.release.outputs.released == 'true' - with: - password: {% raw %}${{ secrets.PYPI_API_TOKEN }}{% endraw %} - - - name: Publish package distributions to GitHub Releases - uses: python-semantic-release/upload-to-gh-release@main - if: steps.release.outputs.released == 'true' - with: - github_token: {% raw %}${{ secrets.GITHUB_TOKEN }}{% endraw %} diff --git a/{{ cookiecutter.__package_slug }}/.github/workflows/ci.yml b/{{ cookiecutter.__package_slug }}/.github/workflows/ci.yml index 4274924..7799755 100644 --- a/{{ cookiecutter.__package_slug }}/.github/workflows/ci.yml +++ b/{{ cookiecutter.__package_slug }}/.github/workflows/ci.yml @@ -1,36 +1,69 @@ +############################### +# CI for {{ cookiecutter.__package_slug }} +# +# Overview +# - CI: Install deps, run tests, upload coverage +# +# Required repository configuration +# - Secrets: +# - CODECOV_TOKEN (for CI coverage upload) +############################### + name: ci -on: [push, pull_request] +on: + push: + branches: + - "main" + paths: + - "src/**" + - ".github/**" + - ".build/**" + - poetry.lock + - pyproject.toml + - tests/** + - .github/workflows/ci.yml + +env: + # General + PACKAGE_NAME: "{{ cookiecutter.__package_slug }}" + PYTHON_VERSION: "{{ cookiecutter.python_version }}" + MIN_COVERAGE: 0 jobs: - ci: + continuous-integration: # Set up operating system runs-on: ubuntu-latest # Define job steps steps: + - name: Check-out repository + uses: actions/checkout@v4 + - name: Set up Python - uses: actions/setup-python@v4 + uses: actions/setup-python@v5 with: - python-version: "{{ cookiecutter.python_version }}" - - - name: Check-out repository - uses: actions/checkout@v3 + python-version: "{% raw %}${{ env.PYTHON_VERSION }}{% endraw %}" - name: Install poetry uses: snok/install-poetry@v1 + with: + virtualenvs-create: true - - name: Install package + - name: Install package dependencies run: poetry install - - name: Test with pytest - run: poetry run pytest tests/ --cov={{ cookiecutter.__package_slug }} --cov-report=xml + - name: Run tests with pytest + run: | + poetry run pytest tests/ \ + --cov={{ cookiecutter.__package_slug }} \ + --cov-report=xml \ + --cov-report=term-missing \ + --cov-fail-under={% raw %}${{ env.MIN_COVERAGE }}{% endraw %} - - name: Use Codecov to track coverage + - name: Upload coverage to Codecov uses: codecov/codecov-action@v5 with: - token: {% raw %}${{ secrets.CODECOV_TOKEN }}{% endraw %} - files: ./coverage.xml # coverage report - - - name: Build documentation - run: poetry run make html --directory docs/ + files: ./coverage.xml + env: + CODECOV_TOKEN: "{% raw %}${{ secrets.CODECOV_TOKEN }}{% endraw %}" diff --git a/{{ cookiecutter.__package_slug }}/.gitignore b/{{ cookiecutter.__package_slug }}/.gitignore index b07c663..02246e4 100644 --- a/{{ cookiecutter.__package_slug }}/.gitignore +++ b/{{ cookiecutter.__package_slug }}/.gitignore @@ -1,3 +1,8 @@ +.venv/ +.python-version +__pycache__/ +*.pyc +.DS_Store # Byte-compiled / optimized / DLL files __pycache__/ *.py[cod] @@ -60,6 +65,7 @@ coverage.xml local_settings.py db.sqlite3 db.sqlite3-journal +{{ cookiecutter.__package_slug }}.db # Flask stuff: instance/ @@ -132,6 +138,9 @@ dmypy.json # PyCharm .idea/ +# Ruff cache +.ruff_cache/ + # RStudio project files **.Rproj.user/ **.Rproj.user* @@ -139,4 +148,4 @@ dmypy.json **.Rhistory # MacOS -.DS_Store \ No newline at end of file +.DS_Store diff --git a/{{ cookiecutter.__package_slug }}/.pre-commit-config.yaml b/{{ cookiecutter.__package_slug }}/.pre-commit-config.yaml new file mode 100644 index 0000000..25b9af5 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/.pre-commit-config.yaml @@ -0,0 +1,66 @@ +repos: + # ============================================================================= + # General pre-commit hooks (file format, syntax) + # ============================================================================= + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v5.0.0 + hooks: + - id: check-yaml + - id: check-json + - id: check-toml + - id: check-added-large-files + - id: check-docstring-first + - id: check-merge-conflict + - id: check-case-conflict + - id: trailing-whitespace + - id: end-of-file-fixer + - id: debug-statements + + # ============================================================================= + # Security hooks + # ============================================================================= + - repo: https://github.com/Yelp/detect-secrets + rev: v1.5.0 + hooks: + - id: detect-secrets + name: detect-secrets + + - repo: https://github.com/PyCQA/bandit + rev: 1.8.0 + hooks: + - id: bandit + name: bandit-security-linter + entry: bandit + language: python + types: [python] + args: ["-c", "pyproject.toml"] + exclude: tests/ + + # ============================================================================= + # Python code style and formatting hooks + # ============================================================================= + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.8.0 + hooks: + - id: ruff + args: [--fix] + - id: ruff-format + + # ============================================================================= + # Python-specific hooks (type checking) + # ============================================================================= + - repo: https://github.com/pre-commit/mirrors-mypy + rev: v1.13.0 + hooks: + - id: mypy + exclude: ^(tests/) + additional_dependencies: + - types-requests + + # ============================================================================= + # Commit message hooks + # ============================================================================= + - repo: https://github.com/commitizen-tools/commitizen + rev: v4.1.0 + hooks: + - id: commitizen diff --git a/{{ cookiecutter.__package_slug }}/.readthedocs.yml b/{{ cookiecutter.__package_slug }}/.readthedocs.yml index 39d01e9..3a3ea65 100644 --- a/{{ cookiecutter.__package_slug }}/.readthedocs.yml +++ b/{{ cookiecutter.__package_slug }}/.readthedocs.yml @@ -17,7 +17,3 @@ build: - pip install poetry post_install: - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH poetry install --all-groups - -# Build documentation in the "docs/" directory with Sphinx -sphinx: - configuration: docs/conf.py diff --git a/{{ cookiecutter.__package_slug }}/.vscode/settings.json b/{{ cookiecutter.__package_slug }}/.vscode/settings.json new file mode 100644 index 0000000..c562ace --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/.vscode/settings.json @@ -0,0 +1,28 @@ +{ + "python.defaultInterpreterPath": "${env:HOME}/.pyenv/versions/${workspaceFolderBasename}/bin/python", + "python.analysis.typeCheckingMode": "basic", + "python.terminal.activateEnvironment": false, + "python.venvPath": "${env:HOME}/.pyenv/versions", + "[python]": { + "editor.defaultFormatter": "ms-python.python", + "editor.formatOnSave": false + }, + "terminal.integrated.profiles.osx": { + "{{ cookiecutter.__package_slug }}": { + "path": "zsh", + "args": ["-l", "-c", "cd '${workspaceFolder}'; eval \"$(pyenv init -)\"; ./scripts/setup_env.sh; pyenv shell {{ cookiecutter.__package_slug }}; exec zsh -l"] + } + }, + "terminal.integrated.defaultProfile.osx": "{{ cookiecutter.__package_slug }}", + "terminal.integrated.profiles.linux": { + "{{ cookiecutter.__package_slug }}": { + "path": "zsh", + "args": ["-l", "-c", "cd '${workspaceFolder}'; eval \"$(pyenv init -)\"; ./scripts/setup_env.sh; pyenv shell {{ cookiecutter.__package_slug }}; exec zsh -l"] + } + }, + "terminal.integrated.defaultProfile.linux": "{{ cookiecutter.__package_slug }}", + "task.allowAutomaticTasks": "on", + "python.testing.pytestArgs": ["tests"], + "python.testing.unittestEnabled": false, + "python.testing.pytestEnabled": true +} diff --git a/{{ cookiecutter.__package_slug }}/.vscode/tasks.json b/{{ cookiecutter.__package_slug }}/.vscode/tasks.json new file mode 100644 index 0000000..3fc8032 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/.vscode/tasks.json @@ -0,0 +1,106 @@ +{ + "version": "2.0.0", + "tasks": [ + { + "label": "🔧 Setup Python Environment", + "type": "process", + "command": "bash", + "args": ["-lc", "./scripts/setup_env.sh"], + "options": { + "cwd": "${workspaceFolder}", + "env": { + "PATH": "${env:PATH}:/usr/local/bin:/opt/homebrew/bin" + } + }, + "group": { + "kind": "build", + "isDefault": false + }, + "detail": "Sets up the Python development environment on demand", + "presentation": { + "echo": true, + "reveal": "always", + "focus": true, + "panel": "new", + "showReuseMessage": false, + "clear": true + }, + "problemMatcher": [] + }, + { + "label": "📊 Run Tests with Coverage", + "type": "process", + "command": "make", + "args": ["test-cov"], + "options": { + "cwd": "${workspaceFolder}" + }, + "group": { + "kind": "test", + "isDefault": true + }, + "detail": "Runs unit tests with comprehensive coverage report (terminal + HTML + XML)", + "presentation": { + "echo": true, + "reveal": "always", + "focus": true, + "panel": "new", + "showReuseMessage": false, + "clear": true, + "close": false, + "group": "test" + }, + "problemMatcher": [] + }, + { + "label": "🐳 Build Docker Image", + "type": "process", + "command": "make", + "args": ["docker-build"], + "options": { + "cwd": "${workspaceFolder}" + }, + "group": { + "kind": "build", + "isDefault": false + }, + "detail": "Builds the project Docker image using the build script", + "presentation": { + "echo": true, + "reveal": "always", + "focus": true, + "panel": "new", + "showReuseMessage": false, + "clear": true, + "close": false, + "group": "docker" + }, + "problemMatcher": [] + }, + { + "label": "🐳 Docker Up (compose)", + "type": "process", + "command": "make", + "args": ["docker-up"], + "options": { + "cwd": "${workspaceFolder}" + }, + "group": { + "kind": "build", + "isDefault": false + }, + "detail": "Build and start docker-compose stack via Makefile (uses .build/docker-compose.yml)", + "presentation": { + "echo": true, + "reveal": "always", + "focus": true, + "panel": "new", + "showReuseMessage": false, + "clear": true, + "close": false, + "group": "docker" + }, + "problemMatcher": [] + } + ] +} diff --git a/{{ cookiecutter.__package_slug }}/CLAUDE.md b/{{ cookiecutter.__package_slug }}/CLAUDE.md new file mode 100644 index 0000000..92c4881 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/CLAUDE.md @@ -0,0 +1,285 @@ +# CLAUDE.md + +This file provides guidance to Claude Code (claude.ai/code) when working with this project. + +## Project Overview + +**{{ cookiecutter.package_name }}** is a REST API project built with FastAPI implementing Clean Architecture / Domain-Driven Design (DDD) patterns. + +**Status**: This is a basic example template. Features, endpoints, and business logic should be expanded and customized for your specific use case. + +## Project Structure + +The project follows Clean Architecture with three main layers: + +``` +src/{{ cookiecutter.__package_slug }}/ +├── domain/ # Business logic (entities, repositories interfaces, services) +│ ├── entities/ # Domain models (e.g., Task, TaskList) +│ ├── repositories/ # Repository interfaces (contracts) +│ ├── services/ # Domain services (business rules) +│ └── shared/ # Shared domain types/exceptions +├── application/ # Use cases and application services +├── infrastructure/ # External integrations (database, web, DI) +│ ├── persistence/ # Database models and repository implementations +│ │ ├── models/ # SQLAlchemy ORM models +│ │ └── repositories/ # Repository implementations +│ └── web/ # FastAPI web layer +│ ├── api/v1/ # REST API endpoints (v1) +│ │ └── schemas/ # Pydantic request/response schemas +│ └── dependencies/ # FastAPI dependency injection +└── {{ cookiecutter.__package_slug }}.py # FastAPI app factory +``` + +## File Organization + +- **`__init__.py` files are intentionally blank** - Explicit imports prevent circular dependencies +- **All imports must be explicit** - Use full paths from the package root +- **Type hints are required** - All functions should have type annotations +- **Pydantic models for validation** - Request/response schemas in `infrastructure/web/api/v1/schemas/` + +## Development Commands + +### Environment Setup + +```bash +# The environment is automatically set up during project generation +# If you need to re-run setup: +make setup + +# Install/update dependencies: +make deps +``` + +### Testing + +```bash +# Run all tests +make test + +# Run tests with coverage report +make test-cov + +# Run only infrastructure tests (database, API) +poetry run pytest tests/{{ cookiecutter.__package_slug }}/infrastructure/ -v + +# Run only API tests +poetry run pytest tests/{{ cookiecutter.__package_slug }}/infrastructure/web/ -v +``` + +### Running the Application + +```bash +# Start FastAPI in development mode (localhost only) +make start + +# Start FastAPI accessible from other devices +make dev-server + +# Start FastAPI in production mode +make prod-server + +# API documentation will be available at: +# - Swagger UI: http://localhost:8000/docs +# - ReDoc: http://localhost:8000/redoc +``` + +### Testing the API + +```bash +# Run complete CRUD flow tests (requires app running with 'make start') +make test-crud + +# Run API tests (requires app running) +make test-api + +# Run API tests with verbose output +make test-api-verbose +``` + +## Code Style and Conventions + +### Imports +- Use explicit imports: `from {{ cookiecutter.__package_slug }}.domain.entities import Task` +- Avoid wildcard imports: ❌ `from module import *` +- Group imports: stdlib → third-party → local + +### Type Hints +- All function parameters must have type hints +- All return types must be specified +- Use `Optional[T]` for nullable values: `def get_task(id: int) -> Optional[Task]:` + +### Naming +- **Classes**: PascalCase (`Task`, `TaskRepository`) +- **Functions/methods**: snake_case (`create_task`, `get_by_id`) +- **Constants**: UPPER_SNAKE_CASE (`MAX_RETRIES`, `DEFAULT_TIMEOUT`) +- **Private methods**: prefix with underscore (`_validate_input`) + +### Example Function + +```python +from typing import Optional +from {{ cookiecutter.__package_slug }}.domain.entities import Task + +def create_task(title: str, description: Optional[str] = None) -> Task: + """Create a new task with validation. + + Args: + title: Task title (required) + description: Optional task description + + Returns: + The created Task instance + + Raises: + ValueError: If title is empty + """ + if not title.strip(): + raise ValueError("Title cannot be empty") + + return Task(title=title, description=description) +``` + +## Architecture Layers Explained + +### Domain Layer (`domain/`) +- **Entities**: Core business models (Task, TaskList) +- **Repositories**: Interfaces defining data access contracts +- **Services**: Business logic that doesn't fit in entities +- **Dependencies**: Domain layer imports nothing else from the application + +### Application Layer (`application/`) +- **Use Cases**: Orchestrate domain entities and repositories +- **Application Services**: Coordinate business operations +- **Dependencies**: Only imports from domain layer + +### Infrastructure Layer (`infrastructure/`) +- **Persistence**: Database models, ORM mappings, repository implementations +- **Web**: FastAPI routes, schemas, dependency injection +- **Dependencies**: Can import from domain and application + +## Testing Strategy + +### Test Organization +``` +tests/{{ cookiecutter.__package_slug }}/ +├── infrastructure/ +│ ├── persistence/repositories/ +│ │ ├── test_task_repository_rds.py +│ │ └── test_task_list_repository_rds.py +│ └── web/ +│ └── test_api.py +``` + +### Running Tests +1. **Unit tests**: `make test` +2. **With coverage**: `make test-cov` +3. **Specific test file**: `poetry run pytest tests/{{ cookiecutter.__package_slug }}/infrastructure/web/test_api.py -v` +4. **Specific test**: `poetry run pytest tests/{{ cookiecutter.__package_slug }}/infrastructure/web/test_api.py::test_create_task -v` + +### Writing Tests +- Use pytest fixtures for common setup +- Mock external dependencies (database, external APIs) +- Test both success and error paths +- Keep tests focused and readable + +## Database + +The project uses **SQLAlchemy** with **SQLite** by default (can be switched to PostgreSQL). + +### Running Migrations +```bash +# Create initial migration +poetry run alembic revision --autogenerate -m "Initial migration" + +# Apply migrations +poetry run alembic upgrade head + +# Rollback last migration +poetry run alembic downgrade -1 +``` + +## Pre-commit Hooks + +The project includes pre-commit hooks for code quality: + +```bash +# Install hooks +pre-commit install + +# Run manually on all files +pre-commit run --all-files + +# Bypass hooks (not recommended) +git commit --no-verify +``` + +## Key Files + +- **`pyproject.toml`**: Project metadata and dependencies +- **`.python-version`**: Python version for pyenv +- **`Makefile`**: Common development commands +- **`scripts/setup_env.sh`**: Environment initialization script +- **`.vscode/settings.json`**: VS Code configuration (Python interpreter, testing) +- **`.pre-commit-config.yaml`**: Pre-commit hooks configuration + +## Customizing the Project + +This template provides a basic REST API structure. To customize: + +1. **Add domain entities**: Create new files in `src/{{ cookiecutter.__package_slug }}/domain/entities/` +2. **Add repositories**: Define interfaces in `domain/repositories/` and implementations in `infrastructure/persistence/repositories/` +3. **Add use cases**: Create application services in `application/` +4. **Add API endpoints**: Create routes in `infrastructure/web/api/v1/` +5. **Add database models**: SQLAlchemy models in `infrastructure/persistence/models/` +6. **Add tests**: Follow the existing test structure + +## Common Tasks with Claude + +### Ask Claude to explain the codebase +``` +"Explain the architecture of this project and how the three layers interact" +``` + +### Ask Claude to add a new feature +``` +"Add a new endpoint to list all tasks with pagination. Follow the existing architecture pattern." +``` + +### Ask Claude to fix a bug +``` +"The task creation is failing with a validation error. Debug and fix the issue." +``` + +### Ask Claude to add tests +``` +"Add comprehensive tests for the task repository, including edge cases." +``` + +### Ask Claude to refactor code +``` +"Refactor the task service to improve readability and follow SOLID principles." +``` + +## Useful Commands Reference + +| Command | Purpose | +|---------|---------| +| `make help` | Show all available commands | +| `make setup` | Setup Python environment | +| `make deps` | Install dependencies | +| `make test` | Run tests | +| `make test-cov` | Tests with coverage | +| `make start` | Start development server | +| `make clean` | Clean build artifacts | +| `make docker-build` | Build Docker image | +| `make docker-up` | Start Docker services | + +## Resources + +- **FastAPI Documentation**: https://fastapi.tiangolo.com/ +- **SQLAlchemy ORM**: https://docs.sqlalchemy.org/ +- **Pydantic**: https://docs.pydantic.dev/ +- **pytest**: https://docs.pytest.org/ +- **Clean Architecture**: https://blog.cleancoder.com/uncle-bob/2012/08/13/the-clean-architecture.html +- **Domain-Driven Design**: https://martinfowler.com/bliki/DomainDrivenDesign.html diff --git a/{{ cookiecutter.__package_slug }}/Makefile b/{{ cookiecutter.__package_slug }}/Makefile new file mode 100644 index 0000000..a3b672c --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/Makefile @@ -0,0 +1,156 @@ +.PHONY: help setup deps docker docker-up docker-down docker-logs test test-cov clean install start dev prod test-crud test-api test-api-verbose + +# Default target +help: + @echo "Available commands:" + @echo "" + @echo "🚀 Application Commands:" + @echo " start - Start FastAPI app (localhost only)" + @echo " dev-server - Start FastAPI app in dev mode (accessible from network)" + @echo " prod-server - Start FastAPI app in production mode" + @echo "" + @echo "🔧 Development Commands:" + @echo " setup - Setup Python development environment" + @echo " deps - Install dependencies from pyproject.toml" + @echo " install - Install package in development mode" + @echo " test - Run unit tests with pytest" + @echo " test-cov - Run tests with coverage report" + @echo " test-api - Run API CRUD tests" + @echo " test-api-verbose - Run API tests with verbose output" + @echo " clean - Clean build artifacts" + @echo "" + @echo "🐳 Docker Commands:" + @echo " docker-build - Build Docker image" + @echo " docker-up - Build and start compose stack (.build/docker-compose.yml)" + @echo " docker-down - Stop and remove compose stack" + @echo " docker-logs - Tail logs from compose stack" + @echo "" + @echo "📚 Other Commands:" + @echo " help - Show this help message" + @echo " dev - Quick development workflow (setup + install + test)" + @echo " full-setup - Full setup including dependencies" + +# Setup Python development environment +setup: + @echo "🔧 Setting up Python development environment..." + @bash -c "./scripts/setup_env.sh" + +# Install dependencies from pyproject.toml +deps: + @echo "📦 Installing dependencies..." + poetry install + +# Build Docker image +docker-build: + @echo "🐳 Building Docker image..." + @bash -c "./scripts/build_docker.sh" + +# Docker Compose helpers (keep Docker files under .build/) +docker-up: docker-down + @echo "🐳 Building and starting docker-compose stack..." + @bash -c "docker compose -f .build/docker-compose.yml up --build $(ARGS)" + + +docker-down: + @echo "🛑 Stopping docker-compose stack..." + @bash -c "docker compose -f .build/docker-compose.yml down" + +docker-logs: + @echo "📜 Tailing docker-compose logs... (Ctrl+C to stop)" + @bash -c "docker compose -f .build/docker-compose.yml logs -f" + +# Run unit tests +test: + @echo "🧪 Running unit tests..." + @echo "📁 Test directory: $(PWD)/tests/" + @echo "🔍 Verbose output enabled" + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + poetry run pytest tests/ -v + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + @echo "✅ Unit tests completed!" + +# Run tests with coverage +test-cov: + @echo "🧪 Running tests with coverage..." + @echo "📁 Test directory: $(PWD)/tests/" + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + poetry run pytest tests/ --cov=src/{{ cookiecutter.__package_slug }} --cov-report=term-missing + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + @echo "✅ Coverage tests completed!" + + +# Install package in development mode +install: + @echo "📦 Installing package in development mode..." + poetry install + +# Clean build artifacts +clean: + @echo "🧹 Cleaning build artifacts..." + rm -rf build/ + rm -rf dist/ + rm -rf *.egg-info/ + rm -rf .coverage + rm -rf htmlcov/ + rm -rf .pytest_cache/ + find . -type d -name "__pycache__" -exec rm -rf {} + + find . -type f -name "*.pyc" -delete + @echo "✅ Cleanup completed!" + +# Quick development workflow +dev: setup install test + @echo "✅ Development environment ready!" + +# Full setup including dependencies +full-setup: setup install + @echo "✅ Full setup completed!" + +# Start the FastAPI application +start: + @echo "🚀 Starting FastAPI application..." + @echo "📖 API Documentation: http://127.0.0.1:8000/docs" + @echo "🔍 ReDoc: http://127.0.0.1:8000/redoc" + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + poetry run uvicorn {{ cookiecutter.__package_slug }}.{{ cookiecutter.__package_slug }}:app --reload --host 127.0.0.1 --port 8000 + +# Start in development mode (accessible from other devices) +dev-server: + @echo "🌐 Starting FastAPI application in development mode..." + @echo "📖 API Documentation: http://0.0.0.0:8000/docs" + @echo "🔍 ReDoc: http://0.0.0.0:8000/redoc" + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + poetry run uvicorn {{ cookiecutter.__package_slug }}.{{ cookiecutter.__package_slug }}:app --reload --host 0.0.0.0 --port 8000 + +# Start in production mode +prod-server: + @echo "🚀 Starting FastAPI application in production mode..." + @echo "📖 API Documentation: http://0.0.0.0:8000/docs" + @echo "🔍 ReDoc: http://0.0.0.0:8000/redoc" + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + poetry run uvicorn {{ cookiecutter.__package_slug }}.{{ cookiecutter.__package_slug }}:app --host 0.0.0.0 --port 8000 + +# Run CRUD flow tests +test-crud: + @echo "🧪 Running complete CRUD flow tests..." + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + poetry run python scripts/test_crud_flow.py + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + @echo "✅ CRUD flow tests completed!" + +# Run API CRUD tests +test-api: + @echo "🧪 Running complete API CRUD tests..." + @echo "⚠️ Make sure the API is running with 'make start' in another terminal" + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + poetry run python scripts/test_api_full_crud.py + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + @echo "✅ API CRUD tests completed!" + +# Run API CRUD tests with verbose output +test-api-verbose: + @echo "🧪 Running complete API CRUD tests (verbose)..." + @echo "⚠️ Make sure the API is running with 'make start' in another terminal" + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + poetry run python scripts/test_api_full_crud.py --verbose + @echo "━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━" + @echo "✅ API CRUD tests completed!" diff --git a/{{ cookiecutter.__package_slug }}/README.md b/{{ cookiecutter.__package_slug }}/README.md index a45172d..6ea3019 100644 --- a/{{ cookiecutter.__package_slug }}/README.md +++ b/{{ cookiecutter.__package_slug }}/README.md @@ -1,25 +1,38 @@ # {{ cookiecutter.__package_slug }} -{{ cookiecutter.package_short_description }} +A package for doing great things! ## Installation -```bash -$ pip install {{ cookiecutter.__package_slug }} -``` +This project automatically sets up the Python environment when opened in VS Code. +For other installation methods, see the [installation guide](docs/installation.md). ## Usage -- TODO +- For a complete usage guide, see [`docs/USAGE.md`](docs/USAGE.md). + +Quick summary: + +- Installation and setup: see [`docs/installation.md`](docs/installation.md) +- Common commands: use the `Makefile` (`make setup`, `make deps`, `make test`) +- Repository usage guide: [`docs/USAGE.md`](docs/USAGE.md) + +### Docker + +- Build the base image (uses `.build/Dockerfile`): ## Contributing -Interested in contributing? Check out the contributing guidelines. Please note that this project is released with a Code of Conduct. By contributing to this project, you agree to abide by its terms. +Interested in contributing? Check out the contributing guidelines. Please note that +this project is released with a Code of Conduct. By contributing to this project, +you agree to abide by its terms. ## License -`{{ cookiecutter.__package_slug }}` was created by {{ cookiecutter.author_name }}. {% if cookiecutter.open_source_license != 'None' -%}It is licensed under the terms of the {{ cookiecutter.open_source_license }} license.{% else %}{{ cookiecutter.author_name }} retains all rights to the source and it may not be reproduced, distributed, or used to create derivative works.{% endif %} +`{{ cookiecutter.__package_slug }}` was created by Jose Miguel Resendiz . +It is licensed under the terms of the MIT license. ## Credits -`{{ cookiecutter.__package_slug }}` was created with [`cookiecutter`](https://cookiecutter.readthedocs.io/en/latest/) and the `py-pkgs-cookiecutter` [template](https://github.com/py-pkgs/py-pkgs-cookiecutter). +`{{ cookiecutter.__package_slug }}` was created with [`cookiecutter`](https://cookiecutter.readthedocs.io/en/latest/) +and the `py-pkgs-cookiecutter` [template](https://github.com/ztocker/py-pkgs-cookiecutter). diff --git a/{{ cookiecutter.__package_slug }}/docs/Makefile b/{{ cookiecutter.__package_slug }}/docs/Makefile deleted file mode 100755 index 195282b..0000000 --- a/{{ cookiecutter.__package_slug }}/docs/Makefile +++ /dev/null @@ -1,19 +0,0 @@ -# Minimal makefile for Sphinx documentation - -# You can set these variables from the command line. -SPHINXOPTS = -SPHINXBUILD = python -msphinx -SPHINXPROJ = {{ cookiecutter.__package_slug }} -SOURCEDIR = . -BUILDDIR = _build - -# Put it first so that "make" without argument is like "make help". -help: - @$(SPHINXBUILD) -M help "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) - -.PHONY: help Makefile - -# Catch-all target: route all unknown targets to Sphinx using the new -# "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). -%: Makefile - @$(SPHINXBUILD) -M $@ "$(SOURCEDIR)" "$(BUILDDIR)" $(SPHINXOPTS) $(O) \ No newline at end of file diff --git a/{{ cookiecutter.__package_slug }}/docs/USAGE.md b/{{ cookiecutter.__package_slug }}/docs/USAGE.md new file mode 100644 index 0000000..52e80b3 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/docs/USAGE.md @@ -0,0 +1,118 @@ +# Repository usage + +This guide covers day-to-day usage of the project: commands, workflow, and best practices. + +Note: for installation and environment setup, see [`docs/installation.md`](installation.md). + +## Typical workflow + +1. Create or edit code in `src/{{ cookiecutter.__package_slug }}/` +2. Add or update tests in `tests/` +3. Run linters and formatters +4. Run tests and coverage +5. Commit using Conventional Commits + +## Key commands (Makefile) + +- `make test`: run tests (`pytest -v`) +- `make test-cov`: run tests with coverage (`--cov={{ cookiecutter.__package_slug }}`) +- `make deps`, `make deps-core`, `make deps-test`: manage base dependencies via scripts +- `make docker`: build the Docker image +- `make clean`: remove build artifacts and caches + +## Dependency management + +- Important: always add or remove dependencies using Poetry commands. Do not edit `pyproject.toml` by hand; Poetry will + update it for you. + +- Helper script: + +```bash +./scripts/add_deps.sh --core # runtime and essential dev dependencies +./scripts/add_deps.sh --test # testing tools +./scripts/add_deps.sh --all # everything above +``` + +- Directly with Poetry: + +```bash +poetry add +poetry add --group dev +``` + +## Development and testing + +- Unit tests: + +```bash +make test +``` + +- With coverage: + +```bash +make test-cov +``` + +- Matrix/isolated via tox: + +```bash +poetry run tox +``` + +## Code quality + +Configured hooks: `ruff`, `black`, `markdownlint`, `mypy`, `bandit`, `detect-secrets`, `interrogate`. + +Run all hooks manually: + +```bash +poetry run pre-commit run -a +``` + +## Package usage + +Minimal example (current API): + +```python +import {{ cookiecutter.__package_slug }} + +print({{ cookiecutter.__package_slug }}.__version__) +``` + +Tip: export public functions/classes from `src/{{ cookiecutter.__package_slug }}/__init__.py` for a clean API and add tests in `tests/{{ cookiecutter.__package_slug }}/`. + +## Versioning and releases + +- Versioning managed by `python-semantic-release` (configured in `pyproject.toml`) +- Use Conventional Commits: `feat: ...`, `fix: ...`, `chore: ...`, etc. + +Manual publish (if applicable): + +```bash +poetry build +poetry publish +``` + +For automated pipelines, configure required tokens in your CI. + +## Docker + +Build the image: + +```bash +make docker +``` + +Print package version with Docker Compose (files under `.build/`): + +```bash +make docker-up # builds if needed and runs compose +make docker-logs # show output +make docker-down # stop and clean +``` + +## Next steps + +- Implement features in `src/{{ cookiecutter.__package_slug }}/` and test them in `tests/` +- Document the API under `docs/` diff --git a/{{ cookiecutter.__package_slug }}/docs/changelog.md b/{{ cookiecutter.__package_slug }}/docs/changelog.md deleted file mode 100755 index 8261b35..0000000 --- a/{{ cookiecutter.__package_slug }}/docs/changelog.md +++ /dev/null @@ -1,2 +0,0 @@ -```{include} ../CHANGELOG.md -``` \ No newline at end of file diff --git a/{{ cookiecutter.__package_slug }}/docs/conduct.md b/{{ cookiecutter.__package_slug }}/docs/conduct.md deleted file mode 100755 index 0568705..0000000 --- a/{{ cookiecutter.__package_slug }}/docs/conduct.md +++ /dev/null @@ -1,2 +0,0 @@ -```{include} ../CONDUCT.md -``` \ No newline at end of file diff --git a/{{ cookiecutter.__package_slug }}/docs/conf.py b/{{ cookiecutter.__package_slug }}/docs/conf.py deleted file mode 100755 index a2da593..0000000 --- a/{{ cookiecutter.__package_slug }}/docs/conf.py +++ /dev/null @@ -1,36 +0,0 @@ -# Configuration file for the Sphinx documentation builder. -# -# This file only contains a selection of the most common options. For a full -# list see the documentation: -# https://www.sphinx-doc.org/en/master/usage/configuration.html - -# -- Project information ----------------------------------------------------- - -project = u"{{ cookiecutter.__package_slug }}" -copyright = u"{% now 'local', '%Y' %}, {{ cookiecutter.author_name }}" -author = u"{{ cookiecutter.author_name }}" - -# -- General configuration --------------------------------------------------- - -# Add any Sphinx extension module names here, as strings. They can be -# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom -# ones. -extensions = [ - "myst_nb", - "autoapi.extension", - "sphinx.ext.napoleon", - "sphinx.ext.viewcode", -] -autoapi_dirs = ["../src"] - -# List of patterns, relative to source directory, that match files and -# directories to ignore when looking for source files. -# This pattern also affects html_static_path and html_extra_path. -exclude_patterns = ["_build", "Thumbs.db", ".DS_Store"] - -# -- Options for HTML output ------------------------------------------------- - -# The theme to use for HTML and HTML Help pages. See the documentation for -# a list of builtin themes. -# -html_theme = "sphinx_rtd_theme" diff --git a/{{ cookiecutter.__package_slug }}/docs/contributing.md b/{{ cookiecutter.__package_slug }}/docs/contributing.md deleted file mode 100755 index 435d357..0000000 --- a/{{ cookiecutter.__package_slug }}/docs/contributing.md +++ /dev/null @@ -1,2 +0,0 @@ -```{include} ../CONTRIBUTING.md -``` \ No newline at end of file diff --git a/{{ cookiecutter.__package_slug }}/docs/example.ipynb b/{{ cookiecutter.__package_slug }}/docs/example.ipynb index 3dab327..962000e 100644 --- a/{{ cookiecutter.__package_slug }}/docs/example.ipynb +++ b/{{ cookiecutter.__package_slug }}/docs/example.ipynb @@ -2,23 +2,23 @@ "cells": [ { "cell_type": "markdown", + "metadata": {}, "source": [ "# Example usage\n", "\n", "To use `{{ cookiecutter.__package_slug }}` in a project:" - ], - "metadata": {} + ] }, { "cell_type": "code", "execution_count": null, + "metadata": {}, + "outputs": [], "source": [ "import {{ cookiecutter.__package_slug }}\n", "\n", "print({{ cookiecutter.__package_slug }}.__version__)" - ], - "outputs": [], - "metadata": {} + ] } ], "metadata": { @@ -42,4 +42,4 @@ }, "nbformat": 4, "nbformat_minor": 4 -} \ No newline at end of file +} diff --git a/{{ cookiecutter.__package_slug }}/docs/index.md b/{{ cookiecutter.__package_slug }}/docs/index.md deleted file mode 100755 index 6fcdc0e..0000000 --- a/{{ cookiecutter.__package_slug }}/docs/index.md +++ /dev/null @@ -1,13 +0,0 @@ -```{include} ../README.md -``` - -```{toctree} -:maxdepth: 1 -:hidden: - -example.ipynb -changelog.md -contributing.md -conduct.md -autoapi/index -``` \ No newline at end of file diff --git a/{{ cookiecutter.__package_slug }}/docs/installation.md b/{{ cookiecutter.__package_slug }}/docs/installation.md new file mode 100644 index 0000000..1bf370d --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/docs/installation.md @@ -0,0 +1,198 @@ +# Installation Guide + +## Overview + +This project supports multiple installation methods: + +1. **Automatic (VS Code/Cursor)**: Environment setup happens automatically when opening the project +2. **Script-based**: Using the provided setup script +3. **Manual**: Step-by-step manual installation + +## Prerequisites + +- [pyenv installed](https://github.com/pyenv/pyenv#installation) +- [pyenv-virtualenv plugin installed](https://github.com/pyenv/pyenv-virtualenv#installation) + +## Method 1: Automatic Setup (VS Code/Cursor) + +### VS Code Setup Steps + +1. **Open the project in VS Code** + - The project will automatically run the setup script + - A notification will appear showing the setup progress + +2. **Verify the setup** + - Check the bottom-left corner for the Python interpreter + - It should display `{{ cookiecutter.__package_slug }}` as the active environment + - The integrated terminal will automatically use the correct environment + +3. **Start coding** + - Your environment is ready to use + - All dependencies will be automatically installed + +## Method 2: Script-based Setup + +### Script Execution Steps + +1. **Run the setup script** + + ```bash + ./scripts/setup_env.sh + ``` + +2. **For verbose output** + + ```bash + ./scripts/setup_env.sh --verbose + ``` + +3. **For quiet mode** + + ```bash + ./scripts/setup_env.sh --quiet + ``` + +The script will: + +- Check if pyenv and pyenv-virtualenv are available +- Verify Python {{ cookiecutter.python_version }} is installed +- Create a virtual environment named `{{ cookiecutter.__package_slug }}` +- Install Poetry 2.1.3 +- Configure the local Python version + +## Method 3: Manual Setup + +### Manual Setup Steps + +#### 1. Virtual Environment Setup + +First, remove any previous installation: + +```bash +pyenv virtualenv-delete -f {{ cookiecutter.__package_slug }} +rm .python-version +pyenv uninstall -f {{ cookiecutter.python_version }} +``` + +Then, create and set up the new virtual environment: + +```bash +pyenv install {{ cookiecutter.python_version }} -f +pyenv virtualenv {{ cookiecutter.python_version }} {{ cookiecutter.__package_slug }} +pyenv local {{ cookiecutter.__package_slug }} +pip install --upgrade pip +pip install poetry==2.1.3 +``` + +#### 2. Dependencies Installation + +Configure Poetry and install dependencies: + +```bash +poetry config virtualenvs.create true +poetry install +``` + +Set up development tools: + +```bash +# Install pre-commit hooks +poetry run pre-commit install + +# Get virtual environment path for IDEs +poetry env info -e | pbcopy +``` + +## VS Code Configuration + +The project includes pre-configured VS Code settings: + +### Automatic Tasks + +- **Setup Python Env (auto)**: Runs automatically when opening the project +- **Terminal Profile**: Automatically activates the environment in new terminals + +### Settings + +- Python interpreter path is automatically configured +- Virtual environment activation is enabled +- Type checking is set to basic mode + +### Manual Task Execution + +If you need to run the setup manually: + +1. Press `Ctrl+Shift+P` (or `Cmd+Shift+P` on macOS) +2. Type "Tasks: Run Task" +3. Select "Setup Python Env (auto)" + +## Troubleshooting + +### Common Issues + +1. **pyenv not found** + + ```bash + # Install pyenv (macOS) + brew install pyenv pyenv-virtualenv + + # Add to your shell profile + echo 'export PYENV_ROOT="$HOME/.pyenv"' >> ~/.zshrc + echo 'command -v pyenv >/dev/null || export PATH="$PYENV_ROOT/bin:$PATH"' >> ~/.zshrc + echo 'eval "$(pyenv init -)"' >> ~/.zshrc + ``` + +2. **Virtual environment not activating** + + ```bash + # Check current pyenv version + pyenv version + + # Set local version + pyenv local {{ cookiecutter.__package_slug }} + ``` + +3. **Poetry installation fails** + + ```bash + # Upgrade pip first + pip install --upgrade pip + + # Install poetry with specific version + pip install poetry==2.1.3 + ``` + +## Verification + +After completing any installation method, verify your setup with these commands: + +```bash +# Verify Python version +python --version + +# Verify pyenv environment +pyenv version + +# Verify Poetry +poetry --version + +# Check installed packages +poetry show + +# Verify environment activation +pyenv shell {{ cookiecutter.__package_slug }} +``` + +**Expected output for successful setup:** + +- Python version should show {{ cookiecutter.python_version }} +- pyenv version should show `{{ cookiecutter.__package_slug }}` +- Poetry version should show 2.1.3 +- Environment should activate without errors + +## Environment Details + +- **Python Version**: {{ cookiecutter.python_version }} +- **Package Manager**: Poetry 2.1.3 +- **Virtual Environment**: {{ cookiecutter.__package_slug }} (managed by pyenv) +- **Development Tools**: pre-commit, pytest diff --git a/{{ cookiecutter.__package_slug }}/docs/make.bat b/{{ cookiecutter.__package_slug }}/docs/make.bat deleted file mode 100755 index 2373439..0000000 --- a/{{ cookiecutter.__package_slug }}/docs/make.bat +++ /dev/null @@ -1,36 +0,0 @@ -@ECHO OFF - -pushd %~dp0 - -REM Command file for Sphinx documentation - -if "%SPHINXBUILD%" == "" ( - set SPHINXBUILD=python -msphinx -) -set SOURCEDIR=. -set BUILDDIR=_build -set SPHINXPROJ={{ cookiecutter.__package_slug }} - -if "%1" == "" goto help - -%SPHINXBUILD% >NUL 2>NUL -if errorlevel 9009 ( - echo. - echo.The Sphinx module was not found. Make sure you have Sphinx installed, - echo.then set the SPHINXBUILD environment variable to point to the full - echo.path of the 'sphinx-build' executable. Alternatively you may add the - echo.Sphinx directory to PATH. - echo. - echo.If you don't have Sphinx installed, grab it from - echo.http://sphinx-doc.org/ - exit /b 1 -) - -%SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% -goto end - -:help -%SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% - -:end -popd diff --git a/{{ cookiecutter.__package_slug }}/docs/requirements.txt b/{{ cookiecutter.__package_slug }}/docs/requirements.txt deleted file mode 100644 index 7b5bd2a..0000000 --- a/{{ cookiecutter.__package_slug }}/docs/requirements.txt +++ /dev/null @@ -1,3 +0,0 @@ -myst-nb -sphinx-autoapi -sphinx-rtd-theme \ No newline at end of file diff --git a/{{ cookiecutter.__package_slug }}/pyproject.toml b/{{ cookiecutter.__package_slug }}/pyproject.toml index 4884a21..c7f900b 100644 --- a/{{ cookiecutter.__package_slug }}/pyproject.toml +++ b/{{ cookiecutter.__package_slug }}/pyproject.toml @@ -4,20 +4,39 @@ version = "{{ cookiecutter.package_version }}" description = "{{ cookiecutter.package_short_description }}" authors = ["{{ cookiecutter.author_name }}"] license = "{{ cookiecutter.open_source_license }}" -readme = "README.md" +packages = [ + { include = "{{ cookiecutter.__package_slug }}", from = "src" }, +] [tool.poetry.dependencies] -python = "^{{ cookiecutter.python_version }}" +python = "{{ cookiecutter.python_version }}" -{% if cookiecutter.include_github_actions == 'ci+cd' -%} -[tool.semantic_release] -version_toml = [ - "pyproject.toml:tool.poetry.version", -] # version location -branch = "main" # branch to make releases of -changelog_file = "CHANGELOG.md" # changelog file -build_command = "pip install poetry && poetry build" # build dists -{% endif %} [build-system] requires = ["poetry-core>=1.0.0"] build-backend = "poetry.core.masonry.api" + +#################### +# Semantic-release # +#################### + +[tool.semantic_release] +version_toml = [ + "pyproject.toml:tool.poetry.version", +] +branch = "main" +changelog_file = "CHANGELOG.md" +build_command = "pip install poetry" + +[tool.semantic_release.branches.main] +match = "main" +prerelease = false + +# [tool.semantic_release.branches.develop] +# match = "develop" +# prerelease = true +# prerelease_token = "dev" + +# [tool.semantic_release.branches.staging] +# match = "staging" +# prerelease = true +# prerelease_token = "rc" diff --git a/{{ cookiecutter.__package_slug }}/scripts/build_docker.sh b/{{ cookiecutter.__package_slug }}/scripts/build_docker.sh new file mode 100755 index 0000000..58ae24d --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/scripts/build_docker.sh @@ -0,0 +1,21 @@ +#!/bin/bash + +# Build Docker image for {{ cookiecutter.__package_slug }} +# Usage: ./scripts/build_docker.sh [tag] + +# Get version from pyproject.toml +VERSION=$(grep "^version = " pyproject.toml | cut -d'"' -f2) +TAG=${1:-$VERSION} +IMAGE="{{ cookiecutter.__package_slug }}:${TAG}" + +echo "Building ${IMAGE}..." + +# Check requirements +[ -f "pyproject.toml" ] || { echo "Error: Run from project root"; exit 1; } +[ -f ".build/Dockerfile" ] || { echo "Error: Dockerfile not found"; exit 1; } +command -v docker >/dev/null || { echo "Error: Docker not found"; exit 1; } + +# Build image +docker build -t "${IMAGE}" -f .build/Dockerfile . + +echo "Done: ${IMAGE}" diff --git a/{{ cookiecutter.__package_slug }}/scripts/setup_env.sh b/{{ cookiecutter.__package_slug }}/scripts/setup_env.sh new file mode 100755 index 0000000..7e9d922 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/scripts/setup_env.sh @@ -0,0 +1,276 @@ +#!/usr/bin/env bash + +set -euo pipefail + +# Output mode flags +QUIET=1 +case "${1:-}" in + --verbose) QUIET=0 ;; + --quiet) QUIET=1 ;; + *) : ;; +esac + +# Track whether we performed any action (e.g., created env, installed tools) +DID_ACTION=0 + +# Preserve a handle to the original stdout and optionally silence regular output +if [[ "$QUIET" -eq 1 ]]; then + exec 3>&1 + exec 1>/dev/null +else + exec 3>&1 +fi + +supports_color() { [[ -t 3 ]] && command -v tput >/dev/null 2>&1; } +color_green() { supports_color && tput setaf 2 || true; } +color_yellow() { supports_color && tput setaf 3 || true; } +color_red() { supports_color && tput setaf 1 || true; } +color_reset() { supports_color && tput sgr0 || true; } + +print_status_line() { + local level="$1"; shift || true + local msg="$*" + case "$level" in + OK) + { color_green; printf "%s\n" "$msg"; color_reset; } >&3 + ;; + WARN) + { color_yellow; printf "%s\n" "$msg"; color_reset; } >&3 + ;; + ERR) + { color_red; printf "%s\n" "$msg"; color_reset; } >&3 + ;; + *) + printf "%s\n" "$msg" >&3 + ;; + esac +} + +# Print compact status: static prefix uncolored, status+emoji colored +print_status_compact() { + local level="$1"; shift || true + local status_text="$1"; shift || true + local emoji="$1"; shift || true + local STATUS_UPPER + STATUS_UPPER=$(printf "%s" "$status_text" | tr '[:lower:]' '[:upper:]') + case "$level" in + OK) + { + printf "Environment status: " + color_green; printf "%s %s\n" "$STATUS_UPPER" "$emoji"; color_reset + } >&3 + ;; + WARN) + { + printf "Environment status: " + color_yellow; printf "%s %s\n" "$STATUS_UPPER" "$emoji"; color_reset + } >&3 + ;; + ERR) + { + printf "Environment status: " + color_red; printf "%s %s\n" "$STATUS_UPPER" "$emoji"; color_reset + } >&3 + ;; + *) + printf "Environment status: %s %s\n" "$STATUS_UPPER" "$emoji" >&3 + ;; + esac +} + +has_pyenv() { + command -v pyenv >/dev/null 2>&1 +} + +has_pyenv_virtualenv() { + has_pyenv && pyenv commands 2>/dev/null | grep -Eq '^[[:space:]]*virtualenv([[:space:]]|$)' +} + +read_pyproject_python_constraint() { + if [[ ! -f pyproject.toml ]]; then + return 1 + fi + sed -n '/^\[tool\.poetry\.dependencies\]/,/^\[/p' pyproject.toml \ + | grep -E '^[[:space:]]*python[[:space:]]*=' | head -n1 \ + | sed -E 's/^[^=]*=[[:space:]]*"?([^"#]+)"?.*/\1/' \ + | tr -d '[:space:]' +} + +resolve_exact_python_version() { + echo "$1" | sed -n 's/^[^0-9]*//p' | grep -Eo '[0-9]+(\.[0-9]+){2}' | head -n1 +} + +read_project_name() { + if [[ ! -f pyproject.toml ]]; then + return 1 + fi + sed -n '/^\[tool\.poetry\]/,/^\[/p' pyproject.toml \ + | grep -E '^[[:space:]]*name[[:space:]]*=' | head -n1 \ + | sed -E 's/^[^=]*=[[:space:]]*"?([^"#]+)"?.*/\1/' \ + | tr -d '[:space:]' +} + +read_local_env_name() { + if [[ -f .python-version ]]; then + head -n1 .python-version | tr -d '[:space:]' + fi +} + +check_project_virtualenv() { + local version="$1" + local env_name local_name project_name + local_name=$(read_local_env_name || true) + project_name=$(read_project_name || true) + + if [[ -n "${local_name:-}" ]]; then + env_name="$local_name" + elif [[ -n "${project_name:-}" ]]; then + env_name="$project_name" + echo "project name: $env_name" + else + echo "env name: MISSING" + return 1 + fi + + # Prefer explicit match: version/envs/name should appear in pyenv list + local env_entry="${version}/envs/${env_name}" + if pyenv versions --bare | grep -Fxq "$env_entry"; then + echo "pyenv entry ${env_entry}: OK" + return 0 + fi + # Fallback: some pyenv setups require prefix check for env entries + if pyenv prefix "$env_entry" >/dev/null 2>&1; then + echo "pyenv entry ${env_entry}: OK" + return 0 + fi + echo "pyenv entry ${env_entry}: MISSING" + return 1 +} + +create_project_virtualenv() { + local version="$1" + local env_name local_name project_name + local_name=$(read_local_env_name || true) + project_name=$(read_project_name || true) + + if [[ -n "${local_name:-}" ]]; then + env_name="$local_name" + elif [[ -n "${project_name:-}" ]]; then + env_name="$project_name" + else + echo "env name: MISSING" + return 1 + fi + + local env_entry="${version}/envs/${env_name}" + print_status_compact WARN "missing -> installing ${env_entry}" "🟡" + pyenv virtualenv "$version" "$env_name" + pyenv local "$env_name" + local prefix + prefix=$(pyenv prefix "$env_name") + "$prefix/bin/python" -m pip install --upgrade pip + "$prefix/bin/python" -m pip install "poetry==2.1.3" + + # Install project dependencies + print_status_compact WARN "installing dependencies" "📦" + "$prefix/bin/poetry" install + DID_ACTION=1 + + # Verify + if pyenv versions --bare | grep -Fxq "$env_entry" || pyenv prefix "$env_entry" >/dev/null 2>&1; then + echo "pyenv entry ${env_entry}: OK" + return 0 + fi + echo "pyenv entry ${env_entry}: MISSING" + return 1 +} + +check_python_version_in_pyenv() { + local constraint + constraint=$(read_pyproject_python_constraint || true) + if [[ -z "${constraint:-}" ]]; then + echo "pyproject/python: MISSING" + return 1 + fi + echo "python constraint: $constraint" + local version + version=$(resolve_exact_python_version "$constraint" || true) + if [[ -z "${version:-}" ]]; then + echo "python version: UNRESOLVED (need x.y.z in constraint)" + return 1 + fi + # Check presence via pyenv list (base), any env under that version, or prefix fallback + if pyenv versions --bare | grep -Fxq "$version"; then + echo "pyenv entry $version: OK" + return 0 + fi + if pyenv versions --bare | grep -Eq "^${version}/envs/"; then + echo "pyenv entry ${version} (via envs): OK" + return 0 + fi + if pyenv prefix "$version" >/dev/null 2>&1; then + echo "pyenv entry $version: OK" + return 0 + fi + echo "pyenv entry $version: MISSING" + return 1 +} + +main() { + local missing=0 + + if ! has_pyenv; then + missing=1 + fi + if ! has_pyenv_virtualenv; then + missing=1 + fi + + # Conditional next step: only verify Python version in pyenv if basics are present + if [[ "$missing" -eq 0 ]]; then + if check_python_version_in_pyenv; then + # If Python base version exists, verify project virtualenv association + local constraint version env_name local_name project_name + constraint=$(read_pyproject_python_constraint || true) + version=$(resolve_exact_python_version "$constraint" || true) + if ! check_project_virtualenv "$version"; then + # Attempt to create the missing env + if ! create_project_virtualenv "$version"; then + missing=1 + fi + fi + + # Always ensure pyenv local is set (creates/updates .python-version) + if [[ "$missing" -eq 0 ]]; then + local_name=$(read_local_env_name || true) + project_name=$(read_project_name || true) + if [[ -n "${local_name:-}" ]]; then + env_name="$local_name" + elif [[ -n "${project_name:-}" ]]; then + env_name="$project_name" + fi + + if [[ -n "${env_name:-}" ]]; then + pyenv local "$env_name" + fi + fi + else + missing=1 + fi + fi + + # Emit a single concise status line to original stdout (fd 3) + if [[ "$missing" -eq 0 ]]; then + if [[ "$DID_ACTION" -eq 1 ]]; then + print_status_compact OK "ready (actions performed)" "🟢" + else + print_status_compact OK "healthy" "🟢" + fi + else + print_status_compact ERR "issues detected" "🔴" + fi + + exit "$missing" +} + +main "$@" diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/__init__.py index a9d86d4..d888249 100644 --- a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/__init__.py +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/__init__.py @@ -1,3 +1,4 @@ # read version from installed package from importlib.metadata import version -__version__ = version("{{ cookiecutter.__package_slug }}") \ No newline at end of file + +__version__ = version("{{ cookiecutter.__package_slug }}") diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/application/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/application/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/application/task_lists.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/application/task_lists.py new file mode 100644 index 0000000..a1ebb1b --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/application/task_lists.py @@ -0,0 +1,23 @@ +from typing import List + +from {{ cookiecutter.__package_slug }}.domain.entities.task_list import TaskList +from {{ cookiecutter.__package_slug }}.domain.services.task_list_service import ( + TaskListService as DomainTaskListService, +) + + +class TaskListUseCases: + """Application use cases for TaskList. + + Coordinates request/response boundaries and delegates domain logic to + the `TaskListService`. + """ + + def __init__(self, service: DomainTaskListService) -> None: + self._service = service + + def create(self, name: str) -> TaskList: + return self._service.create(name) + + def list(self, *, offset: int = 0, limit: int = 100) -> List[TaskList]: + return self._service.list(offset=offset, limit=limit) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/application/tasks.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/application/tasks.py new file mode 100644 index 0000000..414f14e --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/application/tasks.py @@ -0,0 +1,29 @@ +from typing import List +from uuid import UUID + +from {{ cookiecutter.__package_slug }}.domain.entities.task import Task +from {{ cookiecutter.__package_slug }}.domain.services.task_service import TaskService as DomainTaskService + + +class TaskUseCases: + """Application use cases for Task. + + Coordinates request/response boundaries and delegates domain logic to + the `TaskService`. + """ + + def __init__(self, service: DomainTaskService) -> None: + self._service = service + + def add( + self, task_list_id: UUID, title: str, description: str | None = None + ) -> Task: + return self._service.add(task_list_id, title, description) + + def complete(self, task_id: UUID) -> Task: + return self._service.complete(task_id) + + def list( + self, task_list_id: UUID, *, offset: int = 0, limit: int = 100 + ) -> List[Task]: + return self._service.list(task_list_id, offset=offset, limit=limit) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/entities/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/entities/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/entities/task.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/entities/task.py new file mode 100644 index 0000000..521806c --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/entities/task.py @@ -0,0 +1,32 @@ +from datetime import datetime, timezone +from typing import Optional +from uuid import UUID, uuid4 + +from pydantic import BaseModel, Field, model_validator + + +class Task(BaseModel): + """Domain entity representing a single task within a task list.""" + + id: UUID = Field(default_factory=uuid4) + task_list_id: UUID + title: str = Field(min_length=1, max_length=200) + description: Optional[str] = Field(default=None, max_length=1000) + is_completed: bool = False + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + completed_at: Optional[datetime] = None + + @model_validator(mode="after") + def _sync_completed_at(self) -> "Task": + """Ensure completed_at is aligned with is_completed state.""" + if self.is_completed and self.completed_at is None: + self.completed_at = datetime.now(timezone.utc) + if not self.is_completed: + self.completed_at = None + return self + + def mark_completed(self) -> None: + """Mark the task as completed and set completion timestamp if needed.""" + if not self.is_completed: + self.is_completed = True + self.completed_at = datetime.now(timezone.utc) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/entities/task_list.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/entities/task_list.py new file mode 100644 index 0000000..0ed11fb --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/entities/task_list.py @@ -0,0 +1,21 @@ +from datetime import datetime, timezone +from typing import Optional +from uuid import UUID, uuid4 + +from pydantic import BaseModel, Field + + +class TaskList(BaseModel): + """Domain entity representing a list of tasks.""" + + id: UUID = Field(default_factory=uuid4) + name: str = Field(min_length=1, max_length=120) + created_at: datetime = Field(default_factory=lambda: datetime.now(timezone.utc)) + updated_at: Optional[datetime] = None + + def rename(self, new_name: str) -> None: + """Rename the task list ensuring non-empty name and update timestamp.""" + if not new_name or not new_name.strip(): + raise ValueError("name cannot be empty") + self.name = new_name + self.updated_at = datetime.now(timezone.utc) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/repositories/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/repositories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/repositories/task_list_repository.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/repositories/task_list_repository.py new file mode 100644 index 0000000..30a4ca8 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/repositories/task_list_repository.py @@ -0,0 +1,19 @@ +from typing import List, Optional, Protocol, runtime_checkable +from uuid import UUID + +from {{ cookiecutter.__package_slug }}.domain.entities.task_list import TaskList + + +@runtime_checkable +class TaskListRepository(Protocol): + """Abstraction for persisting and retrieving TaskList aggregates.""" + + def get(self, task_list_id: UUID) -> Optional[TaskList]: ... + + def list(self, *, offset: int = 0, limit: int = 100) -> List[TaskList]: ... + + def create(self, task_list: TaskList) -> TaskList: ... + + def update(self, task_list: TaskList) -> TaskList: ... + + def delete(self, task_list_id: UUID) -> bool: ... diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/repositories/task_repository.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/repositories/task_repository.py new file mode 100644 index 0000000..8e5d319 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/repositories/task_repository.py @@ -0,0 +1,21 @@ +from typing import List, Optional, Protocol, runtime_checkable +from uuid import UUID + +from {{ cookiecutter.__package_slug }}.domain.entities.task import Task + + +@runtime_checkable +class TaskRepository(Protocol): + """Abstraction for persisting and retrieving Task entities.""" + + def get(self, task_id: UUID) -> Optional[Task]: ... + + def list_by_task_list( + self, task_list_id: UUID, *, offset: int = 0, limit: int = 100 + ) -> List[Task]: ... + + def create(self, task: Task) -> Task: ... + + def update(self, task: Task) -> Task: ... + + def delete(self, task_id: UUID) -> bool: ... diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/services/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/services/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/services/task_list_service.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/services/task_list_service.py new file mode 100644 index 0000000..cc15eed --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/services/task_list_service.py @@ -0,0 +1,28 @@ +from datetime import datetime, timezone +from typing import List + +from {{ cookiecutter.__package_slug }}.domain.entities.task_list import TaskList +from {{ cookiecutter.__package_slug }}.domain.repositories.task_list_repository import TaskListRepository + + +class TaskListService: + """Domain service for operations on TaskList aggregates. + + This encapsulates business rules around creating and listing task lists, + relying only on the repository interface. + """ + + def __init__(self, repo: TaskListRepository) -> None: + self._repo = repo + + def create(self, name: str) -> TaskList: + """Create a new task list with the provided name. + + Ensures timestamps are set using UTC. + """ + entity = TaskList(name=name, created_at=datetime.now(timezone.utc)) + return self._repo.create(entity) + + def list(self, *, offset: int = 0, limit: int = 100) -> List[TaskList]: + """Return a paginated collection of task lists.""" + return self._repo.list(offset=offset, limit=limit) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/services/task_service.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/services/task_service.py new file mode 100644 index 0000000..e652036 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/services/task_service.py @@ -0,0 +1,46 @@ +from datetime import datetime, timezone +from typing import List +from uuid import UUID + +from {{ cookiecutter.__package_slug }}.domain.entities.task import Task +from {{ cookiecutter.__package_slug }}.domain.repositories.task_repository import TaskRepository + + +class TaskService: + """Domain service for operations on Task entities. + + Provides orchestration for adding, completing and listing tasks while + deferring persistence to the repository interface. + """ + + def __init__(self, repo: TaskRepository) -> None: + self._repo = repo + + def add( + self, task_list_id: UUID, title: str, description: str | None = None + ) -> Task: + """Create and persist a new task within a given task list.""" + entity = Task( + task_list_id=task_list_id, + title=title, + description=description, + created_at=datetime.now(timezone.utc), + ) + return self._repo.create(entity) + + def complete(self, task_id: UUID) -> Task: + """Mark a task as completed and persist the change. + + Raises KeyError if the task does not exist. + """ + existing = self._repo.get(task_id) + if existing is None: + raise KeyError("Task not found") + existing.mark_completed() + return self._repo.update(existing) + + def list( + self, task_list_id: UUID, *, offset: int = 0, limit: int = 100 + ) -> List[Task]: + """Return a paginated collection of tasks for a given list.""" + return self._repo.list_by_task_list(task_list_id, offset=offset, limit=limit) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/shared/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/domain/shared/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/container.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/container.py new file mode 100644 index 0000000..64dcaf0 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/container.py @@ -0,0 +1,42 @@ +from dependency_injector import containers, providers +from sqlalchemy import create_engine +from sqlalchemy.orm import Session, sessionmaker + +from {{ cookiecutter.__package_slug }}.infrastructure.settings import Settings +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.database import Base + + +def _create_engine_for_url(database_url: str): + """Create a SQLAlchemy engine with dialect-specific options. + + - For sqlite, we must pass check_same_thread=False. + - For Postgres (and others), use default options. + """ + if database_url.startswith("sqlite"): + return create_engine(database_url, connect_args={"check_same_thread": False}) + return create_engine(database_url) + + +class Container(containers.DeclarativeContainer): + """Application IoC container for engine, sessions and configuration.""" + + wiring_config = containers.WiringConfiguration(modules=[]) + + settings = providers.Singleton(Settings) + + engine = providers.Singleton( + _create_engine_for_url, + database_url=providers.Callable(lambda s: s.DATABASE_URL, settings), + ) + + session_factory = providers.Singleton( + sessionmaker, + autocommit=False, + autoflush=False, + bind=engine, + ) + + session = providers.Factory(Session, bind=engine) + init_database = providers.Callable( + lambda e: Base.metadata.create_all(bind=e), engine + ) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/database.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/database.py new file mode 100644 index 0000000..6d6e58c --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/database.py @@ -0,0 +1,7 @@ +from __future__ import annotations + +from sqlalchemy.orm import DeclarativeBase + + +class Base(DeclarativeBase): + pass diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/models/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/models/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/models/task.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/models/task.py new file mode 100644 index 0000000..95fd6f4 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/models/task.py @@ -0,0 +1,54 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Optional +from uuid import UUID + +from sqlalchemy import Boolean, DateTime, ForeignKey, String +from sqlalchemy.orm import Mapped, mapped_column + +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.database import Base +from {{ cookiecutter.__package_slug }}.domain.entities.task import Task + + +class TaskModel(Base): + """SQLAlchemy ORM model for Task entity.""" + + __tablename__ = "tasks" + + id: Mapped[UUID] = mapped_column(primary_key=True) + task_list_id: Mapped[UUID] = mapped_column( + ForeignKey("task_lists.id"), nullable=False + ) + title: Mapped[str] = mapped_column(String(200), nullable=False) + description: Mapped[Optional[str]] = mapped_column(String(1000), nullable=True) + is_completed: Mapped[bool] = mapped_column(Boolean, default=False, nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + completed_at: Mapped[Optional[datetime]] = mapped_column( + DateTime(timezone=True), nullable=True + ) + + @staticmethod + def from_domain(entity: Task) -> "TaskModel": + return TaskModel( + id=entity.id, + task_list_id=entity.task_list_id, + title=entity.title, + description=entity.description, + is_completed=entity.is_completed, + created_at=entity.created_at, + completed_at=entity.completed_at, + ) + + def to_domain(self) -> Task: + return Task( + id=self.id, + task_list_id=self.task_list_id, + title=self.title, + description=self.description, + is_completed=self.is_completed, + created_at=self.created_at, + completed_at=self.completed_at, + ) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/models/task_list.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/models/task_list.py new file mode 100644 index 0000000..c7c8996 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/models/task_list.py @@ -0,0 +1,43 @@ +from __future__ import annotations + +from datetime import datetime +from typing import Optional +from uuid import UUID + +from sqlalchemy import DateTime, String +from sqlalchemy.orm import Mapped, mapped_column + +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.database import Base +from {{ cookiecutter.__package_slug }}.domain.entities.task_list import TaskList + + +class TaskListModel(Base): + """SQLAlchemy ORM model for TaskList aggregate.""" + + __tablename__ = "task_lists" + + id: Mapped[UUID] = mapped_column(primary_key=True) + name: Mapped[str] = mapped_column(String(120), nullable=False) + created_at: Mapped[datetime] = mapped_column( + DateTime(timezone=True), nullable=False + ) + updated_at: Mapped[Optional[datetime]] = mapped_column( + DateTime(timezone=True), nullable=True + ) + + @staticmethod + def from_domain(entity: TaskList) -> "TaskListModel": + return TaskListModel( + id=entity.id, + name=entity.name, + created_at=entity.created_at, + updated_at=entity.updated_at, + ) + + def to_domain(self) -> TaskList: + return TaskList( + id=self.id, + name=self.name, + created_at=self.created_at, + updated_at=self.updated_at, + ) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/task_list_repository_rds.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/task_list_repository_rds.py new file mode 100644 index 0000000..91cd417 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/task_list_repository_rds.py @@ -0,0 +1,57 @@ +from typing import List, Optional +from uuid import UUID + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from {{ cookiecutter.__package_slug }}.domain.entities.task_list import TaskList +from {{ cookiecutter.__package_slug }}.domain.repositories.task_list_repository import TaskListRepository +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.models.task_list import TaskListModel + + +class TaskListRepositoryRds(TaskListRepository): + """Relational DB repository for TaskList using SQLAlchemy Session.""" + + def __init__(self, session: Session) -> None: + self._session = session + + def get(self, task_list_id: UUID) -> Optional[TaskList]: + model = self._session.get(TaskListModel, task_list_id) + return model.to_domain() if model else None + + def list(self, *, offset: int = 0, limit: int = 100) -> List[TaskList]: + stmt = select(TaskListModel).offset(offset).limit(limit) + rows = self._session.execute(stmt).scalars().all() + return [m.to_domain() for m in rows] + + def create(self, task_list: TaskList) -> TaskList: + """Persist a new TaskList and return the stored entity.""" + model = TaskListModel.from_domain(task_list) + self._session.add(model) + self._session.flush() + self._session.refresh(model) + return model.to_domain() + + def update(self, task_list: TaskList) -> TaskList: + """Update an existing TaskList; raises KeyError if not found.""" + existing = self._session.get(TaskListModel, task_list.id) + if existing is None: + raise KeyError("TaskList not found") + existing.name = task_list.name + existing.created_at = task_list.created_at + existing.updated_at = task_list.updated_at + self._session.flush() + self._session.refresh(existing) + return existing.to_domain() + + def delete(self, task_list_id: UUID) -> bool: + """Delete a TaskList by id and return True if it existed.""" + model = self._session.get(TaskListModel, task_list_id) + if model is None: + return False + self._session.delete(model) + # Ensure deletion is visible within the same session + self._session.flush() + # Remove from identity map so subsequent get() won't return cached instance + self._session.expunge(model) + return True diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/task_repository_rds.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/task_repository_rds.py new file mode 100644 index 0000000..49da422 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/task_repository_rds.py @@ -0,0 +1,67 @@ +from typing import List, Optional +from uuid import UUID + +from sqlalchemy import select +from sqlalchemy.orm import Session + +from {{ cookiecutter.__package_slug }}.domain.entities.task import Task +from {{ cookiecutter.__package_slug }}.domain.repositories.task_repository import TaskRepository +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.models.task import TaskModel + + +class TaskRepositoryRds(TaskRepository): + """Relational DB repository for Task using SQLAlchemy Session.""" + + def __init__(self, session: Session) -> None: + self._session = session + + def get(self, task_id: UUID) -> Optional[Task]: + model = self._session.get(TaskModel, task_id) + return model.to_domain() if model else None + + def list_by_task_list( + self, task_list_id: UUID, *, offset: int = 0, limit: int = 100 + ) -> List[Task]: + stmt = ( + select(TaskModel) + .where(TaskModel.task_list_id == task_list_id) + .offset(offset) + .limit(limit) + ) + rows = self._session.execute(stmt).scalars().all() + return [m.to_domain() for m in rows] + + def create(self, task: Task) -> Task: + """Persist a new Task and return the stored entity.""" + model = TaskModel.from_domain(task) + self._session.add(model) + self._session.flush() + self._session.refresh(model) + return model.to_domain() + + def update(self, task: Task) -> Task: + """Update an existing Task; raises KeyError if not found.""" + existing = self._session.get(TaskModel, task.id) + if existing is None: + raise KeyError("Task not found") + existing.task_list_id = task.task_list_id + existing.title = task.title + existing.description = task.description + existing.is_completed = task.is_completed + existing.created_at = task.created_at + existing.completed_at = task.completed_at + self._session.flush() + self._session.refresh(existing) + return existing.to_domain() + + def delete(self, task_id: UUID) -> bool: + """Delete a Task by id and return True if it existed.""" + model = self._session.get(TaskModel, task_id) + if model is None: + return False + self._session.delete(model) + # Ensure deletion is visible within the same session + self._session.flush() + # Remove from identity map so subsequent get() won't return cached instance + self._session.expunge(model) + return True diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/settings.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/settings.py new file mode 100644 index 0000000..e8c2ca2 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/settings.py @@ -0,0 +1,11 @@ +from pydantic_settings import BaseSettings, SettingsConfigDict + + +class Settings(BaseSettings): + """Application settings loaded from environment (.env supported).""" + + model_config = SettingsConfigDict( + env_file=".env", env_prefix="{{ cookiecutter.__package_slug | upper }}_", case_sensitive=False + ) + + DATABASE_URL: str = "sqlite:///./{{ cookiecutter.__package_slug }}.db" diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/__init__.py new file mode 100644 index 0000000..c4fd56b --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/__init__.py @@ -0,0 +1 @@ +# API v1 endpoints diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_create_in.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_create_in.py new file mode 100644 index 0000000..37c1e21 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_create_in.py @@ -0,0 +1,11 @@ +from uuid import UUID + +from pydantic import BaseModel, Field + + +class TaskCreateIn(BaseModel): + """Input payload to create a Task within a list.""" + + task_list_id: UUID + title: str = Field(min_length=1, max_length=200) + description: str | None = Field(default=None, max_length=1000) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_list_create_in.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_list_create_in.py new file mode 100644 index 0000000..17cf836 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_list_create_in.py @@ -0,0 +1,7 @@ +from pydantic import BaseModel, Field + + +class TaskListCreateIn(BaseModel): + """Input payload to create a TaskList.""" + + name: str = Field(min_length=1, max_length=120) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_list_out.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_list_out.py new file mode 100644 index 0000000..3b5ba4f --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_list_out.py @@ -0,0 +1,14 @@ +from pydantic import BaseModel + +from {{ cookiecutter.__package_slug }}.domain.entities.task_list import TaskList + + +class TaskListOut(BaseModel): + """Output model for TaskList endpoints.""" + + id: str + name: str + + @staticmethod + def from_domain(entity: TaskList) -> "TaskListOut": + return TaskListOut(id=str(entity.id), name=entity.name) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_out.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_out.py new file mode 100644 index 0000000..67a9fcc --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/schemas/task_out.py @@ -0,0 +1,21 @@ +from pydantic import BaseModel + +from {{ cookiecutter.__package_slug }}.domain.entities.task import Task + + +class TaskOut(BaseModel): + """Output model for Task endpoints.""" + + id: str + task_list_id: str + title: str + is_completed: bool + + @staticmethod + def from_domain(entity: Task) -> "TaskOut": + return TaskOut( + id=str(entity.id), + task_list_id=str(entity.task_list_id), + title=entity.title, + is_completed=entity.is_completed, + ) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/task_lists.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/task_lists.py new file mode 100644 index 0000000..56a294b --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/task_lists.py @@ -0,0 +1,30 @@ +from typing import List + +from fastapi import APIRouter, Depends + +from {{ cookiecutter.__package_slug }}.application.task_lists import TaskListUseCases +from {{ cookiecutter.__package_slug }}.infrastructure.web.api.v1.schemas.task_list_create_in import TaskListCreateIn +from {{ cookiecutter.__package_slug }}.infrastructure.web.api.v1.schemas.task_list_out import TaskListOut +from {{ cookiecutter.__package_slug }}.infrastructure.web.dependencies.services import get_task_list_use_cases + + +router = APIRouter(prefix="/task-lists", tags=["task-lists"]) + + +@router.post("/", response_model=TaskListOut, summary="Create a task list") +def create( + payload: TaskListCreateIn, + use_cases: TaskListUseCases = Depends(get_task_list_use_cases), +) -> TaskListOut: + created = use_cases.create(payload.name) + return TaskListOut.from_domain(created) + + +@router.get("/", response_model=List[TaskListOut], summary="List task lists") +def list_( + offset: int = 0, + limit: int = 100, + use_cases: TaskListUseCases = Depends(get_task_list_use_cases), +) -> List[TaskListOut]: + items = use_cases.list(offset=offset, limit=limit) + return [TaskListOut.from_domain(x) for x in items] diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/tasks.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/tasks.py new file mode 100644 index 0000000..2e3a24f --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/api/v1/tasks.py @@ -0,0 +1,48 @@ +from typing import List +from uuid import UUID + +from fastapi import APIRouter, Depends + +from {{ cookiecutter.__package_slug }}.application.tasks import TaskUseCases +from {{ cookiecutter.__package_slug }}.infrastructure.web.api.v1.schemas.task_create_in import TaskCreateIn +from {{ cookiecutter.__package_slug }}.infrastructure.web.api.v1.schemas.task_out import TaskOut +from {{ cookiecutter.__package_slug }}.infrastructure.web.dependencies.services import get_task_use_cases + + +router = APIRouter(prefix="/tasks", tags=["tasks"]) + + +# Removed local DTOs; importing from schemas instead + + +@router.post("/", response_model=TaskOut, summary="Create a task") +def create( + payload: TaskCreateIn, + use_cases: TaskUseCases = Depends(get_task_use_cases), +) -> TaskOut: + created = use_cases.add(payload.task_list_id, payload.title, payload.description) + return TaskOut.from_domain(created) + + +@router.post("/{task_id}/complete", response_model=TaskOut, summary="Complete a task") +def complete( + task_id: UUID, + use_cases: TaskUseCases = Depends(get_task_use_cases), +) -> TaskOut: + updated = use_cases.complete(task_id) + return TaskOut.from_domain(updated) + + +@router.get( + "/by-list/{task_list_id}", + response_model=List[TaskOut], + summary="List tasks by list id", +) +def list_by_list( + task_list_id: UUID, + offset: int = 0, + limit: int = 100, + use_cases: TaskUseCases = Depends(get_task_use_cases), +) -> List[TaskOut]: + items = use_cases.list(task_list_id, offset=offset, limit=limit) + return [TaskOut.from_domain(x) for x in items] diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/dependencies/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/dependencies/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/dependencies/db.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/dependencies/db.py new file mode 100644 index 0000000..37e14e9 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/dependencies/db.py @@ -0,0 +1,18 @@ +from collections.abc import Generator + +from fastapi import Request +from sqlalchemy.orm import Session + + +def get_session(request: Request) -> Generator[Session, None, None]: + """FastAPI dependency that yields a SQLAlchemy Session from app container.""" + session_factory = request.app.container.session_factory() # type: ignore[attr-defined] + session: Session = session_factory() + try: + yield session + session.commit() + except Exception: + session.rollback() + raise + finally: + session.close() diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/dependencies/services.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/dependencies/services.py new file mode 100644 index 0000000..44cf68c --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/dependencies/services.py @@ -0,0 +1,32 @@ +from fastapi import Depends +from sqlalchemy.orm import Session + +from {{ cookiecutter.__package_slug }}.application.task_lists import TaskListUseCases +from {{ cookiecutter.__package_slug }}.application.tasks import TaskUseCases +from {{ cookiecutter.__package_slug }}.domain.services.task_list_service import ( + TaskListService as DomainTaskListService, +) +from {{ cookiecutter.__package_slug }}.domain.services.task_service import TaskService as DomainTaskService +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.repositories.task_list_repository_rds import ( + TaskListRepositoryRds, +) +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.repositories.task_repository_rds import ( + TaskRepositoryRds, +) +from {{ cookiecutter.__package_slug }}.infrastructure.web.dependencies.db import get_session + + +def get_task_list_use_cases( + session: Session = Depends(get_session), +) -> TaskListUseCases: + """Build TaskList use cases with RDS repository and domain service.""" + repo = TaskListRepositoryRds(session) + svc = DomainTaskListService(repo) + return TaskListUseCases(svc) + + +def get_task_use_cases(session: Session = Depends(get_session)) -> TaskUseCases: + """Build Task use cases with RDS repository and domain service.""" + repo = TaskRepositoryRds(session) + svc = DomainTaskService(repo) + return TaskUseCases(svc) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/ui/__init__.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/ui/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/ui/routes.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/ui/routes.py new file mode 100644 index 0000000..cdc498f --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/ui/routes.py @@ -0,0 +1,15 @@ +from pathlib import Path + +from fastapi import APIRouter, Request +from fastapi.responses import HTMLResponse +from fastapi.templating import Jinja2Templates + + +router = APIRouter(tags=["ui"]) + +templates = Jinja2Templates(directory=str(Path(__file__).parent / "templates")) + + +@router.get("/", response_class=HTMLResponse, include_in_schema=False) +def ui_page(request: Request) -> HTMLResponse: + return templates.TemplateResponse("ui.html", {"request": request}) diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/ui/templates/ui.html b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/ui/templates/ui.html new file mode 100644 index 0000000..0426ab5 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/infrastructure/web/ui/templates/ui.html @@ -0,0 +1,241 @@ + + + + + + Mypkg Tasks UI + + + + + + +
+
+
+
+
+
Nueva lista
+
+ + +
+
+ Crea una lista para organizar tus tareas. +
+
+
+
+ +
+
+
+
+
Tareas
+
+ + +
+
+ +
+
+ +
+
+ +
+
+ +
+
+ +
    +
    +
    +
    +
    +
    + + + + + diff --git a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/{{ cookiecutter.__package_slug }}.py b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/{{ cookiecutter.__package_slug }}.py index e69de29..77ed78d 100644 --- a/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/{{ cookiecutter.__package_slug }}.py +++ b/{{ cookiecutter.__package_slug }}/src/{{ cookiecutter.__package_slug }}/{{ cookiecutter.__package_slug }}.py @@ -0,0 +1,37 @@ +from fastapi import FastAPI +from importlib.metadata import version, metadata + +from {{ cookiecutter.__package_slug }}.infrastructure.container import Container +from {{ cookiecutter.__package_slug }}.infrastructure.web.api.v1.task_lists import router as task_lists_router +from {{ cookiecutter.__package_slug }}.infrastructure.web.api.v1.tasks import router as tasks_router +from {{ cookiecutter.__package_slug }}.infrastructure.web.ui.routes import router as ui_router + + +def create_app() -> FastAPI: + """FastAPI application factory with container-based initialization.""" + container = Container() + container.init_database() + + # Get package metadata + package_version = version("{{ cookiecutter.__package_slug }}") + package_metadata = metadata("{{ cookiecutter.__package_slug }}") + package_name = package_metadata.get("Name", "{{ cookiecutter.__package_slug }}") + package_description = package_metadata.get( + "Summary", "A package for doing great things!" + ) + + app = FastAPI( + title=package_name.title(), + version=package_version, + description=package_description, + ) + app.container = container # type: ignore[attr-defined] + + app.include_router(task_lists_router) + app.include_router(tasks_router) + app.include_router(ui_router) + + return app + + +app = create_app() diff --git a/{{ cookiecutter.__package_slug }}/tests/test_{{ cookiecutter.__package_slug }}.py b/{{ cookiecutter.__package_slug }}/tests/test_{{ cookiecutter.__package_slug }}.py deleted file mode 100644 index 0d81e18..0000000 --- a/{{ cookiecutter.__package_slug }}/tests/test_{{ cookiecutter.__package_slug }}.py +++ /dev/null @@ -1 +0,0 @@ -from {{ cookiecutter.__package_slug }} import {{ cookiecutter.__package_slug }} diff --git a/{{ cookiecutter.__package_slug }}/tests/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/test_task_list_repository_rds.py b/{{ cookiecutter.__package_slug }}/tests/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/test_task_list_repository_rds.py new file mode 100644 index 0000000..5ff062a --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/tests/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/test_task_list_repository_rds.py @@ -0,0 +1,52 @@ +from datetime import datetime, timezone + +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from {{ cookiecutter.__package_slug }}.domain.entities.task_list import TaskList +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.database import Base +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.repositories.task_list_repository_rds import ( + TaskListRepositoryRds, +) + + +def setup_in_memory_db(): + engine = create_engine( + "sqlite+pysqlite:///:memory:", connect_args={"check_same_thread": False} + ) + Base.metadata.create_all(bind=engine) + SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + return engine, SessionLocal + + +def test_task_list_crud_and_pagination(): + engine, SessionLocal = setup_in_memory_db() + with SessionLocal() as session: # type: Session + repo = TaskListRepositoryRds(session) + + a = repo.create(TaskList(name="A", created_at=datetime.now(timezone.utc))) + b = repo.create(TaskList(name="B", created_at=datetime.now(timezone.utc))) + _c = repo.create(TaskList(name="C", created_at=datetime.now(timezone.utc))) + + all_lists = repo.list() + assert {x.name for x in all_lists} == {"A", "B", "C"} + + page1 = repo.list(offset=0, limit=2) + page2 = repo.list(offset=2, limit=2) + assert len(page1) == 2 + assert len(page2) == 1 + + # get + got = repo.get(a.id) + assert got is not None and got.name == "A" + + # update (rename) + b.rename("B2") + b2 = repo.update(b) + assert b2.name == "B2" + assert b2.updated_at is not None + + # delete existing then try again + assert repo.delete(a.id) is True + assert repo.delete(a.id) is False + assert repo.get(a.id) is None diff --git a/{{ cookiecutter.__package_slug }}/tests/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/test_task_repository_rds.py b/{{ cookiecutter.__package_slug }}/tests/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/test_task_repository_rds.py new file mode 100644 index 0000000..1ecbc86 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/tests/{{ cookiecutter.__package_slug }}/infrastructure/persistence/repositories/test_task_repository_rds.py @@ -0,0 +1,59 @@ +from datetime import datetime, timezone + +from sqlalchemy import create_engine +from sqlalchemy.orm import sessionmaker + +from {{ cookiecutter.__package_slug }}.domain.entities.task import Task +from {{ cookiecutter.__package_slug }}.domain.entities.task_list import TaskList +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.database import Base +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.repositories.task_list_repository_rds import ( + TaskListRepositoryRds, +) +from {{ cookiecutter.__package_slug }}.infrastructure.persistence.repositories.task_repository_rds import ( + TaskRepositoryRds, +) + + +def setup_in_memory_db(): + engine = create_engine( + "sqlite+pysqlite:///:memory:", connect_args={"check_same_thread": False} + ) + Base.metadata.create_all(bind=engine) + SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) + return engine, SessionLocal + + +def test_task_crud_pagination_and_post_delete_get(): + engine, SessionLocal = setup_in_memory_db() + with SessionLocal() as session: # type: Session + task_list_repo = TaskListRepositoryRds(session) + repo = TaskRepositoryRds(session) + + tl = task_list_repo.create( + TaskList(name="Inbox", created_at=datetime.now(timezone.utc)) + ) + t1 = repo.create( + Task(task_list_id=tl.id, title="t1", created_at=datetime.now(timezone.utc)) + ) + t2 = repo.create( + Task(task_list_id=tl.id, title="t2", created_at=datetime.now(timezone.utc)) + ) + _t3 = repo.create( + Task(task_list_id=tl.id, title="t3", created_at=datetime.now(timezone.utc)) + ) + + page1 = repo.list_by_task_list(tl.id, offset=0, limit=2) + page2 = repo.list_by_task_list(tl.id, offset=2, limit=2) + assert len(page1) == 2 + assert len(page2) == 1 + + got = repo.get(t1.id) + assert got is not None and got.title == "t1" + + t1.mark_completed() + t1u = repo.update(t1) + assert t1u.is_completed and t1u.completed_at is not None + + ok = repo.delete(t2.id) + assert ok is True + assert repo.get(t2.id) is None diff --git a/{{ cookiecutter.__package_slug }}/tests/{{ cookiecutter.__package_slug }}/infrastructure/web/test_api.py b/{{ cookiecutter.__package_slug }}/tests/{{ cookiecutter.__package_slug }}/infrastructure/web/test_api.py new file mode 100644 index 0000000..9a4441f --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/tests/{{ cookiecutter.__package_slug }}/infrastructure/web/test_api.py @@ -0,0 +1,33 @@ +from fastapi.testclient import TestClient + +from {{ cookiecutter.__package_slug }}.{{ cookiecutter.__package_slug }} import create_app + + +def test_task_list_and_tasks_endpoints(): + app = create_app() + client = TestClient(app) + + # create list + r = client.post("/task-lists/", json={"name": "Inbox"}) + assert r.status_code == 200 + tl = r.json() + + # list lists + r = client.get("/task-lists/") + assert r.status_code == 200 + assert any(x["name"] == "Inbox" for x in r.json()) + + # create task + r = client.post("/tasks/", json={"task_list_id": tl["id"], "title": "t1"}) + assert r.status_code == 200 + t = r.json() + + # list by list + r = client.get(f"/tasks/by-list/{tl['id']}") + assert r.status_code == 200 + assert len(r.json()) >= 1 + + # complete + r = client.post(f"/tasks/{t['id']}/complete") + assert r.status_code == 200 + assert r.json()["is_completed"] is True diff --git a/{{ cookiecutter.__package_slug }}/tox.ini b/{{ cookiecutter.__package_slug }}/tox.ini new file mode 100644 index 0000000..e491eb1 --- /dev/null +++ b/{{ cookiecutter.__package_slug }}/tox.ini @@ -0,0 +1,12 @@ +[tox] +envlist = py312 +isolated_build = True + +[testenv] +deps = poetry +commands = + poetry install + poetry run pytest tests/ -v --cov=src/{{ cookiecutter.__package_slug }} --cov-report=term-missing + +[testenv:py312] +basepython = python3.12