From 349033edb3af6f3fe6172997de9557644ba81946 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 8 Apr 2026 20:37:10 +0000 Subject: [PATCH 1/2] Initial plan From 3d9395f891564c9b959164048996a8e00b416968 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Wed, 8 Apr 2026 20:48:17 +0000 Subject: [PATCH 2/2] feat: add integration catalog system with catalog files, IntegrationCatalog class, list --catalog flag, upgrade command, integration.yml descriptor, and tests Agent-Logs-Url: https://github.com/github/spec-kit/sessions/bbcd44e8-c69c-4735-adc1-bdf1ce109184 Co-authored-by: mnriem <15701806+mnriem@users.noreply.github.com> --- integrations/CONTRIBUTING.md | 135 ++++ integrations/README.md | 110 +++ integrations/catalog.community.json | 6 + integrations/catalog.json | 250 +++++++ src/specify_cli/__init__.py | 150 +++- src/specify_cli/integrations/catalog.py | 516 ++++++++++++++ .../integrations/test_integration_catalog.py | 642 ++++++++++++++++++ 7 files changed, 1808 insertions(+), 1 deletion(-) create mode 100644 integrations/CONTRIBUTING.md create mode 100644 integrations/README.md create mode 100644 integrations/catalog.community.json create mode 100644 integrations/catalog.json create mode 100644 src/specify_cli/integrations/catalog.py create mode 100644 tests/integrations/test_integration_catalog.py diff --git a/integrations/CONTRIBUTING.md b/integrations/CONTRIBUTING.md new file mode 100644 index 000000000..70944bbc7 --- /dev/null +++ b/integrations/CONTRIBUTING.md @@ -0,0 +1,135 @@ +# Contributing to the Integration Catalog + +This guide covers adding integrations to both the **built-in** and **community** catalogs. + +## Adding a Built-In Integration + +Built-in integrations are maintained by the Spec Kit core team and ship with the CLI. + +### Checklist + +1. **Create the integration subpackage** under `src/specify_cli/integrations//` +2. **Implement the integration class** extending `MarkdownIntegration`, `TomlIntegration`, or `SkillsIntegration` +3. **Register the integration** in `src/specify_cli/integrations/__init__.py` +4. **Add tests** under `tests/integrations/test_integration_.py` +5. **Add a catalog entry** in `integrations/catalog.json` +6. **Update documentation** in `AGENTS.md` and `README.md` + +### Catalog Entry Format + +Add your integration to `integrations/catalog.json`: + +```json +{ + "my-agent": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + } +} +``` + +## Adding a Community Integration + +Community integrations are contributed by external developers and listed in `integrations/catalog.community.json` for discovery. + +### Prerequisites + +1. **Working integration** — tested with `specify integration install` +2. **Public repository** — hosted on GitHub or similar +3. **`integration.yml` descriptor** — valid descriptor file (see below) +4. **Documentation** — README with usage instructions +5. **License** — open source license file + +### `integration.yml` Descriptor + +Every community integration must include an `integration.yml`: + +```yaml +schema_version: "1.0" +integration: + id: "my-agent" + name: "My Agent" + version: "1.0.0" + description: "Integration for My Agent" + author: "your-name" + repository: "https://github.com/your-name/speckit-my-agent" + license: "MIT" +requires: + speckit_version: ">=0.6.0" + tools: + - name: "my-agent" + version: ">=1.0.0" + required: true +provides: + commands: + - name: "speckit.specify" + file: "templates/speckit.specify.md" + scripts: + - update-context.sh +``` + +### Descriptor Validation Rules + +| Field | Rule | +|-------|------| +| `schema_version` | Must be `"1.0"` | +| `integration.id` | Lowercase alphanumeric + hyphens (`^[a-z0-9-]+$`) | +| `integration.version` | Valid semantic version | +| `requires.speckit_version` | PEP 440 version specifier | +| `provides` | Must include at least one command or script | +| `provides.commands[].name` | String identifier | +| `provides.commands[].file` | Relative path to template file | + +### Submitting to the Community Catalog + +1. **Fork** the [spec-kit repository](https://github.com/github/spec-kit) +2. **Add your entry** to `integrations/catalog.community.json`: + +```json +{ + "my-agent": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "your-name", + "repository": "https://github.com/your-name/speckit-my-agent", + "tags": ["cli"] + } +} +``` + +3. **Open a pull request** with: + - Your catalog entry + - Link to your integration repository + - Confirmation that `integration.yml` is valid + +### Version Updates + +To update your integration version in the catalog: + +1. Release a new version of your integration +2. Open a PR updating the `version` field in `catalog.community.json` +3. Ensure backward compatibility or document breaking changes + +## Upgrade Workflow + +The `specify integration upgrade` command supports diff-aware upgrades: + +1. **Hash comparison** — the manifest records SHA-256 hashes of all installed files +2. **Modified file detection** — files changed since installation are flagged +3. **Safe default** — modified files are preserved unless `--force` is used +4. **Clean reinstall** — unmodified files are replaced with the latest version + +```bash +# Upgrade current integration (blocks if files are modified) +specify integration upgrade + +# Force upgrade (overwrites modified files) +specify integration upgrade --force +``` diff --git a/integrations/README.md b/integrations/README.md new file mode 100644 index 000000000..5c7c6ea1e --- /dev/null +++ b/integrations/README.md @@ -0,0 +1,110 @@ +# Spec Kit Integration Catalog + +The integration catalog enables discovery, versioning, and distribution of AI agent integrations for Spec Kit. + +## Catalog Files + +### Built-In Catalog (`catalog.json`) + +Contains integrations that ship with Spec Kit. These are maintained by the core team and always installable. + +### Community Catalog (`catalog.community.json`) + +Community-contributed integrations. Listed for discovery only — users install from the source repositories. + +## CLI Commands + +```bash +# List built-in integrations (default) +specify integration list + +# Browse full catalog (built-in + community) +specify integration list --catalog + +# Install an integration +specify integration install copilot + +# Upgrade the current integration (diff-aware) +specify integration upgrade + +# Upgrade with force (overwrite modified files) +specify integration upgrade --force +``` + +## Integration Descriptor (`integration.yml`) + +Each integration can include an `integration.yml` descriptor that documents its metadata, requirements, and provided commands/scripts: + +```yaml +schema_version: "1.0" +integration: + id: "my-agent" + name: "My Agent" + version: "1.0.0" + description: "Integration for My Agent" + author: "my-org" + repository: "https://github.com/my-org/speckit-my-agent" + license: "MIT" +requires: + speckit_version: ">=0.6.0" + tools: + - name: "my-agent" + version: ">=1.0.0" + required: true +provides: + commands: + - name: "speckit.specify" + file: "templates/speckit.specify.md" + - name: "speckit.plan" + file: "templates/speckit.plan.md" + scripts: + - update-context.sh + - update-context.ps1 +``` + +## Catalog Schema + +Both catalog files follow the same JSON schema: + +```json +{ + "schema_version": "1.0", + "updated_at": "2026-04-08T00:00:00Z", + "catalog_url": "https://...", + "integrations": { + "my-agent": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "my-org", + "repository": "https://github.com/my-org/speckit-my-agent", + "tags": ["cli"] + } + } +} +``` + +### Required Fields + +| Field | Type | Description | +|-------|------|-------------| +| `schema_version` | string | Must be `"1.0"` | +| `updated_at` | string | ISO 8601 timestamp | +| `integrations` | object | Map of integration ID → metadata | + +### Integration Entry Fields + +| Field | Type | Required | Description | +|-------|------|----------|-------------| +| `id` | string | Yes | Unique ID (lowercase alphanumeric + hyphens) | +| `name` | string | Yes | Human-readable display name | +| `version` | string | Yes | Semantic version | +| `description` | string | Yes | One-line description | +| `author` | string | No | Author name or organization | +| `repository` | string | No | Source repository URL | +| `tags` | array | No | Searchable tags (e.g., `["cli", "ide"]`) | + +## Contributing + +See [CONTRIBUTING.md](CONTRIBUTING.md) for how to add integrations to the community catalog. diff --git a/integrations/catalog.community.json b/integrations/catalog.community.json new file mode 100644 index 000000000..47eb6d550 --- /dev/null +++ b/integrations/catalog.community.json @@ -0,0 +1,6 @@ +{ + "schema_version": "1.0", + "updated_at": "2026-04-08T00:00:00Z", + "catalog_url": "https://raw.githubusercontent.com/github/spec-kit/main/integrations/catalog.community.json", + "integrations": {} +} diff --git a/integrations/catalog.json b/integrations/catalog.json new file mode 100644 index 000000000..17955ef00 --- /dev/null +++ b/integrations/catalog.json @@ -0,0 +1,250 @@ +{ + "schema_version": "1.0", + "updated_at": "2026-04-08T00:00:00Z", + "catalog_url": "https://raw.githubusercontent.com/github/spec-kit/main/integrations/catalog.json", + "integrations": { + "claude": { + "id": "claude", + "name": "Claude Code", + "version": "1.0.0", + "description": "Anthropic Claude Code CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "anthropic"] + }, + "copilot": { + "id": "copilot", + "name": "GitHub Copilot", + "version": "1.0.0", + "description": "GitHub Copilot IDE integration with agent commands and prompt files", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide", "github"] + }, + "gemini": { + "id": "gemini", + "name": "Gemini CLI", + "version": "1.0.0", + "description": "Google Gemini CLI integration with TOML command format", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "google"] + }, + "cursor-agent": { + "id": "cursor-agent", + "name": "Cursor", + "version": "1.0.0", + "description": "Cursor IDE integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "windsurf": { + "id": "windsurf", + "name": "Windsurf", + "version": "1.0.0", + "description": "Windsurf IDE workflow integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "amp": { + "id": "amp", + "name": "Amp", + "version": "1.0.0", + "description": "Amp CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "codex": { + "id": "codex", + "name": "Codex CLI", + "version": "1.0.0", + "description": "Codex CLI skills-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "skills"] + }, + "qwen": { + "id": "qwen", + "name": "Qwen Code", + "version": "1.0.0", + "description": "Alibaba Qwen Code CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "alibaba"] + }, + "opencode": { + "id": "opencode", + "name": "opencode", + "version": "1.0.0", + "description": "opencode CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "forge": { + "id": "forge", + "name": "Forge", + "version": "1.0.0", + "description": "Forge CLI integration with parameter-based commands", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "kiro-cli": { + "id": "kiro-cli", + "name": "Kiro CLI", + "version": "1.0.0", + "description": "Kiro CLI prompt-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "junie": { + "id": "junie", + "name": "Junie", + "version": "1.0.0", + "description": "Junie by JetBrains CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "jetbrains"] + }, + "auggie": { + "id": "auggie", + "name": "Auggie CLI", + "version": "1.0.0", + "description": "Auggie CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "shai": { + "id": "shai", + "name": "SHAI", + "version": "1.0.0", + "description": "SHAI CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "tabnine": { + "id": "tabnine", + "name": "Tabnine CLI", + "version": "1.0.0", + "description": "Tabnine CLI integration with TOML command format", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "kilocode": { + "id": "kilocode", + "name": "Kilo Code", + "version": "1.0.0", + "description": "Kilo Code IDE workflow integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "roo": { + "id": "roo", + "name": "Roo Code", + "version": "1.0.0", + "description": "Roo Code IDE integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "bob": { + "id": "bob", + "name": "IBM Bob", + "version": "1.0.0", + "description": "IBM Bob IDE integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide", "ibm"] + }, + "trae": { + "id": "trae", + "name": "Trae", + "version": "1.0.0", + "description": "Trae IDE rules-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide"] + }, + "codebuddy": { + "id": "codebuddy", + "name": "CodeBuddy", + "version": "1.0.0", + "description": "CodeBuddy CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "qodercli": { + "id": "qodercli", + "name": "Qoder CLI", + "version": "1.0.0", + "description": "Qoder CLI integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "kimi": { + "id": "kimi", + "name": "Kimi Code", + "version": "1.0.0", + "description": "Kimi Code CLI skills-based integration by Moonshot AI", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "skills"] + }, + "pi": { + "id": "pi", + "name": "Pi Coding Agent", + "version": "1.0.0", + "description": "Pi terminal coding agent prompt-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "iflow": { + "id": "iflow", + "name": "iFlow CLI", + "version": "1.0.0", + "description": "iFlow CLI integration by iflow-ai", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli"] + }, + "vibe": { + "id": "vibe", + "name": "Mistral Vibe", + "version": "1.0.0", + "description": "Mistral Vibe CLI prompt-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["cli", "mistral"] + }, + "agy": { + "id": "agy", + "name": "Antigravity", + "version": "1.0.0", + "description": "Antigravity IDE skills-based integration", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["ide", "skills"] + }, + "generic": { + "id": "generic", + "name": "Generic (bring your own agent)", + "version": "1.0.0", + "description": "Generic integration for any agent via --ai-commands-dir", + "author": "spec-kit-core", + "repository": "https://github.com/github/spec-kit", + "tags": ["generic"] + } + } +} diff --git a/src/specify_cli/__init__.py b/src/specify_cli/__init__.py index 11b6e0eda..0494acd58 100644 --- a/src/specify_cli/__init__.py +++ b/src/specify_cli/__init__.py @@ -1624,7 +1624,9 @@ def _resolve_script_type(project_root: Path, script_type: str | None) -> str: @integration_app.command("list") -def integration_list(): +def integration_list( + catalog: bool = typer.Option(False, "--catalog", help="Browse full catalog (built-in + community)"), +): """List available integrations and installed status.""" from .integrations import INTEGRATION_REGISTRY @@ -1639,6 +1641,47 @@ def integration_list(): current = _read_integration_json(project_root) installed_key = current.get("integration") + if catalog: + from .integrations.catalog import IntegrationCatalog, IntegrationCatalogError + + ic = IntegrationCatalog(project_root) + try: + entries = ic.search() + except IntegrationCatalogError as exc: + console.print(f"[red]Error:[/red] {exc}") + raise typer.Exit(1) + + if not entries: + console.print("[yellow]No integrations found in catalog.[/yellow]") + return + + table = Table(title="Integration Catalog (built-in + community)") + table.add_column("ID", style="cyan") + table.add_column("Name") + table.add_column("Version") + table.add_column("Source") + table.add_column("Status") + + for entry in sorted(entries, key=lambda e: e["id"]): + eid = entry["id"] + cat_name = entry.get("_catalog_name", "") + if eid == installed_key: + status = "[green]installed[/green]" + elif eid in INTEGRATION_REGISTRY: + status = "built-in" + else: + status = "" + table.add_row( + eid, + entry.get("name", eid), + entry.get("version", ""), + cat_name, + status, + ) + + console.print(table) + return + table = Table(title="AI Agent Integrations") table.add_column("Key", style="cyan") table.add_column("Name") @@ -2025,6 +2068,111 @@ def integration_switch( console.print(f"\n[green]✓[/green] Switched to integration '{name}'") +@integration_app.command("upgrade") +def integration_upgrade( + key: str = typer.Argument(None, help="Integration key to upgrade (default: current integration)"), + force: bool = typer.Option(False, "--force", help="Force upgrade even if files are modified"), + script: str | None = typer.Option(None, "--script", help="Script type: sh or ps (default: from init-options.json or platform default)"), + integration_options: str | None = typer.Option(None, "--integration-options", help="Options for the integration"), +): + """Upgrade an integration by reinstalling with diff-aware file handling. + + Compares manifest hashes to detect locally modified files and + preserves them unless --force is used. + """ + from .integrations import get_integration + from .integrations.manifest import IntegrationManifest + + project_root = Path.cwd() + + specify_dir = project_root / ".specify" + if not specify_dir.exists(): + console.print("[red]Error:[/red] Not a spec-kit project (no .specify/ directory)") + console.print("Run this command from a spec-kit project root") + raise typer.Exit(1) + + current = _read_integration_json(project_root) + installed_key = current.get("integration") + + if key is None: + if not installed_key: + console.print("[yellow]No integration is currently installed.[/yellow]") + raise typer.Exit(0) + key = installed_key + + if installed_key and installed_key != key: + console.print( + f"[red]Error:[/red] Integration '{key}' is not the currently installed integration ('{installed_key}')." + ) + console.print(f"Use [cyan]specify integration switch {key}[/cyan] instead.") + raise typer.Exit(1) + + integration = get_integration(key) + if integration is None: + console.print(f"[red]Error:[/red] Unknown integration '{key}'") + raise typer.Exit(1) + + manifest_path = project_root / ".specify" / "integrations" / f"{key}.manifest.json" + if not manifest_path.exists(): + console.print(f"[yellow]No manifest found for integration '{key}'. Nothing to upgrade.[/yellow]") + console.print(f"Run [cyan]specify integration install {key}[/cyan] to perform a fresh install.") + raise typer.Exit(0) + + try: + old_manifest = IntegrationManifest.load(key, project_root) + except (ValueError, FileNotFoundError) as exc: + console.print(f"[red]Error:[/red] Integration manifest for '{key}' is unreadable: {exc}") + raise typer.Exit(1) + + # Detect modified files via manifest hashes + modified = old_manifest.check_modified() + if modified and not force: + console.print(f"[yellow]⚠[/yellow] {len(modified)} file(s) have been modified since installation:") + for rel in modified: + console.print(f" {rel}") + console.print("\nUse [cyan]--force[/cyan] to overwrite modified files, or resolve manually.") + raise typer.Exit(1) + + selected_script = _resolve_script_type(project_root, script) + + # Phase 1: Teardown old files + console.print(f"Upgrading integration: [cyan]{key}[/cyan]") + removed, skipped = old_manifest.uninstall(project_root, force=force) + if removed: + console.print(f" Removed {len(removed)} old file(s)") + if skipped: + console.print(f" [yellow]Preserved {len(skipped)} modified file(s)[/yellow]") + + # Phase 2: Reinstall + new_manifest = IntegrationManifest(key, project_root, version=get_speckit_version()) + + parsed_options: dict[str, Any] | None = None + if integration_options: + parsed_options = _parse_integration_options(integration, integration_options) + + try: + integration.setup( + project_root, + new_manifest, + parsed_options=parsed_options, + script_type=selected_script, + raw_options=integration_options, + ) + new_manifest.save() + _write_integration_json(project_root, key, selected_script) + _update_init_options_for_integration(project_root, integration, script_type=selected_script) + except Exception as exc: + try: + integration.teardown(project_root, new_manifest, force=True) + except Exception: + pass + console.print(f"[red]Error:[/red] Failed to upgrade integration: {exc}") + raise typer.Exit(1) + + name = (integration.config or {}).get("name", key) + console.print(f"\n[green]✓[/green] Integration '{name}' upgraded successfully") + + # ===== Preset Commands ===== diff --git a/src/specify_cli/integrations/catalog.py b/src/specify_cli/integrations/catalog.py new file mode 100644 index 000000000..02a4a480c --- /dev/null +++ b/src/specify_cli/integrations/catalog.py @@ -0,0 +1,516 @@ +"""Integration catalog — discovery, validation, and upgrade support. + +Provides: +- ``IntegrationCatalogEntry`` — single catalog source metadata. +- ``IntegrationCatalog`` — fetches, caches, and searches integration + catalogs (built-in + community). +- ``IntegrationDescriptor`` — loads and validates ``integration.yml``. +""" + +from __future__ import annotations + +import hashlib +import json +import os +import re +from dataclasses import dataclass +from datetime import datetime, timezone +from pathlib import Path +from typing import Any, Dict, List, Optional + +import yaml +from packaging import version as pkg_version + + +# --------------------------------------------------------------------------- +# Errors +# --------------------------------------------------------------------------- + +class IntegrationCatalogError(Exception): + """Raised when a catalog operation fails.""" + + +class IntegrationDescriptorError(Exception): + """Raised when an integration.yml descriptor is invalid.""" + + +# --------------------------------------------------------------------------- +# IntegrationCatalogEntry +# --------------------------------------------------------------------------- + +@dataclass +class IntegrationCatalogEntry: + """Represents a single catalog source in the catalog stack.""" + + url: str + name: str + priority: int + install_allowed: bool + description: str = "" + + +# --------------------------------------------------------------------------- +# IntegrationCatalog +# --------------------------------------------------------------------------- + +class IntegrationCatalog: + """Manages integration catalog fetching, caching, and searching.""" + + DEFAULT_CATALOG_URL = ( + "https://raw.githubusercontent.com/github/spec-kit/main/integrations/catalog.json" + ) + COMMUNITY_CATALOG_URL = ( + "https://raw.githubusercontent.com/github/spec-kit/main/integrations/catalog.community.json" + ) + CACHE_DURATION = 3600 # 1 hour + + def __init__(self, project_root: Path) -> None: + self.project_root = project_root + self.cache_dir = project_root / ".specify" / "integrations" / ".cache" + self.cache_file = self.cache_dir / "catalog.json" + self.cache_metadata_file = self.cache_dir / "catalog-metadata.json" + + # -- URL validation --------------------------------------------------- + + @staticmethod + def _validate_catalog_url(url: str) -> None: + from urllib.parse import urlparse + + parsed = urlparse(url) + is_localhost = parsed.hostname in ("localhost", "127.0.0.1", "::1") + if parsed.scheme != "https" and not (parsed.scheme == "http" and is_localhost): + raise IntegrationCatalogError( + f"Catalog URL must use HTTPS (got {parsed.scheme}://). " + "HTTP is only allowed for localhost." + ) + if not parsed.netloc: + raise IntegrationCatalogError( + "Catalog URL must be a valid URL with a host." + ) + + # -- Catalog stack ---------------------------------------------------- + + def _load_catalog_config( + self, config_path: Path + ) -> Optional[List[IntegrationCatalogEntry]]: + """Load catalog stack from a YAML file. + + Returns None when the file does not exist. + + Raises: + IntegrationCatalogError: on invalid content + """ + if not config_path.exists(): + return None + try: + data = yaml.safe_load(config_path.read_text(encoding="utf-8")) or {} + except (yaml.YAMLError, OSError, UnicodeError) as exc: + raise IntegrationCatalogError( + f"Failed to read catalog config {config_path}: {exc}" + ) + catalogs_data = data.get("catalogs", []) + if not catalogs_data: + raise IntegrationCatalogError( + f"Catalog config {config_path} exists but contains no 'catalogs' entries. " + f"Remove the file to use built-in defaults, or add valid catalog entries." + ) + if not isinstance(catalogs_data, list): + raise IntegrationCatalogError( + f"Invalid catalog config: 'catalogs' must be a list, " + f"got {type(catalogs_data).__name__}" + ) + entries: List[IntegrationCatalogEntry] = [] + skipped: List[int] = [] + for idx, item in enumerate(catalogs_data): + if not isinstance(item, dict): + raise IntegrationCatalogError( + f"Invalid catalog entry at index {idx}: " + f"expected a mapping, got {type(item).__name__}" + ) + url = str(item.get("url", "")).strip() + if not url: + skipped.append(idx) + continue + self._validate_catalog_url(url) + try: + priority = int(item.get("priority", idx + 1)) + except (TypeError, ValueError): + raise IntegrationCatalogError( + f"Invalid priority for catalog '{item.get('name', idx + 1)}': " + f"expected integer, got {item.get('priority')!r}" + ) + raw_install = item.get("install_allowed", False) + if isinstance(raw_install, str): + install_allowed = raw_install.strip().lower() in ("true", "yes", "1") + else: + install_allowed = bool(raw_install) + entries.append( + IntegrationCatalogEntry( + url=url, + name=str(item.get("name", f"catalog-{idx + 1}")), + priority=priority, + install_allowed=install_allowed, + description=str(item.get("description", "")), + ) + ) + entries.sort(key=lambda e: e.priority) + if not entries: + raise IntegrationCatalogError( + f"Catalog config {config_path} contains {len(catalogs_data)} " + f"entries but none have valid URLs (entries at indices {skipped} " + f"were skipped). Each catalog entry must have a 'url' field." + ) + return entries + + def get_active_catalogs(self) -> List[IntegrationCatalogEntry]: + """Return the ordered list of active integration catalogs. + + Resolution: + 1. ``SPECKIT_INTEGRATION_CATALOG_URL`` env var + 2. Project ```.specify/integration-catalogs.yml``` + 3. User ``~/.specify/integration-catalogs.yml`` + 4. Built-in defaults (built-in + community) + """ + import sys + + env_value = os.environ.get("SPECKIT_INTEGRATION_CATALOG_URL", "").strip() + if env_value: + self._validate_catalog_url(env_value) + if env_value != self.DEFAULT_CATALOG_URL: + print( + "Warning: Using non-default integration catalog. " + "Only use catalogs from sources you trust.", + file=sys.stderr, + ) + return [ + IntegrationCatalogEntry( + url=env_value, + name="custom", + priority=1, + install_allowed=True, + description="Custom catalog via SPECKIT_INTEGRATION_CATALOG_URL", + ) + ] + + project_cfg = self.project_root / ".specify" / "integration-catalogs.yml" + catalogs = self._load_catalog_config(project_cfg) + if catalogs is not None: + return catalogs + + user_cfg = Path.home() / ".specify" / "integration-catalogs.yml" + catalogs = self._load_catalog_config(user_cfg) + if catalogs is not None: + return catalogs + + return [ + IntegrationCatalogEntry( + url=self.DEFAULT_CATALOG_URL, + name="default", + priority=1, + install_allowed=True, + description="Built-in catalog of installable integrations", + ), + IntegrationCatalogEntry( + url=self.COMMUNITY_CATALOG_URL, + name="community", + priority=2, + install_allowed=False, + description="Community-contributed integrations (discovery only)", + ), + ] + + # -- Fetching --------------------------------------------------------- + + def _fetch_single_catalog( + self, + entry: IntegrationCatalogEntry, + force_refresh: bool = False, + ) -> Dict[str, Any]: + """Fetch one catalog, with per-URL caching.""" + import urllib.error + import urllib.request + + url_hash = hashlib.sha256(entry.url.encode()).hexdigest()[:16] + cache_file = self.cache_dir / f"catalog-{url_hash}.json" + cache_meta = self.cache_dir / f"catalog-{url_hash}-metadata.json" + + if not force_refresh and cache_file.exists() and cache_meta.exists(): + try: + meta = json.loads(cache_meta.read_text()) + cached_at = datetime.fromisoformat(meta.get("cached_at", "")) + if cached_at.tzinfo is None: + cached_at = cached_at.replace(tzinfo=timezone.utc) + age = (datetime.now(timezone.utc) - cached_at).total_seconds() + if age < self.CACHE_DURATION: + return json.loads(cache_file.read_text()) + except (json.JSONDecodeError, ValueError, KeyError, TypeError): + pass + + try: + with urllib.request.urlopen(entry.url, timeout=10) as resp: + catalog_data = json.loads(resp.read()) + + if ( + "schema_version" not in catalog_data + or "integrations" not in catalog_data + ): + raise IntegrationCatalogError( + f"Invalid catalog format from {entry.url}" + ) + + self.cache_dir.mkdir(parents=True, exist_ok=True) + cache_file.write_text(json.dumps(catalog_data, indent=2)) + cache_meta.write_text( + json.dumps( + { + "cached_at": datetime.now(timezone.utc).isoformat(), + "catalog_url": entry.url, + }, + indent=2, + ) + ) + return catalog_data + + except urllib.error.URLError as exc: + raise IntegrationCatalogError( + f"Failed to fetch catalog from {entry.url}: {exc}" + ) + except json.JSONDecodeError as exc: + raise IntegrationCatalogError( + f"Invalid JSON in catalog from {entry.url}: {exc}" + ) + + def _get_merged_integrations( + self, force_refresh: bool = False + ) -> List[Dict[str, Any]]: + """Fetch and merge integrations from all active catalogs. + + Higher-priority catalogs win on conflicts. Each dict is annotated + with ``_catalog_name`` and ``_install_allowed``. + """ + import sys + + active = self.get_active_catalogs() + merged: Dict[str, Dict[str, Any]] = {} + any_success = False + + for entry in active: + try: + data = self._fetch_single_catalog(entry, force_refresh) + any_success = True + except IntegrationCatalogError as exc: + print( + f"Warning: Could not fetch catalog '{entry.name}': {exc}", + file=sys.stderr, + ) + continue + + for integ_id, integ_data in data.get("integrations", {}).items(): + if integ_id not in merged: + merged[integ_id] = { + **integ_data, + "id": integ_id, + "_catalog_name": entry.name, + "_install_allowed": entry.install_allowed, + } + + if not any_success and active: + raise IntegrationCatalogError( + "Failed to fetch any integration catalog" + ) + + return list(merged.values()) + + # -- Search / info ---------------------------------------------------- + + def search( + self, + query: Optional[str] = None, + tag: Optional[str] = None, + author: Optional[str] = None, + ) -> List[Dict[str, Any]]: + """Search catalogs for integrations matching the given filters.""" + results: List[Dict[str, Any]] = [] + for item in self._get_merged_integrations(): + if author and item.get("author", "").lower() != author.lower(): + continue + if tag and tag.lower() not in [ + t.lower() for t in item.get("tags", []) + ]: + continue + if query: + haystack = " ".join( + [ + item.get("name", ""), + item.get("description", ""), + item.get("id", ""), + ] + + item.get("tags", []) + ).lower() + if query.lower() not in haystack: + continue + results.append(item) + return results + + def get_integration_info( + self, integration_id: str + ) -> Optional[Dict[str, Any]]: + """Return catalog metadata for a single integration, or None.""" + for item in self._get_merged_integrations(): + if item["id"] == integration_id: + return item + return None + + # -- Cache management ------------------------------------------------- + + def clear_cache(self) -> None: + """Remove all cached catalog files.""" + if self.cache_dir.exists(): + for f in self.cache_dir.glob("catalog-*.json"): + f.unlink(missing_ok=True) + + +# --------------------------------------------------------------------------- +# IntegrationDescriptor (integration.yml) +# --------------------------------------------------------------------------- + +class IntegrationDescriptor: + """Loads and validates an ``integration.yml`` descriptor. + + The descriptor mirrors ``extension.yml`` and ``preset.yml``:: + + schema_version: "1.0" + integration: + id: "my-agent" + name: "My Agent" + version: "1.0.0" + description: "Integration for My Agent" + author: "my-org" + requires: + speckit_version: ">=0.6.0" + tools: [...] + provides: + commands: [...] + scripts: [...] + """ + + SCHEMA_VERSION = "1.0" + REQUIRED_TOP_LEVEL = ["schema_version", "integration", "requires", "provides"] + + def __init__(self, descriptor_path: Path) -> None: + self.path = descriptor_path + self.data = self._load(descriptor_path) + self._validate() + + # -- Loading ---------------------------------------------------------- + + @staticmethod + def _load(path: Path) -> dict: + try: + with open(path, "r", encoding="utf-8") as fh: + return yaml.safe_load(fh) or {} + except yaml.YAMLError as exc: + raise IntegrationDescriptorError(f"Invalid YAML in {path}: {exc}") + except FileNotFoundError: + raise IntegrationDescriptorError(f"Descriptor not found: {path}") + + # -- Validation ------------------------------------------------------- + + def _validate(self) -> None: + for field in self.REQUIRED_TOP_LEVEL: + if field not in self.data: + raise IntegrationDescriptorError( + f"Missing required field: {field}" + ) + + if self.data["schema_version"] != self.SCHEMA_VERSION: + raise IntegrationDescriptorError( + f"Unsupported schema version: {self.data['schema_version']} " + f"(expected {self.SCHEMA_VERSION})" + ) + + integ = self.data["integration"] + for field in ("id", "name", "version", "description"): + if field not in integ: + raise IntegrationDescriptorError( + f"Missing integration.{field}" + ) + + if not re.match(r"^[a-z0-9-]+$", integ["id"]): + raise IntegrationDescriptorError( + f"Invalid integration ID '{integ['id']}': " + "must be lowercase alphanumeric with hyphens only" + ) + + try: + pkg_version.Version(integ["version"]) + except pkg_version.InvalidVersion: + raise IntegrationDescriptorError( + f"Invalid version: {integ['version']}" + ) + + requires = self.data["requires"] + if "speckit_version" not in requires: + raise IntegrationDescriptorError( + "Missing requires.speckit_version" + ) + + provides = self.data["provides"] + commands = provides.get("commands", []) + scripts = provides.get("scripts", []) + if "commands" in provides and not isinstance(commands, list): + raise IntegrationDescriptorError( + "Invalid provides.commands: expected a list" + ) + if "scripts" in provides and not isinstance(scripts, list): + raise IntegrationDescriptorError( + "Invalid provides.scripts: expected a list" + ) + if not commands and not scripts: + raise IntegrationDescriptorError( + "Integration must provide at least one command or script" + ) + for cmd in commands: + if "name" not in cmd or "file" not in cmd: + raise IntegrationDescriptorError( + "Command entry missing 'name' or 'file'" + ) + + # -- Property accessors ----------------------------------------------- + + @property + def id(self) -> str: + return self.data["integration"]["id"] + + @property + def name(self) -> str: + return self.data["integration"]["name"] + + @property + def version(self) -> str: + return self.data["integration"]["version"] + + @property + def description(self) -> str: + return self.data["integration"]["description"] + + @property + def requires_speckit_version(self) -> str: + return self.data["requires"]["speckit_version"] + + @property + def commands(self) -> List[Dict[str, Any]]: + return self.data.get("provides", {}).get("commands", []) + + @property + def scripts(self) -> List[str]: + return self.data.get("provides", {}).get("scripts", []) + + @property + def tools(self) -> List[Dict[str, Any]]: + return self.data.get("requires", {}).get("tools", []) + + def get_hash(self) -> str: + """SHA-256 hash of the descriptor file.""" + with open(self.path, "rb") as fh: + return f"sha256:{hashlib.sha256(fh.read()).hexdigest()}" diff --git a/tests/integrations/test_integration_catalog.py b/tests/integrations/test_integration_catalog.py new file mode 100644 index 000000000..2049e7c7f --- /dev/null +++ b/tests/integrations/test_integration_catalog.py @@ -0,0 +1,642 @@ +"""Tests for the integration catalog system (catalog.py).""" + +import json +import os +from pathlib import Path + +import pytest +import yaml + +from specify_cli.integrations.catalog import ( + IntegrationCatalog, + IntegrationCatalogEntry, + IntegrationCatalogError, + IntegrationDescriptor, + IntegrationDescriptorError, +) + + +# --------------------------------------------------------------------------- +# IntegrationCatalogEntry +# --------------------------------------------------------------------------- + + +class TestIntegrationCatalogEntry: + def test_create_entry(self): + entry = IntegrationCatalogEntry( + url="https://example.com/catalog.json", + name="test", + priority=1, + install_allowed=True, + description="Test catalog", + ) + assert entry.url == "https://example.com/catalog.json" + assert entry.name == "test" + assert entry.priority == 1 + assert entry.install_allowed is True + assert entry.description == "Test catalog" + + def test_default_description(self): + entry = IntegrationCatalogEntry( + url="https://example.com/catalog.json", + name="test", + priority=1, + install_allowed=False, + ) + assert entry.description == "" + + +# --------------------------------------------------------------------------- +# IntegrationCatalog — URL validation +# --------------------------------------------------------------------------- + + +class TestCatalogURLValidation: + def test_https_allowed(self): + IntegrationCatalog._validate_catalog_url("https://example.com/catalog.json") + + def test_http_rejected(self): + with pytest.raises(IntegrationCatalogError, match="HTTPS"): + IntegrationCatalog._validate_catalog_url("http://example.com/catalog.json") + + def test_http_localhost_allowed(self): + IntegrationCatalog._validate_catalog_url("http://localhost:8080/catalog.json") + IntegrationCatalog._validate_catalog_url("http://127.0.0.1/catalog.json") + + def test_missing_host_rejected(self): + with pytest.raises(IntegrationCatalogError, match="valid URL"): + IntegrationCatalog._validate_catalog_url("https:///no-host") + + +# --------------------------------------------------------------------------- +# IntegrationCatalog — active catalogs +# --------------------------------------------------------------------------- + + +class TestActiveCatalogs: + def test_defaults_when_no_config(self, tmp_path): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + active = cat.get_active_catalogs() + assert len(active) == 2 + assert active[0].name == "default" + assert active[1].name == "community" + + def test_env_var_override(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + monkeypatch.setenv( + "SPECKIT_INTEGRATION_CATALOG_URL", + "https://custom.example.com/catalog.json", + ) + cat = IntegrationCatalog(tmp_path) + active = cat.get_active_catalogs() + assert len(active) == 1 + assert active[0].name == "custom" + + def test_project_config_overrides_defaults(self, tmp_path): + specify = tmp_path / ".specify" + specify.mkdir() + cfg = specify / "integration-catalogs.yml" + cfg.write_text(yaml.dump({ + "catalogs": [ + {"url": "https://my.example.com/cat.json", "name": "mine", "priority": 1, "install_allowed": True}, + ] + })) + cat = IntegrationCatalog(tmp_path) + active = cat.get_active_catalogs() + assert len(active) == 1 + assert active[0].name == "mine" + + def test_empty_config_raises(self, tmp_path): + specify = tmp_path / ".specify" + specify.mkdir() + cfg = specify / "integration-catalogs.yml" + cfg.write_text(yaml.dump({"catalogs": []})) + cat = IntegrationCatalog(tmp_path) + with pytest.raises(IntegrationCatalogError, match="no 'catalogs' entries"): + cat.get_active_catalogs() + + +# --------------------------------------------------------------------------- +# IntegrationCatalog — fetch & search (using local file:// catalog) +# --------------------------------------------------------------------------- + + +def _write_catalog(path: Path, integrations: dict) -> None: + """Helper: write a catalog JSON file.""" + path.parent.mkdir(parents=True, exist_ok=True) + path.write_text(json.dumps({ + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": integrations, + }, indent=2)) + + +class TestCatalogFetch: + """Tests that use a local HTTP server stub via monkeypatch.""" + + def _patch_urlopen(self, monkeypatch, catalog_data): + """Patch urllib.request.urlopen to return *catalog_data*.""" + + class FakeResponse: + def __init__(self, data): + self._data = json.dumps(data).encode() + + def read(self): + return self._data + + def __enter__(self): + return self + + def __exit__(self, *a): + pass + + def fake_urlopen(url, timeout=10): + return FakeResponse(catalog_data) + + import urllib.request + monkeypatch.setattr(urllib.request, "urlopen", fake_urlopen) + + def test_fetch_and_search_all(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "acme-coder": { + "id": "acme-coder", + "name": "Acme Coder", + "version": "2.0.0", + "description": "Community integration for Acme Coder", + "author": "acme-org", + "tags": ["cli"], + }, + }, + } + self._patch_urlopen(monkeypatch, catalog) + + results = cat.search() + assert len(results) >= 1 + ids = [r["id"] for r in results] + assert "acme-coder" in ids + + def test_search_by_tag(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "a": {"id": "a", "name": "A", "version": "1.0.0", "tags": ["cli"]}, + "b": {"id": "b", "name": "B", "version": "1.0.0", "tags": ["ide"]}, + }, + } + self._patch_urlopen(monkeypatch, catalog) + + results = cat.search(tag="cli") + assert all("cli" in r.get("tags", []) for r in results) + + def test_search_by_query(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "claude": {"id": "claude", "name": "Claude Code", "version": "1.0.0", "description": "Anthropic", "tags": []}, + "gemini": {"id": "gemini", "name": "Gemini CLI", "version": "1.0.0", "description": "Google", "tags": []}, + }, + } + self._patch_urlopen(monkeypatch, catalog) + + results = cat.search(query="claude") + assert len(results) == 1 + assert results[0]["id"] == "claude" + + def test_get_integration_info(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "claude": {"id": "claude", "name": "Claude Code", "version": "1.0.0"}, + }, + } + self._patch_urlopen(monkeypatch, catalog) + + info = cat.get_integration_info("claude") + assert info is not None + assert info["name"] == "Claude Code" + + assert cat.get_integration_info("nonexistent") is None + + def test_invalid_catalog_format(self, tmp_path, monkeypatch): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + + self._patch_urlopen(monkeypatch, {"schema_version": "1.0"}) # missing "integrations" + + with pytest.raises(IntegrationCatalogError, match="Failed to fetch any integration catalog"): + cat.search() + + def test_clear_cache(self, tmp_path): + (tmp_path / ".specify").mkdir() + cat = IntegrationCatalog(tmp_path) + cat.cache_dir.mkdir(parents=True, exist_ok=True) + (cat.cache_dir / "catalog-abc123.json").write_text("{}") + cat.clear_cache() + assert not list(cat.cache_dir.glob("catalog-*.json")) + + +# --------------------------------------------------------------------------- +# IntegrationDescriptor (integration.yml) +# --------------------------------------------------------------------------- + +VALID_DESCRIPTOR = { + "schema_version": "1.0", + "integration": { + "id": "my-agent", + "name": "My Agent", + "version": "1.0.0", + "description": "Integration for My Agent", + "author": "my-org", + }, + "requires": { + "speckit_version": ">=0.6.0", + }, + "provides": { + "commands": [ + {"name": "speckit.specify", "file": "templates/speckit.specify.md"}, + ], + "scripts": ["update-context.sh"], + }, +} + + +class TestIntegrationDescriptor: + def _write(self, tmp_path, data): + p = tmp_path / "integration.yml" + p.write_text(yaml.dump(data)) + return p + + def test_valid_descriptor(self, tmp_path): + p = self._write(tmp_path, VALID_DESCRIPTOR) + desc = IntegrationDescriptor(p) + assert desc.id == "my-agent" + assert desc.name == "My Agent" + assert desc.version == "1.0.0" + assert desc.description == "Integration for My Agent" + assert desc.requires_speckit_version == ">=0.6.0" + assert len(desc.commands) == 1 + assert desc.scripts == ["update-context.sh"] + + def test_missing_schema_version(self, tmp_path): + data = {**VALID_DESCRIPTOR} + del data["schema_version"] + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Missing required field: schema_version"): + IntegrationDescriptor(p) + + def test_unsupported_schema_version(self, tmp_path): + data = {**VALID_DESCRIPTOR, "schema_version": "99.0"} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Unsupported schema version"): + IntegrationDescriptor(p) + + def test_missing_integration_id(self, tmp_path): + data = {**VALID_DESCRIPTOR, "integration": {"name": "X", "version": "1.0.0", "description": "Y"}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Missing integration.id"): + IntegrationDescriptor(p) + + def test_invalid_id_format(self, tmp_path): + integ = {**VALID_DESCRIPTOR["integration"], "id": "BAD_ID"} + data = {**VALID_DESCRIPTOR, "integration": integ} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Invalid integration ID"): + IntegrationDescriptor(p) + + def test_invalid_version(self, tmp_path): + integ = {**VALID_DESCRIPTOR["integration"], "version": "not-semver"} + data = {**VALID_DESCRIPTOR, "integration": integ} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="Invalid version"): + IntegrationDescriptor(p) + + def test_missing_speckit_version(self, tmp_path): + data = {**VALID_DESCRIPTOR, "requires": {}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="requires.speckit_version"): + IntegrationDescriptor(p) + + def test_no_commands_or_scripts(self, tmp_path): + data = {**VALID_DESCRIPTOR, "provides": {}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="at least one command or script"): + IntegrationDescriptor(p) + + def test_command_missing_name(self, tmp_path): + data = {**VALID_DESCRIPTOR, "provides": {"commands": [{"file": "x.md"}]}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="missing 'name' or 'file'"): + IntegrationDescriptor(p) + + def test_commands_not_a_list(self, tmp_path): + data = {**VALID_DESCRIPTOR, "provides": {"commands": "not-a-list", "scripts": ["a.sh"]}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="expected a list"): + IntegrationDescriptor(p) + + def test_scripts_not_a_list(self, tmp_path): + data = {**VALID_DESCRIPTOR, "provides": {"commands": [{"name": "a", "file": "b"}], "scripts": "not-a-list"}} + p = self._write(tmp_path, data) + with pytest.raises(IntegrationDescriptorError, match="expected a list"): + IntegrationDescriptor(p) + + def test_file_not_found(self, tmp_path): + with pytest.raises(IntegrationDescriptorError, match="Descriptor not found"): + IntegrationDescriptor(tmp_path / "nonexistent.yml") + + def test_invalid_yaml(self, tmp_path): + p = tmp_path / "integration.yml" + p.write_text(": : :") + with pytest.raises(IntegrationDescriptorError, match="Invalid YAML"): + IntegrationDescriptor(p) + + def test_get_hash(self, tmp_path): + p = self._write(tmp_path, VALID_DESCRIPTOR) + desc = IntegrationDescriptor(p) + h = desc.get_hash() + assert h.startswith("sha256:") + + def test_tools_accessor(self, tmp_path): + data = {**VALID_DESCRIPTOR, "requires": { + "speckit_version": ">=0.6.0", + "tools": [{"name": "my-agent", "version": ">=1.0.0", "required": True}], + }} + p = self._write(tmp_path, data) + desc = IntegrationDescriptor(p) + assert len(desc.tools) == 1 + assert desc.tools[0]["name"] == "my-agent" + + +# --------------------------------------------------------------------------- +# CLI: integration list --catalog +# --------------------------------------------------------------------------- + + +class TestIntegrationListCatalog: + """Test ``specify integration list --catalog``.""" + + def _init_project(self, tmp_path): + """Create a minimal spec-kit project.""" + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = tmp_path / "proj" + project.mkdir() + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, [ + "init", "--here", + "--integration", "copilot", + "--script", "sh", + "--no-git", + "--ignore-agent-tools", + ], catch_exceptions=False) + finally: + os.chdir(old) + assert result.exit_code == 0, result.output + return project + + def test_list_catalog_flag(self, tmp_path, monkeypatch): + """--catalog should show catalog entries.""" + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path) + + catalog = { + "schema_version": "1.0", + "updated_at": "2026-01-01T00:00:00Z", + "integrations": { + "test-agent": { + "id": "test-agent", + "name": "Test Agent", + "version": "1.0.0", + "description": "A test agent", + "tags": ["cli"], + }, + }, + } + + import urllib.request + + class FakeResponse: + def __init__(self, data): + self._data = json.dumps(data).encode() + def read(self): + return self._data + def __enter__(self): + return self + def __exit__(self, *a): + pass + + monkeypatch.setattr(urllib.request, "urlopen", lambda url, timeout=10: FakeResponse(catalog)) + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "list", "--catalog"]) + finally: + os.chdir(old) + + assert result.exit_code == 0 + assert "test-agent" in result.output + assert "Test Agent" in result.output + + def test_list_without_catalog_still_works(self, tmp_path): + """Default list (no --catalog) works as before.""" + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path) + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "list"]) + finally: + os.chdir(old) + + assert result.exit_code == 0 + assert "copilot" in result.output + assert "installed" in result.output + + +# --------------------------------------------------------------------------- +# CLI: integration upgrade +# --------------------------------------------------------------------------- + + +class TestIntegrationUpgrade: + """Test ``specify integration upgrade``.""" + + def _init_project(self, tmp_path, integration="copilot"): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = tmp_path / "proj" + project.mkdir() + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, [ + "init", "--here", + "--integration", integration, + "--script", "sh", + "--no-git", + "--ignore-agent-tools", + ], catch_exceptions=False) + finally: + os.chdir(old) + assert result.exit_code == 0, result.output + return project + + def test_upgrade_requires_speckit_project(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + old = os.getcwd() + try: + os.chdir(tmp_path) + result = runner.invoke(app, ["integration", "upgrade"]) + finally: + os.chdir(old) + assert result.exit_code != 0 + assert "Not a spec-kit project" in result.output + + def test_upgrade_no_integration_installed(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = tmp_path / "proj" + project.mkdir() + (project / ".specify").mkdir() + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade"]) + finally: + os.chdir(old) + assert result.exit_code == 0 + assert "No integration is currently installed" in result.output + + def test_upgrade_succeeds(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade"], catch_exceptions=False) + finally: + os.chdir(old) + assert result.exit_code == 0 + assert "upgraded successfully" in result.output + + def test_upgrade_blocks_on_modified_files(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + # Modify a tracked file so the manifest hash won't match + manifest_path = project / ".specify" / "integrations" / "copilot.manifest.json" + assert manifest_path.exists(), "Manifest should exist after init" + manifest_data = json.loads(manifest_path.read_text()) + tracked_files = manifest_data.get("files", {}) + if tracked_files: + first_rel = next(iter(tracked_files)) + target_file = project / first_rel + if target_file.exists(): + target_file.write_text("MODIFIED CONTENT\n") + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade"]) + finally: + os.chdir(old) + assert result.exit_code != 0 + assert "modified" in result.output.lower() + + def test_upgrade_force_overwrites_modified(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + # Modify a tracked file + manifest_path = project / ".specify" / "integrations" / "copilot.manifest.json" + manifest_data = json.loads(manifest_path.read_text()) + tracked_files = manifest_data.get("files", {}) + if tracked_files: + first_rel = next(iter(tracked_files)) + target_file = project / first_rel + if target_file.exists(): + target_file.write_text("MODIFIED CONTENT\n") + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade", "--force"], catch_exceptions=False) + finally: + os.chdir(old) + assert result.exit_code == 0 + assert "upgraded successfully" in result.output + + def test_upgrade_wrong_integration_key(self, tmp_path): + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade", "claude"]) + finally: + os.chdir(old) + assert result.exit_code != 0 + assert "not the currently installed integration" in result.output + + def test_upgrade_no_manifest(self, tmp_path): + """Upgrade with missing manifest suggests fresh install.""" + from typer.testing import CliRunner + from specify_cli import app + runner = CliRunner() + project = self._init_project(tmp_path, "copilot") + + # Remove manifest + manifest_path = project / ".specify" / "integrations" / "copilot.manifest.json" + if manifest_path.exists(): + manifest_path.unlink() + + old = os.getcwd() + try: + os.chdir(project) + result = runner.invoke(app, ["integration", "upgrade"]) + finally: + os.chdir(old) + assert result.exit_code == 0 + assert "Nothing to upgrade" in result.output