mirror of
https://github.com/github/spec-kit.git
synced 2026-04-02 10:43:08 +00:00
feat: Stage 1 — integration foundation (base classes, manifest, registry)
Add the integrations package with: - IntegrationBase ABC and MarkdownIntegration base class - IntegrationOption dataclass for per-integration CLI options - IntegrationManifest with SHA-256 hash-tracked install/uninstall - INTEGRATION_REGISTRY (empty, populated in later stages) - 34 tests at 98% coverage Purely additive — no existing code modified. Part of #1924
This commit is contained in:
34
src/specify_cli/integrations/__init__.py
Normal file
34
src/specify_cli/integrations/__init__.py
Normal file
@@ -0,0 +1,34 @@
|
||||
"""Integration registry for AI coding assistants.
|
||||
|
||||
Each integration is a self-contained subpackage that handles setup/teardown
|
||||
for a specific AI assistant (Copilot, Claude, Gemini, etc.).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from typing import TYPE_CHECKING
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from .base import IntegrationBase
|
||||
|
||||
# Maps integration key → IntegrationBase instance.
|
||||
# Populated by later stages as integrations are migrated.
|
||||
INTEGRATION_REGISTRY: dict[str, IntegrationBase] = {}
|
||||
|
||||
|
||||
def _register(integration: IntegrationBase) -> None:
|
||||
"""Register an integration instance in the global registry.
|
||||
|
||||
Raises ``ValueError`` for falsy keys and ``KeyError`` for duplicates.
|
||||
"""
|
||||
key = integration.key
|
||||
if not key:
|
||||
raise ValueError("Cannot register integration with an empty key.")
|
||||
if key in INTEGRATION_REGISTRY:
|
||||
raise KeyError(f"Integration with key {key!r} is already registered.")
|
||||
INTEGRATION_REGISTRY[key] = integration
|
||||
|
||||
|
||||
def get_integration(key: str) -> IntegrationBase | None:
|
||||
"""Return the integration for *key*, or ``None`` if not registered."""
|
||||
return INTEGRATION_REGISTRY.get(key)
|
||||
201
src/specify_cli/integrations/base.py
Normal file
201
src/specify_cli/integrations/base.py
Normal file
@@ -0,0 +1,201 @@
|
||||
"""Base classes for AI-assistant integrations.
|
||||
|
||||
Provides:
|
||||
- ``IntegrationOption`` — declares a CLI option an integration accepts.
|
||||
- ``IntegrationBase`` — abstract base every integration must implement.
|
||||
- ``MarkdownIntegration`` — concrete base for standard Markdown-format
|
||||
integrations (the common case — subclass, set three class attrs, done).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from abc import ABC
|
||||
from dataclasses import dataclass
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# IntegrationOption
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
@dataclass(frozen=True)
|
||||
class IntegrationOption:
|
||||
"""Declares an option that an integration accepts via ``--integration-options``.
|
||||
|
||||
Attributes:
|
||||
name: The flag name (e.g. ``"--commands-dir"``).
|
||||
is_flag: ``True`` for boolean flags (``--skills``).
|
||||
required: ``True`` if the option must be supplied.
|
||||
default: Default value when not supplied (``None`` → no default).
|
||||
help: One-line description shown in ``specify integrate info``.
|
||||
"""
|
||||
|
||||
name: str
|
||||
is_flag: bool = False
|
||||
required: bool = False
|
||||
default: Any = None
|
||||
help: str = ""
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# IntegrationBase — abstract base class
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class IntegrationBase(ABC):
|
||||
"""Abstract base class every integration must implement.
|
||||
|
||||
Subclasses must set the following class-level attributes:
|
||||
|
||||
* ``key`` — unique identifier, matches actual CLI tool name
|
||||
* ``config`` — dict compatible with ``AGENT_CONFIG`` entries
|
||||
* ``registrar_config`` — dict compatible with ``CommandRegistrar.AGENT_CONFIGS``
|
||||
|
||||
And may optionally set:
|
||||
|
||||
* ``context_file`` — path (relative to project root) of the agent
|
||||
context/instructions file (e.g. ``"CLAUDE.md"``)
|
||||
"""
|
||||
|
||||
# -- Must be set by every subclass ------------------------------------
|
||||
|
||||
key: str = ""
|
||||
"""Unique integration key — should match the actual CLI tool name."""
|
||||
|
||||
config: dict[str, Any] | None = None
|
||||
"""Metadata dict matching the ``AGENT_CONFIG`` shape."""
|
||||
|
||||
registrar_config: dict[str, Any] | None = None
|
||||
"""Registration dict matching ``CommandRegistrar.AGENT_CONFIGS`` shape."""
|
||||
|
||||
# -- Optional ---------------------------------------------------------
|
||||
|
||||
context_file: str | None = None
|
||||
"""Relative path to the agent context file (e.g. ``CLAUDE.md``)."""
|
||||
|
||||
# -- Public API -------------------------------------------------------
|
||||
|
||||
@classmethod
|
||||
def options(cls) -> list[IntegrationOption]:
|
||||
"""Return options this integration accepts. Default: none."""
|
||||
return []
|
||||
|
||||
def templates_dir(self) -> Path:
|
||||
"""Return the path to this integration's bundled templates.
|
||||
|
||||
By convention, templates live in a ``templates/`` subdirectory next
|
||||
to the integration's ``__init__.py``.
|
||||
"""
|
||||
import inspect
|
||||
|
||||
module_file = inspect.getfile(type(self))
|
||||
return Path(module_file).resolve().parent / "templates"
|
||||
|
||||
def setup(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: Any,
|
||||
parsed_options: dict[str, Any] | None = None,
|
||||
**opts: Any,
|
||||
) -> list[Path]:
|
||||
"""Install integration files into *project_root*.
|
||||
|
||||
Returns the list of files created. The default implementation
|
||||
copies every file from ``templates_dir()`` into the commands
|
||||
directory derived from ``config``, recording each in *manifest*.
|
||||
"""
|
||||
created: list[Path] = []
|
||||
tpl_dir = self.templates_dir()
|
||||
if not tpl_dir.is_dir():
|
||||
return created
|
||||
|
||||
if not self.config:
|
||||
return created
|
||||
folder = self.config.get("folder")
|
||||
if not folder:
|
||||
return created
|
||||
|
||||
project_root_resolved = project_root.resolve()
|
||||
subdir = self.config.get("commands_subdir", "commands")
|
||||
dest = (project_root / folder / subdir).resolve()
|
||||
# Ensure destination stays within the project root
|
||||
try:
|
||||
dest.relative_to(project_root_resolved)
|
||||
except ValueError as exc:
|
||||
raise ValueError(
|
||||
f"Integration destination {dest} escapes "
|
||||
f"project root {project_root_resolved}"
|
||||
) from exc
|
||||
|
||||
dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for src_file in sorted(tpl_dir.iterdir()):
|
||||
if src_file.is_file():
|
||||
dst_file = (dest / src_file.name).resolve()
|
||||
rel = dst_file.relative_to(project_root_resolved)
|
||||
shutil.copy2(src_file, dst_file)
|
||||
manifest.record_existing(rel)
|
||||
created.append(dst_file)
|
||||
|
||||
return created
|
||||
|
||||
def teardown(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: Any,
|
||||
*,
|
||||
force: bool = False,
|
||||
) -> tuple[list[Path], list[Path]]:
|
||||
"""Uninstall integration files from *project_root*.
|
||||
|
||||
Delegates to ``manifest.uninstall()`` which only removes files
|
||||
whose hash still matches the recorded value (unless *force*).
|
||||
|
||||
Returns ``(removed, skipped)`` file lists.
|
||||
"""
|
||||
return manifest.uninstall(project_root, force=force)
|
||||
|
||||
# -- Convenience helpers for subclasses -------------------------------
|
||||
|
||||
def install(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: Any,
|
||||
parsed_options: dict[str, Any] | None = None,
|
||||
**opts: Any,
|
||||
) -> list[Path]:
|
||||
"""High-level install — calls ``setup()`` and returns created files."""
|
||||
return self.setup(
|
||||
project_root, manifest, parsed_options=parsed_options, **opts
|
||||
)
|
||||
|
||||
def uninstall(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: Any,
|
||||
*,
|
||||
force: bool = False,
|
||||
) -> tuple[list[Path], list[Path]]:
|
||||
"""High-level uninstall — calls ``teardown()``."""
|
||||
return self.teardown(project_root, manifest, force=force)
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# MarkdownIntegration — covers ~20 standard agents
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class MarkdownIntegration(IntegrationBase):
|
||||
"""Concrete base for integrations that use standard Markdown commands.
|
||||
|
||||
Subclasses only need to set ``key``, ``config``, ``registrar_config``
|
||||
(and optionally ``context_file``). Everything else is inherited.
|
||||
|
||||
The default ``setup()`` from ``IntegrationBase`` copies templates
|
||||
into the agent's commands directory — which is correct for the
|
||||
standard Markdown case.
|
||||
"""
|
||||
|
||||
# MarkdownIntegration inherits IntegrationBase.setup() as-is.
|
||||
# Future stages may add markdown-specific path rewriting here.
|
||||
pass
|
||||
225
src/specify_cli/integrations/manifest.py
Normal file
225
src/specify_cli/integrations/manifest.py
Normal file
@@ -0,0 +1,225 @@
|
||||
"""Hash-tracked installation manifest for integrations.
|
||||
|
||||
Each installed integration records the files it created together with
|
||||
their SHA-256 hashes. On uninstall only files whose hash still matches
|
||||
the recorded value are removed — modified files are left in place and
|
||||
reported to the caller.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
|
||||
def _sha256(path: Path) -> str:
|
||||
"""Return the hex SHA-256 digest of *path*."""
|
||||
h = hashlib.sha256()
|
||||
with open(path, "rb") as fh:
|
||||
for chunk in iter(lambda: fh.read(8192), b""):
|
||||
h.update(chunk)
|
||||
return h.hexdigest()
|
||||
|
||||
|
||||
def _validate_rel_path(rel: Path, root: Path) -> Path:
|
||||
"""Resolve *rel* against *root* and verify it stays within *root*.
|
||||
|
||||
Raises ``ValueError`` if *rel* is absolute, contains ``..`` segments
|
||||
that escape *root*, or otherwise resolves outside the project root.
|
||||
"""
|
||||
if rel.is_absolute():
|
||||
raise ValueError(
|
||||
f"Absolute paths are not allowed in manifests: {rel}"
|
||||
)
|
||||
resolved = (root / rel).resolve()
|
||||
root_resolved = root.resolve()
|
||||
try:
|
||||
resolved.relative_to(root_resolved)
|
||||
except ValueError:
|
||||
raise ValueError(
|
||||
f"Path {rel} resolves to {resolved} which is outside "
|
||||
f"the project root {root_resolved}"
|
||||
) from None
|
||||
return resolved
|
||||
|
||||
|
||||
class IntegrationManifest:
|
||||
"""Tracks files installed by a single integration.
|
||||
|
||||
Parameters:
|
||||
key: Integration identifier (e.g. ``"copilot"``).
|
||||
project_root: Absolute path to the project directory.
|
||||
version: CLI version string recorded in the manifest.
|
||||
"""
|
||||
|
||||
def __init__(self, key: str, project_root: Path, version: str = "") -> None:
|
||||
self.key = key
|
||||
self.project_root = project_root.resolve()
|
||||
self.version = version
|
||||
self._files: dict[str, str] = {} # rel_path → sha256 hex
|
||||
self._installed_at: str = ""
|
||||
|
||||
# -- Manifest file location -------------------------------------------
|
||||
|
||||
@property
|
||||
def manifest_path(self) -> Path:
|
||||
"""Path to the on-disk manifest JSON."""
|
||||
return self.project_root / ".specify" / "integrations" / f"{self.key}.manifest.json"
|
||||
|
||||
# -- Recording files --------------------------------------------------
|
||||
|
||||
def record_file(self, rel_path: str | Path, content: bytes | str) -> Path:
|
||||
"""Write *content* to *rel_path* (relative to project root) and record its hash.
|
||||
|
||||
Creates parent directories as needed. Returns the absolute path
|
||||
of the written file.
|
||||
|
||||
Raises ``ValueError`` if *rel_path* resolves outside the project root.
|
||||
"""
|
||||
rel = Path(rel_path)
|
||||
abs_path = _validate_rel_path(rel, self.project_root)
|
||||
abs_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
if isinstance(content, str):
|
||||
content = content.encode("utf-8")
|
||||
abs_path.write_bytes(content)
|
||||
|
||||
self._files[str(rel)] = hashlib.sha256(content).hexdigest()
|
||||
return abs_path
|
||||
|
||||
def record_existing(self, rel_path: str | Path) -> None:
|
||||
"""Record the hash of an already-existing file at *rel_path*.
|
||||
|
||||
Raises ``ValueError`` if *rel_path* resolves outside the project root.
|
||||
"""
|
||||
rel = Path(rel_path)
|
||||
abs_path = _validate_rel_path(rel, self.project_root)
|
||||
self._files[str(rel)] = _sha256(abs_path)
|
||||
|
||||
# -- Querying ---------------------------------------------------------
|
||||
|
||||
@property
|
||||
def files(self) -> dict[str, str]:
|
||||
"""Return a copy of the ``{rel_path: sha256}`` mapping."""
|
||||
return dict(self._files)
|
||||
|
||||
def check_modified(self) -> list[str]:
|
||||
"""Return relative paths of tracked files whose content changed on disk."""
|
||||
modified: list[str] = []
|
||||
for rel, expected_hash in self._files.items():
|
||||
try:
|
||||
abs_path = _validate_rel_path(Path(rel), self.project_root)
|
||||
except ValueError:
|
||||
continue
|
||||
if not abs_path.exists():
|
||||
continue
|
||||
if _sha256(abs_path) != expected_hash:
|
||||
modified.append(rel)
|
||||
return modified
|
||||
|
||||
# -- Uninstall --------------------------------------------------------
|
||||
|
||||
def uninstall(
|
||||
self,
|
||||
project_root: Path | None = None,
|
||||
*,
|
||||
force: bool = False,
|
||||
) -> tuple[list[Path], list[Path]]:
|
||||
"""Remove tracked files whose hash still matches.
|
||||
|
||||
Parameters:
|
||||
project_root: Override for the project root.
|
||||
force: If ``True``, remove files even if modified.
|
||||
|
||||
Returns:
|
||||
``(removed, skipped)`` — absolute paths.
|
||||
"""
|
||||
root = (project_root or self.project_root).resolve()
|
||||
removed: list[Path] = []
|
||||
skipped: list[Path] = []
|
||||
|
||||
for rel, expected_hash in self._files.items():
|
||||
abs_path = (root / rel).resolve()
|
||||
# Skip paths that escape the project root
|
||||
try:
|
||||
abs_path.relative_to(root)
|
||||
except ValueError:
|
||||
continue
|
||||
if not abs_path.exists():
|
||||
continue
|
||||
if not force and _sha256(abs_path) != expected_hash:
|
||||
skipped.append(abs_path)
|
||||
continue
|
||||
abs_path.unlink()
|
||||
removed.append(abs_path)
|
||||
# Clean up empty parent directories up to project root
|
||||
parent = abs_path.parent
|
||||
while parent != root:
|
||||
try:
|
||||
parent.rmdir() # only succeeds if empty
|
||||
except OSError:
|
||||
break
|
||||
parent = parent.parent
|
||||
|
||||
# Remove the manifest file itself
|
||||
manifest = root / ".specify" / "integrations" / f"{self.key}.manifest.json"
|
||||
if manifest.exists():
|
||||
manifest.unlink()
|
||||
parent = manifest.parent
|
||||
while parent != root:
|
||||
try:
|
||||
parent.rmdir()
|
||||
except OSError:
|
||||
break
|
||||
parent = parent.parent
|
||||
|
||||
return removed, skipped
|
||||
|
||||
# -- Persistence ------------------------------------------------------
|
||||
|
||||
def save(self) -> Path:
|
||||
"""Write the manifest to disk. Returns the manifest path."""
|
||||
self._installed_at = self._installed_at or datetime.now(timezone.utc).isoformat()
|
||||
data: dict[str, Any] = {
|
||||
"integration": self.key,
|
||||
"version": self.version,
|
||||
"installed_at": self._installed_at,
|
||||
"files": self._files,
|
||||
}
|
||||
path = self.manifest_path
|
||||
path.parent.mkdir(parents=True, exist_ok=True)
|
||||
path.write_text(json.dumps(data, indent=2) + "\n", encoding="utf-8")
|
||||
return path
|
||||
|
||||
@classmethod
|
||||
def load(cls, key: str, project_root: Path) -> IntegrationManifest:
|
||||
"""Load an existing manifest from disk.
|
||||
|
||||
Raises ``FileNotFoundError`` if the manifest does not exist.
|
||||
"""
|
||||
inst = cls(key, project_root)
|
||||
path = inst.manifest_path
|
||||
data = json.loads(path.read_text(encoding="utf-8"))
|
||||
|
||||
if not isinstance(data, dict):
|
||||
raise ValueError(
|
||||
f"Integration manifest at {path} must be a JSON object, "
|
||||
f"got {type(data).__name__}"
|
||||
)
|
||||
|
||||
files = data.get("files", {})
|
||||
if not isinstance(files, dict) or not all(
|
||||
isinstance(k, str) and isinstance(v, str) for k, v in files.items()
|
||||
):
|
||||
raise ValueError(
|
||||
f"Integration manifest 'files' at {path} must be a "
|
||||
"mapping of string paths to string hashes"
|
||||
)
|
||||
|
||||
inst.version = data.get("version", "")
|
||||
inst._installed_at = data.get("installed_at", "")
|
||||
inst._files = files
|
||||
return inst
|
||||
413
tests/test_integrations.py
Normal file
413
tests/test_integrations.py
Normal file
@@ -0,0 +1,413 @@
|
||||
"""Tests for the integrations foundation (Stage 1).
|
||||
|
||||
Covers:
|
||||
- IntegrationOption dataclass
|
||||
- IntegrationBase ABC and MarkdownIntegration base class
|
||||
- IntegrationManifest — record, hash, save, load, uninstall, modified detection
|
||||
- INTEGRATION_REGISTRY basics
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
|
||||
import pytest
|
||||
|
||||
from specify_cli.integrations import (
|
||||
INTEGRATION_REGISTRY,
|
||||
_register,
|
||||
get_integration,
|
||||
)
|
||||
from specify_cli.integrations.base import (
|
||||
IntegrationBase,
|
||||
IntegrationOption,
|
||||
MarkdownIntegration,
|
||||
)
|
||||
from specify_cli.integrations.manifest import IntegrationManifest, _sha256
|
||||
|
||||
|
||||
# ── helpers ──────────────────────────────────────────────────────────────────
|
||||
|
||||
|
||||
class _StubIntegration(MarkdownIntegration):
|
||||
"""Minimal concrete integration for testing."""
|
||||
|
||||
key = "stub"
|
||||
config = {
|
||||
"name": "Stub Agent",
|
||||
"folder": ".stub/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": None,
|
||||
"requires_cli": False,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".stub/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "STUB.md"
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# IntegrationOption
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
|
||||
class TestIntegrationOption:
|
||||
def test_defaults(self):
|
||||
opt = IntegrationOption(name="--flag")
|
||||
assert opt.name == "--flag"
|
||||
assert opt.is_flag is False
|
||||
assert opt.required is False
|
||||
assert opt.default is None
|
||||
assert opt.help == ""
|
||||
|
||||
def test_flag_option(self):
|
||||
opt = IntegrationOption(name="--skills", is_flag=True, default=True, help="Enable skills")
|
||||
assert opt.is_flag is True
|
||||
assert opt.default is True
|
||||
assert opt.help == "Enable skills"
|
||||
|
||||
def test_required_option(self):
|
||||
opt = IntegrationOption(name="--commands-dir", required=True, help="Dir path")
|
||||
assert opt.required is True
|
||||
|
||||
def test_frozen(self):
|
||||
opt = IntegrationOption(name="--x")
|
||||
with pytest.raises(AttributeError):
|
||||
opt.name = "--y" # type: ignore[misc]
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# IntegrationBase / MarkdownIntegration
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
|
||||
class TestIntegrationBase:
|
||||
def test_key_and_config(self):
|
||||
i = _StubIntegration()
|
||||
assert i.key == "stub"
|
||||
assert i.config["name"] == "Stub Agent"
|
||||
assert i.registrar_config["format"] == "markdown"
|
||||
assert i.context_file == "STUB.md"
|
||||
|
||||
def test_options_default_empty(self):
|
||||
assert _StubIntegration.options() == []
|
||||
|
||||
def test_templates_dir(self):
|
||||
i = _StubIntegration()
|
||||
td = i.templates_dir()
|
||||
# Should point to a templates/ dir next to this test module.
|
||||
# It won't exist, but the path should be well-formed.
|
||||
assert td.name == "templates"
|
||||
|
||||
def test_setup_no_templates_returns_empty(self, tmp_path):
|
||||
"""setup() gracefully returns empty list when templates dir is missing."""
|
||||
i = _StubIntegration()
|
||||
manifest = IntegrationManifest("stub", tmp_path)
|
||||
created = i.setup(tmp_path, manifest)
|
||||
assert created == []
|
||||
|
||||
def test_setup_copies_templates(self, tmp_path, monkeypatch):
|
||||
"""setup() copies template files and records them in the manifest."""
|
||||
# Create templates under tmp_path so we don't mutate the source tree
|
||||
tpl = tmp_path / "_templates"
|
||||
tpl.mkdir()
|
||||
(tpl / "speckit.plan.md").write_text("plan content", encoding="utf-8")
|
||||
(tpl / "speckit.specify.md").write_text("spec content", encoding="utf-8")
|
||||
|
||||
i = _StubIntegration()
|
||||
monkeypatch.setattr(type(i), "templates_dir", lambda self: tpl)
|
||||
|
||||
project = tmp_path / "project"
|
||||
project.mkdir()
|
||||
created = i.setup(project, IntegrationManifest("stub", project))
|
||||
assert len(created) == 2
|
||||
assert (project / ".stub" / "commands" / "speckit.plan.md").exists()
|
||||
assert (project / ".stub" / "commands" / "speckit.specify.md").exists()
|
||||
|
||||
def test_install_delegates_to_setup(self, tmp_path):
|
||||
i = _StubIntegration()
|
||||
manifest = IntegrationManifest("stub", tmp_path)
|
||||
result = i.install(tmp_path, manifest)
|
||||
assert result == [] # no templates dir → empty
|
||||
|
||||
def test_uninstall_delegates_to_teardown(self, tmp_path):
|
||||
i = _StubIntegration()
|
||||
manifest = IntegrationManifest("stub", tmp_path)
|
||||
removed, skipped = i.uninstall(tmp_path, manifest)
|
||||
assert removed == []
|
||||
assert skipped == []
|
||||
|
||||
|
||||
class TestMarkdownIntegration:
|
||||
def test_is_subclass_of_base(self):
|
||||
assert issubclass(MarkdownIntegration, IntegrationBase)
|
||||
|
||||
def test_stub_is_markdown(self):
|
||||
assert isinstance(_StubIntegration(), MarkdownIntegration)
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# IntegrationManifest
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
|
||||
class TestManifestRecordFile:
|
||||
def test_record_file_writes_and_hashes(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
content = "hello world"
|
||||
abs_path = m.record_file("a/b.txt", content)
|
||||
|
||||
assert abs_path == tmp_path / "a" / "b.txt"
|
||||
assert abs_path.read_text(encoding="utf-8") == content
|
||||
expected_hash = hashlib.sha256(content.encode()).hexdigest()
|
||||
assert m.files["a/b.txt"] == expected_hash
|
||||
|
||||
def test_record_file_bytes(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
data = b"\x00\x01\x02"
|
||||
abs_path = m.record_file("bin.dat", data)
|
||||
assert abs_path.read_bytes() == data
|
||||
assert m.files["bin.dat"] == hashlib.sha256(data).hexdigest()
|
||||
|
||||
def test_record_existing(self, tmp_path):
|
||||
f = tmp_path / "existing.txt"
|
||||
f.write_text("content", encoding="utf-8")
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_existing("existing.txt")
|
||||
assert m.files["existing.txt"] == _sha256(f)
|
||||
|
||||
|
||||
class TestManifestPathTraversal:
|
||||
def test_record_file_rejects_parent_traversal(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
with pytest.raises(ValueError, match="outside"):
|
||||
m.record_file("../escape.txt", "bad")
|
||||
|
||||
def test_record_file_rejects_absolute_path(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
with pytest.raises(ValueError, match="Absolute paths"):
|
||||
m.record_file("/tmp/escape.txt", "bad")
|
||||
|
||||
def test_record_existing_rejects_parent_traversal(self, tmp_path):
|
||||
# Create a file outside the project root
|
||||
escape = tmp_path.parent / "escape.txt"
|
||||
escape.write_text("evil", encoding="utf-8")
|
||||
try:
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
with pytest.raises(ValueError, match="outside"):
|
||||
m.record_existing("../escape.txt")
|
||||
finally:
|
||||
escape.unlink(missing_ok=True)
|
||||
|
||||
def test_uninstall_skips_traversal_paths(self, tmp_path):
|
||||
"""If a manifest is corrupted with traversal paths, uninstall ignores them."""
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("safe.txt", "good")
|
||||
# Manually inject a traversal path into the manifest
|
||||
m._files["../outside.txt"] = "fakehash"
|
||||
m.save()
|
||||
|
||||
removed, skipped = m.uninstall()
|
||||
# Only the safe file should have been removed
|
||||
assert len(removed) == 1
|
||||
assert removed[0].name == "safe.txt"
|
||||
|
||||
|
||||
class TestManifestCheckModified:
|
||||
def test_unmodified_file(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("f.txt", "original")
|
||||
assert m.check_modified() == []
|
||||
|
||||
def test_modified_file(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("f.txt", "original")
|
||||
(tmp_path / "f.txt").write_text("changed", encoding="utf-8")
|
||||
assert m.check_modified() == ["f.txt"]
|
||||
|
||||
def test_deleted_file_not_reported(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("f.txt", "original")
|
||||
(tmp_path / "f.txt").unlink()
|
||||
assert m.check_modified() == []
|
||||
|
||||
|
||||
class TestManifestUninstall:
|
||||
def test_removes_unmodified(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("d/f.txt", "content")
|
||||
m.save()
|
||||
|
||||
removed, skipped = m.uninstall()
|
||||
assert len(removed) == 1
|
||||
assert not (tmp_path / "d" / "f.txt").exists()
|
||||
# Parent dir cleaned up because empty
|
||||
assert not (tmp_path / "d").exists()
|
||||
assert skipped == []
|
||||
|
||||
def test_skips_modified(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("f.txt", "original")
|
||||
m.save()
|
||||
(tmp_path / "f.txt").write_text("modified", encoding="utf-8")
|
||||
|
||||
removed, skipped = m.uninstall()
|
||||
assert removed == []
|
||||
assert len(skipped) == 1
|
||||
assert (tmp_path / "f.txt").exists()
|
||||
|
||||
def test_force_removes_modified(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("f.txt", "original")
|
||||
m.save()
|
||||
(tmp_path / "f.txt").write_text("modified", encoding="utf-8")
|
||||
|
||||
removed, skipped = m.uninstall(force=True)
|
||||
assert len(removed) == 1
|
||||
assert skipped == []
|
||||
assert not (tmp_path / "f.txt").exists()
|
||||
|
||||
def test_already_deleted_file(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("f.txt", "content")
|
||||
m.save()
|
||||
(tmp_path / "f.txt").unlink()
|
||||
|
||||
removed, skipped = m.uninstall()
|
||||
assert removed == []
|
||||
assert skipped == []
|
||||
|
||||
def test_removes_manifest_file(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path, version="1.0")
|
||||
m.record_file("f.txt", "content")
|
||||
m.save()
|
||||
assert m.manifest_path.exists()
|
||||
|
||||
m.uninstall()
|
||||
assert not m.manifest_path.exists()
|
||||
|
||||
def test_cleans_empty_parent_dirs(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("a/b/c/f.txt", "content")
|
||||
m.save()
|
||||
|
||||
m.uninstall()
|
||||
assert not (tmp_path / "a" / "b" / "c").exists()
|
||||
assert not (tmp_path / "a" / "b").exists()
|
||||
assert not (tmp_path / "a").exists()
|
||||
|
||||
def test_preserves_nonempty_parent_dirs(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("a/b/tracked.txt", "content")
|
||||
# Create an untracked sibling
|
||||
(tmp_path / "a" / "b" / "other.txt").write_text("keep", encoding="utf-8")
|
||||
m.save()
|
||||
|
||||
m.uninstall()
|
||||
assert not (tmp_path / "a" / "b" / "tracked.txt").exists()
|
||||
assert (tmp_path / "a" / "b" / "other.txt").exists()
|
||||
assert (tmp_path / "a" / "b").is_dir()
|
||||
|
||||
|
||||
class TestManifestPersistence:
|
||||
def test_save_and_load_roundtrip(self, tmp_path):
|
||||
m = IntegrationManifest("myagent", tmp_path, version="2.0.1")
|
||||
m.record_file("dir/file.md", "# Hello")
|
||||
m.save()
|
||||
|
||||
loaded = IntegrationManifest.load("myagent", tmp_path)
|
||||
assert loaded.key == "myagent"
|
||||
assert loaded.version == "2.0.1"
|
||||
assert loaded.files == m.files
|
||||
assert loaded._installed_at == m._installed_at
|
||||
|
||||
def test_manifest_path(self, tmp_path):
|
||||
m = IntegrationManifest("copilot", tmp_path)
|
||||
assert m.manifest_path == tmp_path / ".specify" / "integrations" / "copilot.manifest.json"
|
||||
|
||||
def test_load_missing_raises(self, tmp_path):
|
||||
with pytest.raises(FileNotFoundError):
|
||||
IntegrationManifest.load("nonexistent", tmp_path)
|
||||
|
||||
def test_save_creates_directories(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("f.txt", "content")
|
||||
path = m.save()
|
||||
assert path.exists()
|
||||
data = json.loads(path.read_text(encoding="utf-8"))
|
||||
assert data["integration"] == "test"
|
||||
assert "installed_at" in data
|
||||
assert "f.txt" in data["files"]
|
||||
|
||||
def test_save_preserves_installed_at(self, tmp_path):
|
||||
m = IntegrationManifest("test", tmp_path)
|
||||
m.record_file("f.txt", "content")
|
||||
m.save()
|
||||
first_ts = m._installed_at
|
||||
|
||||
# Save again — timestamp should not change
|
||||
m.save()
|
||||
assert m._installed_at == first_ts
|
||||
|
||||
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
# Registry
|
||||
# ═══════════════════════════════════════════════════════════════════════════
|
||||
|
||||
|
||||
class TestRegistry:
|
||||
def test_registry_starts_empty(self):
|
||||
# Registry may have been populated by other tests; at minimum
|
||||
# it should be a dict.
|
||||
assert isinstance(INTEGRATION_REGISTRY, dict)
|
||||
|
||||
def test_register_and_get(self):
|
||||
stub = _StubIntegration()
|
||||
_register(stub)
|
||||
try:
|
||||
assert get_integration("stub") is stub
|
||||
finally:
|
||||
INTEGRATION_REGISTRY.pop("stub", None)
|
||||
|
||||
def test_get_missing_returns_none(self):
|
||||
assert get_integration("nonexistent-xyz") is None
|
||||
|
||||
def test_register_empty_key_raises(self):
|
||||
class EmptyKey(MarkdownIntegration):
|
||||
key = ""
|
||||
with pytest.raises(ValueError, match="empty key"):
|
||||
_register(EmptyKey())
|
||||
|
||||
def test_register_duplicate_raises(self):
|
||||
stub = _StubIntegration()
|
||||
_register(stub)
|
||||
try:
|
||||
with pytest.raises(KeyError, match="already registered"):
|
||||
_register(_StubIntegration())
|
||||
finally:
|
||||
INTEGRATION_REGISTRY.pop("stub", None)
|
||||
|
||||
|
||||
class TestManifestLoadValidation:
|
||||
def test_load_non_dict_raises(self, tmp_path):
|
||||
path = tmp_path / ".specify" / "integrations" / "bad.manifest.json"
|
||||
path.parent.mkdir(parents=True)
|
||||
path.write_text('"just a string"', encoding="utf-8")
|
||||
with pytest.raises(ValueError, match="JSON object"):
|
||||
IntegrationManifest.load("bad", tmp_path)
|
||||
|
||||
def test_load_bad_files_type_raises(self, tmp_path):
|
||||
path = tmp_path / ".specify" / "integrations" / "bad.manifest.json"
|
||||
path.parent.mkdir(parents=True)
|
||||
path.write_text(json.dumps({"files": ["not", "a", "dict"]}), encoding="utf-8")
|
||||
with pytest.raises(ValueError, match="mapping"):
|
||||
IntegrationManifest.load("bad", tmp_path)
|
||||
|
||||
def test_load_bad_files_values_raises(self, tmp_path):
|
||||
path = tmp_path / ".specify" / "integrations" / "bad.manifest.json"
|
||||
path.parent.mkdir(parents=True)
|
||||
path.write_text(json.dumps({"files": {"a.txt": 123}}), encoding="utf-8")
|
||||
with pytest.raises(ValueError, match="mapping"):
|
||||
IntegrationManifest.load("bad", tmp_path)
|
||||
Reference in New Issue
Block a user