mirror of
https://github.com/github/spec-kit.git
synced 2026-04-02 18:53:09 +00:00
Compare commits
18 Commits
v0.4.4
...
copilot/re
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
ea445f0680 | ||
|
|
846c7f2afc | ||
|
|
28de25bb63 | ||
|
|
663d679f3b | ||
|
|
b1832c9477 | ||
|
|
a858c1d6da | ||
|
|
d9ce7c1fc0 | ||
|
|
4f9d966beb | ||
|
|
b44ffc0101 | ||
|
|
8e14ab1935 | ||
|
|
0945df9ec8 | ||
|
|
ea60efe2fa | ||
|
|
97b9f0f00d | ||
|
|
4df6d963dc | ||
|
|
682ffbfc0d | ||
|
|
b606b38512 | ||
|
|
255371d367 | ||
|
|
3113b72d6f |
62
.github/workflows/release.yml
vendored
62
.github/workflows/release.yml
vendored
@@ -27,35 +27,63 @@ jobs:
|
||||
- name: Check if release already exists
|
||||
id: check_release
|
||||
run: |
|
||||
chmod +x .github/workflows/scripts/check-release-exists.sh
|
||||
.github/workflows/scripts/check-release-exists.sh ${{ steps.version.outputs.tag }}
|
||||
VERSION="${{ steps.version.outputs.tag }}"
|
||||
if gh release view "$VERSION" >/dev/null 2>&1; then
|
||||
echo "exists=true" >> $GITHUB_OUTPUT
|
||||
echo "Release $VERSION already exists, skipping..."
|
||||
else
|
||||
echo "exists=false" >> $GITHUB_OUTPUT
|
||||
echo "Release $VERSION does not exist, proceeding..."
|
||||
fi
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Create release package variants
|
||||
if: steps.check_release.outputs.exists == 'false'
|
||||
run: |
|
||||
chmod +x .github/workflows/scripts/create-release-packages.sh
|
||||
.github/workflows/scripts/create-release-packages.sh ${{ steps.version.outputs.tag }}
|
||||
|
||||
- name: Generate release notes
|
||||
if: steps.check_release.outputs.exists == 'false'
|
||||
id: release_notes
|
||||
run: |
|
||||
chmod +x .github/workflows/scripts/generate-release-notes.sh
|
||||
# Get the previous tag for changelog generation
|
||||
PREVIOUS_TAG=$(git describe --tags --abbrev=0 ${{ steps.version.outputs.tag }}^ 2>/dev/null || echo "")
|
||||
# Default to v0.0.0 if no previous tag is found (e.g., first release)
|
||||
VERSION="${{ steps.version.outputs.tag }}"
|
||||
VERSION_NO_V=${VERSION#v}
|
||||
|
||||
# Find previous tag
|
||||
PREVIOUS_TAG=$(git tag -l 'v*' --sort=-version:refname | grep -v "^${VERSION}$" | head -n 1)
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
PREVIOUS_TAG="v0.0.0"
|
||||
PREVIOUS_TAG=""
|
||||
fi
|
||||
.github/workflows/scripts/generate-release-notes.sh ${{ steps.version.outputs.tag }} "$PREVIOUS_TAG"
|
||||
|
||||
# Get commits since previous tag
|
||||
if [ -z "$PREVIOUS_TAG" ]; then
|
||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
||||
if [ "$COMMIT_COUNT" -gt 20 ]; then
|
||||
COMMITS=$(git log --oneline --pretty=format:"- %s" --no-merges HEAD~20..HEAD)
|
||||
else
|
||||
COMMITS=$(git log --oneline --pretty=format:"- %s" --no-merges)
|
||||
fi
|
||||
else
|
||||
COMMITS=$(git log --oneline --pretty=format:"- %s" --no-merges "$PREVIOUS_TAG"..HEAD)
|
||||
fi
|
||||
|
||||
cat > release_notes.md << NOTES_EOF
|
||||
## Install
|
||||
|
||||
\`\`\`bash
|
||||
uv tool install specify-cli --from git+https://github.com/github/spec-kit.git@${VERSION}
|
||||
specify init my-project
|
||||
\`\`\`
|
||||
|
||||
NOTES_EOF
|
||||
|
||||
echo "## What's Changed" >> release_notes.md
|
||||
echo "" >> release_notes.md
|
||||
echo "$COMMITS" >> release_notes.md
|
||||
|
||||
- name: Create GitHub Release
|
||||
if: steps.check_release.outputs.exists == 'false'
|
||||
run: |
|
||||
chmod +x .github/workflows/scripts/create-github-release.sh
|
||||
.github/workflows/scripts/create-github-release.sh ${{ steps.version.outputs.tag }}
|
||||
VERSION="${{ steps.version.outputs.tag }}"
|
||||
VERSION_NO_V=${VERSION#v}
|
||||
gh release create "$VERSION" \
|
||||
--title "Spec Kit - $VERSION_NO_V" \
|
||||
--notes-file release_notes.md
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# check-release-exists.sh
|
||||
# Check if a GitHub release already exists for the given version
|
||||
# Usage: check-release-exists.sh <version>
|
||||
|
||||
if [[ $# -ne 1 ]]; then
|
||||
echo "Usage: $0 <version>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION="$1"
|
||||
|
||||
if gh release view "$VERSION" >/dev/null 2>&1; then
|
||||
echo "exists=true" >> $GITHUB_OUTPUT
|
||||
echo "Release $VERSION already exists, skipping..."
|
||||
else
|
||||
echo "exists=false" >> $GITHUB_OUTPUT
|
||||
echo "Release $VERSION does not exist, proceeding..."
|
||||
fi
|
||||
@@ -1,72 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# create-github-release.sh
|
||||
# Create a GitHub release with all template zip files
|
||||
# Usage: create-github-release.sh <version>
|
||||
|
||||
if [[ $# -ne 1 ]]; then
|
||||
echo "Usage: $0 <version>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION="$1"
|
||||
|
||||
# Remove 'v' prefix from version for release title
|
||||
VERSION_NO_V=${VERSION#v}
|
||||
|
||||
gh release create "$VERSION" \
|
||||
.genreleases/spec-kit-template-copilot-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-copilot-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-claude-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-claude-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-gemini-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-gemini-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-cursor-agent-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-cursor-agent-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-opencode-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-opencode-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-qwen-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-qwen-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-windsurf-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-windsurf-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-junie-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-junie-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-codex-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-codex-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-kilocode-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-kilocode-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-auggie-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-auggie-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-roo-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-roo-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-codebuddy-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-codebuddy-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-qodercli-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-qodercli-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-amp-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-amp-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-shai-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-shai-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-tabnine-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-tabnine-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-kiro-cli-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-kiro-cli-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-agy-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-agy-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-bob-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-bob-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-vibe-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-vibe-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-kimi-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-kimi-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-trae-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-trae-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-pi-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-pi-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-iflow-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-iflow-ps-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-generic-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-generic-ps-"$VERSION".zip \
|
||||
--title "Spec Kit Templates - $VERSION_NO_V" \
|
||||
--notes-file release_notes.md
|
||||
@@ -1,560 +0,0 @@
|
||||
#!/usr/bin/env pwsh
|
||||
#requires -Version 7.0
|
||||
|
||||
<#
|
||||
.SYNOPSIS
|
||||
Build Spec Kit template release archives for each supported AI assistant and script type.
|
||||
|
||||
.DESCRIPTION
|
||||
create-release-packages.ps1 (workflow-local)
|
||||
Build Spec Kit template release archives for each supported AI assistant and script type.
|
||||
|
||||
.PARAMETER Version
|
||||
Version string with leading 'v' (e.g., v0.2.0)
|
||||
|
||||
.PARAMETER Agents
|
||||
Comma or space separated subset of agents to build (default: all)
|
||||
Valid agents: claude, gemini, copilot, cursor-agent, qwen, opencode, windsurf, junie, codex, kilocode, auggie, roo, codebuddy, amp, kiro-cli, bob, qodercli, shai, tabnine, agy, vibe, kimi, trae, pi, iflow, generic
|
||||
|
||||
.PARAMETER Scripts
|
||||
Comma or space separated subset of script types to build (default: both)
|
||||
Valid scripts: sh, ps
|
||||
|
||||
.EXAMPLE
|
||||
.\create-release-packages.ps1 -Version v0.2.0
|
||||
|
||||
.EXAMPLE
|
||||
.\create-release-packages.ps1 -Version v0.2.0 -Agents claude,copilot -Scripts sh
|
||||
|
||||
.EXAMPLE
|
||||
.\create-release-packages.ps1 -Version v0.2.0 -Agents claude -Scripts ps
|
||||
#>
|
||||
|
||||
param(
|
||||
[Parameter(Mandatory=$true, Position=0)]
|
||||
[string]$Version,
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[string]$Agents = "",
|
||||
|
||||
[Parameter(Mandatory=$false)]
|
||||
[string]$Scripts = ""
|
||||
)
|
||||
|
||||
$ErrorActionPreference = "Stop"
|
||||
|
||||
# Validate version format
|
||||
if ($Version -notmatch '^v\d+\.\d+\.\d+$') {
|
||||
Write-Error "Version must look like v0.0.0"
|
||||
exit 1
|
||||
}
|
||||
|
||||
Write-Host "Building release packages for $Version"
|
||||
|
||||
# Create and use .genreleases directory for all build artifacts
|
||||
$GenReleasesDir = ".genreleases"
|
||||
if (Test-Path $GenReleasesDir) {
|
||||
Remove-Item -Path $GenReleasesDir -Recurse -Force -ErrorAction SilentlyContinue
|
||||
}
|
||||
New-Item -ItemType Directory -Path $GenReleasesDir -Force | Out-Null
|
||||
|
||||
function Rewrite-Paths {
|
||||
param([string]$Content)
|
||||
|
||||
$Content = $Content -replace '(/?)\bmemory/', '.specify/memory/'
|
||||
$Content = $Content -replace '(/?)\bscripts/', '.specify/scripts/'
|
||||
$Content = $Content -replace '(/?)\btemplates/', '.specify/templates/'
|
||||
return $Content
|
||||
}
|
||||
|
||||
function Generate-Commands {
|
||||
param(
|
||||
[string]$Agent,
|
||||
[string]$Extension,
|
||||
[string]$ArgFormat,
|
||||
[string]$OutputDir,
|
||||
[string]$ScriptVariant
|
||||
)
|
||||
|
||||
New-Item -ItemType Directory -Path $OutputDir -Force | Out-Null
|
||||
|
||||
$templates = Get-ChildItem -Path "templates/commands/*.md" -File -ErrorAction SilentlyContinue
|
||||
|
||||
foreach ($template in $templates) {
|
||||
$name = [System.IO.Path]::GetFileNameWithoutExtension($template.Name)
|
||||
|
||||
# Read file content and normalize line endings
|
||||
$fileContent = (Get-Content -Path $template.FullName -Raw) -replace "`r`n", "`n"
|
||||
|
||||
# Extract description from YAML frontmatter
|
||||
$description = ""
|
||||
if ($fileContent -match '(?m)^description:\s*(.+)$') {
|
||||
$description = $matches[1]
|
||||
}
|
||||
|
||||
# Extract script command from YAML frontmatter
|
||||
$scriptCommand = ""
|
||||
if ($fileContent -match "(?m)^\s*${ScriptVariant}:\s*(.+)$") {
|
||||
$scriptCommand = $matches[1]
|
||||
}
|
||||
|
||||
if ([string]::IsNullOrEmpty($scriptCommand)) {
|
||||
Write-Warning "No script command found for $ScriptVariant in $($template.Name)"
|
||||
$scriptCommand = "(Missing script command for $ScriptVariant)"
|
||||
}
|
||||
|
||||
# Extract agent_script command from YAML frontmatter if present
|
||||
$agentScriptCommand = ""
|
||||
if ($fileContent -match "(?ms)agent_scripts:.*?^\s*${ScriptVariant}:\s*(.+?)$") {
|
||||
$agentScriptCommand = $matches[1].Trim()
|
||||
}
|
||||
|
||||
# Replace {SCRIPT} placeholder with the script command
|
||||
$body = $fileContent -replace '\{SCRIPT\}', $scriptCommand
|
||||
|
||||
# Replace {AGENT_SCRIPT} placeholder with the agent script command if found
|
||||
if (-not [string]::IsNullOrEmpty($agentScriptCommand)) {
|
||||
$body = $body -replace '\{AGENT_SCRIPT\}', $agentScriptCommand
|
||||
}
|
||||
|
||||
# Remove the scripts: and agent_scripts: sections from frontmatter
|
||||
$lines = $body -split "`n"
|
||||
$outputLines = @()
|
||||
$inFrontmatter = $false
|
||||
$skipScripts = $false
|
||||
$dashCount = 0
|
||||
|
||||
foreach ($line in $lines) {
|
||||
if ($line -match '^---$') {
|
||||
$outputLines += $line
|
||||
$dashCount++
|
||||
if ($dashCount -eq 1) {
|
||||
$inFrontmatter = $true
|
||||
} else {
|
||||
$inFrontmatter = $false
|
||||
}
|
||||
continue
|
||||
}
|
||||
|
||||
if ($inFrontmatter) {
|
||||
if ($line -match '^(scripts|agent_scripts):$') {
|
||||
$skipScripts = $true
|
||||
continue
|
||||
}
|
||||
if ($line -match '^[a-zA-Z].*:' -and $skipScripts) {
|
||||
$skipScripts = $false
|
||||
}
|
||||
if ($skipScripts -and $line -match '^\s+') {
|
||||
continue
|
||||
}
|
||||
}
|
||||
|
||||
$outputLines += $line
|
||||
}
|
||||
|
||||
$body = $outputLines -join "`n"
|
||||
|
||||
# Apply other substitutions
|
||||
$body = $body -replace '\{ARGS\}', $ArgFormat
|
||||
$body = $body -replace '__AGENT__', $Agent
|
||||
$body = Rewrite-Paths -Content $body
|
||||
|
||||
# Generate output file based on extension
|
||||
$outputFile = Join-Path $OutputDir "speckit.$name.$Extension"
|
||||
|
||||
switch ($Extension) {
|
||||
'toml' {
|
||||
$body = $body -replace '\\', '\\'
|
||||
$output = "description = `"$description`"`n`nprompt = `"`"`"`n$body`n`"`"`""
|
||||
Set-Content -Path $outputFile -Value $output -NoNewline
|
||||
}
|
||||
'md' {
|
||||
Set-Content -Path $outputFile -Value $body -NoNewline
|
||||
}
|
||||
'agent.md' {
|
||||
Set-Content -Path $outputFile -Value $body -NoNewline
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function Generate-CopilotPrompts {
|
||||
param(
|
||||
[string]$AgentsDir,
|
||||
[string]$PromptsDir
|
||||
)
|
||||
|
||||
New-Item -ItemType Directory -Path $PromptsDir -Force | Out-Null
|
||||
|
||||
$agentFiles = Get-ChildItem -Path "$AgentsDir/speckit.*.agent.md" -File -ErrorAction SilentlyContinue
|
||||
|
||||
foreach ($agentFile in $agentFiles) {
|
||||
$basename = $agentFile.Name -replace '\.agent\.md$', ''
|
||||
$promptFile = Join-Path $PromptsDir "$basename.prompt.md"
|
||||
|
||||
$content = @"
|
||||
---
|
||||
agent: $basename
|
||||
---
|
||||
"@
|
||||
Set-Content -Path $promptFile -Value $content
|
||||
}
|
||||
}
|
||||
|
||||
# Create skills in <skills_dir>\<name>\SKILL.md format.
|
||||
# Skills use hyphenated names (e.g. speckit-plan).
|
||||
#
|
||||
# Technical debt note:
|
||||
# Keep SKILL.md frontmatter aligned with `install_ai_skills()` and extension
|
||||
# overrides (at minimum: name/description/compatibility/metadata.{author,source}).
|
||||
function New-Skills {
|
||||
param(
|
||||
[string]$SkillsDir,
|
||||
[string]$ScriptVariant,
|
||||
[string]$AgentName,
|
||||
[string]$Separator = '-'
|
||||
)
|
||||
|
||||
$templates = Get-ChildItem -Path "templates/commands/*.md" -File -ErrorAction SilentlyContinue
|
||||
|
||||
foreach ($template in $templates) {
|
||||
$name = [System.IO.Path]::GetFileNameWithoutExtension($template.Name)
|
||||
$skillName = "speckit${Separator}$name"
|
||||
$skillDir = Join-Path $SkillsDir $skillName
|
||||
New-Item -ItemType Directory -Force -Path $skillDir | Out-Null
|
||||
|
||||
$fileContent = (Get-Content -Path $template.FullName -Raw) -replace "`r`n", "`n"
|
||||
|
||||
# Extract description
|
||||
$description = "Spec Kit: $name workflow"
|
||||
if ($fileContent -match '(?m)^description:\s*(.+)$') {
|
||||
$description = $matches[1]
|
||||
}
|
||||
|
||||
# Extract script command
|
||||
$scriptCommand = "(Missing script command for $ScriptVariant)"
|
||||
if ($fileContent -match "(?m)^\s*${ScriptVariant}:\s*(.+)$") {
|
||||
$scriptCommand = $matches[1]
|
||||
}
|
||||
|
||||
# Extract agent_script command from frontmatter if present
|
||||
$agentScriptCommand = ""
|
||||
if ($fileContent -match "(?ms)agent_scripts:.*?^\s*${ScriptVariant}:\s*(.+?)$") {
|
||||
$agentScriptCommand = $matches[1].Trim()
|
||||
}
|
||||
|
||||
# Replace {SCRIPT}, strip scripts sections, rewrite paths
|
||||
$body = $fileContent -replace '\{SCRIPT\}', $scriptCommand
|
||||
if (-not [string]::IsNullOrEmpty($agentScriptCommand)) {
|
||||
$body = $body -replace '\{AGENT_SCRIPT\}', $agentScriptCommand
|
||||
}
|
||||
|
||||
$lines = $body -split "`n"
|
||||
$outputLines = @()
|
||||
$inFrontmatter = $false
|
||||
$skipScripts = $false
|
||||
$dashCount = 0
|
||||
|
||||
foreach ($line in $lines) {
|
||||
if ($line -match '^---$') {
|
||||
$outputLines += $line
|
||||
$dashCount++
|
||||
$inFrontmatter = ($dashCount -eq 1)
|
||||
continue
|
||||
}
|
||||
if ($inFrontmatter) {
|
||||
if ($line -match '^(scripts|agent_scripts):$') { $skipScripts = $true; continue }
|
||||
if ($line -match '^[a-zA-Z].*:' -and $skipScripts) { $skipScripts = $false }
|
||||
if ($skipScripts -and $line -match '^\s+') { continue }
|
||||
}
|
||||
$outputLines += $line
|
||||
}
|
||||
|
||||
$body = $outputLines -join "`n"
|
||||
$body = $body -replace '\{ARGS\}', '$ARGUMENTS'
|
||||
$body = $body -replace '__AGENT__', $AgentName
|
||||
$body = Rewrite-Paths -Content $body
|
||||
|
||||
# Strip existing frontmatter, keep only body
|
||||
$templateBody = ""
|
||||
$fmCount = 0
|
||||
$inBody = $false
|
||||
foreach ($line in ($body -split "`n")) {
|
||||
if ($line -match '^---$') {
|
||||
$fmCount++
|
||||
if ($fmCount -eq 2) { $inBody = $true }
|
||||
continue
|
||||
}
|
||||
if ($inBody) { $templateBody += "$line`n" }
|
||||
}
|
||||
|
||||
$skillContent = "---`nname: `"$skillName`"`ndescription: `"$description`"`ncompatibility: `"Requires spec-kit project structure with .specify/ directory`"`nmetadata:`n author: `"github-spec-kit`"`n source: `"templates/commands/$name.md`"`n---`n`n$templateBody"
|
||||
Set-Content -Path (Join-Path $skillDir "SKILL.md") -Value $skillContent -NoNewline
|
||||
}
|
||||
}
|
||||
|
||||
function Build-Variant {
|
||||
param(
|
||||
[string]$Agent,
|
||||
[string]$Script
|
||||
)
|
||||
|
||||
$baseDir = Join-Path $GenReleasesDir "sdd-${Agent}-package-${Script}"
|
||||
Write-Host "Building $Agent ($Script) package..."
|
||||
New-Item -ItemType Directory -Path $baseDir -Force | Out-Null
|
||||
|
||||
# Copy base structure but filter scripts by variant
|
||||
$specDir = Join-Path $baseDir ".specify"
|
||||
New-Item -ItemType Directory -Path $specDir -Force | Out-Null
|
||||
|
||||
# Copy memory directory
|
||||
if (Test-Path "memory") {
|
||||
Copy-Item -Path "memory" -Destination $specDir -Recurse -Force
|
||||
Write-Host "Copied memory -> .specify"
|
||||
}
|
||||
|
||||
# Only copy the relevant script variant directory
|
||||
if (Test-Path "scripts") {
|
||||
$scriptsDestDir = Join-Path $specDir "scripts"
|
||||
New-Item -ItemType Directory -Path $scriptsDestDir -Force | Out-Null
|
||||
|
||||
switch ($Script) {
|
||||
'sh' {
|
||||
if (Test-Path "scripts/bash") {
|
||||
Copy-Item -Path "scripts/bash" -Destination $scriptsDestDir -Recurse -Force
|
||||
Write-Host "Copied scripts/bash -> .specify/scripts"
|
||||
}
|
||||
}
|
||||
'ps' {
|
||||
if (Test-Path "scripts/powershell") {
|
||||
Copy-Item -Path "scripts/powershell" -Destination $scriptsDestDir -Recurse -Force
|
||||
Write-Host "Copied scripts/powershell -> .specify/scripts"
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Get-ChildItem -Path "scripts" -File -ErrorAction SilentlyContinue | ForEach-Object {
|
||||
Copy-Item -Path $_.FullName -Destination $scriptsDestDir -Force
|
||||
}
|
||||
}
|
||||
|
||||
# Copy templates (excluding commands directory and vscode-settings.json)
|
||||
if (Test-Path "templates") {
|
||||
$templatesDestDir = Join-Path $specDir "templates"
|
||||
New-Item -ItemType Directory -Path $templatesDestDir -Force | Out-Null
|
||||
|
||||
Get-ChildItem -Path "templates" -Recurse -File | Where-Object {
|
||||
$_.FullName -notmatch 'templates[/\\]commands[/\\]' -and $_.Name -ne 'vscode-settings.json'
|
||||
} | ForEach-Object {
|
||||
$relativePath = $_.FullName.Substring((Resolve-Path "templates").Path.Length + 1)
|
||||
$destFile = Join-Path $templatesDestDir $relativePath
|
||||
$destFileDir = Split-Path $destFile -Parent
|
||||
New-Item -ItemType Directory -Path $destFileDir -Force | Out-Null
|
||||
Copy-Item -Path $_.FullName -Destination $destFile -Force
|
||||
}
|
||||
Write-Host "Copied templates -> .specify/templates"
|
||||
}
|
||||
|
||||
# Generate agent-specific command files
|
||||
switch ($Agent) {
|
||||
'claude' {
|
||||
$cmdDir = Join-Path $baseDir ".claude/commands"
|
||||
Generate-Commands -Agent 'claude' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'gemini' {
|
||||
$cmdDir = Join-Path $baseDir ".gemini/commands"
|
||||
Generate-Commands -Agent 'gemini' -Extension 'toml' -ArgFormat '{{args}}' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
if (Test-Path "agent_templates/gemini/GEMINI.md") {
|
||||
Copy-Item -Path "agent_templates/gemini/GEMINI.md" -Destination (Join-Path $baseDir "GEMINI.md")
|
||||
}
|
||||
}
|
||||
'copilot' {
|
||||
$agentsDir = Join-Path $baseDir ".github/agents"
|
||||
Generate-Commands -Agent 'copilot' -Extension 'agent.md' -ArgFormat '$ARGUMENTS' -OutputDir $agentsDir -ScriptVariant $Script
|
||||
|
||||
$promptsDir = Join-Path $baseDir ".github/prompts"
|
||||
Generate-CopilotPrompts -AgentsDir $agentsDir -PromptsDir $promptsDir
|
||||
|
||||
$vscodeDir = Join-Path $baseDir ".vscode"
|
||||
New-Item -ItemType Directory -Path $vscodeDir -Force | Out-Null
|
||||
if (Test-Path "templates/vscode-settings.json") {
|
||||
Copy-Item -Path "templates/vscode-settings.json" -Destination (Join-Path $vscodeDir "settings.json")
|
||||
}
|
||||
}
|
||||
'cursor-agent' {
|
||||
$cmdDir = Join-Path $baseDir ".cursor/commands"
|
||||
Generate-Commands -Agent 'cursor-agent' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'qwen' {
|
||||
$cmdDir = Join-Path $baseDir ".qwen/commands"
|
||||
Generate-Commands -Agent 'qwen' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
if (Test-Path "agent_templates/qwen/QWEN.md") {
|
||||
Copy-Item -Path "agent_templates/qwen/QWEN.md" -Destination (Join-Path $baseDir "QWEN.md")
|
||||
}
|
||||
}
|
||||
'opencode' {
|
||||
$cmdDir = Join-Path $baseDir ".opencode/command"
|
||||
Generate-Commands -Agent 'opencode' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'windsurf' {
|
||||
$cmdDir = Join-Path $baseDir ".windsurf/workflows"
|
||||
Generate-Commands -Agent 'windsurf' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'junie' {
|
||||
$cmdDir = Join-Path $baseDir ".junie/commands"
|
||||
Generate-Commands -Agent 'junie' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'codex' {
|
||||
$skillsDir = Join-Path $baseDir ".agents/skills"
|
||||
New-Item -ItemType Directory -Force -Path $skillsDir | Out-Null
|
||||
New-Skills -SkillsDir $skillsDir -ScriptVariant $Script -AgentName 'codex' -Separator '-'
|
||||
}
|
||||
'kilocode' {
|
||||
$cmdDir = Join-Path $baseDir ".kilocode/workflows"
|
||||
Generate-Commands -Agent 'kilocode' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'auggie' {
|
||||
$cmdDir = Join-Path $baseDir ".augment/commands"
|
||||
Generate-Commands -Agent 'auggie' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'roo' {
|
||||
$cmdDir = Join-Path $baseDir ".roo/commands"
|
||||
Generate-Commands -Agent 'roo' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'codebuddy' {
|
||||
$cmdDir = Join-Path $baseDir ".codebuddy/commands"
|
||||
Generate-Commands -Agent 'codebuddy' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'amp' {
|
||||
$cmdDir = Join-Path $baseDir ".agents/commands"
|
||||
Generate-Commands -Agent 'amp' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'kiro-cli' {
|
||||
$cmdDir = Join-Path $baseDir ".kiro/prompts"
|
||||
Generate-Commands -Agent 'kiro-cli' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'bob' {
|
||||
$cmdDir = Join-Path $baseDir ".bob/commands"
|
||||
Generate-Commands -Agent 'bob' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'qodercli' {
|
||||
$cmdDir = Join-Path $baseDir ".qoder/commands"
|
||||
Generate-Commands -Agent 'qodercli' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'shai' {
|
||||
$cmdDir = Join-Path $baseDir ".shai/commands"
|
||||
Generate-Commands -Agent 'shai' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'tabnine' {
|
||||
$cmdDir = Join-Path $baseDir ".tabnine/agent/commands"
|
||||
Generate-Commands -Agent 'tabnine' -Extension 'toml' -ArgFormat '{{args}}' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
$tabnineTemplate = Join-Path 'agent_templates' 'tabnine/TABNINE.md'
|
||||
if (Test-Path $tabnineTemplate) { Copy-Item $tabnineTemplate (Join-Path $baseDir 'TABNINE.md') }
|
||||
}
|
||||
'agy' {
|
||||
$cmdDir = Join-Path $baseDir ".agent/commands"
|
||||
Generate-Commands -Agent 'agy' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'vibe' {
|
||||
$cmdDir = Join-Path $baseDir ".vibe/prompts"
|
||||
Generate-Commands -Agent 'vibe' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'kimi' {
|
||||
$skillsDir = Join-Path $baseDir ".kimi/skills"
|
||||
New-Item -ItemType Directory -Force -Path $skillsDir | Out-Null
|
||||
New-Skills -SkillsDir $skillsDir -ScriptVariant $Script -AgentName 'kimi'
|
||||
}
|
||||
'trae' {
|
||||
$rulesDir = Join-Path $baseDir ".trae/rules"
|
||||
New-Item -ItemType Directory -Force -Path $rulesDir | Out-Null
|
||||
Generate-Commands -Agent 'trae' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $rulesDir -ScriptVariant $Script
|
||||
}
|
||||
'pi' {
|
||||
$cmdDir = Join-Path $baseDir ".pi/prompts"
|
||||
Generate-Commands -Agent 'pi' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'iflow' {
|
||||
$cmdDir = Join-Path $baseDir ".iflow/commands"
|
||||
Generate-Commands -Agent 'iflow' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
'generic' {
|
||||
$cmdDir = Join-Path $baseDir ".speckit/commands"
|
||||
Generate-Commands -Agent 'generic' -Extension 'md' -ArgFormat '$ARGUMENTS' -OutputDir $cmdDir -ScriptVariant $Script
|
||||
}
|
||||
default {
|
||||
throw "Unsupported agent '$Agent'."
|
||||
}
|
||||
}
|
||||
|
||||
# Create zip archive
|
||||
$zipFile = Join-Path $GenReleasesDir "spec-kit-template-${Agent}-${Script}-${Version}.zip"
|
||||
Compress-Archive -Path "$baseDir/*" -DestinationPath $zipFile -Force
|
||||
Write-Host "Created $zipFile"
|
||||
}
|
||||
|
||||
# Define all agents and scripts
|
||||
$AllAgents = @('claude', 'gemini', 'copilot', 'cursor-agent', 'qwen', 'opencode', 'windsurf', 'junie', 'codex', 'kilocode', 'auggie', 'roo', 'codebuddy', 'amp', 'kiro-cli', 'bob', 'qodercli', 'shai', 'tabnine', 'agy', 'vibe', 'kimi', 'trae', 'pi', 'iflow', 'generic')
|
||||
$AllScripts = @('sh', 'ps')
|
||||
|
||||
function Normalize-List {
|
||||
param([string]$Value)
|
||||
|
||||
if ([string]::IsNullOrEmpty($Value)) {
|
||||
return @()
|
||||
}
|
||||
|
||||
$items = $Value -split '[,\s]+' | Where-Object { $_ } | Select-Object -Unique
|
||||
return $items
|
||||
}
|
||||
|
||||
function Validate-Subset {
|
||||
param(
|
||||
[string]$Type,
|
||||
[string[]]$Allowed,
|
||||
[string[]]$Items
|
||||
)
|
||||
|
||||
$ok = $true
|
||||
foreach ($item in $Items) {
|
||||
if ($item -notin $Allowed) {
|
||||
Write-Error "Unknown $Type '$item' (allowed: $($Allowed -join ', '))"
|
||||
$ok = $false
|
||||
}
|
||||
}
|
||||
return $ok
|
||||
}
|
||||
|
||||
# Determine agent list
|
||||
if (-not [string]::IsNullOrEmpty($Agents)) {
|
||||
$AgentList = Normalize-List -Value $Agents
|
||||
if (-not (Validate-Subset -Type 'agent' -Allowed $AllAgents -Items $AgentList)) {
|
||||
exit 1
|
||||
}
|
||||
} else {
|
||||
$AgentList = $AllAgents
|
||||
}
|
||||
|
||||
# Determine script list
|
||||
if (-not [string]::IsNullOrEmpty($Scripts)) {
|
||||
$ScriptList = Normalize-List -Value $Scripts
|
||||
if (-not (Validate-Subset -Type 'script' -Allowed $AllScripts -Items $ScriptList)) {
|
||||
exit 1
|
||||
}
|
||||
} else {
|
||||
$ScriptList = $AllScripts
|
||||
}
|
||||
|
||||
Write-Host "Agents: $($AgentList -join ', ')"
|
||||
Write-Host "Scripts: $($ScriptList -join ', ')"
|
||||
|
||||
# Build all variants
|
||||
foreach ($agent in $AgentList) {
|
||||
foreach ($script in $ScriptList) {
|
||||
Build-Variant -Agent $agent -Script $script
|
||||
}
|
||||
}
|
||||
|
||||
Write-Host "`nArchives in ${GenReleasesDir}:"
|
||||
Get-ChildItem -Path $GenReleasesDir -Filter "spec-kit-template-*-${Version}.zip" | ForEach-Object {
|
||||
Write-Host " $($_.Name)"
|
||||
}
|
||||
388
.github/workflows/scripts/create-release-packages.sh
vendored
388
.github/workflows/scripts/create-release-packages.sh
vendored
@@ -1,388 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# create-release-packages.sh (workflow-local)
|
||||
# Build Spec Kit template release archives for each supported AI assistant and script type.
|
||||
# Usage: .github/workflows/scripts/create-release-packages.sh <version>
|
||||
# Version argument should include leading 'v'.
|
||||
# Optionally set AGENTS and/or SCRIPTS env vars to limit what gets built.
|
||||
# AGENTS : space or comma separated subset of: claude gemini copilot cursor-agent qwen opencode windsurf junie codex kilocode auggie roo codebuddy amp shai tabnine kiro-cli agy bob vibe qodercli kimi trae pi iflow generic (default: all)
|
||||
# SCRIPTS : space or comma separated subset of: sh ps (default: both)
|
||||
# Examples:
|
||||
# AGENTS=claude SCRIPTS=sh $0 v0.2.0
|
||||
# AGENTS="copilot,gemini" $0 v0.2.0
|
||||
# SCRIPTS=ps $0 v0.2.0
|
||||
|
||||
if [[ $# -ne 1 ]]; then
|
||||
echo "Usage: $0 <version-with-v-prefix>" >&2
|
||||
exit 1
|
||||
fi
|
||||
NEW_VERSION="$1"
|
||||
if [[ ! $NEW_VERSION =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
||||
echo "Version must look like v0.0.0" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
echo "Building release packages for $NEW_VERSION"
|
||||
|
||||
# Create and use .genreleases directory for all build artifacts
|
||||
# Override via GENRELEASES_DIR env var (e.g. for tests writing to a temp dir)
|
||||
GENRELEASES_DIR="${GENRELEASES_DIR:-.genreleases}"
|
||||
|
||||
# Guard against unsafe GENRELEASES_DIR values before cleaning
|
||||
if [[ -z "$GENRELEASES_DIR" ]]; then
|
||||
echo "GENRELEASES_DIR must not be empty" >&2
|
||||
exit 1
|
||||
fi
|
||||
case "$GENRELEASES_DIR" in
|
||||
'/'|'.'|'..')
|
||||
echo "Refusing to use unsafe GENRELEASES_DIR value: $GENRELEASES_DIR" >&2
|
||||
exit 1
|
||||
;;
|
||||
esac
|
||||
if [[ "$GENRELEASES_DIR" == *".."* ]]; then
|
||||
echo "Refusing to use GENRELEASES_DIR containing '..' path segments: $GENRELEASES_DIR" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
mkdir -p "$GENRELEASES_DIR"
|
||||
rm -rf "${GENRELEASES_DIR%/}/"* || true
|
||||
|
||||
rewrite_paths() {
|
||||
sed -E \
|
||||
-e 's@(/?)memory/@.specify/memory/@g' \
|
||||
-e 's@(/?)scripts/@.specify/scripts/@g' \
|
||||
-e 's@(/?)templates/@.specify/templates/@g' \
|
||||
-e 's@\.specify\.specify/@.specify/@g'
|
||||
}
|
||||
|
||||
generate_commands() {
|
||||
local agent=$1 ext=$2 arg_format=$3 output_dir=$4 script_variant=$5
|
||||
mkdir -p "$output_dir"
|
||||
for template in templates/commands/*.md; do
|
||||
[[ -f "$template" ]] || continue
|
||||
local name description script_command agent_script_command body
|
||||
name=$(basename "$template" .md)
|
||||
|
||||
# Normalize line endings
|
||||
file_content=$(tr -d '\r' < "$template")
|
||||
|
||||
# Extract description and script command from YAML frontmatter
|
||||
description=$(printf '%s\n' "$file_content" | awk '/^description:/ {sub(/^description:[[:space:]]*/, ""); print; exit}')
|
||||
script_command=$(printf '%s\n' "$file_content" | awk -v sv="$script_variant" '/^[[:space:]]*'"$script_variant"':[[:space:]]*/ {sub(/^[[:space:]]*'"$script_variant"':[[:space:]]*/, ""); print; exit}')
|
||||
|
||||
if [[ -z $script_command ]]; then
|
||||
echo "Warning: no script command found for $script_variant in $template" >&2
|
||||
script_command="(Missing script command for $script_variant)"
|
||||
fi
|
||||
|
||||
# Extract agent_script command from YAML frontmatter if present
|
||||
agent_script_command=$(printf '%s\n' "$file_content" | awk '
|
||||
/^agent_scripts:$/ { in_agent_scripts=1; next }
|
||||
in_agent_scripts && /^[[:space:]]*'"$script_variant"':[[:space:]]*/ {
|
||||
sub(/^[[:space:]]*'"$script_variant"':[[:space:]]*/, "")
|
||||
print
|
||||
exit
|
||||
}
|
||||
in_agent_scripts && /^[a-zA-Z]/ { in_agent_scripts=0 }
|
||||
')
|
||||
|
||||
# Replace {SCRIPT} placeholder with the script command
|
||||
body=$(printf '%s\n' "$file_content" | sed "s|{SCRIPT}|${script_command}|g")
|
||||
|
||||
# Replace {AGENT_SCRIPT} placeholder with the agent script command if found
|
||||
if [[ -n $agent_script_command ]]; then
|
||||
body=$(printf '%s\n' "$body" | sed "s|{AGENT_SCRIPT}|${agent_script_command}|g")
|
||||
fi
|
||||
|
||||
# Remove the scripts: and agent_scripts: sections from frontmatter while preserving YAML structure
|
||||
body=$(printf '%s\n' "$body" | awk '
|
||||
/^---$/ { print; if (++dash_count == 1) in_frontmatter=1; else in_frontmatter=0; next }
|
||||
in_frontmatter && /^scripts:$/ { skip_scripts=1; next }
|
||||
in_frontmatter && /^agent_scripts:$/ { skip_scripts=1; next }
|
||||
in_frontmatter && /^[a-zA-Z].*:/ && skip_scripts { skip_scripts=0 }
|
||||
in_frontmatter && skip_scripts && /^[[:space:]]/ { next }
|
||||
{ print }
|
||||
')
|
||||
|
||||
# Apply other substitutions
|
||||
body=$(printf '%s\n' "$body" | sed "s/{ARGS}/$arg_format/g" | sed "s/__AGENT__/$agent/g" | rewrite_paths)
|
||||
|
||||
case $ext in
|
||||
toml)
|
||||
body=$(printf '%s\n' "$body" | sed 's/\\/\\\\/g')
|
||||
{ echo "description = \"$description\""; echo; echo "prompt = \"\"\""; echo "$body"; echo "\"\"\""; } > "$output_dir/speckit.$name.$ext" ;;
|
||||
md)
|
||||
echo "$body" > "$output_dir/speckit.$name.$ext" ;;
|
||||
agent.md)
|
||||
echo "$body" > "$output_dir/speckit.$name.$ext" ;;
|
||||
esac
|
||||
done
|
||||
}
|
||||
|
||||
generate_copilot_prompts() {
|
||||
local agents_dir=$1 prompts_dir=$2
|
||||
mkdir -p "$prompts_dir"
|
||||
|
||||
# Generate a .prompt.md file for each .agent.md file
|
||||
for agent_file in "$agents_dir"/speckit.*.agent.md; do
|
||||
[[ -f "$agent_file" ]] || continue
|
||||
|
||||
local basename=$(basename "$agent_file" .agent.md)
|
||||
local prompt_file="$prompts_dir/${basename}.prompt.md"
|
||||
|
||||
cat > "$prompt_file" <<EOF
|
||||
---
|
||||
agent: ${basename}
|
||||
---
|
||||
EOF
|
||||
done
|
||||
}
|
||||
|
||||
# Create skills in <skills_dir>/<name>/SKILL.md format.
|
||||
# Skills use hyphenated names (e.g. speckit-plan).
|
||||
#
|
||||
# Technical debt note:
|
||||
# Keep SKILL.md frontmatter aligned with `install_ai_skills()` and extension
|
||||
# overrides (at minimum: name/description/compatibility/metadata.{author,source}).
|
||||
create_skills() {
|
||||
local skills_dir="$1"
|
||||
local script_variant="$2"
|
||||
local agent_name="$3"
|
||||
local separator="${4:-"-"}"
|
||||
|
||||
for template in templates/commands/*.md; do
|
||||
[[ -f "$template" ]] || continue
|
||||
local name
|
||||
name=$(basename "$template" .md)
|
||||
local skill_name="speckit${separator}${name}"
|
||||
local skill_dir="${skills_dir}/${skill_name}"
|
||||
mkdir -p "$skill_dir"
|
||||
|
||||
local file_content
|
||||
file_content=$(tr -d '\r' < "$template")
|
||||
|
||||
# Extract description from frontmatter
|
||||
local description
|
||||
description=$(printf '%s\n' "$file_content" | awk '/^description:/ {sub(/^description:[[:space:]]*/, ""); print; exit}')
|
||||
[[ -z "$description" ]] && description="Spec Kit: ${name} workflow"
|
||||
|
||||
# Extract script command
|
||||
local script_command
|
||||
script_command=$(printf '%s\n' "$file_content" | awk -v sv="$script_variant" '/^[[:space:]]*'"$script_variant"':[[:space:]]*/ {sub(/^[[:space:]]*'"$script_variant"':[[:space:]]*/, ""); print; exit}')
|
||||
[[ -z "$script_command" ]] && script_command="(Missing script command for $script_variant)"
|
||||
|
||||
# Extract agent_script command from frontmatter if present
|
||||
local agent_script_command
|
||||
agent_script_command=$(printf '%s\n' "$file_content" | awk '
|
||||
/^agent_scripts:$/ { in_agent_scripts=1; next }
|
||||
in_agent_scripts && /^[[:space:]]*'"$script_variant"':[[:space:]]*/ {
|
||||
sub(/^[[:space:]]*'"$script_variant"':[[:space:]]*/, "")
|
||||
print
|
||||
exit
|
||||
}
|
||||
in_agent_scripts && /^[a-zA-Z]/ { in_agent_scripts=0 }
|
||||
')
|
||||
|
||||
# Build body: replace placeholders, strip scripts sections, rewrite paths
|
||||
local body
|
||||
body=$(printf '%s\n' "$file_content" | sed "s|{SCRIPT}|${script_command}|g")
|
||||
if [[ -n $agent_script_command ]]; then
|
||||
body=$(printf '%s\n' "$body" | sed "s|{AGENT_SCRIPT}|${agent_script_command}|g")
|
||||
fi
|
||||
body=$(printf '%s\n' "$body" | awk '
|
||||
/^---$/ { print; if (++dash_count == 1) in_frontmatter=1; else in_frontmatter=0; next }
|
||||
in_frontmatter && /^scripts:$/ { skip_scripts=1; next }
|
||||
in_frontmatter && /^agent_scripts:$/ { skip_scripts=1; next }
|
||||
in_frontmatter && /^[a-zA-Z].*:/ && skip_scripts { skip_scripts=0 }
|
||||
in_frontmatter && skip_scripts && /^[[:space:]]/ { next }
|
||||
{ print }
|
||||
')
|
||||
body=$(printf '%s\n' "$body" | sed 's/{ARGS}/\$ARGUMENTS/g' | sed "s/__AGENT__/$agent_name/g" | rewrite_paths)
|
||||
|
||||
# Strip existing frontmatter and prepend skills frontmatter.
|
||||
local template_body
|
||||
template_body=$(printf '%s\n' "$body" | awk '/^---/{p++; if(p==2){found=1; next}} found')
|
||||
|
||||
{
|
||||
printf -- '---\n'
|
||||
printf 'name: "%s"\n' "$skill_name"
|
||||
printf 'description: "%s"\n' "$description"
|
||||
printf 'compatibility: "%s"\n' "Requires spec-kit project structure with .specify/ directory"
|
||||
printf -- 'metadata:\n'
|
||||
printf ' author: "%s"\n' "github-spec-kit"
|
||||
printf ' source: "%s"\n' "templates/commands/${name}.md"
|
||||
printf -- '---\n\n'
|
||||
printf '%s\n' "$template_body"
|
||||
} > "$skill_dir/SKILL.md"
|
||||
done
|
||||
}
|
||||
|
||||
build_variant() {
|
||||
local agent=$1 script=$2
|
||||
local base_dir="$GENRELEASES_DIR/sdd-${agent}-package-${script}"
|
||||
echo "Building $agent ($script) package..."
|
||||
mkdir -p "$base_dir"
|
||||
|
||||
# Copy base structure but filter scripts by variant
|
||||
SPEC_DIR="$base_dir/.specify"
|
||||
mkdir -p "$SPEC_DIR"
|
||||
|
||||
[[ -d memory ]] && { cp -r memory "$SPEC_DIR/"; echo "Copied memory -> .specify"; }
|
||||
|
||||
# Only copy the relevant script variant directory
|
||||
if [[ -d scripts ]]; then
|
||||
mkdir -p "$SPEC_DIR/scripts"
|
||||
case $script in
|
||||
sh)
|
||||
[[ -d scripts/bash ]] && { cp -r scripts/bash "$SPEC_DIR/scripts/"; echo "Copied scripts/bash -> .specify/scripts"; }
|
||||
find scripts -maxdepth 1 -type f -exec cp {} "$SPEC_DIR/scripts/" \; 2>/dev/null || true
|
||||
;;
|
||||
ps)
|
||||
[[ -d scripts/powershell ]] && { cp -r scripts/powershell "$SPEC_DIR/scripts/"; echo "Copied scripts/powershell -> .specify/scripts"; }
|
||||
find scripts -maxdepth 1 -type f -exec cp {} "$SPEC_DIR/scripts/" \; 2>/dev/null || true
|
||||
;;
|
||||
esac
|
||||
fi
|
||||
|
||||
[[ -d templates ]] && { mkdir -p "$SPEC_DIR/templates"; find templates -type f -not -path "templates/commands/*" -not -name "vscode-settings.json" | while IFS= read -r f; do d="$SPEC_DIR/$(dirname "$f")"; mkdir -p "$d"; cp "$f" "$d/"; done; echo "Copied templates -> .specify/templates"; }
|
||||
|
||||
case $agent in
|
||||
claude)
|
||||
mkdir -p "$base_dir/.claude/commands"
|
||||
generate_commands claude md "\$ARGUMENTS" "$base_dir/.claude/commands" "$script" ;;
|
||||
gemini)
|
||||
mkdir -p "$base_dir/.gemini/commands"
|
||||
generate_commands gemini toml "{{args}}" "$base_dir/.gemini/commands" "$script"
|
||||
[[ -f agent_templates/gemini/GEMINI.md ]] && cp agent_templates/gemini/GEMINI.md "$base_dir/GEMINI.md" ;;
|
||||
copilot)
|
||||
mkdir -p "$base_dir/.github/agents"
|
||||
generate_commands copilot agent.md "\$ARGUMENTS" "$base_dir/.github/agents" "$script"
|
||||
generate_copilot_prompts "$base_dir/.github/agents" "$base_dir/.github/prompts"
|
||||
mkdir -p "$base_dir/.vscode"
|
||||
[[ -f templates/vscode-settings.json ]] && cp templates/vscode-settings.json "$base_dir/.vscode/settings.json"
|
||||
;;
|
||||
cursor-agent)
|
||||
mkdir -p "$base_dir/.cursor/commands"
|
||||
generate_commands cursor-agent md "\$ARGUMENTS" "$base_dir/.cursor/commands" "$script" ;;
|
||||
qwen)
|
||||
mkdir -p "$base_dir/.qwen/commands"
|
||||
generate_commands qwen md "\$ARGUMENTS" "$base_dir/.qwen/commands" "$script"
|
||||
[[ -f agent_templates/qwen/QWEN.md ]] && cp agent_templates/qwen/QWEN.md "$base_dir/QWEN.md" ;;
|
||||
opencode)
|
||||
mkdir -p "$base_dir/.opencode/command"
|
||||
generate_commands opencode md "\$ARGUMENTS" "$base_dir/.opencode/command" "$script" ;;
|
||||
windsurf)
|
||||
mkdir -p "$base_dir/.windsurf/workflows"
|
||||
generate_commands windsurf md "\$ARGUMENTS" "$base_dir/.windsurf/workflows" "$script" ;;
|
||||
junie)
|
||||
mkdir -p "$base_dir/.junie/commands"
|
||||
generate_commands junie md "\$ARGUMENTS" "$base_dir/.junie/commands" "$script" ;;
|
||||
codex)
|
||||
mkdir -p "$base_dir/.agents/skills"
|
||||
create_skills "$base_dir/.agents/skills" "$script" "codex" "-" ;;
|
||||
kilocode)
|
||||
mkdir -p "$base_dir/.kilocode/workflows"
|
||||
generate_commands kilocode md "\$ARGUMENTS" "$base_dir/.kilocode/workflows" "$script" ;;
|
||||
auggie)
|
||||
mkdir -p "$base_dir/.augment/commands"
|
||||
generate_commands auggie md "\$ARGUMENTS" "$base_dir/.augment/commands" "$script" ;;
|
||||
roo)
|
||||
mkdir -p "$base_dir/.roo/commands"
|
||||
generate_commands roo md "\$ARGUMENTS" "$base_dir/.roo/commands" "$script" ;;
|
||||
codebuddy)
|
||||
mkdir -p "$base_dir/.codebuddy/commands"
|
||||
generate_commands codebuddy md "\$ARGUMENTS" "$base_dir/.codebuddy/commands" "$script" ;;
|
||||
qodercli)
|
||||
mkdir -p "$base_dir/.qoder/commands"
|
||||
generate_commands qodercli md "\$ARGUMENTS" "$base_dir/.qoder/commands" "$script" ;;
|
||||
amp)
|
||||
mkdir -p "$base_dir/.agents/commands"
|
||||
generate_commands amp md "\$ARGUMENTS" "$base_dir/.agents/commands" "$script" ;;
|
||||
shai)
|
||||
mkdir -p "$base_dir/.shai/commands"
|
||||
generate_commands shai md "\$ARGUMENTS" "$base_dir/.shai/commands" "$script" ;;
|
||||
tabnine)
|
||||
mkdir -p "$base_dir/.tabnine/agent/commands"
|
||||
generate_commands tabnine toml "{{args}}" "$base_dir/.tabnine/agent/commands" "$script"
|
||||
[[ -f agent_templates/tabnine/TABNINE.md ]] && cp agent_templates/tabnine/TABNINE.md "$base_dir/TABNINE.md" ;;
|
||||
kiro-cli)
|
||||
mkdir -p "$base_dir/.kiro/prompts"
|
||||
generate_commands kiro-cli md "\$ARGUMENTS" "$base_dir/.kiro/prompts" "$script" ;;
|
||||
agy)
|
||||
mkdir -p "$base_dir/.agent/commands"
|
||||
generate_commands agy md "\$ARGUMENTS" "$base_dir/.agent/commands" "$script" ;;
|
||||
bob)
|
||||
mkdir -p "$base_dir/.bob/commands"
|
||||
generate_commands bob md "\$ARGUMENTS" "$base_dir/.bob/commands" "$script" ;;
|
||||
vibe)
|
||||
mkdir -p "$base_dir/.vibe/prompts"
|
||||
generate_commands vibe md "\$ARGUMENTS" "$base_dir/.vibe/prompts" "$script" ;;
|
||||
kimi)
|
||||
mkdir -p "$base_dir/.kimi/skills"
|
||||
create_skills "$base_dir/.kimi/skills" "$script" "kimi" ;;
|
||||
trae)
|
||||
mkdir -p "$base_dir/.trae/rules"
|
||||
generate_commands trae md "\$ARGUMENTS" "$base_dir/.trae/rules" "$script" ;;
|
||||
pi)
|
||||
mkdir -p "$base_dir/.pi/prompts"
|
||||
generate_commands pi md "\$ARGUMENTS" "$base_dir/.pi/prompts" "$script" ;;
|
||||
iflow)
|
||||
mkdir -p "$base_dir/.iflow/commands"
|
||||
generate_commands iflow md "\$ARGUMENTS" "$base_dir/.iflow/commands" "$script" ;;
|
||||
generic)
|
||||
mkdir -p "$base_dir/.speckit/commands"
|
||||
generate_commands generic md "\$ARGUMENTS" "$base_dir/.speckit/commands" "$script" ;;
|
||||
esac
|
||||
( cd "$base_dir" && zip -r "../spec-kit-template-${agent}-${script}-${NEW_VERSION}.zip" . )
|
||||
echo "Created $GENRELEASES_DIR/spec-kit-template-${agent}-${script}-${NEW_VERSION}.zip"
|
||||
}
|
||||
|
||||
# Determine agent list
|
||||
ALL_AGENTS=(claude gemini copilot cursor-agent qwen opencode windsurf junie codex kilocode auggie roo codebuddy amp shai tabnine kiro-cli agy bob vibe qodercli kimi trae pi iflow generic)
|
||||
ALL_SCRIPTS=(sh ps)
|
||||
|
||||
validate_subset() {
|
||||
local type=$1; shift
|
||||
local allowed_str="$1"; shift
|
||||
local invalid=0
|
||||
for it in "$@"; do
|
||||
local found=0
|
||||
for a in $allowed_str; do
|
||||
if [[ "$it" == "$a" ]]; then found=1; break; fi
|
||||
done
|
||||
if [[ $found -eq 0 ]]; then
|
||||
echo "Error: unknown $type '$it' (allowed: $allowed_str)" >&2
|
||||
invalid=1
|
||||
fi
|
||||
done
|
||||
return $invalid
|
||||
}
|
||||
|
||||
read_list() { tr ',\n' ' ' | awk '{for(i=1;i<=NF;i++){if(!seen[$i]++){printf((out?" ":"") $i);out=1}}}END{printf("\n")}'; }
|
||||
|
||||
if [[ -n ${AGENTS:-} ]]; then
|
||||
read -ra AGENT_LIST <<< "$(printf '%s' "$AGENTS" | read_list)"
|
||||
validate_subset agent "${ALL_AGENTS[*]}" "${AGENT_LIST[@]}" || exit 1
|
||||
else
|
||||
AGENT_LIST=("${ALL_AGENTS[@]}")
|
||||
fi
|
||||
|
||||
if [[ -n ${SCRIPTS:-} ]]; then
|
||||
read -ra SCRIPT_LIST <<< "$(printf '%s' "$SCRIPTS" | read_list)"
|
||||
validate_subset script "${ALL_SCRIPTS[*]}" "${SCRIPT_LIST[@]}" || exit 1
|
||||
else
|
||||
SCRIPT_LIST=("${ALL_SCRIPTS[@]}")
|
||||
fi
|
||||
|
||||
echo "Agents: ${AGENT_LIST[*]}"
|
||||
echo "Scripts: ${SCRIPT_LIST[*]}"
|
||||
|
||||
for agent in "${AGENT_LIST[@]}"; do
|
||||
for script in "${SCRIPT_LIST[@]}"; do
|
||||
build_variant "$agent" "$script"
|
||||
done
|
||||
done
|
||||
|
||||
echo "Archives in $GENRELEASES_DIR:"
|
||||
ls -1 "$GENRELEASES_DIR"/spec-kit-template-*-"${NEW_VERSION}".zip
|
||||
@@ -1,40 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# generate-release-notes.sh
|
||||
# Generate release notes from git history
|
||||
# Usage: generate-release-notes.sh <new_version> <last_tag>
|
||||
|
||||
if [[ $# -ne 2 ]]; then
|
||||
echo "Usage: $0 <new_version> <last_tag>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
NEW_VERSION="$1"
|
||||
LAST_TAG="$2"
|
||||
|
||||
# Get commits since last tag
|
||||
if [ "$LAST_TAG" = "v0.0.0" ]; then
|
||||
# Check how many commits we have and use that as the limit
|
||||
COMMIT_COUNT=$(git rev-list --count HEAD)
|
||||
if [ "$COMMIT_COUNT" -gt 10 ]; then
|
||||
COMMITS=$(git log --oneline --pretty=format:"- %s" HEAD~10..HEAD)
|
||||
else
|
||||
COMMITS=$(git log --oneline --pretty=format:"- %s" HEAD~$COMMIT_COUNT..HEAD 2>/dev/null || git log --oneline --pretty=format:"- %s")
|
||||
fi
|
||||
else
|
||||
COMMITS=$(git log --oneline --pretty=format:"- %s" $LAST_TAG..HEAD)
|
||||
fi
|
||||
|
||||
# Create release notes
|
||||
cat > release_notes.md << EOF
|
||||
This is the latest set of releases that you can use with your agent of choice. We recommend using the Specify CLI to scaffold your projects, however you can download these independently and manage them yourself.
|
||||
|
||||
## Changelog
|
||||
|
||||
$COMMITS
|
||||
|
||||
EOF
|
||||
|
||||
echo "Generated release notes:"
|
||||
cat release_notes.md
|
||||
24
.github/workflows/scripts/get-next-version.sh
vendored
24
.github/workflows/scripts/get-next-version.sh
vendored
@@ -1,24 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# get-next-version.sh
|
||||
# Calculate the next version based on the latest git tag and output GitHub Actions variables
|
||||
# Usage: get-next-version.sh
|
||||
|
||||
# Get the latest tag, or use v0.0.0 if no tags exist
|
||||
LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "v0.0.0")
|
||||
echo "latest_tag=$LATEST_TAG" >> $GITHUB_OUTPUT
|
||||
|
||||
# Extract version number and increment
|
||||
VERSION=$(echo $LATEST_TAG | sed 's/v//')
|
||||
IFS='.' read -ra VERSION_PARTS <<< "$VERSION"
|
||||
MAJOR=${VERSION_PARTS[0]:-0}
|
||||
MINOR=${VERSION_PARTS[1]:-0}
|
||||
PATCH=${VERSION_PARTS[2]:-0}
|
||||
|
||||
# Increment patch version
|
||||
PATCH=$((PATCH + 1))
|
||||
NEW_VERSION="v$MAJOR.$MINOR.$PATCH"
|
||||
|
||||
echo "new_version=$NEW_VERSION" >> $GITHUB_OUTPUT
|
||||
echo "New version will be: $NEW_VERSION"
|
||||
161
.github/workflows/scripts/simulate-release.sh
vendored
161
.github/workflows/scripts/simulate-release.sh
vendored
@@ -1,161 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# simulate-release.sh
|
||||
# Simulate the release process locally without pushing to GitHub
|
||||
# Usage: simulate-release.sh [version]
|
||||
# If version is omitted, auto-increments patch version
|
||||
|
||||
# Colors for output
|
||||
GREEN='\033[0;32m'
|
||||
BLUE='\033[0;34m'
|
||||
YELLOW='\033[1;33m'
|
||||
RED='\033[0;31m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
echo -e "${BLUE}🧪 Simulating Release Process Locally${NC}"
|
||||
echo "======================================"
|
||||
echo ""
|
||||
|
||||
# Step 1: Determine version
|
||||
if [[ -n "${1:-}" ]]; then
|
||||
VERSION="${1#v}"
|
||||
TAG="v$VERSION"
|
||||
echo -e "${GREEN}📝 Using manual version: $VERSION${NC}"
|
||||
else
|
||||
LATEST_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "v0.0.0")
|
||||
echo -e "${BLUE}Latest tag: $LATEST_TAG${NC}"
|
||||
|
||||
VERSION=$(echo $LATEST_TAG | sed 's/v//')
|
||||
IFS='.' read -ra VERSION_PARTS <<< "$VERSION"
|
||||
MAJOR=${VERSION_PARTS[0]:-0}
|
||||
MINOR=${VERSION_PARTS[1]:-0}
|
||||
PATCH=${VERSION_PARTS[2]:-0}
|
||||
|
||||
PATCH=$((PATCH + 1))
|
||||
VERSION="$MAJOR.$MINOR.$PATCH"
|
||||
TAG="v$VERSION"
|
||||
echo -e "${GREEN}📝 Auto-incremented to: $VERSION${NC}"
|
||||
fi
|
||||
|
||||
echo ""
|
||||
|
||||
# Step 2: Check if tag exists
|
||||
if git rev-parse "$TAG" >/dev/null 2>&1; then
|
||||
echo -e "${RED}❌ Error: Tag $TAG already exists!${NC}"
|
||||
echo " Please use a different version or delete the tag first."
|
||||
exit 1
|
||||
fi
|
||||
echo -e "${GREEN}✓ Tag $TAG is available${NC}"
|
||||
|
||||
# Step 3: Backup current state
|
||||
echo ""
|
||||
echo -e "${YELLOW}💾 Creating backup of current state...${NC}"
|
||||
BACKUP_DIR=$(mktemp -d)
|
||||
cp pyproject.toml "$BACKUP_DIR/pyproject.toml.bak"
|
||||
cp CHANGELOG.md "$BACKUP_DIR/CHANGELOG.md.bak"
|
||||
echo -e "${GREEN}✓ Backup created at: $BACKUP_DIR${NC}"
|
||||
|
||||
# Step 4: Update pyproject.toml
|
||||
echo ""
|
||||
echo -e "${YELLOW}📝 Updating pyproject.toml...${NC}"
|
||||
sed -i.tmp "s/version = \".*\"/version = \"$VERSION\"/" pyproject.toml
|
||||
rm -f pyproject.toml.tmp
|
||||
echo -e "${GREEN}✓ Updated pyproject.toml to version $VERSION${NC}"
|
||||
|
||||
# Step 5: Update CHANGELOG.md
|
||||
echo ""
|
||||
echo -e "${YELLOW}📝 Updating CHANGELOG.md...${NC}"
|
||||
DATE=$(date +%Y-%m-%d)
|
||||
|
||||
# Get the previous tag to compare commits
|
||||
PREVIOUS_TAG=$(git describe --tags --abbrev=0 2>/dev/null || echo "")
|
||||
|
||||
if [[ -n "$PREVIOUS_TAG" ]]; then
|
||||
echo " Generating changelog from commits since $PREVIOUS_TAG"
|
||||
# Get commits since last tag, format as bullet points
|
||||
COMMITS=$(git log --oneline "$PREVIOUS_TAG"..HEAD --no-merges --pretty=format:"- %s" 2>/dev/null || echo "- Initial release")
|
||||
else
|
||||
echo " No previous tag found - this is the first release"
|
||||
COMMITS="- Initial release"
|
||||
fi
|
||||
|
||||
# Create temp file with new entry
|
||||
{
|
||||
head -n 8 CHANGELOG.md
|
||||
echo ""
|
||||
echo "## [$VERSION] - $DATE"
|
||||
echo ""
|
||||
echo "### Changed"
|
||||
echo ""
|
||||
echo "$COMMITS"
|
||||
echo ""
|
||||
tail -n +9 CHANGELOG.md
|
||||
} > CHANGELOG.md.tmp
|
||||
mv CHANGELOG.md.tmp CHANGELOG.md
|
||||
echo -e "${GREEN}✓ Updated CHANGELOG.md with commits since $PREVIOUS_TAG${NC}"
|
||||
|
||||
# Step 6: Show what would be committed
|
||||
echo ""
|
||||
echo -e "${YELLOW}📋 Changes that would be committed:${NC}"
|
||||
git diff pyproject.toml CHANGELOG.md
|
||||
|
||||
# Step 7: Create temporary tag (no push)
|
||||
echo ""
|
||||
echo -e "${YELLOW}🏷️ Creating temporary local tag...${NC}"
|
||||
git tag -a "$TAG" -m "Simulated release $TAG" 2>/dev/null || true
|
||||
echo -e "${GREEN}✓ Tag $TAG created locally${NC}"
|
||||
|
||||
# Step 8: Simulate release artifact creation
|
||||
echo ""
|
||||
echo -e "${YELLOW}📦 Simulating release package creation...${NC}"
|
||||
echo " (High-level simulation only; packaging script is not executed)"
|
||||
echo ""
|
||||
|
||||
# Check if script exists and is executable
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
if [[ -x "$SCRIPT_DIR/create-release-packages.sh" ]]; then
|
||||
echo -e "${BLUE}In a real release, the following command would be run to create packages:${NC}"
|
||||
echo " $SCRIPT_DIR/create-release-packages.sh \"$TAG\""
|
||||
echo ""
|
||||
echo "This simulation does not enumerate individual package files to avoid"
|
||||
echo "drifting from the actual behavior of create-release-packages.sh."
|
||||
else
|
||||
echo -e "${RED}⚠️ create-release-packages.sh not found or not executable${NC}"
|
||||
fi
|
||||
|
||||
# Step 9: Simulate release notes generation
|
||||
echo ""
|
||||
echo -e "${YELLOW}📄 Simulating release notes generation...${NC}"
|
||||
echo ""
|
||||
PREVIOUS_TAG=$(git describe --tags --abbrev=0 $TAG^ 2>/dev/null || echo "")
|
||||
if [[ -n "$PREVIOUS_TAG" ]]; then
|
||||
echo -e "${BLUE}Changes since $PREVIOUS_TAG:${NC}"
|
||||
git log --oneline "$PREVIOUS_TAG".."$TAG" | head -n 10
|
||||
echo ""
|
||||
else
|
||||
echo -e "${BLUE}No previous tag found - this would be the first release${NC}"
|
||||
fi
|
||||
|
||||
# Step 10: Summary
|
||||
echo ""
|
||||
echo -e "${GREEN}🎉 Simulation Complete!${NC}"
|
||||
echo "======================================"
|
||||
echo ""
|
||||
echo -e "${BLUE}Summary:${NC}"
|
||||
echo " Version: $VERSION"
|
||||
echo " Tag: $TAG"
|
||||
echo " Backup: $BACKUP_DIR"
|
||||
echo ""
|
||||
echo -e "${YELLOW}⚠️ SIMULATION ONLY - NO CHANGES PUSHED${NC}"
|
||||
echo ""
|
||||
echo -e "${BLUE}Next steps:${NC}"
|
||||
echo " 1. Review the changes above"
|
||||
echo " 2. To keep changes: git add pyproject.toml CHANGELOG.md && git commit"
|
||||
echo " 3. To discard changes: git checkout pyproject.toml CHANGELOG.md && git tag -d $TAG"
|
||||
echo " 4. To restore from backup: cp $BACKUP_DIR/* ."
|
||||
echo ""
|
||||
echo -e "${BLUE}To run the actual release:${NC}"
|
||||
echo " Go to: https://github.com/github/spec-kit/actions/workflows/release-trigger.yml"
|
||||
echo " Click 'Run workflow' and enter version: $VERSION"
|
||||
echo ""
|
||||
23
.github/workflows/scripts/update-version.sh
vendored
23
.github/workflows/scripts/update-version.sh
vendored
@@ -1,23 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
# update-version.sh
|
||||
# Update version in pyproject.toml (for release artifacts only)
|
||||
# Usage: update-version.sh <version>
|
||||
|
||||
if [[ $# -ne 1 ]]; then
|
||||
echo "Usage: $0 <version>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
VERSION="$1"
|
||||
|
||||
# Remove 'v' prefix for Python versioning
|
||||
PYTHON_VERSION=${VERSION#v}
|
||||
|
||||
if [ -f "pyproject.toml" ]; then
|
||||
sed -i "s/version = \".*\"/version = \"$PYTHON_VERSION\"/" pyproject.toml
|
||||
echo "Updated pyproject.toml version to $PYTHON_VERSION (for release artifacts only)"
|
||||
else
|
||||
echo "Warning: pyproject.toml not found, skipping version update"
|
||||
fi
|
||||
546
AGENTS.md
546
AGENTS.md
@@ -10,276 +10,278 @@ The toolkit supports multiple AI coding assistants, allowing teams to use their
|
||||
|
||||
---
|
||||
|
||||
## Adding New Agent Support
|
||||
## Integration Architecture
|
||||
|
||||
This section explains how to add support for new AI agents/assistants to the Specify CLI. Use this guide as a reference when integrating new AI tools into the Spec-Driven Development workflow.
|
||||
Each AI agent is a self-contained **integration subpackage** under `src/specify_cli/integrations/<key>/`. The package exposes a single class that declares all metadata, inherits setup/teardown logic from a base class, and registers itself in the global `INTEGRATION_REGISTRY` at import time.
|
||||
|
||||
### Overview
|
||||
```
|
||||
src/specify_cli/integrations/
|
||||
├── __init__.py # INTEGRATION_REGISTRY + _register_builtins()
|
||||
├── base.py # IntegrationBase, MarkdownIntegration, TomlIntegration, SkillsIntegration
|
||||
├── manifest.py # IntegrationManifest (file tracking)
|
||||
├── claude/ # Example: SkillsIntegration subclass
|
||||
│ ├── __init__.py # ClaudeIntegration class
|
||||
│ └── scripts/ # Thin wrapper scripts
|
||||
│ ├── update-context.sh
|
||||
│ └── update-context.ps1
|
||||
├── gemini/ # Example: TomlIntegration subclass
|
||||
│ ├── __init__.py
|
||||
│ └── scripts/
|
||||
├── windsurf/ # Example: MarkdownIntegration subclass
|
||||
│ ├── __init__.py
|
||||
│ └── scripts/
|
||||
├── copilot/ # Example: IntegrationBase subclass (custom setup)
|
||||
│ ├── __init__.py
|
||||
│ └── scripts/
|
||||
└── ... # One subpackage per supported agent
|
||||
```
|
||||
|
||||
Specify supports multiple AI agents by generating agent-specific command files and directory structures when initializing projects. Each agent has its own conventions for:
|
||||
The registry is the **single source of truth**. Supported agents, their directories, formats, and capabilities are all derived from the integration classes — no separate tables or config dicts to maintain.
|
||||
|
||||
- **Command file formats** (Markdown, TOML, etc.)
|
||||
- **Directory structures** (`.claude/commands/`, `.windsurf/workflows/`, etc.)
|
||||
- **Command invocation patterns** (slash commands, CLI tools, etc.)
|
||||
- **Argument passing conventions** (`$ARGUMENTS`, `{{args}}`, etc.)
|
||||
---
|
||||
|
||||
### Current Supported Agents
|
||||
## Adding a New Integration
|
||||
|
||||
| Agent | Directory | Format | CLI Tool | Description |
|
||||
| -------------------------- | ---------------------- | -------- | --------------- | --------------------------- |
|
||||
| **Claude Code** | `.claude/commands/` | Markdown | `claude` | Anthropic's Claude Code CLI |
|
||||
| **Gemini CLI** | `.gemini/commands/` | TOML | `gemini` | Google's Gemini CLI |
|
||||
| **GitHub Copilot** | `.github/agents/` | Markdown | N/A (IDE-based) | GitHub Copilot in VS Code |
|
||||
| **Cursor** | `.cursor/commands/` | Markdown | N/A (IDE-based) | Cursor IDE (`--ai cursor-agent`) |
|
||||
| **Qwen Code** | `.qwen/commands/` | Markdown | `qwen` | Alibaba's Qwen Code CLI |
|
||||
| **opencode** | `.opencode/command/` | Markdown | `opencode` | opencode CLI |
|
||||
| **Codex CLI** | `.agents/skills/` | Markdown | `codex` | Codex CLI (`--ai codex --ai-skills`) |
|
||||
| **Windsurf** | `.windsurf/workflows/` | Markdown | N/A (IDE-based) | Windsurf IDE workflows |
|
||||
| **Junie** | `.junie/commands/` | Markdown | `junie` | Junie by JetBrains |
|
||||
| **Kilo Code** | `.kilocode/workflows/` | Markdown | N/A (IDE-based) | Kilo Code IDE |
|
||||
| **Auggie CLI** | `.augment/commands/` | Markdown | `auggie` | Auggie CLI |
|
||||
| **Roo Code** | `.roo/commands/` | Markdown | N/A (IDE-based) | Roo Code IDE |
|
||||
| **CodeBuddy CLI** | `.codebuddy/commands/` | Markdown | `codebuddy` | CodeBuddy CLI |
|
||||
| **Qoder CLI** | `.qoder/commands/` | Markdown | `qodercli` | Qoder CLI |
|
||||
| **Kiro CLI** | `.kiro/prompts/` | Markdown | `kiro-cli` | Kiro CLI |
|
||||
| **Amp** | `.agents/commands/` | Markdown | `amp` | Amp CLI |
|
||||
| **SHAI** | `.shai/commands/` | Markdown | `shai` | SHAI CLI |
|
||||
| **Tabnine CLI** | `.tabnine/agent/commands/` | TOML | `tabnine` | Tabnine CLI |
|
||||
| **Kimi Code** | `.kimi/skills/` | Markdown | `kimi` | Kimi Code CLI (Moonshot AI) |
|
||||
| **Pi Coding Agent** | `.pi/prompts/` | Markdown | `pi` | Pi terminal coding agent |
|
||||
| **iFlow CLI** | `.iflow/commands/` | Markdown | `iflow` | iFlow CLI (iflow-ai) |
|
||||
| **IBM Bob** | `.bob/commands/` | Markdown | N/A (IDE-based) | IBM Bob IDE |
|
||||
| **Trae** | `.trae/rules/` | Markdown | N/A (IDE-based) | Trae IDE |
|
||||
| **Antigravity** | `.agent/commands/` | Markdown | N/A (IDE-based) | Antigravity IDE (`--ai agy --ai-skills`) |
|
||||
| **Mistral Vibe** | `.vibe/prompts/` | Markdown | `vibe` | Mistral Vibe CLI |
|
||||
| **Generic** | User-specified via `--ai-commands-dir` | Markdown | N/A | Bring your own agent |
|
||||
### 1. Choose a base class
|
||||
|
||||
### Step-by-Step Integration Guide
|
||||
| Your agent needs… | Subclass |
|
||||
|---|---|
|
||||
| Standard markdown commands (`.md`) | `MarkdownIntegration` |
|
||||
| TOML-format commands (`.toml`) | `TomlIntegration` |
|
||||
| Skill directories (`speckit-<name>/SKILL.md`) | `SkillsIntegration` |
|
||||
| Fully custom output (companion files, settings merge, etc.) | `IntegrationBase` directly |
|
||||
|
||||
Follow these steps to add a new agent (using a hypothetical new agent as an example):
|
||||
Most agents only need `MarkdownIntegration` — a minimal subclass with zero method overrides.
|
||||
|
||||
#### 1. Add to AGENT_CONFIG
|
||||
### 2. Create the subpackage
|
||||
|
||||
**IMPORTANT**: Use the actual CLI tool name as the key, not a shortened version.
|
||||
Create `src/specify_cli/integrations/<key>/__init__.py`. The `key` **must match the actual CLI tool name** (the executable users install and run). Use a Python-safe directory name if the key contains hyphens (e.g., `kiro_cli/` for key `"kiro-cli"`).
|
||||
|
||||
Add the new agent to the `AGENT_CONFIG` dictionary in `src/specify_cli/__init__.py`. This is the **single source of truth** for all agent metadata:
|
||||
**Minimal example — Markdown agent (Windsurf):**
|
||||
|
||||
```python
|
||||
AGENT_CONFIG = {
|
||||
# ... existing agents ...
|
||||
"new-agent-cli": { # Use the ACTUAL CLI tool name (what users type in terminal)
|
||||
"name": "New Agent Display Name",
|
||||
"folder": ".newagent/", # Directory for agent files
|
||||
"commands_subdir": "commands", # Subdirectory name for command files (default: "commands")
|
||||
"install_url": "https://example.com/install", # URL for installation docs (or None if IDE-based)
|
||||
"requires_cli": True, # True if CLI tool required, False for IDE-based agents
|
||||
},
|
||||
}
|
||||
"""Windsurf IDE integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class WindsurfIntegration(MarkdownIntegration):
|
||||
key = "windsurf"
|
||||
config = {
|
||||
"name": "Windsurf",
|
||||
"folder": ".windsurf/",
|
||||
"commands_subdir": "workflows",
|
||||
"install_url": None,
|
||||
"requires_cli": False,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".windsurf/workflows",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = ".windsurf/rules/specify-rules.md"
|
||||
```
|
||||
|
||||
**Key Design Principle**: The dictionary key should match the actual executable name that users install. For example:
|
||||
|
||||
- ✅ Use `"cursor-agent"` because the CLI tool is literally called `cursor-agent`
|
||||
- ❌ Don't use `"cursor"` as a shortcut if the tool is `cursor-agent`
|
||||
|
||||
This eliminates the need for special-case mappings throughout the codebase.
|
||||
|
||||
**Field Explanations**:
|
||||
|
||||
- `name`: Human-readable display name shown to users
|
||||
- `folder`: Directory where agent-specific files are stored (relative to project root)
|
||||
- `commands_subdir`: Subdirectory name within the agent folder where command/prompt files are stored (default: `"commands"`)
|
||||
- Most agents use `"commands"` (e.g., `.claude/commands/`)
|
||||
- Some agents use alternative names: `"agents"` (copilot), `"workflows"` (windsurf, kilocode), `"prompts"` (codex, kiro-cli, pi), `"command"` (opencode - singular)
|
||||
- This field enables `--ai-skills` to locate command templates correctly for skill generation
|
||||
- `install_url`: Installation documentation URL (set to `None` for IDE-based agents)
|
||||
- `requires_cli`: Whether the agent requires a CLI tool check during initialization
|
||||
|
||||
#### 2. Update CLI Help Text
|
||||
|
||||
Update the `--ai` parameter help text in the `init()` command to include the new agent:
|
||||
**TOML agent (Gemini):**
|
||||
|
||||
```python
|
||||
ai_assistant: str = typer.Option(None, "--ai", help="AI assistant to use: claude, gemini, copilot, cursor-agent, qwen, opencode, codex, windsurf, kilocode, auggie, codebuddy, new-agent-cli, or kiro-cli"),
|
||||
"""Gemini CLI integration."""
|
||||
|
||||
from ..base import TomlIntegration
|
||||
|
||||
|
||||
class GeminiIntegration(TomlIntegration):
|
||||
key = "gemini"
|
||||
config = {
|
||||
"name": "Gemini CLI",
|
||||
"folder": ".gemini/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://github.com/google-gemini/gemini-cli",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".gemini/commands",
|
||||
"format": "toml",
|
||||
"args": "{{args}}",
|
||||
"extension": ".toml",
|
||||
}
|
||||
context_file = "GEMINI.md"
|
||||
```
|
||||
|
||||
Also update any function docstrings, examples, and error messages that list available agents.
|
||||
**Skills agent (Codex):**
|
||||
|
||||
#### 3. Update README Documentation
|
||||
```python
|
||||
"""Codex CLI integration — skills-based agent."""
|
||||
|
||||
Update the **Supported AI Agents** section in `README.md` to include the new agent:
|
||||
from __future__ import annotations
|
||||
|
||||
- Add the new agent to the table with appropriate support level (Full/Partial)
|
||||
- Include the agent's official website link
|
||||
- Add any relevant notes about the agent's implementation
|
||||
- Ensure the table formatting remains aligned and consistent
|
||||
from ..base import IntegrationOption, SkillsIntegration
|
||||
|
||||
#### 4. Update Release Package Script
|
||||
|
||||
Modify `.github/workflows/scripts/create-release-packages.sh`:
|
||||
class CodexIntegration(SkillsIntegration):
|
||||
key = "codex"
|
||||
config = {
|
||||
"name": "Codex CLI",
|
||||
"folder": ".agents/",
|
||||
"commands_subdir": "skills",
|
||||
"install_url": "https://github.com/openai/codex",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".agents/skills",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": "/SKILL.md",
|
||||
}
|
||||
context_file = "AGENTS.md"
|
||||
|
||||
##### Add to ALL_AGENTS array
|
||||
@classmethod
|
||||
def options(cls) -> list[IntegrationOption]:
|
||||
return [
|
||||
IntegrationOption(
|
||||
"--skills",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Install as agent skills (default for Codex)",
|
||||
),
|
||||
]
|
||||
```
|
||||
|
||||
#### Required fields
|
||||
|
||||
| Field | Location | Purpose |
|
||||
|---|---|---|
|
||||
| `key` | Class attribute | Unique identifier; must match the CLI executable name |
|
||||
| `config` | Class attribute (dict) | Agent metadata: `name`, `folder`, `commands_subdir`, `install_url`, `requires_cli` |
|
||||
| `registrar_config` | Class attribute (dict) | Command output config: `dir`, `format`, `args` placeholder, file `extension` |
|
||||
| `context_file` | Class attribute (str or None) | Path to agent context/instructions file (e.g., `"CLAUDE.md"`, `".github/copilot-instructions.md"`) |
|
||||
|
||||
**Key design rule:** `key` must be the actual executable name (e.g., `"cursor-agent"` not `"cursor"`). This ensures `shutil.which(key)` works for CLI-tool checks without special-case mappings.
|
||||
|
||||
### 3. Register it
|
||||
|
||||
In `src/specify_cli/integrations/__init__.py`, add one import and one `_register()` call inside `_register_builtins()`. Both lists are alphabetical:
|
||||
|
||||
```python
|
||||
def _register_builtins() -> None:
|
||||
# -- Imports (alphabetical) -------------------------------------------
|
||||
from .claude import ClaudeIntegration
|
||||
# ...
|
||||
from .newagent import NewAgentIntegration # ← add import
|
||||
# ...
|
||||
|
||||
# -- Registration (alphabetical) --------------------------------------
|
||||
_register(ClaudeIntegration())
|
||||
# ...
|
||||
_register(NewAgentIntegration()) # ← add registration
|
||||
# ...
|
||||
```
|
||||
|
||||
### 4. Add scripts
|
||||
|
||||
Create two thin wrapper scripts in `src/specify_cli/integrations/<key>/scripts/` that delegate to the shared context-update scripts. Each is ~25 lines of boilerplate.
|
||||
|
||||
**`update-context.sh`:**
|
||||
|
||||
```bash
|
||||
ALL_AGENTS=(claude gemini copilot cursor-agent qwen opencode windsurf kiro-cli)
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — <Agent Name> integration: create/update <context_file>
|
||||
set -euo pipefail
|
||||
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" <key>
|
||||
```
|
||||
|
||||
##### Add case statement for directory structure
|
||||
|
||||
```bash
|
||||
case $agent in
|
||||
# ... existing cases ...
|
||||
windsurf)
|
||||
mkdir -p "$base_dir/.windsurf/workflows"
|
||||
generate_commands windsurf md "\$ARGUMENTS" "$base_dir/.windsurf/workflows" "$script" ;;
|
||||
esac
|
||||
```
|
||||
|
||||
#### 4. Update GitHub Release Script
|
||||
|
||||
Modify `.github/workflows/scripts/create-github-release.sh` to include the new agent's packages:
|
||||
|
||||
```bash
|
||||
gh release create "$VERSION" \
|
||||
# ... existing packages ...
|
||||
.genreleases/spec-kit-template-windsurf-sh-"$VERSION".zip \
|
||||
.genreleases/spec-kit-template-windsurf-ps-"$VERSION".zip \
|
||||
# Add new agent packages here
|
||||
```
|
||||
|
||||
#### 5. Update Agent Context Scripts
|
||||
|
||||
##### Bash script (`scripts/bash/update-agent-context.sh`)
|
||||
|
||||
Add file variable:
|
||||
|
||||
```bash
|
||||
WINDSURF_FILE="$REPO_ROOT/.windsurf/rules/specify-rules.md"
|
||||
```
|
||||
|
||||
Add to case statement:
|
||||
|
||||
```bash
|
||||
case "$AGENT_TYPE" in
|
||||
# ... existing cases ...
|
||||
windsurf) update_agent_file "$WINDSURF_FILE" "Windsurf" ;;
|
||||
"")
|
||||
# ... existing checks ...
|
||||
[ -f "$WINDSURF_FILE" ] && update_agent_file "$WINDSURF_FILE" "Windsurf";
|
||||
# Update default creation condition
|
||||
;;
|
||||
esac
|
||||
```
|
||||
|
||||
##### PowerShell script (`scripts/powershell/update-agent-context.ps1`)
|
||||
|
||||
Add file variable:
|
||||
**`update-context.ps1`:**
|
||||
|
||||
```powershell
|
||||
$windsurfFile = Join-Path $repoRoot '.windsurf/rules/specify-rules.md'
|
||||
```
|
||||
# update-context.ps1 — <Agent Name> integration: create/update <context_file>
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
Add to switch statement:
|
||||
|
||||
```powershell
|
||||
switch ($AgentType) {
|
||||
# ... existing cases ...
|
||||
'windsurf' { Update-AgentFile $windsurfFile 'Windsurf' }
|
||||
'' {
|
||||
foreach ($pair in @(
|
||||
# ... existing pairs ...
|
||||
@{file=$windsurfFile; name='Windsurf'}
|
||||
)) {
|
||||
if (Test-Path $pair.file) { Update-AgentFile $pair.file $pair.name }
|
||||
}
|
||||
# Update default creation condition
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
#### 6. Update CLI Tool Checks (Optional)
|
||||
|
||||
For agents that require CLI tools, add checks in the `check()` command and agent validation:
|
||||
|
||||
```python
|
||||
# In check() command
|
||||
tracker.add("windsurf", "Windsurf IDE (optional)")
|
||||
windsurf_ok = check_tool_for_tracker("windsurf", "https://windsurf.com/", tracker)
|
||||
|
||||
# In init validation (only if CLI tool required)
|
||||
elif selected_ai == "windsurf":
|
||||
if not check_tool("windsurf", "Install from: https://windsurf.com/"):
|
||||
console.print("[red]Error:[/red] Windsurf CLI is required for Windsurf projects")
|
||||
agent_tool_missing = True
|
||||
```
|
||||
|
||||
**Note**: CLI tool checks are now handled automatically based on the `requires_cli` field in AGENT_CONFIG. No additional code changes needed in the `check()` or `init()` commands - they automatically loop through AGENT_CONFIG and check tools as needed.
|
||||
|
||||
## Important Design Decisions
|
||||
|
||||
### Using Actual CLI Tool Names as Keys
|
||||
|
||||
**CRITICAL**: When adding a new agent to AGENT_CONFIG, always use the **actual executable name** as the dictionary key, not a shortened or convenient version.
|
||||
|
||||
**Why this matters:**
|
||||
|
||||
- The `check_tool()` function uses `shutil.which(tool)` to find executables in the system PATH
|
||||
- If the key doesn't match the actual CLI tool name, you'll need special-case mappings throughout the codebase
|
||||
- This creates unnecessary complexity and maintenance burden
|
||||
|
||||
**Example - The Cursor Lesson:**
|
||||
|
||||
❌ **Wrong approach** (requires special-case mapping):
|
||||
|
||||
```python
|
||||
AGENT_CONFIG = {
|
||||
"cursor": { # Shorthand that doesn't match the actual tool
|
||||
"name": "Cursor",
|
||||
# ...
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
# Then you need special cases everywhere:
|
||||
cli_tool = agent_key
|
||||
if agent_key == "cursor":
|
||||
cli_tool = "cursor-agent" # Map to the real tool name
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType <key>
|
||||
```
|
||||
|
||||
✅ **Correct approach** (no mapping needed):
|
||||
Replace `<key>` with your integration key and `<Agent Name>` / `<context_file>` with the appropriate values.
|
||||
|
||||
```python
|
||||
AGENT_CONFIG = {
|
||||
"cursor-agent": { # Matches the actual executable name
|
||||
"name": "Cursor",
|
||||
# ...
|
||||
}
|
||||
}
|
||||
You must also add the agent to the shared context-update scripts so the shared dispatcher recognises the new key:
|
||||
|
||||
# No special cases needed - just use agent_key directly!
|
||||
- **`scripts/bash/update-agent-context.sh`** — add a file-path variable and a case in `update_specific_agent()`.
|
||||
- **`scripts/powershell/update-agent-context.ps1`** — add a file-path variable, a switch case in `Update-SpecificAgent`, and an entry in `Update-AllExistingAgents`.
|
||||
|
||||
### 5. Test it
|
||||
|
||||
```bash
|
||||
# Install into a test project
|
||||
specify init my-project --integration <key>
|
||||
|
||||
# Verify files were created
|
||||
ls -R my-project/<commands_dir>/
|
||||
|
||||
# Uninstall cleanly
|
||||
cd my-project && specify integration uninstall <key>
|
||||
```
|
||||
|
||||
**Benefits of this approach:**
|
||||
Each integration also has a dedicated test file at `tests/integrations/test_integration_<key>.py`. Run it with:
|
||||
|
||||
- Eliminates special-case logic scattered throughout the codebase
|
||||
- Makes the code more maintainable and easier to understand
|
||||
- Reduces the chance of bugs when adding new agents
|
||||
- Tool checking "just works" without additional mappings
|
||||
```bash
|
||||
pytest tests/integrations/test_integration_<key>.py -v
|
||||
```
|
||||
|
||||
#### 7. Update Devcontainer files (Optional)
|
||||
### 6. Optional overrides
|
||||
|
||||
The base classes handle most work automatically. Override only when the agent deviates from standard patterns:
|
||||
|
||||
| Override | When to use | Example |
|
||||
|---|---|---|
|
||||
| `command_filename(template_name)` | Custom file naming or extension | Copilot → `speckit.{name}.agent.md` |
|
||||
| `options()` | Integration-specific CLI flags via `--integration-options` | Codex → `--skills` flag |
|
||||
| `setup()` | Custom install logic (companion files, settings merge) | Copilot → `.agent.md` + `.prompt.md` + `.vscode/settings.json` |
|
||||
| `teardown()` | Custom uninstall logic | Rarely needed; base handles manifest-tracked files |
|
||||
|
||||
**Example — Copilot (fully custom `setup`):**
|
||||
|
||||
Copilot extends `IntegrationBase` directly because it creates `.agent.md` commands, companion `.prompt.md` files, and merges `.vscode/settings.json`. See `src/specify_cli/integrations/copilot/__init__.py` for the full implementation.
|
||||
|
||||
### 7. Update Devcontainer files (Optional)
|
||||
|
||||
For agents that have VS Code extensions or require CLI installation, update the devcontainer configuration files:
|
||||
|
||||
##### VS Code Extension-based Agents
|
||||
#### VS Code Extension-based Agents
|
||||
|
||||
For agents available as VS Code extensions, add them to `.devcontainer/devcontainer.json`:
|
||||
|
||||
```json
|
||||
```jsonc
|
||||
{
|
||||
"customizations": {
|
||||
"vscode": {
|
||||
"extensions": [
|
||||
// ... existing extensions ...
|
||||
// [New Agent Name]
|
||||
"[New Agent Extension ID]"
|
||||
]
|
||||
}
|
||||
@@ -287,7 +289,7 @@ For agents available as VS Code extensions, add them to `.devcontainer/devcontai
|
||||
}
|
||||
```
|
||||
|
||||
##### CLI-based Agents
|
||||
#### CLI-based Agents
|
||||
|
||||
For agents that require CLI tools, add installation commands to `.devcontainer/post-create.sh`:
|
||||
|
||||
@@ -297,62 +299,16 @@ For agents that require CLI tools, add installation commands to `.devcontainer/p
|
||||
# Existing installations...
|
||||
|
||||
echo -e "\n🤖 Installing [New Agent Name] CLI..."
|
||||
# run_command "npm install -g [agent-cli-package]@latest" # Example for node-based CLI
|
||||
# or other installation instructions (must be non-interactive and compatible with Linux Debian "Trixie" or later)...
|
||||
# run_command "npm install -g [agent-cli-package]@latest"
|
||||
echo "✅ Done"
|
||||
|
||||
```
|
||||
|
||||
**Quick Tips:**
|
||||
|
||||
- **Extension-based agents**: Add to the `extensions` array in `devcontainer.json`
|
||||
- **CLI-based agents**: Add installation scripts to `post-create.sh`
|
||||
- **Hybrid agents**: May require both extension and CLI installation
|
||||
- **Test thoroughly**: Ensure installations work in the devcontainer environment
|
||||
|
||||
## Agent Categories
|
||||
|
||||
### CLI-Based Agents
|
||||
|
||||
Require a command-line tool to be installed:
|
||||
|
||||
- **Claude Code**: `claude` CLI
|
||||
- **Gemini CLI**: `gemini` CLI
|
||||
- **Qwen Code**: `qwen` CLI
|
||||
- **opencode**: `opencode` CLI
|
||||
- **Codex CLI**: `codex` CLI (requires `--ai-skills`)
|
||||
- **Junie**: `junie` CLI
|
||||
- **Auggie CLI**: `auggie` CLI
|
||||
- **CodeBuddy CLI**: `codebuddy` CLI
|
||||
- **Qoder CLI**: `qodercli` CLI
|
||||
- **Kiro CLI**: `kiro-cli` CLI
|
||||
- **Amp**: `amp` CLI
|
||||
- **SHAI**: `shai` CLI
|
||||
- **Tabnine CLI**: `tabnine` CLI
|
||||
- **Kimi Code**: `kimi` CLI
|
||||
- **Mistral Vibe**: `vibe` CLI
|
||||
- **Pi Coding Agent**: `pi` CLI
|
||||
- **iFlow CLI**: `iflow` CLI
|
||||
|
||||
### IDE-Based Agents
|
||||
|
||||
Work within integrated development environments:
|
||||
|
||||
- **GitHub Copilot**: Built into VS Code/compatible editors
|
||||
- **Cursor**: Built into Cursor IDE (`--ai cursor-agent`)
|
||||
- **Windsurf**: Built into Windsurf IDE
|
||||
- **Kilo Code**: Built into Kilo Code IDE
|
||||
- **Roo Code**: Built into Roo Code IDE
|
||||
- **IBM Bob**: Built into IBM Bob IDE
|
||||
- **Trae**: Built into Trae IDE
|
||||
- **Antigravity**: Built into Antigravity IDE (`--ai agy --ai-skills`)
|
||||
---
|
||||
|
||||
## Command File Formats
|
||||
|
||||
### Markdown Format
|
||||
|
||||
Used by: Claude, Cursor, GitHub Copilot, opencode, Windsurf, Junie, Kiro CLI, Amp, SHAI, IBM Bob, Kimi Code, Qwen, Pi, Codex, Auggie, CodeBuddy, Qoder, Roo Code, Kilo Code, Trae, Antigravity, Mistral Vibe, iFlow
|
||||
|
||||
**Standard format:**
|
||||
|
||||
```markdown
|
||||
@@ -376,8 +332,6 @@ Command content with {SCRIPT} and $ARGUMENTS placeholders.
|
||||
|
||||
### TOML Format
|
||||
|
||||
Used by: Gemini, Tabnine
|
||||
|
||||
```toml
|
||||
description = "Command description"
|
||||
|
||||
@@ -386,33 +340,6 @@ Command content with {SCRIPT} and {{args}} placeholders.
|
||||
"""
|
||||
```
|
||||
|
||||
## Directory Conventions
|
||||
|
||||
- **CLI agents**: Usually `.<agent-name>/commands/`
|
||||
- **Singular command exception**:
|
||||
- opencode: `.opencode/command/` (singular `command`, not `commands`)
|
||||
- **Nested path exception**:
|
||||
- Tabnine: `.tabnine/agent/commands/` (extra `agent/` segment)
|
||||
- **Shared `.agents/` folder**:
|
||||
- Amp: `.agents/commands/` (shared folder, not `.amp/`)
|
||||
- Codex: `.agents/skills/` (shared folder; requires `--ai-skills`; invoked as `$speckit-<command>`)
|
||||
- **Skills-based exceptions**:
|
||||
- Kimi Code: `.kimi/skills/` (skills, invoked as `/skill:speckit-<command>`)
|
||||
- **Prompt-based exceptions**:
|
||||
- Kiro CLI: `.kiro/prompts/`
|
||||
- Pi: `.pi/prompts/`
|
||||
- Mistral Vibe: `.vibe/prompts/`
|
||||
- **Rules-based exceptions**:
|
||||
- Trae: `.trae/rules/`
|
||||
- **IDE agents**: Follow IDE-specific patterns:
|
||||
- Copilot: `.github/agents/`
|
||||
- Cursor: `.cursor/commands/`
|
||||
- Windsurf: `.windsurf/workflows/`
|
||||
- Kilo Code: `.kilocode/workflows/`
|
||||
- Roo Code: `.roo/commands/`
|
||||
- IBM Bob: `.bob/commands/`
|
||||
- Antigravity: `.agent/skills/` (`--ai-skills` required; `.agent/commands/` is deprecated)
|
||||
|
||||
## Argument Patterns
|
||||
|
||||
Different agents use different argument placeholders:
|
||||
@@ -422,33 +349,14 @@ Different agents use different argument placeholders:
|
||||
- **Script placeholders**: `{SCRIPT}` (replaced with actual script path)
|
||||
- **Agent placeholders**: `__AGENT__` (replaced with agent name)
|
||||
|
||||
## Testing New Agent Integration
|
||||
|
||||
1. **Build test**: Run package creation script locally
|
||||
2. **CLI test**: Test `specify init --ai <agent>` command
|
||||
3. **File generation**: Verify correct directory structure and files
|
||||
4. **Command validation**: Ensure generated commands work with the agent
|
||||
5. **Context update**: Test agent context update scripts
|
||||
|
||||
## Common Pitfalls
|
||||
|
||||
1. **Using shorthand keys instead of actual CLI tool names**: Always use the actual executable name as the AGENT_CONFIG key (e.g., `"cursor-agent"` not `"cursor"`). This prevents the need for special-case mappings throughout the codebase.
|
||||
2. **Forgetting update scripts**: Both bash and PowerShell scripts must be updated when adding new agents.
|
||||
3. **Incorrect `requires_cli` value**: Set to `True` only for agents that actually have CLI tools to check; set to `False` for IDE-based agents.
|
||||
4. **Wrong argument format**: Use correct placeholder format for each agent type (`$ARGUMENTS` for Markdown, `{{args}}` for TOML).
|
||||
5. **Directory naming**: Follow agent-specific conventions exactly (check existing agents for patterns).
|
||||
6. **Help text inconsistency**: Update all user-facing text consistently (help strings, docstrings, README, error messages).
|
||||
|
||||
## Future Considerations
|
||||
|
||||
When adding new agents:
|
||||
|
||||
- Consider the agent's native command/workflow patterns
|
||||
- Ensure compatibility with the Spec-Driven Development process
|
||||
- Document any special requirements or limitations
|
||||
- Update this guide with lessons learned
|
||||
- Verify the actual CLI tool name before adding to AGENT_CONFIG
|
||||
1. **Using shorthand keys instead of actual CLI tool names**: The integration `key` must match the executable name (e.g., `"cursor-agent"` not `"cursor"`). `shutil.which(key)` is used for CLI tool checks — mismatches require special-case mappings.
|
||||
2. **Forgetting update scripts**: Both bash and PowerShell thin wrappers and the shared context-update scripts must be updated.
|
||||
3. **Incorrect `requires_cli` value**: Set to `True` only for agents that have a CLI tool; set to `False` for IDE-based agents.
|
||||
4. **Wrong argument format**: Use `$ARGUMENTS` for Markdown agents, `{{args}}` for TOML agents.
|
||||
5. **Skipping registration**: The import and `_register()` call in `_register_builtins()` must both be added.
|
||||
|
||||
---
|
||||
|
||||
*This documentation should be updated whenever new agents are added to maintain accuracy and completeness.*
|
||||
*This documentation should be updated whenever new integrations are added to maintain accuracy and completeness.*
|
||||
|
||||
44
CHANGELOG.md
44
CHANGELOG.md
@@ -2,6 +2,50 @@
|
||||
|
||||
<!-- insert new changelog below this comment -->
|
||||
|
||||
## [0.4.5] - 2026-04-02
|
||||
|
||||
### Changed
|
||||
|
||||
- Stage 6: Complete migration — remove legacy scaffold path (#1924) (#2063)
|
||||
- Install Claude Code as native skills and align preset/integration flows (#2051)
|
||||
- Add repoindex 0402 (#2062)
|
||||
- Stage 5: Skills, Generic & Option-Driven Integrations (#1924) (#2052)
|
||||
- feat(scripts): add --dry-run flag to create-new-feature (#1998)
|
||||
- fix: support feature branch numbers with 4+ digits (#2040)
|
||||
- Add community content disclaimers (#2058)
|
||||
- docs: add community extensions website link to README and extensions docs (#2014)
|
||||
- docs: remove dead Cognitive Squad and Understanding extension links and from extensions/catalog.community.json (#2057)
|
||||
- Add fix-findings extension to community catalog (#2039)
|
||||
- Stage 4: TOML integrations — gemini and tabnine migrated to plugin architecture (#2050)
|
||||
- feat: add 5 lifecycle extensions to community catalog (#2049)
|
||||
- Stage 3: Standard markdown integrations — 19 agents migrated to plugin architecture (#2038)
|
||||
- chore: release 0.4.4, begin 0.4.5.dev0 development (#2048)
|
||||
|
||||
## [0.4.4] - 2026-04-01
|
||||
|
||||
### Changed
|
||||
|
||||
- Stage 2: Copilot integration — proof of concept with shared template primitives (#2035)
|
||||
- docs: sync AGENTS.md with AGENT_CONFIG for missing agents (#2025)
|
||||
- docs: ensure manual tests use local specify (#2020)
|
||||
- Stage 1: Integration foundation — base classes, manifest system, and registry (#1925)
|
||||
- fix: harden GitHub Actions workflows (#2021)
|
||||
- chore: use PEP 440 .dev0 versions on main after releases (#2032)
|
||||
- feat: add superpowers bridge extension to community catalog (#2023)
|
||||
- feat: add product-forge extension to community catalog (#2012)
|
||||
- feat(scripts): add --allow-existing-branch flag to create-new-feature (#1999)
|
||||
- fix(scripts): add correct path for copilot-instructions.md (#1997)
|
||||
- Update README.md (#1995)
|
||||
- fix: prevent extension command shadowing (#1994)
|
||||
- Fix Claude Code CLI detection for npm-local installs (#1978)
|
||||
- fix(scripts): honor PowerShell agent and script filters (#1969)
|
||||
- feat: add MAQA extension suite (7 extensions) to community catalog (#1981)
|
||||
- feat: add spec-kit-onboard extension to community catalog (#1991)
|
||||
- Add plan-review-gate to community catalog (#1993)
|
||||
- chore(deps): bump actions/deploy-pages from 4 to 5 (#1990)
|
||||
- chore(deps): bump DavidAnson/markdownlint-cli2-action from 19 to 23 (#1989)
|
||||
- chore: bump version to 0.4.3 (#1986)
|
||||
|
||||
## [0.4.3] - 2026-03-26
|
||||
|
||||
### Changed
|
||||
|
||||
35
README.md
35
README.md
@@ -160,6 +160,11 @@ Want to see Spec Kit in action? Watch our [video overview](https://www.youtube.c
|
||||
|
||||
## 🧩 Community Extensions
|
||||
|
||||
> [!NOTE]
|
||||
> Community extensions are independently created and maintained by their respective authors. GitHub and the Spec Kit maintainers may review pull requests that add entries to the community catalog for formatting, catalog structure, or policy compliance, but they do **not review, audit, endorse, or support the extension code itself**. The Community Extensions website is also a third-party resource. Review extension source code before installation and use at your own discretion.
|
||||
|
||||
🔍 **Browse and search community extensions on the [Community Extensions website](https://speckit-community.github.io/extensions/).**
|
||||
|
||||
The following community-contributed extensions are available in [`catalog.community.json`](extensions/catalog.community.json):
|
||||
|
||||
**Categories:**
|
||||
@@ -182,10 +187,10 @@ The following community-contributed extensions are available in [`catalog.commun
|
||||
| Azure DevOps Integration | Sync user stories and tasks to Azure DevOps work items using OAuth authentication | `integration` | Read+Write | [spec-kit-azure-devops](https://github.com/pragya247/spec-kit-azure-devops) |
|
||||
| Checkpoint Extension | Commit the changes made during the middle of the implementation, so you don't end up with just one very large commit at the end | `code` | Read+Write | [spec-kit-checkpoint](https://github.com/aaronrsun/spec-kit-checkpoint) |
|
||||
| Cleanup Extension | Post-implementation quality gate that reviews changes, fixes small issues (scout rule), creates tasks for medium issues, and generates analysis for large issues | `code` | Read+Write | [spec-kit-cleanup](https://github.com/dsrednicki/spec-kit-cleanup) |
|
||||
| Cognitive Squad | Multi-agent cognitive system with Triadic Model: understanding, internalization, application — with quality gates, backpropagation verification, and self-healing | `docs` | Read+Write | [cognitive-squad](https://github.com/Testimonial/cognitive-squad) |
|
||||
| Conduct Extension | Orchestrates spec-kit phases via sub-agent delegation to reduce context pollution. | `process` | Read+Write | [spec-kit-conduct-ext](https://github.com/twbrandon7/spec-kit-conduct-ext) |
|
||||
| DocGuard — CDD Enforcement | Canonical-Driven Development enforcement. Validates, scores, and traces project documentation with automated checks, AI-driven workflows, and spec-kit hooks. Zero NPM runtime dependencies. | `docs` | Read+Write | [spec-kit-docguard](https://github.com/raccioly/docguard) |
|
||||
| Extensify | Create and validate extensions and extension catalogs | `process` | Read+Write | [extensify](https://github.com/mnriem/spec-kit-extensions/tree/main/extensify) |
|
||||
| Fix Findings | Automated analyze-fix-reanalyze loop that resolves spec findings until clean | `code` | Read+Write | [spec-kit-fix-findings](https://github.com/Quratulain-bilal/spec-kit-fix-findings) |
|
||||
| Fleet Orchestrator | Orchestrate a full feature lifecycle with human-in-the-loop gates across all SpecKit phases | `process` | Read+Write | [spec-kit-fleet](https://github.com/sharathsatish/spec-kit-fleet) |
|
||||
| Iterate | Iterate on spec documents with a two-phase define-and-apply workflow — refine specs mid-implementation and go straight back to building | `docs` | Read+Write | [spec-kit-iterate](https://github.com/imviancagrace/spec-kit-iterate) |
|
||||
| Jira Integration | Create Jira Epics, Stories, and Issues from spec-kit specifications and task breakdowns with configurable hierarchy and custom field support | `integration` | Read+Write | [spec-kit-jira](https://github.com/mbachorik/spec-kit-jira) |
|
||||
@@ -203,14 +208,19 @@ The following community-contributed extensions are available in [`catalog.commun
|
||||
| Product Forge | Full product lifecycle: research → product spec → SpecKit → implement → verify → test | `process` | Read+Write | [speckit-product-forge](https://github.com/VaiYav/speckit-product-forge) |
|
||||
| Project Health Check | Diagnose a Spec Kit project and report health issues across structure, agents, features, scripts, extensions, and git | `visibility` | Read-only | [spec-kit-doctor](https://github.com/KhawarHabibKhan/spec-kit-doctor) |
|
||||
| Project Status | Show current SDD workflow progress — active feature, artifact status, task completion, workflow phase, and extensions summary | `visibility` | Read-only | [spec-kit-status](https://github.com/KhawarHabibKhan/spec-kit-status) |
|
||||
| QA Testing Extension | Systematic QA testing with browser-driven or CLI-based validation of acceptance criteria from spec | `code` | Read-only | [spec-kit-qa](https://github.com/arunt14/spec-kit-qa) |
|
||||
| Ralph Loop | Autonomous implementation loop using AI agent CLI | `code` | Read+Write | [spec-kit-ralph](https://github.com/Rubiss/spec-kit-ralph) |
|
||||
| Reconcile Extension | Reconcile implementation drift by surgically updating feature artifacts. | `docs` | Read+Write | [spec-kit-reconcile](https://github.com/stn1slv/spec-kit-reconcile) |
|
||||
| Repository Index | Generate index for existing repo for overview, architecture and module level. | `docs` | Read-only | [spec-kit-repoindex](https://github.com/liuyiyu/spec-kit-repoindex) |
|
||||
| Retro Extension | Sprint retrospective analysis with metrics, spec accuracy assessment, and improvement suggestions | `process` | Read+Write | [spec-kit-retro](https://github.com/arunt14/spec-kit-retro) |
|
||||
| Retrospective Extension | Post-implementation retrospective with spec adherence scoring, drift analysis, and human-gated spec updates | `docs` | Read+Write | [spec-kit-retrospective](https://github.com/emi-dm/spec-kit-retrospective) |
|
||||
| Review Extension | Post-implementation comprehensive code review with specialized agents for code quality, comments, tests, error handling, type design, and simplification | `code` | Read-only | [spec-kit-review](https://github.com/ismaelJimenez/spec-kit-review) |
|
||||
| SDD Utilities | Resume interrupted workflows, validate project health, and verify spec-to-task traceability | `process` | Read+Write | [speckit-utils](https://github.com/mvanhorn/speckit-utils) |
|
||||
| Staff Review Extension | Staff-engineer-level code review that validates implementation against spec, checks security, performance, and test coverage | `code` | Read-only | [spec-kit-staff-review](https://github.com/arunt14/spec-kit-staff-review) |
|
||||
| Superpowers Bridge | Orchestrates obra/superpowers skills within the spec-kit SDD workflow across the full lifecycle (clarification, TDD, review, verification, critique, debugging, branch completion) | `process` | Read+Write | [superpowers-bridge](https://github.com/RbBtSn0w/spec-kit-extensions/tree/main/superpowers-bridge) |
|
||||
| Ship Release Extension | Automates release pipeline: pre-flight checks, branch sync, changelog generation, CI verification, and PR creation | `process` | Read+Write | [spec-kit-ship](https://github.com/arunt14/spec-kit-ship) |
|
||||
| Spec Critique Extension | Dual-lens critical review of spec and plan from product strategy and engineering risk perspectives | `docs` | Read-only | [spec-kit-critique](https://github.com/arunt14/spec-kit-critique) |
|
||||
| Spec Sync | Detect and resolve drift between specs and implementation. AI-assisted resolution with human approval | `docs` | Read+Write | [spec-kit-sync](https://github.com/bgervin/spec-kit-sync) |
|
||||
| Understanding | Automated requirements quality analysis — 31 deterministic metrics against IEEE/ISO standards with experimental energy-based ambiguity detection | `docs` | Read-only | [understanding](https://github.com/Testimonial/understanding) |
|
||||
| V-Model Extension Pack | Enforces V-Model paired generation of development specs and test specs with full traceability | `docs` | Read+Write | [spec-kit-v-model](https://github.com/leocamello/spec-kit-v-model) |
|
||||
| Verify Extension | Post-implementation quality gate that validates implemented code against specification artifacts | `code` | Read-only | [spec-kit-verify](https://github.com/ismaelJimenez/spec-kit-verify) |
|
||||
| Verify Tasks Extension | Detect phantom completions: tasks marked [X] in tasks.md with no real implementation | `code` | Read-only | [spec-kit-verify-tasks](https://github.com/datastone-inc/spec-kit-verify-tasks) |
|
||||
@@ -219,6 +229,9 @@ To submit your own extension, see the [Extension Publishing Guide](extensions/EX
|
||||
|
||||
## 🎨 Community Presets
|
||||
|
||||
> [!NOTE]
|
||||
> Community presets are independently created and maintained by their respective authors. GitHub and the Spec Kit maintainers may review pull requests that add entries to the community catalog for formatting, catalog structure, or policy compliance, but they do **not review, audit, endorse, or support the preset code itself**. Review preset source code before installation and use at your own discretion.
|
||||
|
||||
The following community-contributed presets customize how Spec Kit behaves — overriding templates, commands, and terminology without changing any tooling. Presets are available in [`catalog.community.json`](presets/catalog.community.json):
|
||||
|
||||
| Preset | Purpose | Provides | Requires | URL |
|
||||
@@ -230,6 +243,9 @@ To build and publish your own preset, see the [Presets Publishing Guide](presets
|
||||
|
||||
## 🚶 Community Walkthroughs
|
||||
|
||||
> [!NOTE]
|
||||
> Community walkthroughs are independently created and maintained by their respective authors. They are **not reviewed, nor endorsed, nor supported by GitHub**. Review their content before following along and use at your own discretion.
|
||||
|
||||
See Spec-Driven Development in action across different scenarios with these community-contributed walkthroughs:
|
||||
|
||||
- **[Greenfield .NET CLI tool](https://github.com/mnriem/spec-kit-dotnet-cli-demo)** — Builds a Timezone Utility as a .NET single-binary CLI tool from a blank directory, covering the full spec-kit workflow: constitution, specify, plan, tasks, and multi-pass implement using GitHub Copilot agents.
|
||||
@@ -248,6 +264,9 @@ See Spec-Driven Development in action across different scenarios with these comm
|
||||
|
||||
## 🛠️ Community Friends
|
||||
|
||||
> [!NOTE]
|
||||
> Community projects listed here are independently created and maintained by their respective authors. They are **not reviewed, nor endorsed, nor supported by GitHub**. Review their source code before installation and use at your own discretion.
|
||||
|
||||
Community projects that extend, visualize, or build on Spec Kit:
|
||||
|
||||
- **[cc-sdd](https://github.com/rhuss/cc-sdd)** - A Claude Code plugin that adds composable traits on top of Spec Kit with [Superpowers](https://github.com/obra/superpowers)-based quality gates, spec/code review, git worktree isolation, and parallel implementation via agent teams.
|
||||
@@ -262,7 +281,7 @@ Community projects that extend, visualize, or build on Spec Kit:
|
||||
| [Kiro CLI](https://kiro.dev/docs/cli/) | ✅ | Use `--ai kiro-cli` (alias: `--ai kiro`) |
|
||||
| [Amp](https://ampcode.com/) | ✅ | |
|
||||
| [Auggie CLI](https://docs.augmentcode.com/cli/overview) | ✅ | |
|
||||
| [Claude Code](https://www.anthropic.com/claude-code) | ✅ | |
|
||||
| [Claude Code](https://www.anthropic.com/claude-code) | ✅ | Installs skills in `.claude/skills`; invoke spec-kit as `/speckit-constitution`, `/speckit-plan`, etc. |
|
||||
| [CodeBuddy CLI](https://www.codebuddy.ai/cli) | ✅ | |
|
||||
| [Codex CLI](https://github.com/openai/codex) | ✅ | Requires `--ai-skills`. Codex recommends [skills](https://developers.openai.com/codex/skills) and treats [custom prompts](https://developers.openai.com/codex/custom-prompts) as deprecated. Spec-kit installs Codex skills into `.agents/skills` and invokes them as `$speckit-<command>`. |
|
||||
| [Cursor](https://cursor.sh/) | ✅ | |
|
||||
@@ -382,8 +401,8 @@ specify init my-project --ai claude --debug
|
||||
# Use GitHub token for API requests (helpful for corporate environments)
|
||||
specify init my-project --ai claude --github-token ghp_your_token_here
|
||||
|
||||
# Install agent skills with the project
|
||||
specify init my-project --ai claude --ai-skills
|
||||
# Claude Code installs skills with the project by default
|
||||
specify init my-project --ai claude
|
||||
|
||||
# Initialize in current directory with agent skills
|
||||
specify init --here --ai gemini --ai-skills
|
||||
@@ -397,7 +416,11 @@ specify check
|
||||
|
||||
### Available Slash Commands
|
||||
|
||||
After running `specify init`, your AI coding agent will have access to these slash commands for structured development.
|
||||
After running `specify init`, your AI coding agent will have access to these structured development commands.
|
||||
|
||||
Most agents expose the traditional dotted slash commands shown below, like `/speckit.plan`.
|
||||
|
||||
Claude Code installs spec-kit as skills and invokes them as `/speckit-constitution`, `/speckit-specify`, `/speckit-plan`, `/speckit-tasks`, and `/speckit-implement`.
|
||||
|
||||
For Codex CLI, `--ai-skills` installs spec-kit as agent skills instead of slash-command prompt files. In Codex skills mode, invoke spec-kit as `$speckit-constitution`, `$speckit-specify`, `$speckit-plan`, `$speckit-tasks`, and `$speckit-implement`.
|
||||
|
||||
|
||||
@@ -24,6 +24,9 @@ specify extension search # Now uses your organization's catalog instead of the
|
||||
|
||||
### Community Reference Catalog (`catalog.community.json`)
|
||||
|
||||
> [!NOTE]
|
||||
> Community extensions are independently created and maintained by their respective authors. GitHub and the Spec Kit maintainers may review pull requests that add entries to the community catalog for formatting, catalog structure, or policy compliance, but they do **not review, audit, endorse, or support the extension code itself**. Review extension source code before installation and use at your own discretion.
|
||||
|
||||
- **Purpose**: Browse available community-contributed extensions
|
||||
- **Status**: Active - contains extensions submitted by the community
|
||||
- **Location**: `extensions/catalog.community.json`
|
||||
@@ -68,6 +71,11 @@ specify extension add <extension-name> --from https://github.com/org/spec-kit-ex
|
||||
|
||||
## Available Community Extensions
|
||||
|
||||
> [!NOTE]
|
||||
> Community extensions are independently created and maintained by their respective authors. GitHub and the Spec Kit maintainers may review pull requests that add entries to the community catalog for formatting, catalog structure, or policy compliance, but they do **not review, audit, endorse, or support the extension code itself**. The Community Extensions website is also a third-party resource. Review extension source code before installation and use at your own discretion.
|
||||
|
||||
🔍 **Browse and search community extensions on the [Community Extensions website](https://speckit-community.github.io/extensions/).**
|
||||
|
||||
See the [Community Extensions](../README.md#-community-extensions) section in the main README for the full list of available community-contributed extensions.
|
||||
|
||||
For the raw catalog data, see [`catalog.community.json`](catalog.community.json).
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"schema_version": "1.0",
|
||||
"updated_at": "2026-03-30T00:00:00Z",
|
||||
"updated_at": "2026-04-01T00:00:00Z",
|
||||
"catalog_url": "https://raw.githubusercontent.com/github/spec-kit/main/extensions/catalog.community.json",
|
||||
"extensions": {
|
||||
"aide": {
|
||||
@@ -167,50 +167,6 @@
|
||||
"created_at": "2026-02-22T00:00:00Z",
|
||||
"updated_at": "2026-02-22T00:00:00Z"
|
||||
},
|
||||
"cognitive-squad": {
|
||||
"name": "Cognitive Squad",
|
||||
"id": "cognitive-squad",
|
||||
"description": "Multi-agent cognitive system with Triadic Model: understanding, internalization, application — with quality gates, backpropagation verification, and self-healing",
|
||||
"author": "Testimonial",
|
||||
"version": "0.1.0",
|
||||
"download_url": "https://github.com/Testimonial/cognitive-squad/archive/refs/tags/v0.1.0.zip",
|
||||
"repository": "https://github.com/Testimonial/cognitive-squad",
|
||||
"homepage": "https://github.com/Testimonial/cognitive-squad",
|
||||
"documentation": "https://github.com/Testimonial/cognitive-squad/blob/main/README.md",
|
||||
"changelog": "https://github.com/Testimonial/cognitive-squad/blob/main/CHANGELOG.md",
|
||||
"license": "MIT",
|
||||
"requires": {
|
||||
"speckit_version": ">=0.3.0",
|
||||
"tools": [
|
||||
{
|
||||
"name": "understanding",
|
||||
"version": ">=3.4.0",
|
||||
"required": false
|
||||
},
|
||||
{
|
||||
"name": "spec-kit-reverse-eng",
|
||||
"version": ">=1.0.0",
|
||||
"required": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"provides": {
|
||||
"commands": 10,
|
||||
"hooks": 1
|
||||
},
|
||||
"tags": [
|
||||
"ai-agents",
|
||||
"cognitive",
|
||||
"full-lifecycle",
|
||||
"verification",
|
||||
"multi-agent"
|
||||
],
|
||||
"verified": false,
|
||||
"downloads": 0,
|
||||
"stars": 0,
|
||||
"created_at": "2026-03-16T00:00:00Z",
|
||||
"updated_at": "2026-03-18T00:00:00Z"
|
||||
},
|
||||
"conduct": {
|
||||
"name": "Conduct Extension",
|
||||
"id": "conduct",
|
||||
@@ -241,6 +197,36 @@
|
||||
"created_at": "2026-03-19T12:08:20Z",
|
||||
"updated_at": "2026-03-19T12:08:20Z"
|
||||
},
|
||||
"critique": {
|
||||
"name": "Spec Critique Extension",
|
||||
"id": "critique",
|
||||
"description": "Dual-lens critical review of spec and plan from product strategy and engineering risk perspectives.",
|
||||
"author": "arunt14",
|
||||
"version": "1.0.0",
|
||||
"download_url": "https://github.com/arunt14/spec-kit-critique/archive/refs/tags/v1.0.0.zip",
|
||||
"repository": "https://github.com/arunt14/spec-kit-critique",
|
||||
"homepage": "https://github.com/arunt14/spec-kit-critique",
|
||||
"documentation": "https://github.com/arunt14/spec-kit-critique/blob/main/README.md",
|
||||
"changelog": "https://github.com/arunt14/spec-kit-critique/blob/main/CHANGELOG.md",
|
||||
"license": "MIT",
|
||||
"requires": {
|
||||
"speckit_version": ">=0.1.0"
|
||||
},
|
||||
"provides": {
|
||||
"commands": 1,
|
||||
"hooks": 1
|
||||
},
|
||||
"tags": [
|
||||
"docs",
|
||||
"review",
|
||||
"planning"
|
||||
],
|
||||
"verified": false,
|
||||
"downloads": 0,
|
||||
"stars": 0,
|
||||
"created_at": "2026-04-01T00:00:00Z",
|
||||
"updated_at": "2026-04-01T00:00:00Z"
|
||||
},
|
||||
"docguard": {
|
||||
"name": "DocGuard — CDD Enforcement",
|
||||
"id": "docguard",
|
||||
@@ -345,6 +331,38 @@
|
||||
"created_at": "2026-03-18T00:00:00Z",
|
||||
"updated_at": "2026-03-18T00:00:00Z"
|
||||
},
|
||||
"fix-findings": {
|
||||
"name": "Fix Findings",
|
||||
"id": "fix-findings",
|
||||
"description": "Automated analyze-fix-reanalyze loop that resolves spec findings until clean.",
|
||||
"author": "Quratulain-bilal",
|
||||
"version": "1.0.0",
|
||||
"download_url": "https://github.com/Quratulain-bilal/spec-kit-fix-findings/archive/refs/tags/v1.0.0.zip",
|
||||
"repository": "https://github.com/Quratulain-bilal/spec-kit-fix-findings",
|
||||
"homepage": "https://github.com/Quratulain-bilal/spec-kit-fix-findings",
|
||||
"documentation": "https://github.com/Quratulain-bilal/spec-kit-fix-findings/blob/main/README.md",
|
||||
"changelog": "https://github.com/Quratulain-bilal/spec-kit-fix-findings/blob/main/CHANGELOG.md",
|
||||
"license": "MIT",
|
||||
"requires": {
|
||||
"speckit_version": ">=0.1.0"
|
||||
},
|
||||
"provides": {
|
||||
"commands": 1,
|
||||
"hooks": 1
|
||||
},
|
||||
"tags": [
|
||||
"code",
|
||||
"analysis",
|
||||
"quality",
|
||||
"automation",
|
||||
"findings"
|
||||
],
|
||||
"verified": false,
|
||||
"downloads": 0,
|
||||
"stars": 0,
|
||||
"created_at": "2026-04-01T00:00:00Z",
|
||||
"updated_at": "2026-04-01T00:00:00Z"
|
||||
},
|
||||
"fleet": {
|
||||
"name": "Fleet Orchestrator",
|
||||
"id": "fleet",
|
||||
@@ -792,7 +810,7 @@
|
||||
"product-forge": {
|
||||
"name": "Product Forge",
|
||||
"id": "product-forge",
|
||||
"description": "Full product lifecycle: research \u2192 product spec \u2192 SpecKit \u2192 implement \u2192 verify \u2192 test",
|
||||
"description": "Full product lifecycle: research → product spec → SpecKit → implement → verify → test",
|
||||
"author": "VaiYav",
|
||||
"version": "1.1.1",
|
||||
"download_url": "https://github.com/VaiYav/speckit-product-forge/archive/refs/tags/v1.1.1.zip",
|
||||
@@ -821,6 +839,36 @@
|
||||
"created_at": "2026-03-28T00:00:00Z",
|
||||
"updated_at": "2026-03-28T00:00:00Z"
|
||||
},
|
||||
"qa": {
|
||||
"name": "QA Testing Extension",
|
||||
"id": "qa",
|
||||
"description": "Systematic QA testing with browser-driven or CLI-based validation of acceptance criteria from spec.",
|
||||
"author": "arunt14",
|
||||
"version": "1.0.0",
|
||||
"download_url": "https://github.com/arunt14/spec-kit-qa/archive/refs/tags/v1.0.0.zip",
|
||||
"repository": "https://github.com/arunt14/spec-kit-qa",
|
||||
"homepage": "https://github.com/arunt14/spec-kit-qa",
|
||||
"documentation": "https://github.com/arunt14/spec-kit-qa/blob/main/README.md",
|
||||
"changelog": "https://github.com/arunt14/spec-kit-qa/blob/main/CHANGELOG.md",
|
||||
"license": "MIT",
|
||||
"requires": {
|
||||
"speckit_version": ">=0.1.0"
|
||||
},
|
||||
"provides": {
|
||||
"commands": 1,
|
||||
"hooks": 1
|
||||
},
|
||||
"tags": [
|
||||
"code",
|
||||
"testing",
|
||||
"qa"
|
||||
],
|
||||
"verified": false,
|
||||
"downloads": 0,
|
||||
"stars": 0,
|
||||
"created_at": "2026-04-01T00:00:00Z",
|
||||
"updated_at": "2026-04-01T00:00:00Z"
|
||||
},
|
||||
"ralph": {
|
||||
"name": "Ralph Loop",
|
||||
"id": "ralph",
|
||||
@@ -893,6 +941,73 @@
|
||||
"created_at": "2026-03-14T00:00:00Z",
|
||||
"updated_at": "2026-03-14T00:00:00Z"
|
||||
},
|
||||
"repoindex":{
|
||||
"name": "Repository Index",
|
||||
"id": "repoindex",
|
||||
"description": "Generate index of your repo for overview, architecuture and module",
|
||||
"author": "Yiyu Liu",
|
||||
"version": "1.0.0",
|
||||
"download_url": "https://github.com/liuyiyu/spec-kit-repoindex/archive/refs/tags/v1.0.0.zip",
|
||||
"repository": "https://github.com/liuyiyu/spec-kit-repoindex",
|
||||
"homepage": "https://github.com/liuyiyu/spec-kit-repoindex",
|
||||
"documentation": "https://github.com/liuyiyu/spec-kit-repoindex/tree/main/docs",
|
||||
"changelog": "https://github.com/liuyiyu/spec-kit-repoindex/blob/main/CHANGELOG.md",
|
||||
"license": "MIT",
|
||||
"requires": {
|
||||
"speckit_version": ">=0.1.0",
|
||||
"tools": [
|
||||
{
|
||||
"name": "no need",
|
||||
"version": ">=1.0.0",
|
||||
"required": false
|
||||
}
|
||||
]
|
||||
},
|
||||
"provides": {
|
||||
"commands": 3,
|
||||
"hooks": 0
|
||||
},
|
||||
"tags": [
|
||||
"utility",
|
||||
"brownfield",
|
||||
"analysis"
|
||||
],
|
||||
"verified": false,
|
||||
"downloads": 0,
|
||||
"stars": 0,
|
||||
"created_at": "2026-03-23T13:30:00Z",
|
||||
"updated_at": "2026-03-23T13:30:00Z"
|
||||
},
|
||||
"retro": {
|
||||
"name": "Retro Extension",
|
||||
"id": "retro",
|
||||
"description": "Sprint retrospective analysis with metrics, spec accuracy assessment, and improvement suggestions.",
|
||||
"author": "arunt14",
|
||||
"version": "1.0.0",
|
||||
"download_url": "https://github.com/arunt14/spec-kit-retro/archive/refs/tags/v1.0.0.zip",
|
||||
"repository": "https://github.com/arunt14/spec-kit-retro",
|
||||
"homepage": "https://github.com/arunt14/spec-kit-retro",
|
||||
"documentation": "https://github.com/arunt14/spec-kit-retro/blob/main/README.md",
|
||||
"changelog": "https://github.com/arunt14/spec-kit-retro/blob/main/CHANGELOG.md",
|
||||
"license": "MIT",
|
||||
"requires": {
|
||||
"speckit_version": ">=0.1.0"
|
||||
},
|
||||
"provides": {
|
||||
"commands": 1,
|
||||
"hooks": 0
|
||||
},
|
||||
"tags": [
|
||||
"process",
|
||||
"retrospective",
|
||||
"metrics"
|
||||
],
|
||||
"verified": false,
|
||||
"downloads": 0,
|
||||
"stars": 0,
|
||||
"created_at": "2026-04-01T00:00:00Z",
|
||||
"updated_at": "2026-04-01T00:00:00Z"
|
||||
},
|
||||
"retrospective": {
|
||||
"name": "Retrospective Extension",
|
||||
"id": "retrospective",
|
||||
@@ -959,6 +1074,36 @@
|
||||
"created_at": "2026-03-06T00:00:00Z",
|
||||
"updated_at": "2026-03-06T00:00:00Z"
|
||||
},
|
||||
"ship": {
|
||||
"name": "Ship Release Extension",
|
||||
"id": "ship",
|
||||
"description": "Automates release pipeline: pre-flight checks, branch sync, changelog generation, CI verification, and PR creation.",
|
||||
"author": "arunt14",
|
||||
"version": "1.0.0",
|
||||
"download_url": "https://github.com/arunt14/spec-kit-ship/archive/refs/tags/v1.0.0.zip",
|
||||
"repository": "https://github.com/arunt14/spec-kit-ship",
|
||||
"homepage": "https://github.com/arunt14/spec-kit-ship",
|
||||
"documentation": "https://github.com/arunt14/spec-kit-ship/blob/main/README.md",
|
||||
"changelog": "https://github.com/arunt14/spec-kit-ship/blob/main/CHANGELOG.md",
|
||||
"license": "MIT",
|
||||
"requires": {
|
||||
"speckit_version": ">=0.1.0"
|
||||
},
|
||||
"provides": {
|
||||
"commands": 1,
|
||||
"hooks": 1
|
||||
},
|
||||
"tags": [
|
||||
"process",
|
||||
"release",
|
||||
"automation"
|
||||
],
|
||||
"verified": false,
|
||||
"downloads": 0,
|
||||
"stars": 0,
|
||||
"created_at": "2026-04-01T00:00:00Z",
|
||||
"updated_at": "2026-04-01T00:00:00Z"
|
||||
},
|
||||
"speckit-utils": {
|
||||
"name": "SDD Utilities",
|
||||
"id": "speckit-utils",
|
||||
@@ -991,6 +1136,36 @@
|
||||
"created_at": "2026-03-18T00:00:00Z",
|
||||
"updated_at": "2026-03-18T00:00:00Z"
|
||||
},
|
||||
"staff-review": {
|
||||
"name": "Staff Review Extension",
|
||||
"id": "staff-review",
|
||||
"description": "Staff-engineer-level code review that validates implementation against spec, checks security, performance, and test coverage.",
|
||||
"author": "arunt14",
|
||||
"version": "1.0.0",
|
||||
"download_url": "https://github.com/arunt14/spec-kit-staff-review/archive/refs/tags/v1.0.0.zip",
|
||||
"repository": "https://github.com/arunt14/spec-kit-staff-review",
|
||||
"homepage": "https://github.com/arunt14/spec-kit-staff-review",
|
||||
"documentation": "https://github.com/arunt14/spec-kit-staff-review/blob/main/README.md",
|
||||
"changelog": "https://github.com/arunt14/spec-kit-staff-review/blob/main/CHANGELOG.md",
|
||||
"license": "MIT",
|
||||
"requires": {
|
||||
"speckit_version": ">=0.1.0"
|
||||
},
|
||||
"provides": {
|
||||
"commands": 1,
|
||||
"hooks": 1
|
||||
},
|
||||
"tags": [
|
||||
"code",
|
||||
"review",
|
||||
"quality"
|
||||
],
|
||||
"verified": false,
|
||||
"downloads": 0,
|
||||
"stars": 0,
|
||||
"created_at": "2026-04-01T00:00:00Z",
|
||||
"updated_at": "2026-04-01T00:00:00Z"
|
||||
},
|
||||
"status": {
|
||||
"name": "Project Status",
|
||||
"id": "status",
|
||||
@@ -1098,47 +1273,6 @@
|
||||
"created_at": "2026-03-02T00:00:00Z",
|
||||
"updated_at": "2026-03-02T00:00:00Z"
|
||||
},
|
||||
"understanding": {
|
||||
"name": "Understanding",
|
||||
"id": "understanding",
|
||||
"description": "Automated requirements quality analysis — validates specs against IEEE/ISO standards using 31 deterministic metrics. Catches ambiguity, missing testability, and structural issues before they reach implementation. Includes experimental energy-based ambiguity detection using local LM token perplexity.",
|
||||
"author": "Ladislav Bihari",
|
||||
"version": "3.4.0",
|
||||
"download_url": "https://github.com/Testimonial/understanding/archive/refs/tags/v3.4.0.zip",
|
||||
"repository": "https://github.com/Testimonial/understanding",
|
||||
"homepage": "https://github.com/Testimonial/understanding",
|
||||
"documentation": "https://github.com/Testimonial/understanding/blob/main/extension/README.md",
|
||||
"changelog": "https://github.com/Testimonial/understanding/blob/main/extension/CHANGELOG.md",
|
||||
"license": "MIT",
|
||||
"requires": {
|
||||
"speckit_version": ">=0.1.0",
|
||||
"tools": [
|
||||
{
|
||||
"name": "understanding",
|
||||
"version": ">=3.4.0",
|
||||
"required": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"provides": {
|
||||
"commands": 3,
|
||||
"hooks": 1
|
||||
},
|
||||
"tags": [
|
||||
"quality",
|
||||
"metrics",
|
||||
"requirements",
|
||||
"validation",
|
||||
"readability",
|
||||
"IEEE-830",
|
||||
"ISO-29148"
|
||||
],
|
||||
"verified": false,
|
||||
"downloads": 0,
|
||||
"stars": 0,
|
||||
"created_at": "2026-03-07T00:00:00Z",
|
||||
"updated_at": "2026-03-07T00:00:00Z"
|
||||
},
|
||||
"v-model": {
|
||||
"name": "V-Model Extension Pack",
|
||||
"id": "v-model",
|
||||
@@ -1234,5 +1368,6 @@
|
||||
"created_at": "2026-03-16T00:00:00Z",
|
||||
"updated_at": "2026-03-16T00:00:00Z"
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
|
||||
@@ -67,6 +67,9 @@ Presets **override**, they don't merge. If two presets both provide `spec-templa
|
||||
|
||||
Presets are discovered through catalogs. By default, Spec Kit uses the official and community catalogs:
|
||||
|
||||
> [!NOTE]
|
||||
> Community presets are independently created and maintained by their respective authors. GitHub and the Spec Kit maintainers may review pull requests that add entries to the community catalog for formatting, catalog structure, or policy compliance, but they do **not review, audit, endorse, or support the preset code itself**. Review preset source code before installation and use at your own discretion.
|
||||
|
||||
```bash
|
||||
# List active catalogs
|
||||
specify preset catalog list
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
[project]
|
||||
name = "specify-cli"
|
||||
version = "0.4.4.dev0"
|
||||
version = "0.4.6.dev0"
|
||||
description = "Specify CLI, part of GitHub Spec Kit. A tool to bootstrap your projects for Spec-Driven Development (SDD)."
|
||||
requires-python = ">=3.11"
|
||||
dependencies = [
|
||||
@@ -41,8 +41,6 @@ packages = ["src/specify_cli"]
|
||||
"templates/commands" = "specify_cli/core_pack/commands"
|
||||
"scripts/bash" = "specify_cli/core_pack/scripts/bash"
|
||||
"scripts/powershell" = "specify_cli/core_pack/scripts/powershell"
|
||||
".github/workflows/scripts/create-release-packages.sh" = "specify_cli/core_pack/release_scripts/create-release-packages.sh"
|
||||
".github/workflows/scripts/create-release-packages.ps1" = "specify_cli/core_pack/release_scripts/create-release-packages.ps1"
|
||||
|
||||
[project.optional-dependencies]
|
||||
test = [
|
||||
|
||||
@@ -78,7 +78,7 @@ get_current_branch() {
|
||||
latest_timestamp="$ts"
|
||||
latest_feature=$dirname
|
||||
fi
|
||||
elif [[ "$dirname" =~ ^([0-9]{3})- ]]; then
|
||||
elif [[ "$dirname" =~ ^([0-9]{3,})- ]]; then
|
||||
local number=${BASH_REMATCH[1]}
|
||||
number=$((10#$number))
|
||||
if [[ "$number" -gt "$highest" ]]; then
|
||||
@@ -124,9 +124,15 @@ check_feature_branch() {
|
||||
return 0
|
||||
fi
|
||||
|
||||
if [[ ! "$branch" =~ ^[0-9]{3}- ]] && [[ ! "$branch" =~ ^[0-9]{8}-[0-9]{6}- ]]; then
|
||||
# Accept sequential prefix (3+ digits) but exclude malformed timestamps
|
||||
# Malformed: 7-or-8 digit date + 6-digit time with no trailing slug (e.g. "2026031-143022" or "20260319-143022")
|
||||
local is_sequential=false
|
||||
if [[ "$branch" =~ ^[0-9]{3,}- ]] && [[ ! "$branch" =~ ^[0-9]{7}-[0-9]{6}- ]] && [[ ! "$branch" =~ ^[0-9]{7,8}-[0-9]{6}$ ]]; then
|
||||
is_sequential=true
|
||||
fi
|
||||
if [[ "$is_sequential" != "true" ]] && [[ ! "$branch" =~ ^[0-9]{8}-[0-9]{6}- ]]; then
|
||||
echo "ERROR: Not on a feature branch. Current branch: $branch" >&2
|
||||
echo "Feature branches should be named like: 001-feature-name or 20260319-143022-feature-name" >&2
|
||||
echo "Feature branches should be named like: 001-feature-name, 1234-feature-name, or 20260319-143022-feature-name" >&2
|
||||
return 1
|
||||
fi
|
||||
|
||||
@@ -146,7 +152,7 @@ find_feature_dir_by_prefix() {
|
||||
local prefix=""
|
||||
if [[ "$branch_name" =~ ^([0-9]{8}-[0-9]{6})- ]]; then
|
||||
prefix="${BASH_REMATCH[1]}"
|
||||
elif [[ "$branch_name" =~ ^([0-9]{3})- ]]; then
|
||||
elif [[ "$branch_name" =~ ^([0-9]{3,})- ]]; then
|
||||
prefix="${BASH_REMATCH[1]}"
|
||||
else
|
||||
# If branch doesn't have a recognized prefix, fall back to exact match
|
||||
|
||||
@@ -3,6 +3,7 @@
|
||||
set -e
|
||||
|
||||
JSON_MODE=false
|
||||
DRY_RUN=false
|
||||
ALLOW_EXISTING=false
|
||||
SHORT_NAME=""
|
||||
BRANCH_NUMBER=""
|
||||
@@ -15,6 +16,9 @@ while [ $i -le $# ]; do
|
||||
--json)
|
||||
JSON_MODE=true
|
||||
;;
|
||||
--dry-run)
|
||||
DRY_RUN=true
|
||||
;;
|
||||
--allow-existing-branch)
|
||||
ALLOW_EXISTING=true
|
||||
;;
|
||||
@@ -49,10 +53,11 @@ while [ $i -le $# ]; do
|
||||
USE_TIMESTAMP=true
|
||||
;;
|
||||
--help|-h)
|
||||
echo "Usage: $0 [--json] [--allow-existing-branch] [--short-name <name>] [--number N] [--timestamp] <feature_description>"
|
||||
echo "Usage: $0 [--json] [--dry-run] [--allow-existing-branch] [--short-name <name>] [--number N] [--timestamp] <feature_description>"
|
||||
echo ""
|
||||
echo "Options:"
|
||||
echo " --json Output in JSON format"
|
||||
echo " --dry-run Compute branch name and paths without creating branches, directories, or files"
|
||||
echo " --allow-existing-branch Switch to branch if it already exists instead of failing"
|
||||
echo " --short-name <name> Provide a custom short name (2-4 words) for the branch"
|
||||
echo " --number N Specify branch number manually (overrides auto-detection)"
|
||||
@@ -74,7 +79,7 @@ done
|
||||
|
||||
FEATURE_DESCRIPTION="${ARGS[*]}"
|
||||
if [ -z "$FEATURE_DESCRIPTION" ]; then
|
||||
echo "Usage: $0 [--json] [--allow-existing-branch] [--short-name <name>] [--number N] [--timestamp] <feature_description>" >&2
|
||||
echo "Usage: $0 [--json] [--dry-run] [--allow-existing-branch] [--short-name <name>] [--number N] [--timestamp] <feature_description>" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
@@ -110,39 +115,59 @@ get_highest_from_specs() {
|
||||
|
||||
# Function to get highest number from git branches
|
||||
get_highest_from_branches() {
|
||||
git branch -a 2>/dev/null | sed 's/^[* ]*//; s|^remotes/[^/]*/||' | _extract_highest_number
|
||||
}
|
||||
|
||||
# Extract the highest sequential feature number from a list of ref names (one per line).
|
||||
# Shared by get_highest_from_branches and get_highest_from_remote_refs.
|
||||
_extract_highest_number() {
|
||||
local highest=0
|
||||
|
||||
# Get all branches (local and remote)
|
||||
branches=$(git branch -a 2>/dev/null || echo "")
|
||||
|
||||
if [ -n "$branches" ]; then
|
||||
while IFS= read -r branch; do
|
||||
# Clean branch name: remove leading markers and remote prefixes
|
||||
clean_branch=$(echo "$branch" | sed 's/^[* ]*//; s|^remotes/[^/]*/||')
|
||||
|
||||
# Extract sequential feature number (>=3 digits), skip timestamp branches.
|
||||
if echo "$clean_branch" | grep -Eq '^[0-9]{3,}-' && ! echo "$clean_branch" | grep -Eq '^[0-9]{8}-[0-9]{6}-'; then
|
||||
number=$(echo "$clean_branch" | grep -Eo '^[0-9]+' || echo "0")
|
||||
number=$((10#$number))
|
||||
if [ "$number" -gt "$highest" ]; then
|
||||
highest=$number
|
||||
fi
|
||||
while IFS= read -r name; do
|
||||
[ -z "$name" ] && continue
|
||||
if echo "$name" | grep -Eq '^[0-9]{3,}-' && ! echo "$name" | grep -Eq '^[0-9]{8}-[0-9]{6}-'; then
|
||||
number=$(echo "$name" | grep -Eo '^[0-9]+' || echo "0")
|
||||
number=$((10#$number))
|
||||
if [ "$number" -gt "$highest" ]; then
|
||||
highest=$number
|
||||
fi
|
||||
done <<< "$branches"
|
||||
fi
|
||||
|
||||
fi
|
||||
done
|
||||
echo "$highest"
|
||||
}
|
||||
|
||||
# Function to check existing branches (local and remote) and return next available number
|
||||
# Function to get highest number from remote branches without fetching (side-effect-free)
|
||||
get_highest_from_remote_refs() {
|
||||
local highest=0
|
||||
|
||||
for remote in $(git remote 2>/dev/null); do
|
||||
local remote_highest
|
||||
remote_highest=$(GIT_TERMINAL_PROMPT=0 git ls-remote --heads "$remote" 2>/dev/null | sed 's|.*refs/heads/||' | _extract_highest_number)
|
||||
if [ "$remote_highest" -gt "$highest" ]; then
|
||||
highest=$remote_highest
|
||||
fi
|
||||
done
|
||||
|
||||
echo "$highest"
|
||||
}
|
||||
|
||||
# Function to check existing branches (local and remote) and return next available number.
|
||||
# When skip_fetch is true, queries remotes via ls-remote (read-only) instead of fetching.
|
||||
check_existing_branches() {
|
||||
local specs_dir="$1"
|
||||
local skip_fetch="${2:-false}"
|
||||
|
||||
# Fetch all remotes to get latest branch info (suppress errors if no remotes)
|
||||
git fetch --all --prune >/dev/null 2>&1 || true
|
||||
|
||||
# Get highest number from ALL branches (not just matching short name)
|
||||
local highest_branch=$(get_highest_from_branches)
|
||||
if [ "$skip_fetch" = true ]; then
|
||||
# Side-effect-free: query remotes via ls-remote
|
||||
local highest_remote=$(get_highest_from_remote_refs)
|
||||
local highest_branch=$(get_highest_from_branches)
|
||||
if [ "$highest_remote" -gt "$highest_branch" ]; then
|
||||
highest_branch=$highest_remote
|
||||
fi
|
||||
else
|
||||
# Fetch all remotes to get latest branch info (suppress errors if no remotes)
|
||||
git fetch --all --prune >/dev/null 2>&1 || true
|
||||
local highest_branch=$(get_highest_from_branches)
|
||||
fi
|
||||
|
||||
# Get highest number from ALL specs (not just matching short name)
|
||||
local highest_spec=$(get_highest_from_specs "$specs_dir")
|
||||
@@ -179,7 +204,9 @@ fi
|
||||
cd "$REPO_ROOT"
|
||||
|
||||
SPECS_DIR="$REPO_ROOT/specs"
|
||||
mkdir -p "$SPECS_DIR"
|
||||
if [ "$DRY_RUN" != true ]; then
|
||||
mkdir -p "$SPECS_DIR"
|
||||
fi
|
||||
|
||||
# Function to generate branch name with stop word filtering and length filtering
|
||||
generate_branch_name() {
|
||||
@@ -251,7 +278,14 @@ if [ "$USE_TIMESTAMP" = true ]; then
|
||||
else
|
||||
# Determine branch number
|
||||
if [ -z "$BRANCH_NUMBER" ]; then
|
||||
if [ "$HAS_GIT" = true ]; then
|
||||
if [ "$DRY_RUN" = true ] && [ "$HAS_GIT" = true ]; then
|
||||
# Dry-run: query remotes via ls-remote (side-effect-free, no fetch)
|
||||
BRANCH_NUMBER=$(check_existing_branches "$SPECS_DIR" true)
|
||||
elif [ "$DRY_RUN" = true ]; then
|
||||
# Dry-run without git: local spec dirs only
|
||||
HIGHEST=$(get_highest_from_specs "$SPECS_DIR")
|
||||
BRANCH_NUMBER=$((HIGHEST + 1))
|
||||
elif [ "$HAS_GIT" = true ]; then
|
||||
# Check existing branches on remotes
|
||||
BRANCH_NUMBER=$(check_existing_branches "$SPECS_DIR")
|
||||
else
|
||||
@@ -288,62 +322,79 @@ if [ ${#BRANCH_NAME} -gt $MAX_BRANCH_LENGTH ]; then
|
||||
>&2 echo "[specify] Truncated to: $BRANCH_NAME (${#BRANCH_NAME} bytes)"
|
||||
fi
|
||||
|
||||
if [ "$HAS_GIT" = true ]; then
|
||||
if ! git checkout -b "$BRANCH_NAME" 2>/dev/null; then
|
||||
# Check if branch already exists
|
||||
if git branch --list "$BRANCH_NAME" | grep -q .; then
|
||||
if [ "$ALLOW_EXISTING" = true ]; then
|
||||
# Switch to the existing branch instead of failing
|
||||
if ! git checkout "$BRANCH_NAME" 2>/dev/null; then
|
||||
>&2 echo "Error: Failed to switch to existing branch '$BRANCH_NAME'. Please resolve any local changes or conflicts and try again."
|
||||
FEATURE_DIR="$SPECS_DIR/$BRANCH_NAME"
|
||||
SPEC_FILE="$FEATURE_DIR/spec.md"
|
||||
|
||||
if [ "$DRY_RUN" != true ]; then
|
||||
if [ "$HAS_GIT" = true ]; then
|
||||
if ! git checkout -b "$BRANCH_NAME" 2>/dev/null; then
|
||||
# Check if branch already exists
|
||||
if git branch --list "$BRANCH_NAME" | grep -q .; then
|
||||
if [ "$ALLOW_EXISTING" = true ]; then
|
||||
# Switch to the existing branch instead of failing
|
||||
if ! git checkout "$BRANCH_NAME" 2>/dev/null; then
|
||||
>&2 echo "Error: Failed to switch to existing branch '$BRANCH_NAME'. Please resolve any local changes or conflicts and try again."
|
||||
exit 1
|
||||
fi
|
||||
elif [ "$USE_TIMESTAMP" = true ]; then
|
||||
>&2 echo "Error: Branch '$BRANCH_NAME' already exists. Rerun to get a new timestamp or use a different --short-name."
|
||||
exit 1
|
||||
else
|
||||
>&2 echo "Error: Branch '$BRANCH_NAME' already exists. Please use a different feature name or specify a different number with --number."
|
||||
exit 1
|
||||
fi
|
||||
elif [ "$USE_TIMESTAMP" = true ]; then
|
||||
>&2 echo "Error: Branch '$BRANCH_NAME' already exists. Rerun to get a new timestamp or use a different --short-name."
|
||||
exit 1
|
||||
else
|
||||
>&2 echo "Error: Branch '$BRANCH_NAME' already exists. Please use a different feature name or specify a different number with --number."
|
||||
>&2 echo "Error: Failed to create git branch '$BRANCH_NAME'. Please check your git configuration and try again."
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
else
|
||||
>&2 echo "[specify] Warning: Git repository not detected; skipped branch creation for $BRANCH_NAME"
|
||||
fi
|
||||
|
||||
mkdir -p "$FEATURE_DIR"
|
||||
|
||||
if [ ! -f "$SPEC_FILE" ]; then
|
||||
TEMPLATE=$(resolve_template "spec-template" "$REPO_ROOT") || true
|
||||
if [ -n "$TEMPLATE" ] && [ -f "$TEMPLATE" ]; then
|
||||
cp "$TEMPLATE" "$SPEC_FILE"
|
||||
else
|
||||
>&2 echo "Error: Failed to create git branch '$BRANCH_NAME'. Please check your git configuration and try again."
|
||||
exit 1
|
||||
echo "Warning: Spec template not found; created empty spec file" >&2
|
||||
touch "$SPEC_FILE"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
>&2 echo "[specify] Warning: Git repository not detected; skipped branch creation for $BRANCH_NAME"
|
||||
|
||||
# Inform the user how to persist the feature variable in their own shell
|
||||
printf '# To persist: export SPECIFY_FEATURE=%q\n' "$BRANCH_NAME" >&2
|
||||
fi
|
||||
|
||||
FEATURE_DIR="$SPECS_DIR/$BRANCH_NAME"
|
||||
mkdir -p "$FEATURE_DIR"
|
||||
|
||||
SPEC_FILE="$FEATURE_DIR/spec.md"
|
||||
if [ ! -f "$SPEC_FILE" ]; then
|
||||
TEMPLATE=$(resolve_template "spec-template" "$REPO_ROOT") || true
|
||||
if [ -n "$TEMPLATE" ] && [ -f "$TEMPLATE" ]; then
|
||||
cp "$TEMPLATE" "$SPEC_FILE"
|
||||
else
|
||||
echo "Warning: Spec template not found; created empty spec file" >&2
|
||||
touch "$SPEC_FILE"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Inform the user how to persist the feature variable in their own shell
|
||||
printf '# To persist: export SPECIFY_FEATURE=%q\n' "$BRANCH_NAME" >&2
|
||||
|
||||
if $JSON_MODE; then
|
||||
if command -v jq >/dev/null 2>&1; then
|
||||
jq -cn \
|
||||
--arg branch_name "$BRANCH_NAME" \
|
||||
--arg spec_file "$SPEC_FILE" \
|
||||
--arg feature_num "$FEATURE_NUM" \
|
||||
'{BRANCH_NAME:$branch_name,SPEC_FILE:$spec_file,FEATURE_NUM:$feature_num}'
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
jq -cn \
|
||||
--arg branch_name "$BRANCH_NAME" \
|
||||
--arg spec_file "$SPEC_FILE" \
|
||||
--arg feature_num "$FEATURE_NUM" \
|
||||
'{BRANCH_NAME:$branch_name,SPEC_FILE:$spec_file,FEATURE_NUM:$feature_num,DRY_RUN:true}'
|
||||
else
|
||||
jq -cn \
|
||||
--arg branch_name "$BRANCH_NAME" \
|
||||
--arg spec_file "$SPEC_FILE" \
|
||||
--arg feature_num "$FEATURE_NUM" \
|
||||
'{BRANCH_NAME:$branch_name,SPEC_FILE:$spec_file,FEATURE_NUM:$feature_num}'
|
||||
fi
|
||||
else
|
||||
printf '{"BRANCH_NAME":"%s","SPEC_FILE":"%s","FEATURE_NUM":"%s"}\n' "$(json_escape "$BRANCH_NAME")" "$(json_escape "$SPEC_FILE")" "$(json_escape "$FEATURE_NUM")"
|
||||
if [ "$DRY_RUN" = true ]; then
|
||||
printf '{"BRANCH_NAME":"%s","SPEC_FILE":"%s","FEATURE_NUM":"%s","DRY_RUN":true}\n' "$(json_escape "$BRANCH_NAME")" "$(json_escape "$SPEC_FILE")" "$(json_escape "$FEATURE_NUM")"
|
||||
else
|
||||
printf '{"BRANCH_NAME":"%s","SPEC_FILE":"%s","FEATURE_NUM":"%s"}\n' "$(json_escape "$BRANCH_NAME")" "$(json_escape "$SPEC_FILE")" "$(json_escape "$FEATURE_NUM")"
|
||||
fi
|
||||
fi
|
||||
else
|
||||
echo "BRANCH_NAME: $BRANCH_NAME"
|
||||
echo "SPEC_FILE: $SPEC_FILE"
|
||||
echo "FEATURE_NUM: $FEATURE_NUM"
|
||||
printf '# To persist in your shell: export SPECIFY_FEATURE=%q\n' "$BRANCH_NAME"
|
||||
if [ "$DRY_RUN" != true ]; then
|
||||
printf '# To persist in your shell: export SPECIFY_FEATURE=%q\n' "$BRANCH_NAME"
|
||||
fi
|
||||
fi
|
||||
|
||||
@@ -83,8 +83,8 @@ function Get-CurrentBranch {
|
||||
$latestTimestamp = $ts
|
||||
$latestFeature = $_.Name
|
||||
}
|
||||
} elseif ($_.Name -match '^(\d{3})-') {
|
||||
$num = [int]$matches[1]
|
||||
} elseif ($_.Name -match '^(\d{3,})-') {
|
||||
$num = [long]$matches[1]
|
||||
if ($num -gt $highest) {
|
||||
$highest = $num
|
||||
# Only update if no timestamp branch found yet
|
||||
@@ -139,9 +139,13 @@ function Test-FeatureBranch {
|
||||
return $true
|
||||
}
|
||||
|
||||
if ($Branch -notmatch '^[0-9]{3}-' -and $Branch -notmatch '^\d{8}-\d{6}-') {
|
||||
# Accept sequential prefix (3+ digits) but exclude malformed timestamps
|
||||
# Malformed: 7-or-8 digit date + 6-digit time with no trailing slug (e.g. "2026031-143022" or "20260319-143022")
|
||||
$hasMalformedTimestamp = ($Branch -match '^[0-9]{7}-[0-9]{6}-') -or ($Branch -match '^(?:\d{7}|\d{8})-\d{6}$')
|
||||
$isSequential = ($Branch -match '^[0-9]{3,}-') -and (-not $hasMalformedTimestamp)
|
||||
if (-not $isSequential -and $Branch -notmatch '^\d{8}-\d{6}-') {
|
||||
Write-Output "ERROR: Not on a feature branch. Current branch: $Branch"
|
||||
Write-Output "Feature branches should be named like: 001-feature-name or 20260319-143022-feature-name"
|
||||
Write-Output "Feature branches should be named like: 001-feature-name, 1234-feature-name, or 20260319-143022-feature-name"
|
||||
return $false
|
||||
}
|
||||
return $true
|
||||
|
||||
@@ -4,6 +4,7 @@
|
||||
param(
|
||||
[switch]$Json,
|
||||
[switch]$AllowExistingBranch,
|
||||
[switch]$DryRun,
|
||||
[string]$ShortName,
|
||||
[Parameter()]
|
||||
[long]$Number = 0,
|
||||
@@ -16,10 +17,11 @@ $ErrorActionPreference = 'Stop'
|
||||
|
||||
# Show help if requested
|
||||
if ($Help) {
|
||||
Write-Host "Usage: ./create-new-feature.ps1 [-Json] [-AllowExistingBranch] [-ShortName <name>] [-Number N] [-Timestamp] <feature description>"
|
||||
Write-Host "Usage: ./create-new-feature.ps1 [-Json] [-DryRun] [-AllowExistingBranch] [-ShortName <name>] [-Number N] [-Timestamp] <feature description>"
|
||||
Write-Host ""
|
||||
Write-Host "Options:"
|
||||
Write-Host " -Json Output in JSON format"
|
||||
Write-Host " -DryRun Compute branch name and paths without creating branches, directories, or files"
|
||||
Write-Host " -AllowExistingBranch Switch to branch if it already exists instead of failing"
|
||||
Write-Host " -ShortName <name> Provide a custom short name (2-4 words) for the branch"
|
||||
Write-Host " -Number N Specify branch number manually (overrides auto-detection)"
|
||||
@@ -35,7 +37,7 @@ if ($Help) {
|
||||
|
||||
# Check if feature description provided
|
||||
if (-not $FeatureDescription -or $FeatureDescription.Count -eq 0) {
|
||||
Write-Error "Usage: ./create-new-feature.ps1 [-Json] [-AllowExistingBranch] [-ShortName <name>] [-Number N] [-Timestamp] <feature description>"
|
||||
Write-Error "Usage: ./create-new-feature.ps1 [-Json] [-DryRun] [-AllowExistingBranch] [-ShortName <name>] [-Number N] [-Timestamp] <feature description>"
|
||||
exit 1
|
||||
}
|
||||
|
||||
@@ -49,7 +51,7 @@ if ([string]::IsNullOrWhiteSpace($featureDesc)) {
|
||||
|
||||
function Get-HighestNumberFromSpecs {
|
||||
param([string]$SpecsDir)
|
||||
|
||||
|
||||
[long]$highest = 0
|
||||
if (Test-Path $SpecsDir) {
|
||||
Get-ChildItem -Path $SpecsDir -Directory | ForEach-Object {
|
||||
@@ -65,48 +67,87 @@ function Get-HighestNumberFromSpecs {
|
||||
return $highest
|
||||
}
|
||||
|
||||
function Get-HighestNumberFromBranches {
|
||||
param()
|
||||
|
||||
# Extract the highest sequential feature number from a list of branch/ref names.
|
||||
# Shared by Get-HighestNumberFromBranches and Get-HighestNumberFromRemoteRefs.
|
||||
function Get-HighestNumberFromNames {
|
||||
param([string[]]$Names)
|
||||
|
||||
[long]$highest = 0
|
||||
try {
|
||||
$branches = git branch -a 2>$null
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
foreach ($branch in $branches) {
|
||||
# Clean branch name: remove leading markers and remote prefixes
|
||||
$cleanBranch = $branch.Trim() -replace '^\*?\s+', '' -replace '^remotes/[^/]+/', ''
|
||||
|
||||
# Extract sequential feature number (>=3 digits), skip timestamp branches.
|
||||
if ($cleanBranch -match '^(\d{3,})-' -and $cleanBranch -notmatch '^\d{8}-\d{6}-') {
|
||||
[long]$num = 0
|
||||
if ([long]::TryParse($matches[1], [ref]$num) -and $num -gt $highest) {
|
||||
$highest = $num
|
||||
}
|
||||
}
|
||||
foreach ($name in $Names) {
|
||||
if ($name -match '^(\d{3,})-' -and $name -notmatch '^\d{8}-\d{6}-') {
|
||||
[long]$num = 0
|
||||
if ([long]::TryParse($matches[1], [ref]$num) -and $num -gt $highest) {
|
||||
$highest = $num
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
# If git command fails, return 0
|
||||
Write-Verbose "Could not check Git branches: $_"
|
||||
}
|
||||
return $highest
|
||||
}
|
||||
|
||||
function Get-HighestNumberFromBranches {
|
||||
param()
|
||||
|
||||
try {
|
||||
$branches = git branch -a 2>$null
|
||||
if ($LASTEXITCODE -eq 0 -and $branches) {
|
||||
$cleanNames = $branches | ForEach-Object {
|
||||
$_.Trim() -replace '^\*?\s+', '' -replace '^remotes/[^/]+/', ''
|
||||
}
|
||||
return Get-HighestNumberFromNames -Names $cleanNames
|
||||
}
|
||||
} catch {
|
||||
Write-Verbose "Could not check Git branches: $_"
|
||||
}
|
||||
return 0
|
||||
}
|
||||
|
||||
function Get-HighestNumberFromRemoteRefs {
|
||||
[long]$highest = 0
|
||||
try {
|
||||
$remotes = git remote 2>$null
|
||||
if ($remotes) {
|
||||
foreach ($remote in $remotes) {
|
||||
$env:GIT_TERMINAL_PROMPT = '0'
|
||||
$refs = git ls-remote --heads $remote 2>$null
|
||||
$env:GIT_TERMINAL_PROMPT = $null
|
||||
if ($LASTEXITCODE -eq 0 -and $refs) {
|
||||
$refNames = $refs | ForEach-Object {
|
||||
if ($_ -match 'refs/heads/(.+)$') { $matches[1] }
|
||||
} | Where-Object { $_ }
|
||||
$remoteHighest = Get-HighestNumberFromNames -Names $refNames
|
||||
if ($remoteHighest -gt $highest) { $highest = $remoteHighest }
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
Write-Verbose "Could not query remote refs: $_"
|
||||
}
|
||||
return $highest
|
||||
}
|
||||
|
||||
# Return next available branch number. When SkipFetch is true, queries remotes
|
||||
# via ls-remote (read-only) instead of fetching.
|
||||
function Get-NextBranchNumber {
|
||||
param(
|
||||
[string]$SpecsDir
|
||||
[string]$SpecsDir,
|
||||
[switch]$SkipFetch
|
||||
)
|
||||
|
||||
# Fetch all remotes to get latest branch info (suppress errors if no remotes)
|
||||
try {
|
||||
git fetch --all --prune 2>$null | Out-Null
|
||||
} catch {
|
||||
# Ignore fetch errors
|
||||
if ($SkipFetch) {
|
||||
# Side-effect-free: query remotes via ls-remote
|
||||
$highestBranch = Get-HighestNumberFromBranches
|
||||
$highestRemote = Get-HighestNumberFromRemoteRefs
|
||||
$highestBranch = [Math]::Max($highestBranch, $highestRemote)
|
||||
} else {
|
||||
# Fetch all remotes to get latest branch info (suppress errors if no remotes)
|
||||
try {
|
||||
git fetch --all --prune 2>$null | Out-Null
|
||||
} catch {
|
||||
# Ignore fetch errors
|
||||
}
|
||||
$highestBranch = Get-HighestNumberFromBranches
|
||||
}
|
||||
|
||||
# Get highest number from ALL branches (not just matching short name)
|
||||
$highestBranch = Get-HighestNumberFromBranches
|
||||
|
||||
# Get highest number from ALL specs (not just matching short name)
|
||||
$highestSpec = Get-HighestNumberFromSpecs -SpecsDir $SpecsDir
|
||||
|
||||
@@ -119,7 +160,7 @@ function Get-NextBranchNumber {
|
||||
|
||||
function ConvertTo-CleanBranchName {
|
||||
param([string]$Name)
|
||||
|
||||
|
||||
return $Name.ToLower() -replace '[^a-z0-9]', '-' -replace '-{2,}', '-' -replace '^-', '' -replace '-$', ''
|
||||
}
|
||||
# Load common functions (includes Get-RepoRoot, Test-HasGit, Resolve-Template)
|
||||
@@ -134,12 +175,14 @@ $hasGit = Test-HasGit
|
||||
Set-Location $repoRoot
|
||||
|
||||
$specsDir = Join-Path $repoRoot 'specs'
|
||||
New-Item -ItemType Directory -Path $specsDir -Force | Out-Null
|
||||
if (-not $DryRun) {
|
||||
New-Item -ItemType Directory -Path $specsDir -Force | Out-Null
|
||||
}
|
||||
|
||||
# Function to generate branch name with stop word filtering and length filtering
|
||||
function Get-BranchName {
|
||||
param([string]$Description)
|
||||
|
||||
|
||||
# Common stop words to filter out
|
||||
$stopWords = @(
|
||||
'i', 'a', 'an', 'the', 'to', 'for', 'of', 'in', 'on', 'at', 'by', 'with', 'from',
|
||||
@@ -148,17 +191,17 @@ function Get-BranchName {
|
||||
'this', 'that', 'these', 'those', 'my', 'your', 'our', 'their',
|
||||
'want', 'need', 'add', 'get', 'set'
|
||||
)
|
||||
|
||||
|
||||
# Convert to lowercase and extract words (alphanumeric only)
|
||||
$cleanName = $Description.ToLower() -replace '[^a-z0-9\s]', ' '
|
||||
$words = $cleanName -split '\s+' | Where-Object { $_ }
|
||||
|
||||
|
||||
# Filter words: remove stop words and words shorter than 3 chars (unless they're uppercase acronyms in original)
|
||||
$meaningfulWords = @()
|
||||
foreach ($word in $words) {
|
||||
# Skip stop words
|
||||
if ($stopWords -contains $word) { continue }
|
||||
|
||||
|
||||
# Keep words that are length >= 3 OR appear as uppercase in original (likely acronyms)
|
||||
if ($word.Length -ge 3) {
|
||||
$meaningfulWords += $word
|
||||
@@ -167,7 +210,7 @@ function Get-BranchName {
|
||||
$meaningfulWords += $word
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
# If we have meaningful words, use first 3-4 of them
|
||||
if ($meaningfulWords.Count -gt 0) {
|
||||
$maxWords = if ($meaningfulWords.Count -eq 4) { 4 } else { 3 }
|
||||
@@ -203,7 +246,13 @@ if ($Timestamp) {
|
||||
} else {
|
||||
# Determine branch number
|
||||
if ($Number -eq 0) {
|
||||
if ($hasGit) {
|
||||
if ($DryRun -and $hasGit) {
|
||||
# Dry-run: query remotes via ls-remote (side-effect-free, no fetch)
|
||||
$Number = Get-NextBranchNumber -SpecsDir $specsDir -SkipFetch
|
||||
} elseif ($DryRun) {
|
||||
# Dry-run without git: local spec dirs only
|
||||
$Number = (Get-HighestNumberFromSpecs -SpecsDir $specsDir) + 1
|
||||
} elseif ($hasGit) {
|
||||
# Check existing branches on remotes
|
||||
$Number = Get-NextBranchNumber -SpecsDir $specsDir
|
||||
} else {
|
||||
@@ -224,86 +273,94 @@ if ($branchName.Length -gt $maxBranchLength) {
|
||||
# Account for prefix length: timestamp (15) + hyphen (1) = 16, or sequential (3) + hyphen (1) = 4
|
||||
$prefixLength = $featureNum.Length + 1
|
||||
$maxSuffixLength = $maxBranchLength - $prefixLength
|
||||
|
||||
|
||||
# Truncate suffix
|
||||
$truncatedSuffix = $branchSuffix.Substring(0, [Math]::Min($branchSuffix.Length, $maxSuffixLength))
|
||||
# Remove trailing hyphen if truncation created one
|
||||
$truncatedSuffix = $truncatedSuffix -replace '-$', ''
|
||||
|
||||
|
||||
$originalBranchName = $branchName
|
||||
$branchName = "$featureNum-$truncatedSuffix"
|
||||
|
||||
|
||||
Write-Warning "[specify] Branch name exceeded GitHub's 244-byte limit"
|
||||
Write-Warning "[specify] Original: $originalBranchName ($($originalBranchName.Length) bytes)"
|
||||
Write-Warning "[specify] Truncated to: $branchName ($($branchName.Length) bytes)"
|
||||
}
|
||||
|
||||
if ($hasGit) {
|
||||
$branchCreated = $false
|
||||
try {
|
||||
git checkout -q -b $branchName 2>$null | Out-Null
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
$branchCreated = $true
|
||||
}
|
||||
} catch {
|
||||
# Exception during git command
|
||||
}
|
||||
$featureDir = Join-Path $specsDir $branchName
|
||||
$specFile = Join-Path $featureDir 'spec.md'
|
||||
|
||||
if (-not $branchCreated) {
|
||||
# Check if branch already exists
|
||||
$existingBranch = git branch --list $branchName 2>$null
|
||||
if ($existingBranch) {
|
||||
if ($AllowExistingBranch) {
|
||||
# Switch to the existing branch instead of failing
|
||||
git checkout -q $branchName 2>$null | Out-Null
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "Error: Branch '$branchName' exists but could not be checked out. Resolve any uncommitted changes or conflicts and try again."
|
||||
if (-not $DryRun) {
|
||||
if ($hasGit) {
|
||||
$branchCreated = $false
|
||||
try {
|
||||
git checkout -q -b $branchName 2>$null | Out-Null
|
||||
if ($LASTEXITCODE -eq 0) {
|
||||
$branchCreated = $true
|
||||
}
|
||||
} catch {
|
||||
# Exception during git command
|
||||
}
|
||||
|
||||
if (-not $branchCreated) {
|
||||
# Check if branch already exists
|
||||
$existingBranch = git branch --list $branchName 2>$null
|
||||
if ($existingBranch) {
|
||||
if ($AllowExistingBranch) {
|
||||
# Switch to the existing branch instead of failing
|
||||
git checkout -q $branchName 2>$null | Out-Null
|
||||
if ($LASTEXITCODE -ne 0) {
|
||||
Write-Error "Error: Branch '$branchName' exists but could not be checked out. Resolve any uncommitted changes or conflicts and try again."
|
||||
exit 1
|
||||
}
|
||||
} elseif ($Timestamp) {
|
||||
Write-Error "Error: Branch '$branchName' already exists. Rerun to get a new timestamp or use a different -ShortName."
|
||||
exit 1
|
||||
} else {
|
||||
Write-Error "Error: Branch '$branchName' already exists. Please use a different feature name or specify a different number with -Number."
|
||||
exit 1
|
||||
}
|
||||
} elseif ($Timestamp) {
|
||||
Write-Error "Error: Branch '$branchName' already exists. Rerun to get a new timestamp or use a different -ShortName."
|
||||
exit 1
|
||||
} else {
|
||||
Write-Error "Error: Branch '$branchName' already exists. Please use a different feature name or specify a different number with -Number."
|
||||
Write-Error "Error: Failed to create git branch '$branchName'. Please check your git configuration and try again."
|
||||
exit 1
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Write-Warning "[specify] Warning: Git repository not detected; skipped branch creation for $branchName"
|
||||
}
|
||||
|
||||
New-Item -ItemType Directory -Path $featureDir -Force | Out-Null
|
||||
|
||||
if (-not (Test-Path -PathType Leaf $specFile)) {
|
||||
$template = Resolve-Template -TemplateName 'spec-template' -RepoRoot $repoRoot
|
||||
if ($template -and (Test-Path $template)) {
|
||||
Copy-Item $template $specFile -Force
|
||||
} else {
|
||||
Write-Error "Error: Failed to create git branch '$branchName'. Please check your git configuration and try again."
|
||||
exit 1
|
||||
New-Item -ItemType File -Path $specFile -Force | Out-Null
|
||||
}
|
||||
}
|
||||
} else {
|
||||
Write-Warning "[specify] Warning: Git repository not detected; skipped branch creation for $branchName"
|
||||
|
||||
# Set the SPECIFY_FEATURE environment variable for the current session
|
||||
$env:SPECIFY_FEATURE = $branchName
|
||||
}
|
||||
|
||||
$featureDir = Join-Path $specsDir $branchName
|
||||
New-Item -ItemType Directory -Path $featureDir -Force | Out-Null
|
||||
|
||||
$specFile = Join-Path $featureDir 'spec.md'
|
||||
if (-not (Test-Path -PathType Leaf $specFile)) {
|
||||
$template = Resolve-Template -TemplateName 'spec-template' -RepoRoot $repoRoot
|
||||
if ($template -and (Test-Path $template)) {
|
||||
Copy-Item $template $specFile -Force
|
||||
} else {
|
||||
New-Item -ItemType File -Path $specFile | Out-Null
|
||||
}
|
||||
}
|
||||
|
||||
# Set the SPECIFY_FEATURE environment variable for the current session
|
||||
$env:SPECIFY_FEATURE = $branchName
|
||||
|
||||
if ($Json) {
|
||||
$obj = [PSCustomObject]@{
|
||||
$obj = [PSCustomObject]@{
|
||||
BRANCH_NAME = $branchName
|
||||
SPEC_FILE = $specFile
|
||||
FEATURE_NUM = $featureNum
|
||||
HAS_GIT = $hasGit
|
||||
}
|
||||
if ($DryRun) {
|
||||
$obj | Add-Member -NotePropertyName 'DRY_RUN' -NotePropertyValue $true
|
||||
}
|
||||
$obj | ConvertTo-Json -Compress
|
||||
} else {
|
||||
Write-Output "BRANCH_NAME: $branchName"
|
||||
Write-Output "SPEC_FILE: $specFile"
|
||||
Write-Output "FEATURE_NUM: $featureNum"
|
||||
Write-Output "HAS_GIT: $hasGit"
|
||||
Write-Output "SPECIFY_FEATURE environment variable set to: $branchName"
|
||||
if (-not $DryRun) {
|
||||
Write-Output "SPECIFY_FEATURE environment variable set to: $branchName"
|
||||
}
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -15,6 +15,18 @@ from copy import deepcopy
|
||||
import yaml
|
||||
|
||||
|
||||
def _build_agent_configs() -> dict[str, Any]:
|
||||
"""Derive CommandRegistrar.AGENT_CONFIGS from INTEGRATION_REGISTRY."""
|
||||
from specify_cli.integrations import INTEGRATION_REGISTRY
|
||||
configs: dict[str, dict[str, Any]] = {}
|
||||
for key, integration in INTEGRATION_REGISTRY.items():
|
||||
if key == "generic":
|
||||
continue
|
||||
if integration.registrar_config:
|
||||
configs[key] = dict(integration.registrar_config)
|
||||
return configs
|
||||
|
||||
|
||||
class CommandRegistrar:
|
||||
"""Handles registration of commands with AI agents.
|
||||
|
||||
@@ -23,147 +35,26 @@ class CommandRegistrar:
|
||||
and companion files (e.g. Copilot .prompt.md).
|
||||
"""
|
||||
|
||||
# Agent configurations with directory, format, and argument placeholder
|
||||
AGENT_CONFIGS = {
|
||||
"claude": {
|
||||
"dir": ".claude/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"gemini": {
|
||||
"dir": ".gemini/commands",
|
||||
"format": "toml",
|
||||
"args": "{{args}}",
|
||||
"extension": ".toml"
|
||||
},
|
||||
"copilot": {
|
||||
"dir": ".github/agents",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".agent.md"
|
||||
},
|
||||
"cursor": {
|
||||
"dir": ".cursor/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"qwen": {
|
||||
"dir": ".qwen/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"opencode": {
|
||||
"dir": ".opencode/command",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"codex": {
|
||||
"dir": ".agents/skills",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": "/SKILL.md",
|
||||
},
|
||||
"windsurf": {
|
||||
"dir": ".windsurf/workflows",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"junie": {
|
||||
"dir": ".junie/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"kilocode": {
|
||||
"dir": ".kilocode/workflows",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"auggie": {
|
||||
"dir": ".augment/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"roo": {
|
||||
"dir": ".roo/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"codebuddy": {
|
||||
"dir": ".codebuddy/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"qodercli": {
|
||||
"dir": ".qoder/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"kiro-cli": {
|
||||
"dir": ".kiro/prompts",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"pi": {
|
||||
"dir": ".pi/prompts",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"amp": {
|
||||
"dir": ".agents/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"shai": {
|
||||
"dir": ".shai/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"tabnine": {
|
||||
"dir": ".tabnine/agent/commands",
|
||||
"format": "toml",
|
||||
"args": "{{args}}",
|
||||
"extension": ".toml"
|
||||
},
|
||||
"bob": {
|
||||
"dir": ".bob/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"kimi": {
|
||||
"dir": ".kimi/skills",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": "/SKILL.md",
|
||||
},
|
||||
"trae": {
|
||||
"dir": ".trae/rules",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
},
|
||||
"iflow": {
|
||||
"dir": ".iflow/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md"
|
||||
}
|
||||
}
|
||||
# Derived from INTEGRATION_REGISTRY — single source of truth.
|
||||
# Populated lazily via _ensure_configs() on first use.
|
||||
AGENT_CONFIGS: dict[str, dict[str, Any]] = {}
|
||||
_configs_loaded: bool = False
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._ensure_configs()
|
||||
|
||||
def __init_subclass__(cls, **kwargs: Any) -> None:
|
||||
super().__init_subclass__(**kwargs)
|
||||
cls._ensure_configs()
|
||||
|
||||
@classmethod
|
||||
def _ensure_configs(cls) -> None:
|
||||
if not cls._configs_loaded:
|
||||
try:
|
||||
cls.AGENT_CONFIGS = _build_agent_configs()
|
||||
cls._configs_loaded = True
|
||||
except ImportError:
|
||||
pass # Circular import during module init; retry on next access
|
||||
|
||||
@staticmethod
|
||||
def parse_frontmatter(content: str) -> tuple[dict, str]:
|
||||
@@ -235,11 +126,11 @@ class CommandRegistrar:
|
||||
|
||||
for key, script_path in scripts.items():
|
||||
if isinstance(script_path, str):
|
||||
scripts[key] = self._rewrite_project_relative_paths(script_path)
|
||||
scripts[key] = self.rewrite_project_relative_paths(script_path)
|
||||
return frontmatter
|
||||
|
||||
@staticmethod
|
||||
def _rewrite_project_relative_paths(text: str) -> str:
|
||||
def rewrite_project_relative_paths(text: str) -> str:
|
||||
"""Rewrite repo-relative paths to their generated project locations."""
|
||||
if not isinstance(text, str) or not text:
|
||||
return text
|
||||
@@ -358,16 +249,35 @@ class CommandRegistrar:
|
||||
body = self.resolve_skill_placeholders(agent_name, frontmatter, body, project_root)
|
||||
|
||||
description = frontmatter.get("description", f"Spec-kit workflow command: {skill_name}")
|
||||
skill_frontmatter = self.build_skill_frontmatter(
|
||||
agent_name,
|
||||
skill_name,
|
||||
description,
|
||||
f"{source_id}:{source_file}",
|
||||
)
|
||||
return self.render_frontmatter(skill_frontmatter) + "\n" + body
|
||||
|
||||
@staticmethod
|
||||
def build_skill_frontmatter(
|
||||
agent_name: str,
|
||||
skill_name: str,
|
||||
description: str,
|
||||
source: str,
|
||||
) -> dict:
|
||||
"""Build consistent SKILL.md frontmatter across all skill generators."""
|
||||
skill_frontmatter = {
|
||||
"name": skill_name,
|
||||
"description": description,
|
||||
"compatibility": "Requires spec-kit project structure with .specify/ directory",
|
||||
"metadata": {
|
||||
"author": "github-spec-kit",
|
||||
"source": f"{source_id}:{source_file}",
|
||||
"source": source,
|
||||
},
|
||||
}
|
||||
return self.render_frontmatter(skill_frontmatter) + "\n" + body
|
||||
if agent_name == "claude":
|
||||
# Claude skills should only run when explicitly invoked.
|
||||
skill_frontmatter["disable-model-invocation"] = True
|
||||
return skill_frontmatter
|
||||
|
||||
@staticmethod
|
||||
def resolve_skill_placeholders(agent_name: str, frontmatter: dict, body: str, project_root: Path) -> str:
|
||||
@@ -422,7 +332,7 @@ class CommandRegistrar:
|
||||
body = body.replace("{AGENT_SCRIPT}", agent_script_command)
|
||||
|
||||
body = body.replace("{ARGS}", "$ARGUMENTS").replace("__AGENT__", agent_name)
|
||||
return CommandRegistrar._rewrite_project_relative_paths(body)
|
||||
return CommandRegistrar.rewrite_project_relative_paths(body)
|
||||
|
||||
def _convert_argument_placeholder(self, content: str, from_placeholder: str, to_placeholder: str) -> str:
|
||||
"""Convert argument placeholder format.
|
||||
@@ -475,6 +385,7 @@ class CommandRegistrar:
|
||||
Raises:
|
||||
ValueError: If agent is not supported
|
||||
"""
|
||||
self._ensure_configs()
|
||||
if agent_name not in self.AGENT_CONFIGS:
|
||||
raise ValueError(f"Unsupported agent: {agent_name}")
|
||||
|
||||
@@ -574,6 +485,7 @@ class CommandRegistrar:
|
||||
"""
|
||||
results = {}
|
||||
|
||||
self._ensure_configs()
|
||||
for agent_name, agent_config in self.AGENT_CONFIGS.items():
|
||||
agent_dir = project_root / agent_config["dir"]
|
||||
|
||||
@@ -601,6 +513,7 @@ class CommandRegistrar:
|
||||
registered_commands: Dict mapping agent names to command name lists
|
||||
project_root: Path to project root
|
||||
"""
|
||||
self._ensure_configs()
|
||||
for agent_name, cmd_names in registered_commands.items():
|
||||
if agent_name not in self.AGENT_CONFIGS:
|
||||
continue
|
||||
@@ -618,3 +531,13 @@ class CommandRegistrar:
|
||||
prompt_file = project_root / ".github" / "prompts" / f"{cmd_name}.prompt.md"
|
||||
if prompt_file.exists():
|
||||
prompt_file.unlink()
|
||||
|
||||
|
||||
# Populate AGENT_CONFIGS after class definition.
|
||||
# Catches ImportError from circular imports during module loading;
|
||||
# _configs_loaded stays False so the next explicit access retries.
|
||||
try:
|
||||
CommandRegistrar._ensure_configs()
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
|
||||
@@ -801,15 +801,12 @@ class ExtensionManager:
|
||||
original_desc = frontmatter.get("description", "")
|
||||
description = original_desc or f"Extension command: {cmd_name}"
|
||||
|
||||
frontmatter_data = {
|
||||
"name": skill_name,
|
||||
"description": description,
|
||||
"compatibility": "Requires spec-kit project structure with .specify/ directory",
|
||||
"metadata": {
|
||||
"author": "github-spec-kit",
|
||||
"source": f"extension:{manifest.id}",
|
||||
},
|
||||
}
|
||||
frontmatter_data = registrar.build_skill_frontmatter(
|
||||
selected_ai,
|
||||
skill_name,
|
||||
description,
|
||||
f"extension:{manifest.id}",
|
||||
)
|
||||
frontmatter_text = yaml.safe_dump(frontmatter_data, sort_keys=False).strip()
|
||||
|
||||
# Derive a human-friendly title from the command name
|
||||
@@ -2138,11 +2135,14 @@ class HookExecutor:
|
||||
init_options = self._load_init_options()
|
||||
selected_ai = init_options.get("ai")
|
||||
codex_skill_mode = selected_ai == "codex" and bool(init_options.get("ai_skills"))
|
||||
claude_skill_mode = selected_ai == "claude" and bool(init_options.get("ai_skills"))
|
||||
kimi_skill_mode = selected_ai == "kimi"
|
||||
|
||||
skill_name = self._skill_name_from_command(command_id)
|
||||
if codex_skill_mode and skill_name:
|
||||
return f"${skill_name}"
|
||||
if claude_skill_mode and skill_name:
|
||||
return f"/{skill_name}"
|
||||
if kimi_skill_mode and skill_name:
|
||||
return f"/skill:{skill_name}"
|
||||
|
||||
|
||||
@@ -37,10 +37,69 @@ def get_integration(key: str) -> IntegrationBase | None:
|
||||
# -- Register built-in integrations --------------------------------------
|
||||
|
||||
def _register_builtins() -> None:
|
||||
"""Register all built-in integrations."""
|
||||
from .copilot import CopilotIntegration
|
||||
"""Register all built-in integrations.
|
||||
|
||||
Package directories use Python-safe identifiers (e.g. ``kiro_cli``,
|
||||
``cursor_agent``). The user-facing integration key stored in
|
||||
``IntegrationBase.key`` stays hyphenated (``"kiro-cli"``,
|
||||
``"cursor-agent"``) to match the actual CLI tool / binary name that
|
||||
users install and invoke.
|
||||
"""
|
||||
# -- Imports (alphabetical) -------------------------------------------
|
||||
from .agy import AgyIntegration
|
||||
from .amp import AmpIntegration
|
||||
from .auggie import AuggieIntegration
|
||||
from .bob import BobIntegration
|
||||
from .claude import ClaudeIntegration
|
||||
from .codex import CodexIntegration
|
||||
from .codebuddy import CodebuddyIntegration
|
||||
from .copilot import CopilotIntegration
|
||||
from .cursor_agent import CursorAgentIntegration
|
||||
from .gemini import GeminiIntegration
|
||||
from .generic import GenericIntegration
|
||||
from .iflow import IflowIntegration
|
||||
from .junie import JunieIntegration
|
||||
from .kilocode import KilocodeIntegration
|
||||
from .kimi import KimiIntegration
|
||||
from .kiro_cli import KiroCliIntegration
|
||||
from .opencode import OpencodeIntegration
|
||||
from .pi import PiIntegration
|
||||
from .qodercli import QodercliIntegration
|
||||
from .qwen import QwenIntegration
|
||||
from .roo import RooIntegration
|
||||
from .shai import ShaiIntegration
|
||||
from .tabnine import TabnineIntegration
|
||||
from .trae import TraeIntegration
|
||||
from .vibe import VibeIntegration
|
||||
from .windsurf import WindsurfIntegration
|
||||
|
||||
# -- Registration (alphabetical) --------------------------------------
|
||||
_register(AgyIntegration())
|
||||
_register(AmpIntegration())
|
||||
_register(AuggieIntegration())
|
||||
_register(BobIntegration())
|
||||
_register(ClaudeIntegration())
|
||||
_register(CodexIntegration())
|
||||
_register(CodebuddyIntegration())
|
||||
_register(CopilotIntegration())
|
||||
_register(CursorAgentIntegration())
|
||||
_register(GeminiIntegration())
|
||||
_register(GenericIntegration())
|
||||
_register(IflowIntegration())
|
||||
_register(JunieIntegration())
|
||||
_register(KilocodeIntegration())
|
||||
_register(KimiIntegration())
|
||||
_register(KiroCliIntegration())
|
||||
_register(OpencodeIntegration())
|
||||
_register(PiIntegration())
|
||||
_register(QodercliIntegration())
|
||||
_register(QwenIntegration())
|
||||
_register(RooIntegration())
|
||||
_register(ShaiIntegration())
|
||||
_register(TabnineIntegration())
|
||||
_register(TraeIntegration())
|
||||
_register(VibeIntegration())
|
||||
_register(WindsurfIntegration())
|
||||
|
||||
|
||||
_register_builtins()
|
||||
|
||||
41
src/specify_cli/integrations/agy/__init__.py
Normal file
41
src/specify_cli/integrations/agy/__init__.py
Normal file
@@ -0,0 +1,41 @@
|
||||
"""Antigravity (agy) integration — skills-based agent.
|
||||
|
||||
Antigravity uses ``.agent/skills/speckit-<name>/SKILL.md`` layout.
|
||||
Explicit command support was deprecated in version 1.20.5;
|
||||
``--skills`` defaults to ``True``.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from ..base import IntegrationOption, SkillsIntegration
|
||||
|
||||
|
||||
class AgyIntegration(SkillsIntegration):
|
||||
"""Integration for Antigravity IDE."""
|
||||
|
||||
key = "agy"
|
||||
config = {
|
||||
"name": "Antigravity",
|
||||
"folder": ".agent/",
|
||||
"commands_subdir": "skills",
|
||||
"install_url": None,
|
||||
"requires_cli": False,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".agent/skills",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": "/SKILL.md",
|
||||
}
|
||||
context_file = "AGENTS.md"
|
||||
|
||||
@classmethod
|
||||
def options(cls) -> list[IntegrationOption]:
|
||||
return [
|
||||
IntegrationOption(
|
||||
"--skills",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Install as agent skills (default for Antigravity since v1.20.5)",
|
||||
),
|
||||
]
|
||||
17
src/specify_cli/integrations/agy/scripts/update-context.ps1
Normal file
17
src/specify_cli/integrations/agy/scripts/update-context.ps1
Normal file
@@ -0,0 +1,17 @@
|
||||
# update-context.ps1 — Antigravity (agy) integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType agy
|
||||
24
src/specify_cli/integrations/agy/scripts/update-context.sh
Executable file
24
src/specify_cli/integrations/agy/scripts/update-context.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Antigravity (agy) integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" agy
|
||||
21
src/specify_cli/integrations/amp/__init__.py
Normal file
21
src/specify_cli/integrations/amp/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Amp CLI integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class AmpIntegration(MarkdownIntegration):
|
||||
key = "amp"
|
||||
config = {
|
||||
"name": "Amp",
|
||||
"folder": ".agents/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://ampcode.com/manual#install",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".agents/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "AGENTS.md"
|
||||
23
src/specify_cli/integrations/amp/scripts/update-context.ps1
Normal file
23
src/specify_cli/integrations/amp/scripts/update-context.ps1
Normal file
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Amp integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType amp
|
||||
28
src/specify_cli/integrations/amp/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/amp/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Amp integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" amp
|
||||
21
src/specify_cli/integrations/auggie/__init__.py
Normal file
21
src/specify_cli/integrations/auggie/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Auggie CLI integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class AuggieIntegration(MarkdownIntegration):
|
||||
key = "auggie"
|
||||
config = {
|
||||
"name": "Auggie CLI",
|
||||
"folder": ".augment/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://docs.augmentcode.com/cli/setup-auggie/install-auggie-cli",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".augment/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = ".augment/rules/specify-rules.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Auggie CLI integration: create/update .augment/rules/specify-rules.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType auggie
|
||||
28
src/specify_cli/integrations/auggie/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/auggie/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Auggie CLI integration: create/update .augment/rules/specify-rules.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" auggie
|
||||
@@ -5,6 +5,10 @@ Provides:
|
||||
- ``IntegrationBase`` — abstract base every integration must implement.
|
||||
- ``MarkdownIntegration`` — concrete base for standard Markdown-format
|
||||
integrations (the common case — subclass, set three class attrs, done).
|
||||
- ``TomlIntegration`` — concrete base for TOML-format integrations
|
||||
(Gemini, Tabnine — subclass, set three class attrs, done).
|
||||
- ``SkillsIntegration`` — concrete base for integrations that install
|
||||
commands as agent skills (``speckit-<name>/SKILL.md`` layout).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -198,14 +202,65 @@ class IntegrationBase(ABC):
|
||||
) -> Path:
|
||||
"""Write *content* to *dest*, hash it, and record in *manifest*.
|
||||
|
||||
Creates parent directories as needed. Returns *dest*.
|
||||
Creates parent directories as needed. Writes bytes directly to
|
||||
avoid platform newline translation (CRLF on Windows). Any
|
||||
``\r\n`` sequences in *content* are normalised to ``\n`` before
|
||||
writing. Returns *dest*.
|
||||
"""
|
||||
dest.parent.mkdir(parents=True, exist_ok=True)
|
||||
dest.write_text(content, encoding="utf-8")
|
||||
normalized = content.replace("\r\n", "\n")
|
||||
dest.write_bytes(normalized.encode("utf-8"))
|
||||
rel = dest.resolve().relative_to(project_root.resolve())
|
||||
manifest.record_existing(rel)
|
||||
return dest
|
||||
|
||||
def integration_scripts_dir(self) -> Path | None:
|
||||
"""Return path to this integration's bundled ``scripts/`` directory.
|
||||
|
||||
Looks for a ``scripts/`` sibling of the module that defines the
|
||||
concrete subclass (not ``IntegrationBase`` itself).
|
||||
Returns ``None`` if the directory doesn't exist.
|
||||
"""
|
||||
import inspect
|
||||
|
||||
cls_file = inspect.getfile(type(self))
|
||||
scripts = Path(cls_file).resolve().parent / "scripts"
|
||||
return scripts if scripts.is_dir() else None
|
||||
|
||||
def install_scripts(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: IntegrationManifest,
|
||||
) -> list[Path]:
|
||||
"""Copy integration-specific scripts into the project.
|
||||
|
||||
Copies files from this integration's ``scripts/`` directory to
|
||||
``.specify/integrations/<key>/scripts/`` in the project. Shell
|
||||
scripts are made executable. All copied files are recorded in
|
||||
*manifest*.
|
||||
|
||||
Returns the list of files created.
|
||||
"""
|
||||
scripts_src = self.integration_scripts_dir()
|
||||
if not scripts_src:
|
||||
return []
|
||||
|
||||
created: list[Path] = []
|
||||
scripts_dest = project_root / ".specify" / "integrations" / self.key / "scripts"
|
||||
scripts_dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
for src_script in sorted(scripts_src.iterdir()):
|
||||
if not src_script.is_file():
|
||||
continue
|
||||
dst_script = scripts_dest / src_script.name
|
||||
shutil.copy2(src_script, dst_script)
|
||||
if dst_script.suffix == ".sh":
|
||||
dst_script.chmod(dst_script.stat().st_mode | 0o111)
|
||||
self.record_file_in_manifest(dst_script, project_root, manifest)
|
||||
created.append(dst_script)
|
||||
|
||||
return created
|
||||
|
||||
@staticmethod
|
||||
def process_template(
|
||||
content: str,
|
||||
@@ -299,13 +354,11 @@ class IntegrationBase(ABC):
|
||||
# 6. Replace __AGENT__
|
||||
content = content.replace("__AGENT__", agent_name)
|
||||
|
||||
# 7. Rewrite paths (matches release script's rewrite_paths())
|
||||
content = re.sub(r"(/?)memory/", r".specify/memory/", content)
|
||||
content = re.sub(r"(/?)scripts/", r".specify/scripts/", content)
|
||||
content = re.sub(r"(/?)templates/", r".specify/templates/", content)
|
||||
# Fix double-prefix (same as release script's .specify.specify/ fix)
|
||||
content = content.replace(".specify.specify/", ".specify/")
|
||||
content = content.replace(".specify/.specify/", ".specify/")
|
||||
# 7. Rewrite paths — delegate to the shared implementation in
|
||||
# CommandRegistrar so extension-local paths are preserved and
|
||||
# boundary rules stay consistent across the codebase.
|
||||
from specify_cli.agents import CommandRegistrar
|
||||
content = CommandRegistrar.rewrite_project_relative_paths(content)
|
||||
|
||||
return content
|
||||
|
||||
@@ -405,11 +458,336 @@ class MarkdownIntegration(IntegrationBase):
|
||||
Subclasses only need to set ``key``, ``config``, ``registrar_config``
|
||||
(and optionally ``context_file``). Everything else is inherited.
|
||||
|
||||
The default ``setup()`` from ``IntegrationBase`` copies templates
|
||||
into the agent's commands directory — which is correct for the
|
||||
standard Markdown case.
|
||||
``setup()`` processes command templates (replacing ``{SCRIPT}``,
|
||||
``{ARGS}``, ``__AGENT__``, rewriting paths) and installs
|
||||
integration-specific scripts (``update-context.sh`` / ``.ps1``).
|
||||
"""
|
||||
|
||||
# MarkdownIntegration inherits IntegrationBase.setup() as-is.
|
||||
# Future stages may add markdown-specific path rewriting here.
|
||||
pass
|
||||
def setup(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: IntegrationManifest,
|
||||
parsed_options: dict[str, Any] | None = None,
|
||||
**opts: Any,
|
||||
) -> list[Path]:
|
||||
templates = self.list_command_templates()
|
||||
if not templates:
|
||||
return []
|
||||
|
||||
project_root_resolved = project_root.resolve()
|
||||
if manifest.project_root != project_root_resolved:
|
||||
raise ValueError(
|
||||
f"manifest.project_root ({manifest.project_root}) does not match "
|
||||
f"project_root ({project_root_resolved})"
|
||||
)
|
||||
|
||||
dest = self.commands_dest(project_root).resolve()
|
||||
try:
|
||||
dest.relative_to(project_root_resolved)
|
||||
except ValueError as exc:
|
||||
raise ValueError(
|
||||
f"Integration destination {dest} escapes "
|
||||
f"project root {project_root_resolved}"
|
||||
) from exc
|
||||
dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
script_type = opts.get("script_type", "sh")
|
||||
arg_placeholder = self.registrar_config.get("args", "$ARGUMENTS") if self.registrar_config else "$ARGUMENTS"
|
||||
created: list[Path] = []
|
||||
|
||||
for src_file in templates:
|
||||
raw = src_file.read_text(encoding="utf-8")
|
||||
processed = self.process_template(raw, self.key, script_type, arg_placeholder)
|
||||
dst_name = self.command_filename(src_file.stem)
|
||||
dst_file = self.write_file_and_record(
|
||||
processed, dest / dst_name, project_root, manifest
|
||||
)
|
||||
created.append(dst_file)
|
||||
|
||||
created.extend(self.install_scripts(project_root, manifest))
|
||||
return created
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# TomlIntegration — TOML-format agents (Gemini, Tabnine)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TomlIntegration(IntegrationBase):
|
||||
"""Concrete base for integrations that use TOML command format.
|
||||
|
||||
Mirrors ``MarkdownIntegration`` closely: subclasses only need to set
|
||||
``key``, ``config``, ``registrar_config`` (and optionally
|
||||
``context_file``). Everything else is inherited.
|
||||
|
||||
``setup()`` processes command templates through the same placeholder
|
||||
pipeline as ``MarkdownIntegration``, then converts the result to
|
||||
TOML format (``description`` key + ``prompt`` multiline string).
|
||||
"""
|
||||
|
||||
def command_filename(self, template_name: str) -> str:
|
||||
"""TOML commands use ``.toml`` extension."""
|
||||
return f"speckit.{template_name}.toml"
|
||||
|
||||
@staticmethod
|
||||
def _extract_description(content: str) -> str:
|
||||
"""Extract the ``description`` value from YAML frontmatter.
|
||||
|
||||
Scans lines between the first pair of ``---`` delimiters for a
|
||||
top-level ``description:`` key. Returns the value (with
|
||||
surrounding quotes stripped) or an empty string if not found.
|
||||
"""
|
||||
in_frontmatter = False
|
||||
for line in content.splitlines():
|
||||
stripped = line.rstrip("\n\r")
|
||||
if stripped == "---":
|
||||
if not in_frontmatter:
|
||||
in_frontmatter = True
|
||||
continue
|
||||
break # second ---
|
||||
if in_frontmatter and stripped.startswith("description:"):
|
||||
_, _, value = stripped.partition(":")
|
||||
return value.strip().strip('"').strip("'")
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _render_toml(description: str, body: str) -> str:
|
||||
"""Render a TOML command file from description and body.
|
||||
|
||||
Uses multiline basic strings (``\"\"\"``) with backslashes
|
||||
escaped, matching the output of the release script. Falls back
|
||||
to multiline literal strings (``'''``) if the body contains
|
||||
``\"\"\"``, then to an escaped basic string as a last resort.
|
||||
|
||||
The body is rstrip'd so the closing delimiter appears on the line
|
||||
immediately after the last content line — matching the release
|
||||
script's ``echo "$body"; echo '\"\"\"'`` pattern.
|
||||
"""
|
||||
toml_lines: list[str] = []
|
||||
|
||||
if description:
|
||||
desc = description.replace('"', '\\"')
|
||||
toml_lines.append(f'description = "{desc}"')
|
||||
toml_lines.append("")
|
||||
|
||||
body = body.rstrip("\n")
|
||||
|
||||
# Escape backslashes for basic multiline strings.
|
||||
escaped = body.replace("\\", "\\\\")
|
||||
|
||||
if '"""' not in escaped:
|
||||
toml_lines.append('prompt = """')
|
||||
toml_lines.append(escaped)
|
||||
toml_lines.append('"""')
|
||||
elif "'''" not in body:
|
||||
toml_lines.append("prompt = '''")
|
||||
toml_lines.append(body)
|
||||
toml_lines.append("'''")
|
||||
else:
|
||||
escaped_body = (
|
||||
body.replace("\\", "\\\\")
|
||||
.replace('"', '\\"')
|
||||
.replace("\n", "\\n")
|
||||
.replace("\r", "\\r")
|
||||
.replace("\t", "\\t")
|
||||
)
|
||||
toml_lines.append(f'prompt = "{escaped_body}"')
|
||||
|
||||
return "\n".join(toml_lines) + "\n"
|
||||
|
||||
def setup(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: IntegrationManifest,
|
||||
parsed_options: dict[str, Any] | None = None,
|
||||
**opts: Any,
|
||||
) -> list[Path]:
|
||||
templates = self.list_command_templates()
|
||||
if not templates:
|
||||
return []
|
||||
|
||||
project_root_resolved = project_root.resolve()
|
||||
if manifest.project_root != project_root_resolved:
|
||||
raise ValueError(
|
||||
f"manifest.project_root ({manifest.project_root}) does not match "
|
||||
f"project_root ({project_root_resolved})"
|
||||
)
|
||||
|
||||
dest = self.commands_dest(project_root).resolve()
|
||||
try:
|
||||
dest.relative_to(project_root_resolved)
|
||||
except ValueError as exc:
|
||||
raise ValueError(
|
||||
f"Integration destination {dest} escapes "
|
||||
f"project root {project_root_resolved}"
|
||||
) from exc
|
||||
dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
script_type = opts.get("script_type", "sh")
|
||||
arg_placeholder = self.registrar_config.get("args", "{{args}}") if self.registrar_config else "{{args}}"
|
||||
created: list[Path] = []
|
||||
|
||||
for src_file in templates:
|
||||
raw = src_file.read_text(encoding="utf-8")
|
||||
description = self._extract_description(raw)
|
||||
processed = self.process_template(raw, self.key, script_type, arg_placeholder)
|
||||
toml_content = self._render_toml(description, processed)
|
||||
dst_name = self.command_filename(src_file.stem)
|
||||
dst_file = self.write_file_and_record(
|
||||
toml_content, dest / dst_name, project_root, manifest
|
||||
)
|
||||
created.append(dst_file)
|
||||
|
||||
created.extend(self.install_scripts(project_root, manifest))
|
||||
return created
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# SkillsIntegration — skills-format agents (Codex, Kimi, Agy)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class SkillsIntegration(IntegrationBase):
|
||||
"""Concrete base for integrations that install commands as agent skills.
|
||||
|
||||
Skills use the ``speckit-<name>/SKILL.md`` directory layout following
|
||||
the `agentskills.io <https://agentskills.io/specification>`_ spec.
|
||||
|
||||
Subclasses set ``key``, ``config``, ``registrar_config`` (and
|
||||
optionally ``context_file``) like any integration. They may also
|
||||
override ``options()`` to declare additional CLI flags (e.g.
|
||||
``--skills``, ``--migrate-legacy``).
|
||||
|
||||
``setup()`` processes each shared command template into a
|
||||
``speckit-<name>/SKILL.md`` file with skills-oriented frontmatter.
|
||||
"""
|
||||
|
||||
def skills_dest(self, project_root: Path) -> Path:
|
||||
"""Return the absolute path to the skills output directory.
|
||||
|
||||
Derived from ``config["folder"]`` and the configured
|
||||
``commands_subdir`` (defaults to ``"skills"``).
|
||||
|
||||
Raises ``ValueError`` when ``config`` or ``folder`` is missing.
|
||||
"""
|
||||
if not self.config:
|
||||
raise ValueError(
|
||||
f"{type(self).__name__}.config is not set."
|
||||
)
|
||||
folder = self.config.get("folder")
|
||||
if not folder:
|
||||
raise ValueError(
|
||||
f"{type(self).__name__}.config is missing required 'folder' entry."
|
||||
)
|
||||
subdir = self.config.get("commands_subdir", "skills")
|
||||
return project_root / folder / subdir
|
||||
|
||||
def setup(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: IntegrationManifest,
|
||||
parsed_options: dict[str, Any] | None = None,
|
||||
**opts: Any,
|
||||
) -> list[Path]:
|
||||
"""Install command templates as agent skills.
|
||||
|
||||
Creates ``speckit-<name>/SKILL.md`` for each shared command
|
||||
template. Each SKILL.md has normalised frontmatter containing
|
||||
``name``, ``description``, ``compatibility``, and ``metadata``.
|
||||
"""
|
||||
import yaml
|
||||
|
||||
templates = self.list_command_templates()
|
||||
if not templates:
|
||||
return []
|
||||
|
||||
project_root_resolved = project_root.resolve()
|
||||
if manifest.project_root != project_root_resolved:
|
||||
raise ValueError(
|
||||
f"manifest.project_root ({manifest.project_root}) does not match "
|
||||
f"project_root ({project_root_resolved})"
|
||||
)
|
||||
|
||||
skills_dir = self.skills_dest(project_root).resolve()
|
||||
try:
|
||||
skills_dir.relative_to(project_root_resolved)
|
||||
except ValueError as exc:
|
||||
raise ValueError(
|
||||
f"Skills destination {skills_dir} escapes "
|
||||
f"project root {project_root_resolved}"
|
||||
) from exc
|
||||
|
||||
script_type = opts.get("script_type", "sh")
|
||||
arg_placeholder = (
|
||||
self.registrar_config.get("args", "$ARGUMENTS")
|
||||
if self.registrar_config
|
||||
else "$ARGUMENTS"
|
||||
)
|
||||
created: list[Path] = []
|
||||
|
||||
for src_file in templates:
|
||||
raw = src_file.read_text(encoding="utf-8")
|
||||
|
||||
# Derive the skill name from the template stem
|
||||
command_name = src_file.stem # e.g. "plan"
|
||||
skill_name = f"speckit-{command_name.replace('.', '-')}"
|
||||
|
||||
# Parse frontmatter for description
|
||||
frontmatter: dict[str, Any] = {}
|
||||
if raw.startswith("---"):
|
||||
parts = raw.split("---", 2)
|
||||
if len(parts) >= 3:
|
||||
try:
|
||||
fm = yaml.safe_load(parts[1])
|
||||
if isinstance(fm, dict):
|
||||
frontmatter = fm
|
||||
except yaml.YAMLError:
|
||||
pass
|
||||
|
||||
# Process body through the standard template pipeline
|
||||
processed_body = self.process_template(
|
||||
raw, self.key, script_type, arg_placeholder
|
||||
)
|
||||
# Strip the processed frontmatter — we rebuild it for skills.
|
||||
# Preserve leading whitespace in the body to match release ZIP
|
||||
# output byte-for-byte (the template body starts with \n after
|
||||
# the closing ---).
|
||||
if processed_body.startswith("---"):
|
||||
parts = processed_body.split("---", 2)
|
||||
if len(parts) >= 3:
|
||||
processed_body = parts[2]
|
||||
|
||||
# Select description — use the original template description
|
||||
# to stay byte-for-byte identical with release ZIP output.
|
||||
description = frontmatter.get("description", "")
|
||||
if not description:
|
||||
description = f"Spec Kit: {command_name} workflow"
|
||||
|
||||
# Build SKILL.md with manually formatted frontmatter to match
|
||||
# the release packaging script output exactly (double-quoted
|
||||
# values, no yaml.safe_dump quoting differences).
|
||||
def _quote(v: str) -> str:
|
||||
escaped = v.replace("\\", "\\\\").replace('"', '\\"')
|
||||
return f'"{escaped}"'
|
||||
|
||||
skill_content = (
|
||||
f"---\n"
|
||||
f"name: {_quote(skill_name)}\n"
|
||||
f"description: {_quote(description)}\n"
|
||||
f"compatibility: {_quote('Requires spec-kit project structure with .specify/ directory')}\n"
|
||||
f"metadata:\n"
|
||||
f" author: {_quote('github-spec-kit')}\n"
|
||||
f" source: {_quote('templates/commands/' + src_file.name)}\n"
|
||||
f"---\n"
|
||||
f"{processed_body}"
|
||||
)
|
||||
|
||||
# Write speckit-<name>/SKILL.md
|
||||
skill_dir = skills_dir / skill_name
|
||||
skill_file = skill_dir / "SKILL.md"
|
||||
dst = self.write_file_and_record(
|
||||
skill_content, skill_file, project_root, manifest
|
||||
)
|
||||
created.append(dst)
|
||||
|
||||
created.extend(self.install_scripts(project_root, manifest))
|
||||
return created
|
||||
|
||||
21
src/specify_cli/integrations/bob/__init__.py
Normal file
21
src/specify_cli/integrations/bob/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""IBM Bob integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class BobIntegration(MarkdownIntegration):
|
||||
key = "bob"
|
||||
config = {
|
||||
"name": "IBM Bob",
|
||||
"folder": ".bob/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": None,
|
||||
"requires_cli": False,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".bob/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "AGENTS.md"
|
||||
23
src/specify_cli/integrations/bob/scripts/update-context.ps1
Normal file
23
src/specify_cli/integrations/bob/scripts/update-context.ps1
Normal file
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — IBM Bob integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType bob
|
||||
28
src/specify_cli/integrations/bob/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/bob/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — IBM Bob integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" bob
|
||||
109
src/specify_cli/integrations/claude/__init__.py
Normal file
109
src/specify_cli/integrations/claude/__init__.py
Normal file
@@ -0,0 +1,109 @@
|
||||
"""Claude Code integration."""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import yaml
|
||||
|
||||
from ..base import SkillsIntegration
|
||||
from ..manifest import IntegrationManifest
|
||||
|
||||
|
||||
class ClaudeIntegration(SkillsIntegration):
|
||||
"""Integration for Claude Code skills."""
|
||||
|
||||
key = "claude"
|
||||
config = {
|
||||
"name": "Claude Code",
|
||||
"folder": ".claude/",
|
||||
"commands_subdir": "skills",
|
||||
"install_url": "https://docs.anthropic.com/en/docs/claude-code/setup",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".claude/skills",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": "/SKILL.md",
|
||||
}
|
||||
context_file = "CLAUDE.md"
|
||||
|
||||
def command_filename(self, template_name: str) -> str:
|
||||
"""Claude skills live at .claude/skills/<name>/SKILL.md."""
|
||||
skill_name = f"speckit-{template_name.replace('.', '-')}"
|
||||
return f"{skill_name}/SKILL.md"
|
||||
|
||||
def _render_skill(self, template_name: str, frontmatter: dict[str, Any], body: str) -> str:
|
||||
"""Render a processed command template as a Claude skill."""
|
||||
skill_name = f"speckit-{template_name.replace('.', '-')}"
|
||||
description = frontmatter.get(
|
||||
"description",
|
||||
f"Spec-kit workflow command: {template_name}",
|
||||
)
|
||||
skill_frontmatter = self._build_skill_fm(
|
||||
skill_name, description, f"templates/commands/{template_name}.md"
|
||||
)
|
||||
frontmatter_text = yaml.safe_dump(skill_frontmatter, sort_keys=False).strip()
|
||||
return f"---\n{frontmatter_text}\n---\n\n{body.strip()}\n"
|
||||
|
||||
def _build_skill_fm(self, name: str, description: str, source: str) -> dict:
|
||||
from specify_cli.agents import CommandRegistrar
|
||||
return CommandRegistrar.build_skill_frontmatter(
|
||||
self.key, name, description, source
|
||||
)
|
||||
|
||||
def setup(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: IntegrationManifest,
|
||||
parsed_options: dict[str, Any] | None = None,
|
||||
**opts: Any,
|
||||
) -> list[Path]:
|
||||
"""Install Claude skills into .claude/skills."""
|
||||
templates = self.list_command_templates()
|
||||
if not templates:
|
||||
return []
|
||||
|
||||
project_root_resolved = project_root.resolve()
|
||||
if manifest.project_root != project_root_resolved:
|
||||
raise ValueError(
|
||||
f"manifest.project_root ({manifest.project_root}) does not match "
|
||||
f"project_root ({project_root_resolved})"
|
||||
)
|
||||
|
||||
dest = self.skills_dest(project_root).resolve()
|
||||
try:
|
||||
dest.relative_to(project_root_resolved)
|
||||
except ValueError as exc:
|
||||
raise ValueError(
|
||||
f"Integration destination {dest} escapes "
|
||||
f"project root {project_root_resolved}"
|
||||
) from exc
|
||||
dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
script_type = opts.get("script_type", "sh")
|
||||
arg_placeholder = self.registrar_config.get("args", "$ARGUMENTS")
|
||||
from specify_cli.agents import CommandRegistrar
|
||||
registrar = CommandRegistrar()
|
||||
created: list[Path] = []
|
||||
|
||||
for src_file in templates:
|
||||
raw = src_file.read_text(encoding="utf-8")
|
||||
processed = self.process_template(raw, self.key, script_type, arg_placeholder)
|
||||
frontmatter, body = registrar.parse_frontmatter(processed)
|
||||
if not isinstance(frontmatter, dict):
|
||||
frontmatter = {}
|
||||
|
||||
rendered = self._render_skill(src_file.stem, frontmatter, body)
|
||||
dst_file = self.write_file_and_record(
|
||||
rendered,
|
||||
dest / self.command_filename(src_file.stem),
|
||||
project_root,
|
||||
manifest,
|
||||
)
|
||||
created.append(dst_file)
|
||||
|
||||
created.extend(self.install_scripts(project_root, manifest))
|
||||
return created
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Claude Code integration: create/update CLAUDE.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType claude
|
||||
28
src/specify_cli/integrations/claude/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/claude/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Claude Code integration: create/update CLAUDE.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" claude
|
||||
21
src/specify_cli/integrations/codebuddy/__init__.py
Normal file
21
src/specify_cli/integrations/codebuddy/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""CodeBuddy CLI integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class CodebuddyIntegration(MarkdownIntegration):
|
||||
key = "codebuddy"
|
||||
config = {
|
||||
"name": "CodeBuddy",
|
||||
"folder": ".codebuddy/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://www.codebuddy.ai/cli",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".codebuddy/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "CODEBUDDY.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — CodeBuddy integration: create/update CODEBUDDY.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType codebuddy
|
||||
28
src/specify_cli/integrations/codebuddy/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/codebuddy/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — CodeBuddy integration: create/update CODEBUDDY.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" codebuddy
|
||||
40
src/specify_cli/integrations/codex/__init__.py
Normal file
40
src/specify_cli/integrations/codex/__init__.py
Normal file
@@ -0,0 +1,40 @@
|
||||
"""Codex CLI integration — skills-based agent.
|
||||
|
||||
Codex uses the ``.agents/skills/speckit-<name>/SKILL.md`` layout.
|
||||
Commands are deprecated; ``--skills`` defaults to ``True``.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from ..base import IntegrationOption, SkillsIntegration
|
||||
|
||||
|
||||
class CodexIntegration(SkillsIntegration):
|
||||
"""Integration for OpenAI Codex CLI."""
|
||||
|
||||
key = "codex"
|
||||
config = {
|
||||
"name": "Codex CLI",
|
||||
"folder": ".agents/",
|
||||
"commands_subdir": "skills",
|
||||
"install_url": "https://github.com/openai/codex",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".agents/skills",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": "/SKILL.md",
|
||||
}
|
||||
context_file = "AGENTS.md"
|
||||
|
||||
@classmethod
|
||||
def options(cls) -> list[IntegrationOption]:
|
||||
return [
|
||||
IntegrationOption(
|
||||
"--skills",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Install as agent skills (default for Codex)",
|
||||
),
|
||||
]
|
||||
@@ -0,0 +1,17 @@
|
||||
# update-context.ps1 — Codex CLI integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType codex
|
||||
24
src/specify_cli/integrations/codex/scripts/update-context.sh
Executable file
24
src/specify_cli/integrations/codex/scripts/update-context.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Codex CLI integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" codex
|
||||
@@ -118,19 +118,7 @@ class CopilotIntegration(IntegrationBase):
|
||||
created.append(dst_settings)
|
||||
|
||||
# 4. Install integration-specific update-context scripts
|
||||
scripts_src = Path(__file__).resolve().parent / "scripts"
|
||||
if scripts_src.is_dir():
|
||||
scripts_dest = project_root / ".specify" / "integrations" / "copilot" / "scripts"
|
||||
scripts_dest.mkdir(parents=True, exist_ok=True)
|
||||
for src_script in sorted(scripts_src.iterdir()):
|
||||
if src_script.is_file():
|
||||
dst_script = scripts_dest / src_script.name
|
||||
shutil.copy2(src_script, dst_script)
|
||||
# Make shell scripts executable
|
||||
if dst_script.suffix == ".sh":
|
||||
dst_script.chmod(dst_script.stat().st_mode | 0o111)
|
||||
self.record_file_in_manifest(dst_script, project_root, manifest)
|
||||
created.append(dst_script)
|
||||
created.extend(self.install_scripts(project_root, manifest))
|
||||
|
||||
return created
|
||||
|
||||
|
||||
@@ -14,8 +14,18 @@
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$repoRoot = git rev-parse --show-toplevel 2>$null
|
||||
if (-not $repoRoot) { $repoRoot = $PWD.Path }
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
# Invoke shared update-agent-context script as a separate process.
|
||||
# Dot-sourcing is unsafe until that script guards its Main call.
|
||||
|
||||
@@ -15,7 +15,22 @@
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
REPO_ROOT="${REPO_ROOT:-$(git rev-parse --show-toplevel 2>/dev/null || pwd)}"
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
# Invoke shared update-agent-context script as a separate process.
|
||||
# Sourcing is unsafe until that script guards its main logic.
|
||||
|
||||
21
src/specify_cli/integrations/cursor_agent/__init__.py
Normal file
21
src/specify_cli/integrations/cursor_agent/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Cursor IDE integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class CursorAgentIntegration(MarkdownIntegration):
|
||||
key = "cursor-agent"
|
||||
config = {
|
||||
"name": "Cursor",
|
||||
"folder": ".cursor/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": None,
|
||||
"requires_cli": False,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".cursor/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = ".cursor/rules/specify-rules.mdc"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Cursor integration: create/update .cursor/rules/specify-rules.mdc
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType cursor-agent
|
||||
28
src/specify_cli/integrations/cursor_agent/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/cursor_agent/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Cursor integration: create/update .cursor/rules/specify-rules.mdc
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" cursor-agent
|
||||
21
src/specify_cli/integrations/gemini/__init__.py
Normal file
21
src/specify_cli/integrations/gemini/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Gemini CLI integration."""
|
||||
|
||||
from ..base import TomlIntegration
|
||||
|
||||
|
||||
class GeminiIntegration(TomlIntegration):
|
||||
key = "gemini"
|
||||
config = {
|
||||
"name": "Gemini CLI",
|
||||
"folder": ".gemini/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://github.com/google-gemini/gemini-cli",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".gemini/commands",
|
||||
"format": "toml",
|
||||
"args": "{{args}}",
|
||||
"extension": ".toml",
|
||||
}
|
||||
context_file = "GEMINI.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Gemini CLI integration: create/update GEMINI.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType gemini
|
||||
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Gemini CLI integration: create/update GEMINI.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" gemini
|
||||
133
src/specify_cli/integrations/generic/__init__.py
Normal file
133
src/specify_cli/integrations/generic/__init__.py
Normal file
@@ -0,0 +1,133 @@
|
||||
"""Generic integration — bring your own agent.
|
||||
|
||||
Requires ``--commands-dir`` to specify the output directory for command
|
||||
files. No longer special-cased in the core CLI — just another
|
||||
integration with its own required option.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from ..base import IntegrationOption, MarkdownIntegration
|
||||
from ..manifest import IntegrationManifest
|
||||
|
||||
|
||||
class GenericIntegration(MarkdownIntegration):
|
||||
"""Integration for user-specified (generic) agents."""
|
||||
|
||||
key = "generic"
|
||||
config = {
|
||||
"name": "Generic (bring your own agent)",
|
||||
"folder": None, # Set dynamically from --commands-dir
|
||||
"commands_subdir": "commands",
|
||||
"install_url": None,
|
||||
"requires_cli": False,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": "", # Set dynamically from --commands-dir
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = None
|
||||
|
||||
@classmethod
|
||||
def options(cls) -> list[IntegrationOption]:
|
||||
return [
|
||||
IntegrationOption(
|
||||
"--commands-dir",
|
||||
required=True,
|
||||
help="Directory for command files (e.g. .myagent/commands/)",
|
||||
),
|
||||
]
|
||||
|
||||
@staticmethod
|
||||
def _resolve_commands_dir(
|
||||
parsed_options: dict[str, Any] | None,
|
||||
opts: dict[str, Any],
|
||||
) -> str:
|
||||
"""Extract ``--commands-dir`` from parsed options or raw_options.
|
||||
|
||||
Returns the directory string or raises ``ValueError``.
|
||||
"""
|
||||
parsed_options = parsed_options or {}
|
||||
|
||||
commands_dir = parsed_options.get("commands_dir")
|
||||
if commands_dir:
|
||||
return commands_dir
|
||||
|
||||
# Fall back to raw_options (--integration-options="--commands-dir ...")
|
||||
raw = opts.get("raw_options")
|
||||
if raw:
|
||||
import shlex
|
||||
tokens = shlex.split(raw)
|
||||
for i, token in enumerate(tokens):
|
||||
if token == "--commands-dir" and i + 1 < len(tokens):
|
||||
return tokens[i + 1]
|
||||
if token.startswith("--commands-dir="):
|
||||
return token.split("=", 1)[1]
|
||||
|
||||
raise ValueError(
|
||||
"--commands-dir is required for the generic integration"
|
||||
)
|
||||
|
||||
def commands_dest(self, project_root: Path) -> Path:
|
||||
"""Not supported for GenericIntegration — use setup() directly.
|
||||
|
||||
GenericIntegration is stateless; the output directory comes from
|
||||
``parsed_options`` or ``raw_options`` at call time, not from
|
||||
instance state.
|
||||
"""
|
||||
raise ValueError(
|
||||
"GenericIntegration.commands_dest() cannot be called directly; "
|
||||
"the output directory is resolved from parsed_options in setup()"
|
||||
)
|
||||
|
||||
def setup(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: IntegrationManifest,
|
||||
parsed_options: dict[str, Any] | None = None,
|
||||
**opts: Any,
|
||||
) -> list[Path]:
|
||||
"""Install commands to the user-provided commands directory."""
|
||||
commands_dir = self._resolve_commands_dir(parsed_options, opts)
|
||||
|
||||
templates = self.list_command_templates()
|
||||
if not templates:
|
||||
return []
|
||||
|
||||
project_root_resolved = project_root.resolve()
|
||||
if manifest.project_root != project_root_resolved:
|
||||
raise ValueError(
|
||||
f"manifest.project_root ({manifest.project_root}) does not match "
|
||||
f"project_root ({project_root_resolved})"
|
||||
)
|
||||
|
||||
dest = (project_root / commands_dir).resolve()
|
||||
try:
|
||||
dest.relative_to(project_root_resolved)
|
||||
except ValueError as exc:
|
||||
raise ValueError(
|
||||
f"Integration destination {dest} escapes "
|
||||
f"project root {project_root_resolved}"
|
||||
) from exc
|
||||
dest.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
script_type = opts.get("script_type", "sh")
|
||||
arg_placeholder = "$ARGUMENTS"
|
||||
created: list[Path] = []
|
||||
|
||||
for src_file in templates:
|
||||
raw = src_file.read_text(encoding="utf-8")
|
||||
processed = self.process_template(raw, self.key, script_type, arg_placeholder)
|
||||
dst_name = self.command_filename(src_file.stem)
|
||||
dst_file = self.write_file_and_record(
|
||||
processed, dest / dst_name, project_root, manifest
|
||||
)
|
||||
created.append(dst_file)
|
||||
|
||||
created.extend(self.install_scripts(project_root, manifest))
|
||||
return created
|
||||
@@ -0,0 +1,17 @@
|
||||
# update-context.ps1 — Generic integration: create/update context file
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType generic
|
||||
24
src/specify_cli/integrations/generic/scripts/update-context.sh
Executable file
24
src/specify_cli/integrations/generic/scripts/update-context.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Generic integration: create/update context file
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" generic
|
||||
21
src/specify_cli/integrations/iflow/__init__.py
Normal file
21
src/specify_cli/integrations/iflow/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""iFlow CLI integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class IflowIntegration(MarkdownIntegration):
|
||||
key = "iflow"
|
||||
config = {
|
||||
"name": "iFlow CLI",
|
||||
"folder": ".iflow/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://docs.iflow.cn/en/cli/quickstart",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".iflow/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "IFLOW.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — iFlow CLI integration: create/update IFLOW.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType iflow
|
||||
28
src/specify_cli/integrations/iflow/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/iflow/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — iFlow CLI integration: create/update IFLOW.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" iflow
|
||||
21
src/specify_cli/integrations/junie/__init__.py
Normal file
21
src/specify_cli/integrations/junie/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Junie integration (JetBrains)."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class JunieIntegration(MarkdownIntegration):
|
||||
key = "junie"
|
||||
config = {
|
||||
"name": "Junie",
|
||||
"folder": ".junie/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://junie.jetbrains.com/",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".junie/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = ".junie/AGENTS.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Junie integration: create/update .junie/AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType junie
|
||||
28
src/specify_cli/integrations/junie/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/junie/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Junie integration: create/update .junie/AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" junie
|
||||
21
src/specify_cli/integrations/kilocode/__init__.py
Normal file
21
src/specify_cli/integrations/kilocode/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Kilo Code integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class KilocodeIntegration(MarkdownIntegration):
|
||||
key = "kilocode"
|
||||
config = {
|
||||
"name": "Kilo Code",
|
||||
"folder": ".kilocode/",
|
||||
"commands_subdir": "workflows",
|
||||
"install_url": None,
|
||||
"requires_cli": False,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".kilocode/workflows",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = ".kilocode/rules/specify-rules.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Kilo Code integration: create/update .kilocode/rules/specify-rules.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType kilocode
|
||||
28
src/specify_cli/integrations/kilocode/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/kilocode/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Kilo Code integration: create/update .kilocode/rules/specify-rules.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" kilocode
|
||||
124
src/specify_cli/integrations/kimi/__init__.py
Normal file
124
src/specify_cli/integrations/kimi/__init__.py
Normal file
@@ -0,0 +1,124 @@
|
||||
"""Kimi Code integration — skills-based agent (Moonshot AI).
|
||||
|
||||
Kimi uses the ``.kimi/skills/speckit-<name>/SKILL.md`` layout with
|
||||
``/skill:speckit-<name>`` invocation syntax.
|
||||
|
||||
Includes legacy migration logic for projects initialised before Kimi
|
||||
moved from dotted skill directories (``speckit.xxx``) to hyphenated
|
||||
(``speckit-xxx``).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import shutil
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
from ..base import IntegrationOption, SkillsIntegration
|
||||
from ..manifest import IntegrationManifest
|
||||
|
||||
|
||||
class KimiIntegration(SkillsIntegration):
|
||||
"""Integration for Kimi Code CLI (Moonshot AI)."""
|
||||
|
||||
key = "kimi"
|
||||
config = {
|
||||
"name": "Kimi Code",
|
||||
"folder": ".kimi/",
|
||||
"commands_subdir": "skills",
|
||||
"install_url": "https://code.kimi.com/",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".kimi/skills",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": "/SKILL.md",
|
||||
}
|
||||
context_file = "KIMI.md"
|
||||
|
||||
@classmethod
|
||||
def options(cls) -> list[IntegrationOption]:
|
||||
return [
|
||||
IntegrationOption(
|
||||
"--skills",
|
||||
is_flag=True,
|
||||
default=True,
|
||||
help="Install as agent skills (default for Kimi)",
|
||||
),
|
||||
IntegrationOption(
|
||||
"--migrate-legacy",
|
||||
is_flag=True,
|
||||
default=False,
|
||||
help="Migrate legacy dotted skill dirs (speckit.xxx → speckit-xxx)",
|
||||
),
|
||||
]
|
||||
|
||||
def setup(
|
||||
self,
|
||||
project_root: Path,
|
||||
manifest: IntegrationManifest,
|
||||
parsed_options: dict[str, Any] | None = None,
|
||||
**opts: Any,
|
||||
) -> list[Path]:
|
||||
"""Install skills with optional legacy dotted-name migration."""
|
||||
parsed_options = parsed_options or {}
|
||||
|
||||
# Run base setup first so hyphenated targets (speckit-*) exist,
|
||||
# then migrate/clean legacy dotted dirs without risking user content loss.
|
||||
created = super().setup(
|
||||
project_root, manifest, parsed_options=parsed_options, **opts
|
||||
)
|
||||
|
||||
if parsed_options.get("migrate_legacy", False):
|
||||
skills_dir = self.skills_dest(project_root)
|
||||
if skills_dir.is_dir():
|
||||
_migrate_legacy_kimi_dotted_skills(skills_dir)
|
||||
|
||||
return created
|
||||
|
||||
|
||||
def _migrate_legacy_kimi_dotted_skills(skills_dir: Path) -> tuple[int, int]:
|
||||
"""Migrate legacy Kimi dotted skill dirs (speckit.xxx) to hyphenated format.
|
||||
|
||||
Returns ``(migrated_count, removed_count)``.
|
||||
"""
|
||||
if not skills_dir.is_dir():
|
||||
return (0, 0)
|
||||
|
||||
migrated_count = 0
|
||||
removed_count = 0
|
||||
|
||||
for legacy_dir in sorted(skills_dir.glob("speckit.*")):
|
||||
if not legacy_dir.is_dir():
|
||||
continue
|
||||
if not (legacy_dir / "SKILL.md").exists():
|
||||
continue
|
||||
|
||||
suffix = legacy_dir.name[len("speckit."):]
|
||||
if not suffix:
|
||||
continue
|
||||
|
||||
target_dir = skills_dir / f"speckit-{suffix.replace('.', '-')}"
|
||||
|
||||
if not target_dir.exists():
|
||||
shutil.move(str(legacy_dir), str(target_dir))
|
||||
migrated_count += 1
|
||||
continue
|
||||
|
||||
# Target exists — only remove legacy if SKILL.md is identical
|
||||
target_skill = target_dir / "SKILL.md"
|
||||
legacy_skill = legacy_dir / "SKILL.md"
|
||||
if target_skill.is_file():
|
||||
try:
|
||||
if target_skill.read_bytes() == legacy_skill.read_bytes():
|
||||
has_extra = any(
|
||||
child.name != "SKILL.md" for child in legacy_dir.iterdir()
|
||||
)
|
||||
if not has_extra:
|
||||
shutil.rmtree(legacy_dir)
|
||||
removed_count += 1
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return (migrated_count, removed_count)
|
||||
17
src/specify_cli/integrations/kimi/scripts/update-context.ps1
Normal file
17
src/specify_cli/integrations/kimi/scripts/update-context.ps1
Normal file
@@ -0,0 +1,17 @@
|
||||
# update-context.ps1 — Kimi Code integration: create/update KIMI.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType kimi
|
||||
24
src/specify_cli/integrations/kimi/scripts/update-context.sh
Executable file
24
src/specify_cli/integrations/kimi/scripts/update-context.sh
Executable file
@@ -0,0 +1,24 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Kimi Code integration: create/update KIMI.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" kimi
|
||||
21
src/specify_cli/integrations/kiro_cli/__init__.py
Normal file
21
src/specify_cli/integrations/kiro_cli/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Kiro CLI integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class KiroCliIntegration(MarkdownIntegration):
|
||||
key = "kiro-cli"
|
||||
config = {
|
||||
"name": "Kiro CLI",
|
||||
"folder": ".kiro/",
|
||||
"commands_subdir": "prompts",
|
||||
"install_url": "https://kiro.dev/docs/cli/",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".kiro/prompts",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "AGENTS.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Kiro CLI integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType kiro-cli
|
||||
28
src/specify_cli/integrations/kiro_cli/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/kiro_cli/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Kiro CLI integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" kiro-cli
|
||||
21
src/specify_cli/integrations/opencode/__init__.py
Normal file
21
src/specify_cli/integrations/opencode/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""opencode integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class OpencodeIntegration(MarkdownIntegration):
|
||||
key = "opencode"
|
||||
config = {
|
||||
"name": "opencode",
|
||||
"folder": ".opencode/",
|
||||
"commands_subdir": "command",
|
||||
"install_url": "https://opencode.ai",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".opencode/command",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "AGENTS.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — opencode integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType opencode
|
||||
28
src/specify_cli/integrations/opencode/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/opencode/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — opencode integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" opencode
|
||||
21
src/specify_cli/integrations/pi/__init__.py
Normal file
21
src/specify_cli/integrations/pi/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Pi Coding Agent integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class PiIntegration(MarkdownIntegration):
|
||||
key = "pi"
|
||||
config = {
|
||||
"name": "Pi Coding Agent",
|
||||
"folder": ".pi/",
|
||||
"commands_subdir": "prompts",
|
||||
"install_url": "https://www.npmjs.com/package/@mariozechner/pi-coding-agent",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".pi/prompts",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "AGENTS.md"
|
||||
23
src/specify_cli/integrations/pi/scripts/update-context.ps1
Normal file
23
src/specify_cli/integrations/pi/scripts/update-context.ps1
Normal file
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Pi Coding Agent integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType pi
|
||||
28
src/specify_cli/integrations/pi/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/pi/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Pi Coding Agent integration: create/update AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" pi
|
||||
21
src/specify_cli/integrations/qodercli/__init__.py
Normal file
21
src/specify_cli/integrations/qodercli/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Qoder CLI integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class QodercliIntegration(MarkdownIntegration):
|
||||
key = "qodercli"
|
||||
config = {
|
||||
"name": "Qoder CLI",
|
||||
"folder": ".qoder/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://qoder.com/cli",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".qoder/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "QODER.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Qoder CLI integration: create/update QODER.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType qodercli
|
||||
28
src/specify_cli/integrations/qodercli/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/qodercli/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Qoder CLI integration: create/update QODER.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" qodercli
|
||||
21
src/specify_cli/integrations/qwen/__init__.py
Normal file
21
src/specify_cli/integrations/qwen/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Qwen Code integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class QwenIntegration(MarkdownIntegration):
|
||||
key = "qwen"
|
||||
config = {
|
||||
"name": "Qwen Code",
|
||||
"folder": ".qwen/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://github.com/QwenLM/qwen-code",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".qwen/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "QWEN.md"
|
||||
23
src/specify_cli/integrations/qwen/scripts/update-context.ps1
Normal file
23
src/specify_cli/integrations/qwen/scripts/update-context.ps1
Normal file
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Qwen Code integration: create/update QWEN.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType qwen
|
||||
28
src/specify_cli/integrations/qwen/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/qwen/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Qwen Code integration: create/update QWEN.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" qwen
|
||||
21
src/specify_cli/integrations/roo/__init__.py
Normal file
21
src/specify_cli/integrations/roo/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Roo Code integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class RooIntegration(MarkdownIntegration):
|
||||
key = "roo"
|
||||
config = {
|
||||
"name": "Roo Code",
|
||||
"folder": ".roo/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": None,
|
||||
"requires_cli": False,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".roo/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = ".roo/rules/specify-rules.md"
|
||||
23
src/specify_cli/integrations/roo/scripts/update-context.ps1
Normal file
23
src/specify_cli/integrations/roo/scripts/update-context.ps1
Normal file
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Roo Code integration: create/update .roo/rules/specify-rules.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType roo
|
||||
28
src/specify_cli/integrations/roo/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/roo/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Roo Code integration: create/update .roo/rules/specify-rules.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" roo
|
||||
21
src/specify_cli/integrations/shai/__init__.py
Normal file
21
src/specify_cli/integrations/shai/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""SHAI CLI integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class ShaiIntegration(MarkdownIntegration):
|
||||
key = "shai"
|
||||
config = {
|
||||
"name": "SHAI",
|
||||
"folder": ".shai/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://github.com/ovh/shai",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".shai/commands",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = "SHAI.md"
|
||||
23
src/specify_cli/integrations/shai/scripts/update-context.ps1
Normal file
23
src/specify_cli/integrations/shai/scripts/update-context.ps1
Normal file
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — SHAI integration: create/update SHAI.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType shai
|
||||
28
src/specify_cli/integrations/shai/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/shai/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — SHAI integration: create/update SHAI.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" shai
|
||||
21
src/specify_cli/integrations/tabnine/__init__.py
Normal file
21
src/specify_cli/integrations/tabnine/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Tabnine CLI integration."""
|
||||
|
||||
from ..base import TomlIntegration
|
||||
|
||||
|
||||
class TabnineIntegration(TomlIntegration):
|
||||
key = "tabnine"
|
||||
config = {
|
||||
"name": "Tabnine CLI",
|
||||
"folder": ".tabnine/agent/",
|
||||
"commands_subdir": "commands",
|
||||
"install_url": "https://docs.tabnine.com/main/getting-started/tabnine-cli",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".tabnine/agent/commands",
|
||||
"format": "toml",
|
||||
"args": "{{args}}",
|
||||
"extension": ".toml",
|
||||
}
|
||||
context_file = "TABNINE.md"
|
||||
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Tabnine CLI integration: create/update TABNINE.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType tabnine
|
||||
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Tabnine CLI integration: create/update TABNINE.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" tabnine
|
||||
21
src/specify_cli/integrations/trae/__init__.py
Normal file
21
src/specify_cli/integrations/trae/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Trae IDE integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class TraeIntegration(MarkdownIntegration):
|
||||
key = "trae"
|
||||
config = {
|
||||
"name": "Trae",
|
||||
"folder": ".trae/",
|
||||
"commands_subdir": "rules",
|
||||
"install_url": None,
|
||||
"requires_cli": False,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".trae/rules",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = ".trae/rules/AGENTS.md"
|
||||
23
src/specify_cli/integrations/trae/scripts/update-context.ps1
Normal file
23
src/specify_cli/integrations/trae/scripts/update-context.ps1
Normal file
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Trae integration: create/update .trae/rules/AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType trae
|
||||
28
src/specify_cli/integrations/trae/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/trae/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Trae integration: create/update .trae/rules/AGENTS.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" trae
|
||||
21
src/specify_cli/integrations/vibe/__init__.py
Normal file
21
src/specify_cli/integrations/vibe/__init__.py
Normal file
@@ -0,0 +1,21 @@
|
||||
"""Mistral Vibe CLI integration."""
|
||||
|
||||
from ..base import MarkdownIntegration
|
||||
|
||||
|
||||
class VibeIntegration(MarkdownIntegration):
|
||||
key = "vibe"
|
||||
config = {
|
||||
"name": "Mistral Vibe",
|
||||
"folder": ".vibe/",
|
||||
"commands_subdir": "prompts",
|
||||
"install_url": "https://github.com/mistralai/mistral-vibe",
|
||||
"requires_cli": True,
|
||||
}
|
||||
registrar_config = {
|
||||
"dir": ".vibe/prompts",
|
||||
"format": "markdown",
|
||||
"args": "$ARGUMENTS",
|
||||
"extension": ".md",
|
||||
}
|
||||
context_file = ".vibe/agents/specify-agents.md"
|
||||
23
src/specify_cli/integrations/vibe/scripts/update-context.ps1
Normal file
23
src/specify_cli/integrations/vibe/scripts/update-context.ps1
Normal file
@@ -0,0 +1,23 @@
|
||||
# update-context.ps1 — Mistral Vibe integration: create/update .vibe/agents/specify-agents.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
$ErrorActionPreference = 'Stop'
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
$scriptDir = Split-Path -Parent $MyInvocation.MyCommand.Definition
|
||||
$repoRoot = try { git rev-parse --show-toplevel 2>$null } catch { $null }
|
||||
# If git did not return a repo root, or the git root does not contain .specify,
|
||||
# fall back to walking up from the script directory to find the initialized project root.
|
||||
if (-not $repoRoot -or -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = $scriptDir
|
||||
$fsRoot = [System.IO.Path]::GetPathRoot($repoRoot)
|
||||
while ($repoRoot -and $repoRoot -ne $fsRoot -and -not (Test-Path (Join-Path $repoRoot '.specify'))) {
|
||||
$repoRoot = Split-Path -Parent $repoRoot
|
||||
}
|
||||
}
|
||||
|
||||
& "$repoRoot/.specify/scripts/powershell/update-agent-context.ps1" -AgentType vibe
|
||||
28
src/specify_cli/integrations/vibe/scripts/update-context.sh
Executable file
28
src/specify_cli/integrations/vibe/scripts/update-context.sh
Executable file
@@ -0,0 +1,28 @@
|
||||
#!/usr/bin/env bash
|
||||
# update-context.sh — Mistral Vibe integration: create/update .vibe/agents/specify-agents.md
|
||||
#
|
||||
# Thin wrapper that delegates to the shared update-agent-context script.
|
||||
# Activated in Stage 7 when the shared script uses integration.json dispatch.
|
||||
#
|
||||
# Until then, this delegates to the shared script as a subprocess.
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# Derive repo root from script location (walks up to find .specify/)
|
||||
_script_dir="$(cd "$(dirname "$0")" && pwd)"
|
||||
_root="$_script_dir"
|
||||
while [ "$_root" != "/" ] && [ ! -d "$_root/.specify" ]; do _root="$(dirname "$_root")"; done
|
||||
if [ -z "${REPO_ROOT:-}" ]; then
|
||||
if [ -d "$_root/.specify" ]; then
|
||||
REPO_ROOT="$_root"
|
||||
else
|
||||
git_root="$(git rev-parse --show-toplevel 2>/dev/null || true)"
|
||||
if [ -n "$git_root" ] && [ -d "$git_root/.specify" ]; then
|
||||
REPO_ROOT="$git_root"
|
||||
else
|
||||
REPO_ROOT="$_root"
|
||||
fi
|
||||
fi
|
||||
fi
|
||||
|
||||
exec "$REPO_ROOT/.specify/scripts/bash/update-agent-context.sh" vibe
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user