mirror of
https://github.com/github/spec-kit.git
synced 2026-03-21 04:43:08 +00:00
* feat(cli): embed core pack in wheel + offline-first init (#1711, #1752) Bundle templates, commands, and scripts inside the specify-cli wheel so that `specify init` works without any network access by default. Changes: - pyproject.toml: add hatchling force-include for core_pack assets; bump version to 0.2.1 - __init__.py: add _locate_core_pack(), _generate_agent_commands() (Python port of generate_commands() shell function), and scaffold_from_core_pack(); modify init() to scaffold from bundled assets by default; add --from-github flag to opt back in to the GitHub download path - release.yml: build wheel during CI release job - create-github-release.sh: attach .whl as a release asset - docs/installation.md: add Enterprise/Air-Gapped Installation section - README.md: add Option 3 enterprise install with accurate offline story Closes #1711 Addresses #1752 * fix(tests): update kiro alias test for offline-first scaffold path * feat(cli): invoke bundled release script at runtime for offline scaffold - Embed release scripts (bash + PowerShell) in wheel via pyproject.toml - Replace Python _generate_agent_commands() with subprocess invocation of the canonical create-release-packages.sh, guaranteeing byte-for-byte parity between 'specify init --offline' and GitHub release ZIPs - Fix macOS bash 3.2 compat in release script: replace cp --parents, local -n (nameref), and mapfile with POSIX-safe alternatives - Fix _TOML_AGENTS: remove qwen (uses markdown per release script) - Rename --from-github to --offline (opt-in to bundled assets) - Add _locate_release_script() for cross-platform script discovery - Update tests: remove bash 4+/GNU coreutils requirements, handle Kimi directory-per-skill layout, 576 tests passing - Update CHANGELOG and docs/installation.md * Potential fix for pull request finding Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com> * fix(offline): error out if --offline fails instead of falling back to network - _locate_core_pack() docstring now accurately describes that it only finds wheel-bundled core_pack/; source-checkout fallback lives in callers - init() --offline + no bundled assets now exits with a clear error (previously printed a warning and silently fell back to GitHub download) - init() scaffold failure under --offline now exits with an error instead of retrying via download_and_extract_template Addresses reviewer comment: https://github.com/github/spec-kit/pull/1803 * fix(offline): address PR review comments - fix(shell): harden validate_subset against glob injection in case patterns - fix(shell): make GENRELEASES_DIR overridable via env var for test isolation - fix(cli): probe pwsh then powershell on Windows instead of hardcoding pwsh - fix(cli): remove unreachable fallback branch when --offline fails - fix(cli): improve --offline error message with common failure causes - fix(release): move wheel build step after create-release-packages.sh - fix(docs): add --offline to installation.md air-gapped example - fix(tests): remove unused genreleases_dir param from _run_release_script - fix(tests): rewrite parity test to run one agent at a time with isolated temp dirs, preventing cross-agent interference from rm -rf * fix(offline): address second round of review comments - fix(shell): replace case-pattern membership with explicit loop + == check for unambiguous glob-safety in validate_subset() - fix(cli): require pwsh (PowerShell 7) only; drop powershell (PS5) fallback since the bundled script uses #requires -Version 7.0 - fix(cli): add bash and zip preflight checks in scaffold_from_core_pack() with clear error messages if either is missing - fix(build): list individual template files in pyproject.toml force-include to avoid duplicating templates/commands/ in the wheel * fix(offline): address third round of review comments - Add 120s timeout to subprocess.run in scaffold_from_core_pack to prevent indefinite hangs during offline scaffolding - Add test_pyproject_force_include_covers_all_templates to catch missing template files in wheel bundling - Tighten kiro alias test to assert specific scaffold path (download vs offline) * fix(offline): address Copilot review round 4 - fix(offline): use handle_vscode_settings() merge for --here --offline to prevent data loss on existing .vscode/settings.json - fix(release): glob wheel filename in create-github-release.sh instead of hardcoding version, preventing upload failures on version mismatch - docs(release): add comment noting pyproject.toml version is synced by release-trigger.yml before the tag is pushed * fix(offline): address review round 5 + offline bundle ZIP - fix(offline): pwsh-only, no powershell.exe fallback; clarify error message - fix(offline): tighten _has_bundled to check scripts dir for source checkouts - feat(release): build specify-bundle-v*.zip with all deps at release time - feat(release): attach offline bundle ZIP to GitHub release assets - docs: simplify air-gapped install to single ZIP download from releases - docs: add Windows PowerShell 7+ (pwsh) requirement note * fix(tests): session-scoped scaffold cache + timeout + dead code removal - Add timeout=300 and returncode check to _run_release_script() to fail fast with clear output on script hangs or failures - Remove unused import specify_cli, _SOURCE_TEMPLATES, bundled_project fixture - Add session-scoped scaffolded_sh/scaffolded_ps fixtures that scaffold once per agent and reuse the output directory across all invariant tests - Reduces test_core_pack_scaffold runtime from ~175s to ~51s (3.4x faster) - Parity tests still scaffold independently for isolation * fix(offline): remove wheel from release, update air-gapped docs to use pip download * fix(tests): handle codex skills layout and iflow agent in scaffold tests Codex now uses create_skills() with hyphenated separator (speckit-plan/SKILL.md) instead of generate_commands(). Update _SKILL_AGENTS, _expected_ext, and _list_command_files to handle both codex ('-') and kimi ('.') skill agents. Also picks up iflow as a new testable agent automatically via AGENT_CONFIG. * fix(offline): require wheel core_pack for --offline, remove source-checkout fallback --offline now strictly requires _locate_core_pack() to find the wheel's bundled core_pack/ directory. Source-checkout fallbacks are no longer accepted at the init() level — if core_pack/ is missing, the CLI errors out with a clear message pointing to the installation docs. scaffold_from_core_pack() retains its internal source-checkout fallbacks so parity tests can call it directly from a source checkout. * fix(offline): remove stale [Unreleased] CHANGELOG section, scope httpx.Client to download path - Remove entire [Unreleased] section — CHANGELOG is auto-generated at release - Move httpx.Client into use_github branch with context manager so --offline path doesn't allocate an unused network client * fix(offline): remove dead --from-github flag, fix typer.Exit handling, add page templates validation - Remove unused --from-github CLI option and docstring example - Add (typer.Exit, SystemExit) re-raise before broad except Exception to prevent duplicate error panel on offline scaffold failure - Validate page templates directory exists in scaffold_from_core_pack() to fail fast on incomplete wheel installs - Fix ruff lint: remove unused shutil import, remove f-prefix on strings without placeholders in test_core_pack_scaffold.py * docs(offline): add v0.6.0 deprecation notice with rationale - Help text: note bundled assets become default in v0.6.0 - Docstring: explain why GitHub download is being retired (no network dependency, no proxy/firewall issues, guaranteed version match) - Runtime nudge: when bundled assets are available but user takes the GitHub download path, suggest --offline with rationale - docs/installation.md: add deprecation notice with full rationale * fix(offline): allow --offline in source checkouts, fix CHANGELOG truncation - Simplify use_github logic: use_github = not offline (let scaffold_from_core_pack handle fallback to source-checkout paths) - Remove hard-fail when core_pack/ is absent — scaffold_from_core_pack already falls back to repo-root templates/scripts/commands - Fix truncated 'skill…' → 'skills' in CHANGELOG.md * fix(offline): sandbox GENRELEASES_DIR and clean up on failure - Pin GENRELEASES_DIR to temp dir in scaffold_from_core_pack() so a user-exported value cannot redirect output or cause rm -rf outside the sandbox - Clean up partial project directory on --offline scaffold failure (same behavior as the GitHub-download failure path) * fix(tests): use shutil.which for bash discovery, add ps parity tests - _find_bash() now tries shutil.which('bash') first so non-standard install locations (Nix, custom CI images) are found - Parametrize parity test over both 'sh' and 'ps' script types to ensure PowerShell variant stays byte-for-byte identical to release script output (353 scaffold tests, 810 total) * fix(tests): parse pyproject.toml with tomllib, remove unused fixture - Use tomllib to parse force-include keys from the actual TOML table instead of raw substring search (avoids false positives) - Remove unused source_template_stems fixture from test_scaffold_command_dir_location * fix: guard GENRELEASES_DIR against unsafe values, update docstring - Add safety check in create-release-packages.sh: reject empty, '/', '.', '..' values for GENRELEASES_DIR before rm -rf - Strip trailing slash to avoid path surprises - Update scaffold_from_core_pack() docstring to accurately describe all failure modes (not just 'assets not found') * fix: harden GENRELEASES_DIR guard, cache parity tests, safe iterdir - Reject '..' path segments in GENRELEASES_DIR to prevent traversal - Session-cache both scaffold and release-script results in parity tests — runtime drops from ~74s to ~45s (40% faster) - Guard cmd_dir.iterdir() in assertion message against missing dirs * fix(tests): exclude YAML frontmatter source metadata from path rewrite check The codex and kimi SKILL.md files have 'source: templates/commands/...' in their YAML frontmatter — this is provenance metadata, not a runtime path that needs rewriting. Strip frontmatter before checking for bare scripts/ and templates/ paths. * fix(offline): surface scaffold failure detail in error output When --offline scaffold fails, look up the tracker's 'scaffold' step detail and print it alongside the generic error message so users see the specific root cause (e.g. missing zip/pwsh, script stderr). --------- Co-authored-by: Copilot Autofix powered by AI <175728472+Copilot@users.noreply.github.com>
390 lines
16 KiB
Bash
Executable File
390 lines
16 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
set -euo pipefail
|
|
|
|
# create-release-packages.sh (workflow-local)
|
|
# Build Spec Kit template release archives for each supported AI assistant and script type.
|
|
# Usage: .github/workflows/scripts/create-release-packages.sh <version>
|
|
# Version argument should include leading 'v'.
|
|
# Optionally set AGENTS and/or SCRIPTS env vars to limit what gets built.
|
|
# AGENTS : space or comma separated subset of: claude gemini copilot cursor-agent qwen opencode windsurf junie codex kilocode auggie roo codebuddy amp shai tabnine kiro-cli agy bob vibe qodercli kimi trae pi iflow generic (default: all)
|
|
# SCRIPTS : space or comma separated subset of: sh ps (default: both)
|
|
# Examples:
|
|
# AGENTS=claude SCRIPTS=sh $0 v0.2.0
|
|
# AGENTS="copilot,gemini" $0 v0.2.0
|
|
# SCRIPTS=ps $0 v0.2.0
|
|
|
|
if [[ $# -ne 1 ]]; then
|
|
echo "Usage: $0 <version-with-v-prefix>" >&2
|
|
exit 1
|
|
fi
|
|
NEW_VERSION="$1"
|
|
if [[ ! $NEW_VERSION =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then
|
|
echo "Version must look like v0.0.0" >&2
|
|
exit 1
|
|
fi
|
|
|
|
echo "Building release packages for $NEW_VERSION"
|
|
|
|
# Create and use .genreleases directory for all build artifacts
|
|
# Override via GENRELEASES_DIR env var (e.g. for tests writing to a temp dir)
|
|
GENRELEASES_DIR="${GENRELEASES_DIR:-.genreleases}"
|
|
|
|
# Guard against unsafe GENRELEASES_DIR values before cleaning
|
|
if [[ -z "$GENRELEASES_DIR" ]]; then
|
|
echo "GENRELEASES_DIR must not be empty" >&2
|
|
exit 1
|
|
fi
|
|
case "$GENRELEASES_DIR" in
|
|
'/'|'.'|'..')
|
|
echo "Refusing to use unsafe GENRELEASES_DIR value: $GENRELEASES_DIR" >&2
|
|
exit 1
|
|
;;
|
|
esac
|
|
if [[ "$GENRELEASES_DIR" == *".."* ]]; then
|
|
echo "Refusing to use GENRELEASES_DIR containing '..' path segments: $GENRELEASES_DIR" >&2
|
|
exit 1
|
|
fi
|
|
|
|
mkdir -p "$GENRELEASES_DIR"
|
|
rm -rf "${GENRELEASES_DIR%/}/"* || true
|
|
|
|
rewrite_paths() {
|
|
sed -E \
|
|
-e 's@(/?)memory/@.specify/memory/@g' \
|
|
-e 's@(/?)scripts/@.specify/scripts/@g' \
|
|
-e 's@(/?)templates/@.specify/templates/@g' \
|
|
-e 's@\.specify\.specify/@.specify/@g'
|
|
}
|
|
|
|
generate_commands() {
|
|
local agent=$1 ext=$2 arg_format=$3 output_dir=$4 script_variant=$5
|
|
mkdir -p "$output_dir"
|
|
for template in templates/commands/*.md; do
|
|
[[ -f "$template" ]] || continue
|
|
local name description script_command agent_script_command body
|
|
name=$(basename "$template" .md)
|
|
|
|
# Normalize line endings
|
|
file_content=$(tr -d '\r' < "$template")
|
|
|
|
# Extract description and script command from YAML frontmatter
|
|
description=$(printf '%s\n' "$file_content" | awk '/^description:/ {sub(/^description:[[:space:]]*/, ""); print; exit}')
|
|
script_command=$(printf '%s\n' "$file_content" | awk -v sv="$script_variant" '/^[[:space:]]*'"$script_variant"':[[:space:]]*/ {sub(/^[[:space:]]*'"$script_variant"':[[:space:]]*/, ""); print; exit}')
|
|
|
|
if [[ -z $script_command ]]; then
|
|
echo "Warning: no script command found for $script_variant in $template" >&2
|
|
script_command="(Missing script command for $script_variant)"
|
|
fi
|
|
|
|
# Extract agent_script command from YAML frontmatter if present
|
|
agent_script_command=$(printf '%s\n' "$file_content" | awk '
|
|
/^agent_scripts:$/ { in_agent_scripts=1; next }
|
|
in_agent_scripts && /^[[:space:]]*'"$script_variant"':[[:space:]]*/ {
|
|
sub(/^[[:space:]]*'"$script_variant"':[[:space:]]*/, "")
|
|
print
|
|
exit
|
|
}
|
|
in_agent_scripts && /^[a-zA-Z]/ { in_agent_scripts=0 }
|
|
')
|
|
|
|
# Replace {SCRIPT} placeholder with the script command
|
|
body=$(printf '%s\n' "$file_content" | sed "s|{SCRIPT}|${script_command}|g")
|
|
|
|
# Replace {AGENT_SCRIPT} placeholder with the agent script command if found
|
|
if [[ -n $agent_script_command ]]; then
|
|
body=$(printf '%s\n' "$body" | sed "s|{AGENT_SCRIPT}|${agent_script_command}|g")
|
|
fi
|
|
|
|
# Remove the scripts: and agent_scripts: sections from frontmatter while preserving YAML structure
|
|
body=$(printf '%s\n' "$body" | awk '
|
|
/^---$/ { print; if (++dash_count == 1) in_frontmatter=1; else in_frontmatter=0; next }
|
|
in_frontmatter && /^scripts:$/ { skip_scripts=1; next }
|
|
in_frontmatter && /^agent_scripts:$/ { skip_scripts=1; next }
|
|
in_frontmatter && /^[a-zA-Z].*:/ && skip_scripts { skip_scripts=0 }
|
|
in_frontmatter && skip_scripts && /^[[:space:]]/ { next }
|
|
{ print }
|
|
')
|
|
|
|
# Apply other substitutions
|
|
body=$(printf '%s\n' "$body" | sed "s/{ARGS}/$arg_format/g" | sed "s/__AGENT__/$agent/g" | rewrite_paths)
|
|
|
|
case $ext in
|
|
toml)
|
|
body=$(printf '%s\n' "$body" | sed 's/\\/\\\\/g')
|
|
{ echo "description = \"$description\""; echo; echo "prompt = \"\"\""; echo "$body"; echo "\"\"\""; } > "$output_dir/speckit.$name.$ext" ;;
|
|
md)
|
|
echo "$body" > "$output_dir/speckit.$name.$ext" ;;
|
|
agent.md)
|
|
echo "$body" > "$output_dir/speckit.$name.$ext" ;;
|
|
esac
|
|
done
|
|
}
|
|
|
|
generate_copilot_prompts() {
|
|
local agents_dir=$1 prompts_dir=$2
|
|
mkdir -p "$prompts_dir"
|
|
|
|
# Generate a .prompt.md file for each .agent.md file
|
|
for agent_file in "$agents_dir"/speckit.*.agent.md; do
|
|
[[ -f "$agent_file" ]] || continue
|
|
|
|
local basename=$(basename "$agent_file" .agent.md)
|
|
local prompt_file="$prompts_dir/${basename}.prompt.md"
|
|
|
|
cat > "$prompt_file" <<EOF
|
|
---
|
|
agent: ${basename}
|
|
---
|
|
EOF
|
|
done
|
|
}
|
|
|
|
# Create skills in <skills_dir>/<name>/SKILL.md format.
|
|
# Most agents use hyphenated names (e.g. speckit-plan); Kimi is the
|
|
# current dotted-name exception (e.g. speckit.plan).
|
|
#
|
|
# Technical debt note:
|
|
# Keep SKILL.md frontmatter aligned with `install_ai_skills()` and extension
|
|
# overrides (at minimum: name/description/compatibility/metadata.{author,source}).
|
|
create_skills() {
|
|
local skills_dir="$1"
|
|
local script_variant="$2"
|
|
local agent_name="$3"
|
|
local separator="${4:-"-"}"
|
|
|
|
for template in templates/commands/*.md; do
|
|
[[ -f "$template" ]] || continue
|
|
local name
|
|
name=$(basename "$template" .md)
|
|
local skill_name="speckit${separator}${name}"
|
|
local skill_dir="${skills_dir}/${skill_name}"
|
|
mkdir -p "$skill_dir"
|
|
|
|
local file_content
|
|
file_content=$(tr -d '\r' < "$template")
|
|
|
|
# Extract description from frontmatter
|
|
local description
|
|
description=$(printf '%s\n' "$file_content" | awk '/^description:/ {sub(/^description:[[:space:]]*/, ""); print; exit}')
|
|
[[ -z "$description" ]] && description="Spec Kit: ${name} workflow"
|
|
|
|
# Extract script command
|
|
local script_command
|
|
script_command=$(printf '%s\n' "$file_content" | awk -v sv="$script_variant" '/^[[:space:]]*'"$script_variant"':[[:space:]]*/ {sub(/^[[:space:]]*'"$script_variant"':[[:space:]]*/, ""); print; exit}')
|
|
[[ -z "$script_command" ]] && script_command="(Missing script command for $script_variant)"
|
|
|
|
# Extract agent_script command from frontmatter if present
|
|
local agent_script_command
|
|
agent_script_command=$(printf '%s\n' "$file_content" | awk '
|
|
/^agent_scripts:$/ { in_agent_scripts=1; next }
|
|
in_agent_scripts && /^[[:space:]]*'"$script_variant"':[[:space:]]*/ {
|
|
sub(/^[[:space:]]*'"$script_variant"':[[:space:]]*/, "")
|
|
print
|
|
exit
|
|
}
|
|
in_agent_scripts && /^[a-zA-Z]/ { in_agent_scripts=0 }
|
|
')
|
|
|
|
# Build body: replace placeholders, strip scripts sections, rewrite paths
|
|
local body
|
|
body=$(printf '%s\n' "$file_content" | sed "s|{SCRIPT}|${script_command}|g")
|
|
if [[ -n $agent_script_command ]]; then
|
|
body=$(printf '%s\n' "$body" | sed "s|{AGENT_SCRIPT}|${agent_script_command}|g")
|
|
fi
|
|
body=$(printf '%s\n' "$body" | awk '
|
|
/^---$/ { print; if (++dash_count == 1) in_frontmatter=1; else in_frontmatter=0; next }
|
|
in_frontmatter && /^scripts:$/ { skip_scripts=1; next }
|
|
in_frontmatter && /^agent_scripts:$/ { skip_scripts=1; next }
|
|
in_frontmatter && /^[a-zA-Z].*:/ && skip_scripts { skip_scripts=0 }
|
|
in_frontmatter && skip_scripts && /^[[:space:]]/ { next }
|
|
{ print }
|
|
')
|
|
body=$(printf '%s\n' "$body" | sed 's/{ARGS}/\$ARGUMENTS/g' | sed "s/__AGENT__/$agent_name/g" | rewrite_paths)
|
|
|
|
# Strip existing frontmatter and prepend skills frontmatter.
|
|
local template_body
|
|
template_body=$(printf '%s\n' "$body" | awk '/^---/{p++; if(p==2){found=1; next}} found')
|
|
|
|
{
|
|
printf -- '---\n'
|
|
printf 'name: "%s"\n' "$skill_name"
|
|
printf 'description: "%s"\n' "$description"
|
|
printf 'compatibility: "%s"\n' "Requires spec-kit project structure with .specify/ directory"
|
|
printf -- 'metadata:\n'
|
|
printf ' author: "%s"\n' "github-spec-kit"
|
|
printf ' source: "%s"\n' "templates/commands/${name}.md"
|
|
printf -- '---\n\n'
|
|
printf '%s\n' "$template_body"
|
|
} > "$skill_dir/SKILL.md"
|
|
done
|
|
}
|
|
|
|
build_variant() {
|
|
local agent=$1 script=$2
|
|
local base_dir="$GENRELEASES_DIR/sdd-${agent}-package-${script}"
|
|
echo "Building $agent ($script) package..."
|
|
mkdir -p "$base_dir"
|
|
|
|
# Copy base structure but filter scripts by variant
|
|
SPEC_DIR="$base_dir/.specify"
|
|
mkdir -p "$SPEC_DIR"
|
|
|
|
[[ -d memory ]] && { cp -r memory "$SPEC_DIR/"; echo "Copied memory -> .specify"; }
|
|
|
|
# Only copy the relevant script variant directory
|
|
if [[ -d scripts ]]; then
|
|
mkdir -p "$SPEC_DIR/scripts"
|
|
case $script in
|
|
sh)
|
|
[[ -d scripts/bash ]] && { cp -r scripts/bash "$SPEC_DIR/scripts/"; echo "Copied scripts/bash -> .specify/scripts"; }
|
|
find scripts -maxdepth 1 -type f -exec cp {} "$SPEC_DIR/scripts/" \; 2>/dev/null || true
|
|
;;
|
|
ps)
|
|
[[ -d scripts/powershell ]] && { cp -r scripts/powershell "$SPEC_DIR/scripts/"; echo "Copied scripts/powershell -> .specify/scripts"; }
|
|
find scripts -maxdepth 1 -type f -exec cp {} "$SPEC_DIR/scripts/" \; 2>/dev/null || true
|
|
;;
|
|
esac
|
|
fi
|
|
|
|
[[ -d templates ]] && { mkdir -p "$SPEC_DIR/templates"; find templates -type f -not -path "templates/commands/*" -not -name "vscode-settings.json" | while IFS= read -r f; do d="$SPEC_DIR/$(dirname "$f")"; mkdir -p "$d"; cp "$f" "$d/"; done; echo "Copied templates -> .specify/templates"; }
|
|
|
|
case $agent in
|
|
claude)
|
|
mkdir -p "$base_dir/.claude/commands"
|
|
generate_commands claude md "\$ARGUMENTS" "$base_dir/.claude/commands" "$script" ;;
|
|
gemini)
|
|
mkdir -p "$base_dir/.gemini/commands"
|
|
generate_commands gemini toml "{{args}}" "$base_dir/.gemini/commands" "$script"
|
|
[[ -f agent_templates/gemini/GEMINI.md ]] && cp agent_templates/gemini/GEMINI.md "$base_dir/GEMINI.md" ;;
|
|
copilot)
|
|
mkdir -p "$base_dir/.github/agents"
|
|
generate_commands copilot agent.md "\$ARGUMENTS" "$base_dir/.github/agents" "$script"
|
|
generate_copilot_prompts "$base_dir/.github/agents" "$base_dir/.github/prompts"
|
|
mkdir -p "$base_dir/.vscode"
|
|
[[ -f templates/vscode-settings.json ]] && cp templates/vscode-settings.json "$base_dir/.vscode/settings.json"
|
|
;;
|
|
cursor-agent)
|
|
mkdir -p "$base_dir/.cursor/commands"
|
|
generate_commands cursor-agent md "\$ARGUMENTS" "$base_dir/.cursor/commands" "$script" ;;
|
|
qwen)
|
|
mkdir -p "$base_dir/.qwen/commands"
|
|
generate_commands qwen md "\$ARGUMENTS" "$base_dir/.qwen/commands" "$script"
|
|
[[ -f agent_templates/qwen/QWEN.md ]] && cp agent_templates/qwen/QWEN.md "$base_dir/QWEN.md" ;;
|
|
opencode)
|
|
mkdir -p "$base_dir/.opencode/command"
|
|
generate_commands opencode md "\$ARGUMENTS" "$base_dir/.opencode/command" "$script" ;;
|
|
windsurf)
|
|
mkdir -p "$base_dir/.windsurf/workflows"
|
|
generate_commands windsurf md "\$ARGUMENTS" "$base_dir/.windsurf/workflows" "$script" ;;
|
|
junie)
|
|
mkdir -p "$base_dir/.junie/commands"
|
|
generate_commands junie md "\$ARGUMENTS" "$base_dir/.junie/commands" "$script" ;;
|
|
codex)
|
|
mkdir -p "$base_dir/.agents/skills"
|
|
create_skills "$base_dir/.agents/skills" "$script" "codex" "-" ;;
|
|
kilocode)
|
|
mkdir -p "$base_dir/.kilocode/workflows"
|
|
generate_commands kilocode md "\$ARGUMENTS" "$base_dir/.kilocode/workflows" "$script" ;;
|
|
auggie)
|
|
mkdir -p "$base_dir/.augment/commands"
|
|
generate_commands auggie md "\$ARGUMENTS" "$base_dir/.augment/commands" "$script" ;;
|
|
roo)
|
|
mkdir -p "$base_dir/.roo/commands"
|
|
generate_commands roo md "\$ARGUMENTS" "$base_dir/.roo/commands" "$script" ;;
|
|
codebuddy)
|
|
mkdir -p "$base_dir/.codebuddy/commands"
|
|
generate_commands codebuddy md "\$ARGUMENTS" "$base_dir/.codebuddy/commands" "$script" ;;
|
|
qodercli)
|
|
mkdir -p "$base_dir/.qoder/commands"
|
|
generate_commands qodercli md "\$ARGUMENTS" "$base_dir/.qoder/commands" "$script" ;;
|
|
amp)
|
|
mkdir -p "$base_dir/.agents/commands"
|
|
generate_commands amp md "\$ARGUMENTS" "$base_dir/.agents/commands" "$script" ;;
|
|
shai)
|
|
mkdir -p "$base_dir/.shai/commands"
|
|
generate_commands shai md "\$ARGUMENTS" "$base_dir/.shai/commands" "$script" ;;
|
|
tabnine)
|
|
mkdir -p "$base_dir/.tabnine/agent/commands"
|
|
generate_commands tabnine toml "{{args}}" "$base_dir/.tabnine/agent/commands" "$script"
|
|
[[ -f agent_templates/tabnine/TABNINE.md ]] && cp agent_templates/tabnine/TABNINE.md "$base_dir/TABNINE.md" ;;
|
|
kiro-cli)
|
|
mkdir -p "$base_dir/.kiro/prompts"
|
|
generate_commands kiro-cli md "\$ARGUMENTS" "$base_dir/.kiro/prompts" "$script" ;;
|
|
agy)
|
|
mkdir -p "$base_dir/.agent/commands"
|
|
generate_commands agy md "\$ARGUMENTS" "$base_dir/.agent/commands" "$script" ;;
|
|
bob)
|
|
mkdir -p "$base_dir/.bob/commands"
|
|
generate_commands bob md "\$ARGUMENTS" "$base_dir/.bob/commands" "$script" ;;
|
|
vibe)
|
|
mkdir -p "$base_dir/.vibe/prompts"
|
|
generate_commands vibe md "\$ARGUMENTS" "$base_dir/.vibe/prompts" "$script" ;;
|
|
kimi)
|
|
mkdir -p "$base_dir/.kimi/skills"
|
|
create_skills "$base_dir/.kimi/skills" "$script" "kimi" "." ;;
|
|
trae)
|
|
mkdir -p "$base_dir/.trae/rules"
|
|
generate_commands trae md "\$ARGUMENTS" "$base_dir/.trae/rules" "$script" ;;
|
|
pi)
|
|
mkdir -p "$base_dir/.pi/prompts"
|
|
generate_commands pi md "\$ARGUMENTS" "$base_dir/.pi/prompts" "$script" ;;
|
|
iflow)
|
|
mkdir -p "$base_dir/.iflow/commands"
|
|
generate_commands iflow md "\$ARGUMENTS" "$base_dir/.iflow/commands" "$script" ;;
|
|
generic)
|
|
mkdir -p "$base_dir/.speckit/commands"
|
|
generate_commands generic md "\$ARGUMENTS" "$base_dir/.speckit/commands" "$script" ;;
|
|
esac
|
|
( cd "$base_dir" && zip -r "../spec-kit-template-${agent}-${script}-${NEW_VERSION}.zip" . )
|
|
echo "Created $GENRELEASES_DIR/spec-kit-template-${agent}-${script}-${NEW_VERSION}.zip"
|
|
}
|
|
|
|
# Determine agent list
|
|
ALL_AGENTS=(claude gemini copilot cursor-agent qwen opencode windsurf junie codex kilocode auggie roo codebuddy amp shai tabnine kiro-cli agy bob vibe qodercli kimi trae pi iflow generic)
|
|
ALL_SCRIPTS=(sh ps)
|
|
|
|
validate_subset() {
|
|
local type=$1; shift
|
|
local allowed_str="$1"; shift
|
|
local invalid=0
|
|
for it in "$@"; do
|
|
local found=0
|
|
for a in $allowed_str; do
|
|
if [[ "$it" == "$a" ]]; then found=1; break; fi
|
|
done
|
|
if [[ $found -eq 0 ]]; then
|
|
echo "Error: unknown $type '$it' (allowed: $allowed_str)" >&2
|
|
invalid=1
|
|
fi
|
|
done
|
|
return $invalid
|
|
}
|
|
|
|
read_list() { tr ',\n' ' ' | awk '{for(i=1;i<=NF;i++){if(!seen[$i]++){printf((out?" ":"") $i);out=1}}}END{printf("\n")}'; }
|
|
|
|
if [[ -n ${AGENTS:-} ]]; then
|
|
read -ra AGENT_LIST <<< "$(printf '%s' "$AGENTS" | read_list)"
|
|
validate_subset agent "${ALL_AGENTS[*]}" "${AGENT_LIST[@]}" || exit 1
|
|
else
|
|
AGENT_LIST=("${ALL_AGENTS[@]}")
|
|
fi
|
|
|
|
if [[ -n ${SCRIPTS:-} ]]; then
|
|
read -ra SCRIPT_LIST <<< "$(printf '%s' "$SCRIPTS" | read_list)"
|
|
validate_subset script "${ALL_SCRIPTS[*]}" "${SCRIPT_LIST[@]}" || exit 1
|
|
else
|
|
SCRIPT_LIST=("${ALL_SCRIPTS[@]}")
|
|
fi
|
|
|
|
echo "Agents: ${AGENT_LIST[*]}"
|
|
echo "Scripts: ${SCRIPT_LIST[*]}"
|
|
|
|
for agent in "${AGENT_LIST[@]}"; do
|
|
for script in "${SCRIPT_LIST[@]}"; do
|
|
build_variant "$agent" "$script"
|
|
done
|
|
done
|
|
|
|
echo "Archives in $GENRELEASES_DIR:"
|
|
ls -1 "$GENRELEASES_DIR"/spec-kit-template-*-"${NEW_VERSION}".zip
|