mirror of
https://github.com/github/spec-kit.git
synced 2026-03-20 04:13:08 +00:00
feat(cli): polite deep merge for settings.json and support JSONC (#1874)
* feat(cli): polite deep merge for settings.json with json5 and safe atomic write * fix(cli): prevent temp fd leak and align merge-policy docs
This commit is contained in:
@@ -7,6 +7,7 @@
|
||||
# "platformdirs",
|
||||
# "readchar",
|
||||
# "httpx",
|
||||
# "json5",
|
||||
# ]
|
||||
# ///
|
||||
"""
|
||||
@@ -32,6 +33,8 @@ import tempfile
|
||||
import shutil
|
||||
import shlex
|
||||
import json
|
||||
import json5
|
||||
import stat
|
||||
import yaml
|
||||
from pathlib import Path
|
||||
from typing import Any, Optional, Tuple
|
||||
@@ -654,37 +657,82 @@ def init_git_repo(project_path: Path, quiet: bool = False) -> Tuple[bool, Option
|
||||
os.chdir(original_cwd)
|
||||
|
||||
def handle_vscode_settings(sub_item, dest_file, rel_path, verbose=False, tracker=None) -> None:
|
||||
"""Handle merging or copying of .vscode/settings.json files."""
|
||||
"""Handle merging or copying of .vscode/settings.json files.
|
||||
|
||||
Note: when merge produces changes, rewritten output is normalized JSON and
|
||||
existing JSONC comments/trailing commas are not preserved.
|
||||
"""
|
||||
def log(message, color="green"):
|
||||
if verbose and not tracker:
|
||||
console.print(f"[{color}]{message}[/] {rel_path}")
|
||||
|
||||
def atomic_write_json(target_file: Path, payload: dict[str, Any]) -> None:
|
||||
"""Atomically write JSON while preserving existing mode bits when possible."""
|
||||
temp_path: Optional[Path] = None
|
||||
try:
|
||||
with tempfile.NamedTemporaryFile(
|
||||
mode='w',
|
||||
encoding='utf-8',
|
||||
dir=target_file.parent,
|
||||
prefix=f"{target_file.name}.",
|
||||
suffix=".tmp",
|
||||
delete=False,
|
||||
) as f:
|
||||
temp_path = Path(f.name)
|
||||
json.dump(payload, f, indent=4)
|
||||
f.write('\n')
|
||||
|
||||
if target_file.exists():
|
||||
try:
|
||||
existing_stat = target_file.stat()
|
||||
os.chmod(temp_path, stat.S_IMODE(existing_stat.st_mode))
|
||||
if hasattr(os, "chown"):
|
||||
try:
|
||||
os.chown(temp_path, existing_stat.st_uid, existing_stat.st_gid)
|
||||
except PermissionError:
|
||||
# Best-effort owner/group preservation without requiring elevated privileges.
|
||||
pass
|
||||
except OSError:
|
||||
# Best-effort metadata preservation; data safety is prioritized.
|
||||
pass
|
||||
|
||||
os.replace(temp_path, target_file)
|
||||
except Exception:
|
||||
if temp_path and temp_path.exists():
|
||||
temp_path.unlink()
|
||||
raise
|
||||
|
||||
try:
|
||||
with open(sub_item, 'r', encoding='utf-8') as f:
|
||||
new_settings = json.load(f)
|
||||
# json5 natively supports comments and trailing commas (JSONC)
|
||||
new_settings = json5.load(f)
|
||||
|
||||
if dest_file.exists():
|
||||
merged = merge_json_files(dest_file, new_settings, verbose=verbose and not tracker)
|
||||
with open(dest_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(merged, f, indent=4)
|
||||
f.write('\n')
|
||||
log("Merged:", "green")
|
||||
if merged is not None:
|
||||
atomic_write_json(dest_file, merged)
|
||||
log("Merged:", "green")
|
||||
log("Note: comments/trailing commas are normalized when rewritten", "yellow")
|
||||
else:
|
||||
log("Skipped merge (preserved existing settings)", "yellow")
|
||||
else:
|
||||
shutil.copy2(sub_item, dest_file)
|
||||
log("Copied (no existing settings.json):", "blue")
|
||||
|
||||
except Exception as e:
|
||||
log(f"Warning: Could not merge, copying instead: {e}", "yellow")
|
||||
shutil.copy2(sub_item, dest_file)
|
||||
log(f"Warning: Could not merge settings: {e}", "yellow")
|
||||
if not dest_file.exists():
|
||||
shutil.copy2(sub_item, dest_file)
|
||||
|
||||
def merge_json_files(existing_path: Path, new_content: dict, verbose: bool = False) -> dict:
|
||||
|
||||
def merge_json_files(existing_path: Path, new_content: Any, verbose: bool = False) -> Optional[dict[str, Any]]:
|
||||
"""Merge new JSON content into existing JSON file.
|
||||
|
||||
Performs a deep merge where:
|
||||
Performs a polite deep merge where:
|
||||
- New keys are added
|
||||
- Existing keys are preserved unless overwritten by new content
|
||||
- Nested dictionaries are merged recursively
|
||||
- Lists and other values are replaced (not merged)
|
||||
- Existing keys are preserved (not overwritten) unless both values are dictionaries
|
||||
- Nested dictionaries are merged recursively only when both sides are dictionaries
|
||||
- Lists and other values are preserved from base if they exist
|
||||
|
||||
Args:
|
||||
existing_path: Path to existing JSON file
|
||||
@@ -692,28 +740,64 @@ def merge_json_files(existing_path: Path, new_content: dict, verbose: bool = Fal
|
||||
verbose: Whether to print merge details
|
||||
|
||||
Returns:
|
||||
Merged JSON content as dict
|
||||
Merged JSON content as dict, or None if the existing file should be left untouched.
|
||||
"""
|
||||
try:
|
||||
with open(existing_path, 'r', encoding='utf-8') as f:
|
||||
existing_content = json.load(f)
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
# If file doesn't exist or is invalid, just use new content
|
||||
# Load existing content first to have a safe fallback
|
||||
existing_content = None
|
||||
exists = existing_path.exists()
|
||||
|
||||
if exists:
|
||||
try:
|
||||
with open(existing_path, 'r', encoding='utf-8') as f:
|
||||
# Handle comments (JSONC) natively with json5
|
||||
# Note: json5 handles BOM automatically
|
||||
existing_content = json5.load(f)
|
||||
except FileNotFoundError:
|
||||
# Handle race condition where file is deleted after exists() check
|
||||
exists = False
|
||||
except Exception as e:
|
||||
if verbose:
|
||||
console.print(f"[yellow]Warning: Could not read or parse existing JSON in {existing_path.name} ({e}).[/yellow]")
|
||||
# Skip merge to preserve existing file if unparseable or inaccessible (e.g. PermissionError)
|
||||
return None
|
||||
|
||||
# Validate template content
|
||||
if not isinstance(new_content, dict):
|
||||
if verbose:
|
||||
console.print(f"[yellow]Warning: Template content for {existing_path.name} is not a dictionary. Preserving existing settings.[/yellow]")
|
||||
return None
|
||||
|
||||
if not exists:
|
||||
return new_content
|
||||
|
||||
def deep_merge(base: dict, update: dict) -> dict:
|
||||
"""Recursively merge update dict into base dict."""
|
||||
# If existing content parsed but is not a dict, skip merge to avoid data loss
|
||||
if not isinstance(existing_content, dict):
|
||||
if verbose:
|
||||
console.print(f"[yellow]Warning: Existing JSON in {existing_path.name} is not an object. Skipping merge to avoid data loss.[/yellow]")
|
||||
return None
|
||||
|
||||
def deep_merge_polite(base: dict[str, Any], update: dict[str, Any]) -> dict[str, Any]:
|
||||
"""Recursively merge update dict into base dict, preserving base values."""
|
||||
result = base.copy()
|
||||
for key, value in update.items():
|
||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
||||
# Recursively merge nested dictionaries
|
||||
result[key] = deep_merge(result[key], value)
|
||||
else:
|
||||
# Add new key or replace existing value
|
||||
if key not in result:
|
||||
# Add new key
|
||||
result[key] = value
|
||||
elif isinstance(result[key], dict) and isinstance(value, dict):
|
||||
# Recursively merge nested dictionaries
|
||||
result[key] = deep_merge_polite(result[key], value)
|
||||
else:
|
||||
# Key already exists and values are not both dicts; preserve existing value.
|
||||
# This ensures user settings aren't overwritten by template defaults.
|
||||
pass
|
||||
return result
|
||||
|
||||
merged = deep_merge(existing_content, new_content)
|
||||
merged = deep_merge_polite(existing_content, new_content)
|
||||
|
||||
# Detect if anything actually changed. If not, return None so the caller
|
||||
# can skip rewriting the file (preserving user's comments/formatting).
|
||||
if merged == existing_content:
|
||||
return None
|
||||
|
||||
if verbose:
|
||||
console.print(f"[cyan]Merged JSON file:[/cyan] {existing_path.name}")
|
||||
|
||||
Reference in New Issue
Block a user