mirror of
https://github.com/leonvanzyl/autocoder.git
synced 2026-01-30 06:12:06 +00:00
feat: add concurrent agents with dependency system and delightful UI
Major feature implementation for parallel agent execution with dependency-aware scheduling and an engaging multi-agent UI experience. Backend Changes: - Add parallel_orchestrator.py for concurrent feature processing - Add api/dependency_resolver.py with cycle detection (Kahn's algorithm + DFS) - Add atomic feature_claim_next() with retry limit and exponential backoff - Fix circular dependency check arguments in 4 locations - Add AgentTracker class for parsing agent output and emitting updates - Add browser isolation with --isolated flag for Playwright MCP - Extend WebSocket protocol with agent_update messages and log attribution - Add WSAgentUpdateMessage schema with agent states and mascot names - Fix WSProgressMessage to include in_progress field New UI Components: - AgentMissionControl: Dashboard showing active agents with collapsible activity - AgentCard: Individual agent status with avatar and thought bubble - AgentAvatar: SVG mascots (Spark, Fizz, Octo, Hoot, Buzz) with animations - ActivityFeed: Recent activity stream with stable keys (no flickering) - CelebrationOverlay: Confetti animation with click/Escape dismiss - DependencyGraph: Interactive node graph visualization with dagre layout - DependencyBadge: Visual indicator for feature dependencies - ViewToggle: Switch between Kanban and Graph views - KeyboardShortcutsHelp: Help overlay accessible via ? key UI/UX Improvements: - Celebration queue system to handle rapid success messages - Accessibility attributes on AgentAvatar (role, aria-label, aria-live) - Collapsible Recent Activity section with persisted preference - Agent count display in header - Keyboard shortcut G to toggle Kanban/Graph view - Real-time thought bubbles and state animations Bug Fixes: - Fix circular dependency validation (swapped source/target arguments) - Add MAX_CLAIM_RETRIES=10 to prevent stack overflow under contention - Fix THOUGHT_PATTERNS to match actual [Tool: name] format - Fix ActivityFeed key prop to prevent re-renders on new items - Add featureId/agentIndex to log messages for proper attribution Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -85,6 +85,8 @@ async def get_agent_status(project_name: str):
|
||||
started_at=manager.started_at,
|
||||
yolo_mode=manager.yolo_mode,
|
||||
model=manager.model,
|
||||
parallel_mode=manager.parallel_mode,
|
||||
max_concurrency=manager.max_concurrency,
|
||||
)
|
||||
|
||||
|
||||
@@ -100,8 +102,15 @@ async def start_agent(
|
||||
default_yolo, default_model = _get_settings_defaults()
|
||||
yolo_mode = request.yolo_mode if request.yolo_mode is not None else default_yolo
|
||||
model = request.model if request.model else default_model
|
||||
parallel_mode = request.parallel_mode or False
|
||||
max_concurrency = request.max_concurrency
|
||||
|
||||
success, message = await manager.start(yolo_mode=yolo_mode, model=model)
|
||||
success, message = await manager.start(
|
||||
yolo_mode=yolo_mode,
|
||||
model=model,
|
||||
parallel_mode=parallel_mode,
|
||||
max_concurrency=max_concurrency,
|
||||
)
|
||||
|
||||
return AgentActionResponse(
|
||||
success=success,
|
||||
|
||||
@@ -12,6 +12,9 @@ from pathlib import Path
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from ..schemas import (
|
||||
DependencyGraphNode,
|
||||
DependencyGraphResponse,
|
||||
DependencyUpdate,
|
||||
FeatureBulkCreate,
|
||||
FeatureBulkCreateResponse,
|
||||
FeatureCreate,
|
||||
@@ -72,11 +75,27 @@ def get_db_session(project_dir: Path):
|
||||
session.close()
|
||||
|
||||
|
||||
def feature_to_response(f) -> FeatureResponse:
|
||||
def feature_to_response(f, passing_ids: set[int] | None = None) -> FeatureResponse:
|
||||
"""Convert a Feature model to a FeatureResponse.
|
||||
|
||||
Handles legacy NULL values in boolean fields by treating them as False.
|
||||
Computes blocked status if passing_ids is provided.
|
||||
|
||||
Args:
|
||||
f: Feature model instance
|
||||
passing_ids: Optional set of feature IDs that are passing (for computing blocked status)
|
||||
|
||||
Returns:
|
||||
FeatureResponse with computed blocked status
|
||||
"""
|
||||
deps = f.dependencies or []
|
||||
if passing_ids is None:
|
||||
blocking = []
|
||||
blocked = False
|
||||
else:
|
||||
blocking = [d for d in deps if d not in passing_ids]
|
||||
blocked = len(blocking) > 0
|
||||
|
||||
return FeatureResponse(
|
||||
id=f.id,
|
||||
priority=f.priority,
|
||||
@@ -84,9 +103,12 @@ def feature_to_response(f) -> FeatureResponse:
|
||||
name=f.name,
|
||||
description=f.description,
|
||||
steps=f.steps if isinstance(f.steps, list) else [],
|
||||
dependencies=deps,
|
||||
# Handle legacy NULL values gracefully - treat as False
|
||||
passes=f.passes if f.passes is not None else False,
|
||||
in_progress=f.in_progress if f.in_progress is not None else False,
|
||||
blocked=blocked,
|
||||
blocking_dependencies=blocking,
|
||||
)
|
||||
|
||||
|
||||
@@ -119,12 +141,15 @@ async def list_features(project_name: str):
|
||||
with get_db_session(project_dir) as session:
|
||||
all_features = session.query(Feature).order_by(Feature.priority).all()
|
||||
|
||||
# Compute passing IDs for blocked status calculation
|
||||
passing_ids = {f.id for f in all_features if f.passes}
|
||||
|
||||
pending = []
|
||||
in_progress = []
|
||||
done = []
|
||||
|
||||
for f in all_features:
|
||||
feature_response = feature_to_response(f)
|
||||
feature_response = feature_to_response(f, passing_ids)
|
||||
if f.passes:
|
||||
done.append(feature_response)
|
||||
elif f.in_progress:
|
||||
@@ -174,6 +199,7 @@ async def create_feature(project_name: str, feature: FeatureCreate):
|
||||
name=feature.name,
|
||||
description=feature.description,
|
||||
steps=feature.steps,
|
||||
dependencies=feature.dependencies if feature.dependencies else None,
|
||||
passes=False,
|
||||
in_progress=False,
|
||||
)
|
||||
@@ -190,6 +216,167 @@ async def create_feature(project_name: str, feature: FeatureCreate):
|
||||
raise HTTPException(status_code=500, detail="Failed to create feature")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Static path endpoints - MUST be declared before /{feature_id} routes
|
||||
# ============================================================================
|
||||
|
||||
|
||||
@router.post("/bulk", response_model=FeatureBulkCreateResponse)
|
||||
async def create_features_bulk(project_name: str, bulk: FeatureBulkCreate):
|
||||
"""
|
||||
Create multiple features at once.
|
||||
|
||||
Features are assigned sequential priorities starting from:
|
||||
- starting_priority if specified (must be >= 1)
|
||||
- max(existing priorities) + 1 if not specified
|
||||
|
||||
This is useful for:
|
||||
- Expanding a project with new features via AI
|
||||
- Importing features from external sources
|
||||
- Batch operations
|
||||
|
||||
Returns:
|
||||
{"created": N, "features": [...]}
|
||||
"""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
if not project_dir:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found in registry")
|
||||
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
if not bulk.features:
|
||||
return FeatureBulkCreateResponse(created=0, features=[])
|
||||
|
||||
# Validate starting_priority if provided
|
||||
if bulk.starting_priority is not None and bulk.starting_priority < 1:
|
||||
raise HTTPException(status_code=400, detail="starting_priority must be >= 1")
|
||||
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
# Determine starting priority with row-level lock to prevent race conditions
|
||||
if bulk.starting_priority is not None:
|
||||
current_priority = bulk.starting_priority
|
||||
else:
|
||||
# Lock the max priority row to prevent concurrent inserts from getting same priority
|
||||
max_priority_feature = (
|
||||
session.query(Feature)
|
||||
.order_by(Feature.priority.desc())
|
||||
.with_for_update()
|
||||
.first()
|
||||
)
|
||||
current_priority = (max_priority_feature.priority + 1) if max_priority_feature else 1
|
||||
|
||||
created_ids = []
|
||||
|
||||
for feature_data in bulk.features:
|
||||
db_feature = Feature(
|
||||
priority=current_priority,
|
||||
category=feature_data.category,
|
||||
name=feature_data.name,
|
||||
description=feature_data.description,
|
||||
steps=feature_data.steps,
|
||||
dependencies=feature_data.dependencies if feature_data.dependencies else None,
|
||||
passes=False,
|
||||
in_progress=False,
|
||||
)
|
||||
session.add(db_feature)
|
||||
session.flush() # Flush to get the ID immediately
|
||||
created_ids.append(db_feature.id)
|
||||
current_priority += 1
|
||||
|
||||
session.commit()
|
||||
|
||||
# Query created features by their IDs (avoids relying on priority range)
|
||||
created_features = []
|
||||
for db_feature in session.query(Feature).filter(
|
||||
Feature.id.in_(created_ids)
|
||||
).order_by(Feature.priority).all():
|
||||
created_features.append(feature_to_response(db_feature))
|
||||
|
||||
return FeatureBulkCreateResponse(
|
||||
created=len(created_features),
|
||||
features=created_features
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to bulk create features")
|
||||
raise HTTPException(status_code=500, detail="Failed to bulk create features")
|
||||
|
||||
|
||||
@router.get("/graph", response_model=DependencyGraphResponse)
|
||||
async def get_dependency_graph(project_name: str):
|
||||
"""Return dependency graph data for visualization.
|
||||
|
||||
Returns nodes (features) and edges (dependencies) suitable for
|
||||
rendering with React Flow or similar graph libraries.
|
||||
"""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
if not project_dir:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found in registry")
|
||||
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
db_file = project_dir / "features.db"
|
||||
if not db_file.exists():
|
||||
return DependencyGraphResponse(nodes=[], edges=[])
|
||||
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
all_features = session.query(Feature).all()
|
||||
passing_ids = {f.id for f in all_features if f.passes}
|
||||
|
||||
nodes = []
|
||||
edges = []
|
||||
|
||||
for f in all_features:
|
||||
deps = f.dependencies or []
|
||||
blocking = [d for d in deps if d not in passing_ids]
|
||||
|
||||
if f.passes:
|
||||
status = "done"
|
||||
elif blocking:
|
||||
status = "blocked"
|
||||
elif f.in_progress:
|
||||
status = "in_progress"
|
||||
else:
|
||||
status = "pending"
|
||||
|
||||
nodes.append(DependencyGraphNode(
|
||||
id=f.id,
|
||||
name=f.name,
|
||||
category=f.category,
|
||||
status=status,
|
||||
priority=f.priority,
|
||||
dependencies=deps
|
||||
))
|
||||
|
||||
for dep_id in deps:
|
||||
edges.append({"source": dep_id, "target": f.id})
|
||||
|
||||
return DependencyGraphResponse(nodes=nodes, edges=edges)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to get dependency graph")
|
||||
raise HTTPException(status_code=500, detail="Failed to get dependency graph")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Parameterized path endpoints - /{feature_id} routes
|
||||
# ============================================================================
|
||||
|
||||
|
||||
@router.get("/{feature_id}", response_model=FeatureResponse)
|
||||
async def get_feature(project_name: str, feature_id: int):
|
||||
"""Get details of a specific feature."""
|
||||
@@ -268,11 +455,17 @@ async def update_feature(project_name: str, feature_id: int, update: FeatureUpda
|
||||
feature.steps = update.steps
|
||||
if update.priority is not None:
|
||||
feature.priority = update.priority
|
||||
if update.dependencies is not None:
|
||||
feature.dependencies = update.dependencies if update.dependencies else None
|
||||
|
||||
session.commit()
|
||||
session.refresh(feature)
|
||||
|
||||
return feature_to_response(feature)
|
||||
# Compute passing IDs for response
|
||||
all_features = session.query(Feature).all()
|
||||
passing_ids = {f.id for f in all_features if f.passes}
|
||||
|
||||
return feature_to_response(feature, passing_ids)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
@@ -282,7 +475,12 @@ async def update_feature(project_name: str, feature_id: int, update: FeatureUpda
|
||||
|
||||
@router.delete("/{feature_id}")
|
||||
async def delete_feature(project_name: str, feature_id: int):
|
||||
"""Delete a feature."""
|
||||
"""Delete a feature and clean up references in other features' dependencies.
|
||||
|
||||
When a feature is deleted, any other features that depend on it will have
|
||||
that dependency removed from their dependencies list. This prevents orphaned
|
||||
dependencies that would permanently block features.
|
||||
"""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
@@ -301,10 +499,24 @@ async def delete_feature(project_name: str, feature_id: int):
|
||||
if not feature:
|
||||
raise HTTPException(status_code=404, detail=f"Feature {feature_id} not found")
|
||||
|
||||
# Clean up dependency references in other features
|
||||
# This prevents orphaned dependencies that would block features forever
|
||||
affected_features = []
|
||||
for f in session.query(Feature).all():
|
||||
if f.dependencies and feature_id in f.dependencies:
|
||||
# Remove the deleted feature from this feature's dependencies
|
||||
deps = [d for d in f.dependencies if d != feature_id]
|
||||
f.dependencies = deps if deps else None
|
||||
affected_features.append(f.id)
|
||||
|
||||
session.delete(feature)
|
||||
session.commit()
|
||||
|
||||
return {"success": True, "message": f"Feature {feature_id} deleted"}
|
||||
message = f"Feature {feature_id} deleted"
|
||||
if affected_features:
|
||||
message += f". Removed from dependencies of features: {affected_features}"
|
||||
|
||||
return {"success": True, "message": message, "affected_features": affected_features}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
@@ -352,22 +564,123 @@ async def skip_feature(project_name: str, feature_id: int):
|
||||
raise HTTPException(status_code=500, detail="Failed to skip feature")
|
||||
|
||||
|
||||
@router.post("/bulk", response_model=FeatureBulkCreateResponse)
|
||||
async def create_features_bulk(project_name: str, bulk: FeatureBulkCreate):
|
||||
# ============================================================================
|
||||
# Dependency Management Endpoints
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def _get_dependency_resolver():
|
||||
"""Lazy import of dependency resolver."""
|
||||
import sys
|
||||
root = Path(__file__).parent.parent.parent
|
||||
if str(root) not in sys.path:
|
||||
sys.path.insert(0, str(root))
|
||||
from api.dependency_resolver import would_create_circular_dependency, MAX_DEPENDENCIES_PER_FEATURE
|
||||
return would_create_circular_dependency, MAX_DEPENDENCIES_PER_FEATURE
|
||||
|
||||
|
||||
@router.post("/{feature_id}/dependencies/{dep_id}")
|
||||
async def add_dependency(project_name: str, feature_id: int, dep_id: int):
|
||||
"""Add a dependency relationship between features.
|
||||
|
||||
The dep_id feature must be completed before feature_id can be started.
|
||||
Validates: self-reference, existence, circular dependencies, max limit.
|
||||
"""
|
||||
Create multiple features at once.
|
||||
project_name = validate_project_name(project_name)
|
||||
|
||||
Features are assigned sequential priorities starting from:
|
||||
- starting_priority if specified (must be >= 1)
|
||||
- max(existing priorities) + 1 if not specified
|
||||
# Security: Self-reference check
|
||||
if feature_id == dep_id:
|
||||
raise HTTPException(status_code=400, detail="A feature cannot depend on itself")
|
||||
|
||||
This is useful for:
|
||||
- Expanding a project with new features via AI
|
||||
- Importing features from external sources
|
||||
- Batch operations
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
Returns:
|
||||
{"created": N, "features": [...]}
|
||||
if not project_dir:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found in registry")
|
||||
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
would_create_circular_dependency, MAX_DEPENDENCIES_PER_FEATURE = _get_dependency_resolver()
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
feature = session.query(Feature).filter(Feature.id == feature_id).first()
|
||||
dependency = session.query(Feature).filter(Feature.id == dep_id).first()
|
||||
|
||||
if not feature:
|
||||
raise HTTPException(status_code=404, detail=f"Feature {feature_id} not found")
|
||||
if not dependency:
|
||||
raise HTTPException(status_code=404, detail=f"Dependency {dep_id} not found")
|
||||
|
||||
current_deps = feature.dependencies or []
|
||||
|
||||
# Security: Limit check
|
||||
if len(current_deps) >= MAX_DEPENDENCIES_PER_FEATURE:
|
||||
raise HTTPException(status_code=400, detail=f"Maximum {MAX_DEPENDENCIES_PER_FEATURE} dependencies allowed")
|
||||
|
||||
if dep_id in current_deps:
|
||||
raise HTTPException(status_code=400, detail="Dependency already exists")
|
||||
|
||||
# Security: Circular dependency check
|
||||
# source_id = feature_id (gaining dep), target_id = dep_id (being depended upon)
|
||||
all_features = [f.to_dict() for f in session.query(Feature).all()]
|
||||
if would_create_circular_dependency(all_features, feature_id, dep_id):
|
||||
raise HTTPException(status_code=400, detail="Would create circular dependency")
|
||||
|
||||
current_deps.append(dep_id)
|
||||
feature.dependencies = sorted(current_deps)
|
||||
session.commit()
|
||||
|
||||
return {"success": True, "feature_id": feature_id, "dependencies": feature.dependencies}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to add dependency")
|
||||
raise HTTPException(status_code=500, detail="Failed to add dependency")
|
||||
|
||||
|
||||
@router.delete("/{feature_id}/dependencies/{dep_id}")
|
||||
async def remove_dependency(project_name: str, feature_id: int, dep_id: int):
|
||||
"""Remove a dependency from a feature."""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
if not project_dir:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found in registry")
|
||||
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
feature = session.query(Feature).filter(Feature.id == feature_id).first()
|
||||
if not feature:
|
||||
raise HTTPException(status_code=404, detail=f"Feature {feature_id} not found")
|
||||
|
||||
current_deps = feature.dependencies or []
|
||||
if dep_id not in current_deps:
|
||||
raise HTTPException(status_code=400, detail="Dependency does not exist")
|
||||
|
||||
current_deps.remove(dep_id)
|
||||
feature.dependencies = current_deps if current_deps else None
|
||||
session.commit()
|
||||
|
||||
return {"success": True, "feature_id": feature_id, "dependencies": feature.dependencies or []}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to remove dependency")
|
||||
raise HTTPException(status_code=500, detail="Failed to remove dependency")
|
||||
|
||||
|
||||
@router.put("/{feature_id}/dependencies")
|
||||
async def set_dependencies(project_name: str, feature_id: int, update: DependencyUpdate):
|
||||
"""Set all dependencies for a feature at once, replacing any existing.
|
||||
|
||||
Validates: self-reference, existence of all dependencies, circular dependencies, max limit.
|
||||
"""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
@@ -378,62 +691,56 @@ async def create_features_bulk(project_name: str, bulk: FeatureBulkCreate):
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
if not bulk.features:
|
||||
return FeatureBulkCreateResponse(created=0, features=[])
|
||||
dependency_ids = update.dependency_ids
|
||||
|
||||
# Validate starting_priority if provided
|
||||
if bulk.starting_priority is not None and bulk.starting_priority < 1:
|
||||
raise HTTPException(status_code=400, detail="starting_priority must be >= 1")
|
||||
# Security: Self-reference check
|
||||
if feature_id in dependency_ids:
|
||||
raise HTTPException(status_code=400, detail="A feature cannot depend on itself")
|
||||
|
||||
# Check for duplicates
|
||||
if len(dependency_ids) != len(set(dependency_ids)):
|
||||
raise HTTPException(status_code=400, detail="Duplicate dependencies not allowed")
|
||||
|
||||
would_create_circular_dependency, _ = _get_dependency_resolver()
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
# Determine starting priority with row-level lock to prevent race conditions
|
||||
if bulk.starting_priority is not None:
|
||||
current_priority = bulk.starting_priority
|
||||
else:
|
||||
# Lock the max priority row to prevent concurrent inserts from getting same priority
|
||||
max_priority_feature = (
|
||||
session.query(Feature)
|
||||
.order_by(Feature.priority.desc())
|
||||
.with_for_update()
|
||||
.first()
|
||||
)
|
||||
current_priority = (max_priority_feature.priority + 1) if max_priority_feature else 1
|
||||
feature = session.query(Feature).filter(Feature.id == feature_id).first()
|
||||
if not feature:
|
||||
raise HTTPException(status_code=404, detail=f"Feature {feature_id} not found")
|
||||
|
||||
created_ids = []
|
||||
# Validate all dependencies exist
|
||||
all_feature_ids = {f.id for f in session.query(Feature).all()}
|
||||
missing = [d for d in dependency_ids if d not in all_feature_ids]
|
||||
if missing:
|
||||
raise HTTPException(status_code=400, detail=f"Dependencies not found: {missing}")
|
||||
|
||||
for feature_data in bulk.features:
|
||||
db_feature = Feature(
|
||||
priority=current_priority,
|
||||
category=feature_data.category,
|
||||
name=feature_data.name,
|
||||
description=feature_data.description,
|
||||
steps=feature_data.steps,
|
||||
passes=False,
|
||||
in_progress=False,
|
||||
)
|
||||
session.add(db_feature)
|
||||
session.flush() # Flush to get the ID immediately
|
||||
created_ids.append(db_feature.id)
|
||||
current_priority += 1
|
||||
# Check for circular dependencies
|
||||
all_features = [f.to_dict() for f in session.query(Feature).all()]
|
||||
# Temporarily update the feature's dependencies for cycle check
|
||||
test_features = []
|
||||
for f in all_features:
|
||||
if f["id"] == feature_id:
|
||||
test_features.append({**f, "dependencies": dependency_ids})
|
||||
else:
|
||||
test_features.append(f)
|
||||
|
||||
for dep_id in dependency_ids:
|
||||
# source_id = feature_id (gaining dep), target_id = dep_id (being depended upon)
|
||||
if would_create_circular_dependency(test_features, feature_id, dep_id):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Cannot add dependency {dep_id}: would create circular dependency"
|
||||
)
|
||||
|
||||
# Set dependencies
|
||||
feature.dependencies = sorted(dependency_ids) if dependency_ids else None
|
||||
session.commit()
|
||||
|
||||
# Query created features by their IDs (avoids relying on priority range)
|
||||
created_features = []
|
||||
for db_feature in session.query(Feature).filter(
|
||||
Feature.id.in_(created_ids)
|
||||
).order_by(Feature.priority).all():
|
||||
created_features.append(feature_to_response(db_feature))
|
||||
|
||||
return FeatureBulkCreateResponse(
|
||||
created=len(created_features),
|
||||
features=created_features
|
||||
)
|
||||
return {"success": True, "feature_id": feature_id, "dependencies": feature.dependencies or []}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to bulk create features")
|
||||
raise HTTPException(status_code=500, detail="Failed to bulk create features")
|
||||
logger.exception("Failed to set dependencies")
|
||||
raise HTTPException(status_code=500, detail="Failed to set dependencies")
|
||||
|
||||
Reference in New Issue
Block a user