mirror of
https://github.com/leonvanzyl/autocoder.git
synced 2026-01-30 06:12:06 +00:00
feat: add concurrent agents with dependency system and delightful UI
Major feature implementation for parallel agent execution with dependency-aware scheduling and an engaging multi-agent UI experience. Backend Changes: - Add parallel_orchestrator.py for concurrent feature processing - Add api/dependency_resolver.py with cycle detection (Kahn's algorithm + DFS) - Add atomic feature_claim_next() with retry limit and exponential backoff - Fix circular dependency check arguments in 4 locations - Add AgentTracker class for parsing agent output and emitting updates - Add browser isolation with --isolated flag for Playwright MCP - Extend WebSocket protocol with agent_update messages and log attribution - Add WSAgentUpdateMessage schema with agent states and mascot names - Fix WSProgressMessage to include in_progress field New UI Components: - AgentMissionControl: Dashboard showing active agents with collapsible activity - AgentCard: Individual agent status with avatar and thought bubble - AgentAvatar: SVG mascots (Spark, Fizz, Octo, Hoot, Buzz) with animations - ActivityFeed: Recent activity stream with stable keys (no flickering) - CelebrationOverlay: Confetti animation with click/Escape dismiss - DependencyGraph: Interactive node graph visualization with dagre layout - DependencyBadge: Visual indicator for feature dependencies - ViewToggle: Switch between Kanban and Graph views - KeyboardShortcutsHelp: Help overlay accessible via ? key UI/UX Improvements: - Celebration queue system to handle rapid success messages - Accessibility attributes on AgentAvatar (role, aria-label, aria-live) - Collapsible Recent Activity section with persisted preference - Agent count display in header - Keyboard shortcut G to toggle Kanban/Graph view - Real-time thought bubbles and state animations Bug Fixes: - Fix circular dependency validation (swapped source/target arguments) - Add MAX_CLAIM_RETRIES=10 to prevent stack overflow under contention - Fix THOUGHT_PATTERNS to match actual [Tool: name] format - Fix ActivityFeed key prop to prevent re-renders on new items - Add featureId/agentIndex to log messages for proper attribution Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -85,6 +85,8 @@ async def get_agent_status(project_name: str):
|
||||
started_at=manager.started_at,
|
||||
yolo_mode=manager.yolo_mode,
|
||||
model=manager.model,
|
||||
parallel_mode=manager.parallel_mode,
|
||||
max_concurrency=manager.max_concurrency,
|
||||
)
|
||||
|
||||
|
||||
@@ -100,8 +102,15 @@ async def start_agent(
|
||||
default_yolo, default_model = _get_settings_defaults()
|
||||
yolo_mode = request.yolo_mode if request.yolo_mode is not None else default_yolo
|
||||
model = request.model if request.model else default_model
|
||||
parallel_mode = request.parallel_mode or False
|
||||
max_concurrency = request.max_concurrency
|
||||
|
||||
success, message = await manager.start(yolo_mode=yolo_mode, model=model)
|
||||
success, message = await manager.start(
|
||||
yolo_mode=yolo_mode,
|
||||
model=model,
|
||||
parallel_mode=parallel_mode,
|
||||
max_concurrency=max_concurrency,
|
||||
)
|
||||
|
||||
return AgentActionResponse(
|
||||
success=success,
|
||||
|
||||
@@ -12,6 +12,9 @@ from pathlib import Path
|
||||
from fastapi import APIRouter, HTTPException
|
||||
|
||||
from ..schemas import (
|
||||
DependencyGraphNode,
|
||||
DependencyGraphResponse,
|
||||
DependencyUpdate,
|
||||
FeatureBulkCreate,
|
||||
FeatureBulkCreateResponse,
|
||||
FeatureCreate,
|
||||
@@ -72,11 +75,27 @@ def get_db_session(project_dir: Path):
|
||||
session.close()
|
||||
|
||||
|
||||
def feature_to_response(f) -> FeatureResponse:
|
||||
def feature_to_response(f, passing_ids: set[int] | None = None) -> FeatureResponse:
|
||||
"""Convert a Feature model to a FeatureResponse.
|
||||
|
||||
Handles legacy NULL values in boolean fields by treating them as False.
|
||||
Computes blocked status if passing_ids is provided.
|
||||
|
||||
Args:
|
||||
f: Feature model instance
|
||||
passing_ids: Optional set of feature IDs that are passing (for computing blocked status)
|
||||
|
||||
Returns:
|
||||
FeatureResponse with computed blocked status
|
||||
"""
|
||||
deps = f.dependencies or []
|
||||
if passing_ids is None:
|
||||
blocking = []
|
||||
blocked = False
|
||||
else:
|
||||
blocking = [d for d in deps if d not in passing_ids]
|
||||
blocked = len(blocking) > 0
|
||||
|
||||
return FeatureResponse(
|
||||
id=f.id,
|
||||
priority=f.priority,
|
||||
@@ -84,9 +103,12 @@ def feature_to_response(f) -> FeatureResponse:
|
||||
name=f.name,
|
||||
description=f.description,
|
||||
steps=f.steps if isinstance(f.steps, list) else [],
|
||||
dependencies=deps,
|
||||
# Handle legacy NULL values gracefully - treat as False
|
||||
passes=f.passes if f.passes is not None else False,
|
||||
in_progress=f.in_progress if f.in_progress is not None else False,
|
||||
blocked=blocked,
|
||||
blocking_dependencies=blocking,
|
||||
)
|
||||
|
||||
|
||||
@@ -119,12 +141,15 @@ async def list_features(project_name: str):
|
||||
with get_db_session(project_dir) as session:
|
||||
all_features = session.query(Feature).order_by(Feature.priority).all()
|
||||
|
||||
# Compute passing IDs for blocked status calculation
|
||||
passing_ids = {f.id for f in all_features if f.passes}
|
||||
|
||||
pending = []
|
||||
in_progress = []
|
||||
done = []
|
||||
|
||||
for f in all_features:
|
||||
feature_response = feature_to_response(f)
|
||||
feature_response = feature_to_response(f, passing_ids)
|
||||
if f.passes:
|
||||
done.append(feature_response)
|
||||
elif f.in_progress:
|
||||
@@ -174,6 +199,7 @@ async def create_feature(project_name: str, feature: FeatureCreate):
|
||||
name=feature.name,
|
||||
description=feature.description,
|
||||
steps=feature.steps,
|
||||
dependencies=feature.dependencies if feature.dependencies else None,
|
||||
passes=False,
|
||||
in_progress=False,
|
||||
)
|
||||
@@ -190,6 +216,167 @@ async def create_feature(project_name: str, feature: FeatureCreate):
|
||||
raise HTTPException(status_code=500, detail="Failed to create feature")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Static path endpoints - MUST be declared before /{feature_id} routes
|
||||
# ============================================================================
|
||||
|
||||
|
||||
@router.post("/bulk", response_model=FeatureBulkCreateResponse)
|
||||
async def create_features_bulk(project_name: str, bulk: FeatureBulkCreate):
|
||||
"""
|
||||
Create multiple features at once.
|
||||
|
||||
Features are assigned sequential priorities starting from:
|
||||
- starting_priority if specified (must be >= 1)
|
||||
- max(existing priorities) + 1 if not specified
|
||||
|
||||
This is useful for:
|
||||
- Expanding a project with new features via AI
|
||||
- Importing features from external sources
|
||||
- Batch operations
|
||||
|
||||
Returns:
|
||||
{"created": N, "features": [...]}
|
||||
"""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
if not project_dir:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found in registry")
|
||||
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
if not bulk.features:
|
||||
return FeatureBulkCreateResponse(created=0, features=[])
|
||||
|
||||
# Validate starting_priority if provided
|
||||
if bulk.starting_priority is not None and bulk.starting_priority < 1:
|
||||
raise HTTPException(status_code=400, detail="starting_priority must be >= 1")
|
||||
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
# Determine starting priority with row-level lock to prevent race conditions
|
||||
if bulk.starting_priority is not None:
|
||||
current_priority = bulk.starting_priority
|
||||
else:
|
||||
# Lock the max priority row to prevent concurrent inserts from getting same priority
|
||||
max_priority_feature = (
|
||||
session.query(Feature)
|
||||
.order_by(Feature.priority.desc())
|
||||
.with_for_update()
|
||||
.first()
|
||||
)
|
||||
current_priority = (max_priority_feature.priority + 1) if max_priority_feature else 1
|
||||
|
||||
created_ids = []
|
||||
|
||||
for feature_data in bulk.features:
|
||||
db_feature = Feature(
|
||||
priority=current_priority,
|
||||
category=feature_data.category,
|
||||
name=feature_data.name,
|
||||
description=feature_data.description,
|
||||
steps=feature_data.steps,
|
||||
dependencies=feature_data.dependencies if feature_data.dependencies else None,
|
||||
passes=False,
|
||||
in_progress=False,
|
||||
)
|
||||
session.add(db_feature)
|
||||
session.flush() # Flush to get the ID immediately
|
||||
created_ids.append(db_feature.id)
|
||||
current_priority += 1
|
||||
|
||||
session.commit()
|
||||
|
||||
# Query created features by their IDs (avoids relying on priority range)
|
||||
created_features = []
|
||||
for db_feature in session.query(Feature).filter(
|
||||
Feature.id.in_(created_ids)
|
||||
).order_by(Feature.priority).all():
|
||||
created_features.append(feature_to_response(db_feature))
|
||||
|
||||
return FeatureBulkCreateResponse(
|
||||
created=len(created_features),
|
||||
features=created_features
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to bulk create features")
|
||||
raise HTTPException(status_code=500, detail="Failed to bulk create features")
|
||||
|
||||
|
||||
@router.get("/graph", response_model=DependencyGraphResponse)
|
||||
async def get_dependency_graph(project_name: str):
|
||||
"""Return dependency graph data for visualization.
|
||||
|
||||
Returns nodes (features) and edges (dependencies) suitable for
|
||||
rendering with React Flow or similar graph libraries.
|
||||
"""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
if not project_dir:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found in registry")
|
||||
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
db_file = project_dir / "features.db"
|
||||
if not db_file.exists():
|
||||
return DependencyGraphResponse(nodes=[], edges=[])
|
||||
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
all_features = session.query(Feature).all()
|
||||
passing_ids = {f.id for f in all_features if f.passes}
|
||||
|
||||
nodes = []
|
||||
edges = []
|
||||
|
||||
for f in all_features:
|
||||
deps = f.dependencies or []
|
||||
blocking = [d for d in deps if d not in passing_ids]
|
||||
|
||||
if f.passes:
|
||||
status = "done"
|
||||
elif blocking:
|
||||
status = "blocked"
|
||||
elif f.in_progress:
|
||||
status = "in_progress"
|
||||
else:
|
||||
status = "pending"
|
||||
|
||||
nodes.append(DependencyGraphNode(
|
||||
id=f.id,
|
||||
name=f.name,
|
||||
category=f.category,
|
||||
status=status,
|
||||
priority=f.priority,
|
||||
dependencies=deps
|
||||
))
|
||||
|
||||
for dep_id in deps:
|
||||
edges.append({"source": dep_id, "target": f.id})
|
||||
|
||||
return DependencyGraphResponse(nodes=nodes, edges=edges)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to get dependency graph")
|
||||
raise HTTPException(status_code=500, detail="Failed to get dependency graph")
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Parameterized path endpoints - /{feature_id} routes
|
||||
# ============================================================================
|
||||
|
||||
|
||||
@router.get("/{feature_id}", response_model=FeatureResponse)
|
||||
async def get_feature(project_name: str, feature_id: int):
|
||||
"""Get details of a specific feature."""
|
||||
@@ -268,11 +455,17 @@ async def update_feature(project_name: str, feature_id: int, update: FeatureUpda
|
||||
feature.steps = update.steps
|
||||
if update.priority is not None:
|
||||
feature.priority = update.priority
|
||||
if update.dependencies is not None:
|
||||
feature.dependencies = update.dependencies if update.dependencies else None
|
||||
|
||||
session.commit()
|
||||
session.refresh(feature)
|
||||
|
||||
return feature_to_response(feature)
|
||||
# Compute passing IDs for response
|
||||
all_features = session.query(Feature).all()
|
||||
passing_ids = {f.id for f in all_features if f.passes}
|
||||
|
||||
return feature_to_response(feature, passing_ids)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
@@ -282,7 +475,12 @@ async def update_feature(project_name: str, feature_id: int, update: FeatureUpda
|
||||
|
||||
@router.delete("/{feature_id}")
|
||||
async def delete_feature(project_name: str, feature_id: int):
|
||||
"""Delete a feature."""
|
||||
"""Delete a feature and clean up references in other features' dependencies.
|
||||
|
||||
When a feature is deleted, any other features that depend on it will have
|
||||
that dependency removed from their dependencies list. This prevents orphaned
|
||||
dependencies that would permanently block features.
|
||||
"""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
@@ -301,10 +499,24 @@ async def delete_feature(project_name: str, feature_id: int):
|
||||
if not feature:
|
||||
raise HTTPException(status_code=404, detail=f"Feature {feature_id} not found")
|
||||
|
||||
# Clean up dependency references in other features
|
||||
# This prevents orphaned dependencies that would block features forever
|
||||
affected_features = []
|
||||
for f in session.query(Feature).all():
|
||||
if f.dependencies and feature_id in f.dependencies:
|
||||
# Remove the deleted feature from this feature's dependencies
|
||||
deps = [d for d in f.dependencies if d != feature_id]
|
||||
f.dependencies = deps if deps else None
|
||||
affected_features.append(f.id)
|
||||
|
||||
session.delete(feature)
|
||||
session.commit()
|
||||
|
||||
return {"success": True, "message": f"Feature {feature_id} deleted"}
|
||||
message = f"Feature {feature_id} deleted"
|
||||
if affected_features:
|
||||
message += f". Removed from dependencies of features: {affected_features}"
|
||||
|
||||
return {"success": True, "message": message, "affected_features": affected_features}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
@@ -352,22 +564,123 @@ async def skip_feature(project_name: str, feature_id: int):
|
||||
raise HTTPException(status_code=500, detail="Failed to skip feature")
|
||||
|
||||
|
||||
@router.post("/bulk", response_model=FeatureBulkCreateResponse)
|
||||
async def create_features_bulk(project_name: str, bulk: FeatureBulkCreate):
|
||||
# ============================================================================
|
||||
# Dependency Management Endpoints
|
||||
# ============================================================================
|
||||
|
||||
|
||||
def _get_dependency_resolver():
|
||||
"""Lazy import of dependency resolver."""
|
||||
import sys
|
||||
root = Path(__file__).parent.parent.parent
|
||||
if str(root) not in sys.path:
|
||||
sys.path.insert(0, str(root))
|
||||
from api.dependency_resolver import would_create_circular_dependency, MAX_DEPENDENCIES_PER_FEATURE
|
||||
return would_create_circular_dependency, MAX_DEPENDENCIES_PER_FEATURE
|
||||
|
||||
|
||||
@router.post("/{feature_id}/dependencies/{dep_id}")
|
||||
async def add_dependency(project_name: str, feature_id: int, dep_id: int):
|
||||
"""Add a dependency relationship between features.
|
||||
|
||||
The dep_id feature must be completed before feature_id can be started.
|
||||
Validates: self-reference, existence, circular dependencies, max limit.
|
||||
"""
|
||||
Create multiple features at once.
|
||||
project_name = validate_project_name(project_name)
|
||||
|
||||
Features are assigned sequential priorities starting from:
|
||||
- starting_priority if specified (must be >= 1)
|
||||
- max(existing priorities) + 1 if not specified
|
||||
# Security: Self-reference check
|
||||
if feature_id == dep_id:
|
||||
raise HTTPException(status_code=400, detail="A feature cannot depend on itself")
|
||||
|
||||
This is useful for:
|
||||
- Expanding a project with new features via AI
|
||||
- Importing features from external sources
|
||||
- Batch operations
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
Returns:
|
||||
{"created": N, "features": [...]}
|
||||
if not project_dir:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found in registry")
|
||||
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
would_create_circular_dependency, MAX_DEPENDENCIES_PER_FEATURE = _get_dependency_resolver()
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
feature = session.query(Feature).filter(Feature.id == feature_id).first()
|
||||
dependency = session.query(Feature).filter(Feature.id == dep_id).first()
|
||||
|
||||
if not feature:
|
||||
raise HTTPException(status_code=404, detail=f"Feature {feature_id} not found")
|
||||
if not dependency:
|
||||
raise HTTPException(status_code=404, detail=f"Dependency {dep_id} not found")
|
||||
|
||||
current_deps = feature.dependencies or []
|
||||
|
||||
# Security: Limit check
|
||||
if len(current_deps) >= MAX_DEPENDENCIES_PER_FEATURE:
|
||||
raise HTTPException(status_code=400, detail=f"Maximum {MAX_DEPENDENCIES_PER_FEATURE} dependencies allowed")
|
||||
|
||||
if dep_id in current_deps:
|
||||
raise HTTPException(status_code=400, detail="Dependency already exists")
|
||||
|
||||
# Security: Circular dependency check
|
||||
# source_id = feature_id (gaining dep), target_id = dep_id (being depended upon)
|
||||
all_features = [f.to_dict() for f in session.query(Feature).all()]
|
||||
if would_create_circular_dependency(all_features, feature_id, dep_id):
|
||||
raise HTTPException(status_code=400, detail="Would create circular dependency")
|
||||
|
||||
current_deps.append(dep_id)
|
||||
feature.dependencies = sorted(current_deps)
|
||||
session.commit()
|
||||
|
||||
return {"success": True, "feature_id": feature_id, "dependencies": feature.dependencies}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to add dependency")
|
||||
raise HTTPException(status_code=500, detail="Failed to add dependency")
|
||||
|
||||
|
||||
@router.delete("/{feature_id}/dependencies/{dep_id}")
|
||||
async def remove_dependency(project_name: str, feature_id: int, dep_id: int):
|
||||
"""Remove a dependency from a feature."""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
|
||||
if not project_dir:
|
||||
raise HTTPException(status_code=404, detail=f"Project '{project_name}' not found in registry")
|
||||
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
feature = session.query(Feature).filter(Feature.id == feature_id).first()
|
||||
if not feature:
|
||||
raise HTTPException(status_code=404, detail=f"Feature {feature_id} not found")
|
||||
|
||||
current_deps = feature.dependencies or []
|
||||
if dep_id not in current_deps:
|
||||
raise HTTPException(status_code=400, detail="Dependency does not exist")
|
||||
|
||||
current_deps.remove(dep_id)
|
||||
feature.dependencies = current_deps if current_deps else None
|
||||
session.commit()
|
||||
|
||||
return {"success": True, "feature_id": feature_id, "dependencies": feature.dependencies or []}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to remove dependency")
|
||||
raise HTTPException(status_code=500, detail="Failed to remove dependency")
|
||||
|
||||
|
||||
@router.put("/{feature_id}/dependencies")
|
||||
async def set_dependencies(project_name: str, feature_id: int, update: DependencyUpdate):
|
||||
"""Set all dependencies for a feature at once, replacing any existing.
|
||||
|
||||
Validates: self-reference, existence of all dependencies, circular dependencies, max limit.
|
||||
"""
|
||||
project_name = validate_project_name(project_name)
|
||||
project_dir = _get_project_path(project_name)
|
||||
@@ -378,62 +691,56 @@ async def create_features_bulk(project_name: str, bulk: FeatureBulkCreate):
|
||||
if not project_dir.exists():
|
||||
raise HTTPException(status_code=404, detail="Project directory not found")
|
||||
|
||||
if not bulk.features:
|
||||
return FeatureBulkCreateResponse(created=0, features=[])
|
||||
dependency_ids = update.dependency_ids
|
||||
|
||||
# Validate starting_priority if provided
|
||||
if bulk.starting_priority is not None and bulk.starting_priority < 1:
|
||||
raise HTTPException(status_code=400, detail="starting_priority must be >= 1")
|
||||
# Security: Self-reference check
|
||||
if feature_id in dependency_ids:
|
||||
raise HTTPException(status_code=400, detail="A feature cannot depend on itself")
|
||||
|
||||
# Check for duplicates
|
||||
if len(dependency_ids) != len(set(dependency_ids)):
|
||||
raise HTTPException(status_code=400, detail="Duplicate dependencies not allowed")
|
||||
|
||||
would_create_circular_dependency, _ = _get_dependency_resolver()
|
||||
_, Feature = _get_db_classes()
|
||||
|
||||
try:
|
||||
with get_db_session(project_dir) as session:
|
||||
# Determine starting priority with row-level lock to prevent race conditions
|
||||
if bulk.starting_priority is not None:
|
||||
current_priority = bulk.starting_priority
|
||||
else:
|
||||
# Lock the max priority row to prevent concurrent inserts from getting same priority
|
||||
max_priority_feature = (
|
||||
session.query(Feature)
|
||||
.order_by(Feature.priority.desc())
|
||||
.with_for_update()
|
||||
.first()
|
||||
)
|
||||
current_priority = (max_priority_feature.priority + 1) if max_priority_feature else 1
|
||||
feature = session.query(Feature).filter(Feature.id == feature_id).first()
|
||||
if not feature:
|
||||
raise HTTPException(status_code=404, detail=f"Feature {feature_id} not found")
|
||||
|
||||
created_ids = []
|
||||
# Validate all dependencies exist
|
||||
all_feature_ids = {f.id for f in session.query(Feature).all()}
|
||||
missing = [d for d in dependency_ids if d not in all_feature_ids]
|
||||
if missing:
|
||||
raise HTTPException(status_code=400, detail=f"Dependencies not found: {missing}")
|
||||
|
||||
for feature_data in bulk.features:
|
||||
db_feature = Feature(
|
||||
priority=current_priority,
|
||||
category=feature_data.category,
|
||||
name=feature_data.name,
|
||||
description=feature_data.description,
|
||||
steps=feature_data.steps,
|
||||
passes=False,
|
||||
in_progress=False,
|
||||
)
|
||||
session.add(db_feature)
|
||||
session.flush() # Flush to get the ID immediately
|
||||
created_ids.append(db_feature.id)
|
||||
current_priority += 1
|
||||
# Check for circular dependencies
|
||||
all_features = [f.to_dict() for f in session.query(Feature).all()]
|
||||
# Temporarily update the feature's dependencies for cycle check
|
||||
test_features = []
|
||||
for f in all_features:
|
||||
if f["id"] == feature_id:
|
||||
test_features.append({**f, "dependencies": dependency_ids})
|
||||
else:
|
||||
test_features.append(f)
|
||||
|
||||
for dep_id in dependency_ids:
|
||||
# source_id = feature_id (gaining dep), target_id = dep_id (being depended upon)
|
||||
if would_create_circular_dependency(test_features, feature_id, dep_id):
|
||||
raise HTTPException(
|
||||
status_code=400,
|
||||
detail=f"Cannot add dependency {dep_id}: would create circular dependency"
|
||||
)
|
||||
|
||||
# Set dependencies
|
||||
feature.dependencies = sorted(dependency_ids) if dependency_ids else None
|
||||
session.commit()
|
||||
|
||||
# Query created features by their IDs (avoids relying on priority range)
|
||||
created_features = []
|
||||
for db_feature in session.query(Feature).filter(
|
||||
Feature.id.in_(created_ids)
|
||||
).order_by(Feature.priority).all():
|
||||
created_features.append(feature_to_response(db_feature))
|
||||
|
||||
return FeatureBulkCreateResponse(
|
||||
created=len(created_features),
|
||||
features=created_features
|
||||
)
|
||||
return {"success": True, "feature_id": feature_id, "dependencies": feature.dependencies or []}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception:
|
||||
logger.exception("Failed to bulk create features")
|
||||
raise HTTPException(status_code=500, detail="Failed to bulk create features")
|
||||
logger.exception("Failed to set dependencies")
|
||||
raise HTTPException(status_code=500, detail="Failed to set dependencies")
|
||||
|
||||
@@ -80,6 +80,7 @@ class FeatureBase(BaseModel):
|
||||
name: str
|
||||
description: str
|
||||
steps: list[str]
|
||||
dependencies: list[int] = Field(default_factory=list) # Optional dependencies
|
||||
|
||||
|
||||
class FeatureCreate(FeatureBase):
|
||||
@@ -94,6 +95,7 @@ class FeatureUpdate(BaseModel):
|
||||
description: str | None = None
|
||||
steps: list[str] | None = None
|
||||
priority: int | None = None
|
||||
dependencies: list[int] | None = None # Optional - can update dependencies
|
||||
|
||||
|
||||
class FeatureResponse(FeatureBase):
|
||||
@@ -102,6 +104,8 @@ class FeatureResponse(FeatureBase):
|
||||
priority: int
|
||||
passes: bool
|
||||
in_progress: bool
|
||||
blocked: bool = False # Computed: has unmet dependencies
|
||||
blocking_dependencies: list[int] = Field(default_factory=list) # Computed
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@@ -126,6 +130,37 @@ class FeatureBulkCreateResponse(BaseModel):
|
||||
features: list[FeatureResponse]
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Dependency Graph Schemas
|
||||
# ============================================================================
|
||||
|
||||
class DependencyGraphNode(BaseModel):
|
||||
"""Minimal node for graph visualization (no description exposed for security)."""
|
||||
id: int
|
||||
name: str
|
||||
category: str
|
||||
status: Literal["pending", "in_progress", "done", "blocked"]
|
||||
priority: int
|
||||
dependencies: list[int]
|
||||
|
||||
|
||||
class DependencyGraphEdge(BaseModel):
|
||||
"""Edge in the dependency graph."""
|
||||
source: int
|
||||
target: int
|
||||
|
||||
|
||||
class DependencyGraphResponse(BaseModel):
|
||||
"""Response for dependency graph visualization."""
|
||||
nodes: list[DependencyGraphNode]
|
||||
edges: list[DependencyGraphEdge]
|
||||
|
||||
|
||||
class DependencyUpdate(BaseModel):
|
||||
"""Request schema for updating a feature's dependencies."""
|
||||
dependency_ids: list[int] = Field(..., max_length=20) # Security: limit
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Agent Schemas
|
||||
# ============================================================================
|
||||
@@ -134,6 +169,8 @@ class AgentStartRequest(BaseModel):
|
||||
"""Request schema for starting the agent."""
|
||||
yolo_mode: bool | None = None # None means use global settings
|
||||
model: str | None = None # None means use global settings
|
||||
parallel_mode: bool | None = None # Enable parallel execution
|
||||
max_concurrency: int | None = None # Max concurrent agents (1-5)
|
||||
|
||||
@field_validator('model')
|
||||
@classmethod
|
||||
@@ -143,6 +180,14 @@ class AgentStartRequest(BaseModel):
|
||||
raise ValueError(f"Invalid model. Must be one of: {VALID_MODELS}")
|
||||
return v
|
||||
|
||||
@field_validator('max_concurrency')
|
||||
@classmethod
|
||||
def validate_concurrency(cls, v: int | None) -> int | None:
|
||||
"""Validate max_concurrency is between 1 and 5."""
|
||||
if v is not None and (v < 1 or v > 5):
|
||||
raise ValueError("max_concurrency must be between 1 and 5")
|
||||
return v
|
||||
|
||||
|
||||
class AgentStatus(BaseModel):
|
||||
"""Current agent status."""
|
||||
@@ -151,6 +196,8 @@ class AgentStatus(BaseModel):
|
||||
started_at: datetime | None = None
|
||||
yolo_mode: bool = False
|
||||
model: str | None = None # Model being used by running agent
|
||||
parallel_mode: bool = False
|
||||
max_concurrency: int | None = None
|
||||
|
||||
|
||||
class AgentActionResponse(BaseModel):
|
||||
@@ -180,6 +227,7 @@ class WSProgressMessage(BaseModel):
|
||||
"""WebSocket message for progress updates."""
|
||||
type: Literal["progress"] = "progress"
|
||||
passing: int
|
||||
in_progress: int
|
||||
total: int
|
||||
percentage: float
|
||||
|
||||
@@ -196,6 +244,8 @@ class WSLogMessage(BaseModel):
|
||||
type: Literal["log"] = "log"
|
||||
line: str
|
||||
timestamp: datetime
|
||||
featureId: int | None = None
|
||||
agentIndex: int | None = None
|
||||
|
||||
|
||||
class WSAgentStatusMessage(BaseModel):
|
||||
@@ -204,6 +254,25 @@ class WSAgentStatusMessage(BaseModel):
|
||||
status: str
|
||||
|
||||
|
||||
# Agent state for multi-agent tracking
|
||||
AgentState = Literal["idle", "thinking", "working", "testing", "success", "error", "struggling"]
|
||||
|
||||
# Agent mascot names assigned by index
|
||||
AGENT_MASCOTS = ["Spark", "Fizz", "Octo", "Hoot", "Buzz"]
|
||||
|
||||
|
||||
class WSAgentUpdateMessage(BaseModel):
|
||||
"""WebSocket message for multi-agent status updates."""
|
||||
type: Literal["agent_update"] = "agent_update"
|
||||
agentIndex: int
|
||||
agentName: str # One of AGENT_MASCOTS
|
||||
featureId: int
|
||||
featureName: str
|
||||
state: AgentState
|
||||
thought: str | None = None
|
||||
timestamp: datetime
|
||||
|
||||
|
||||
# ============================================================================
|
||||
# Spec Chat Schemas
|
||||
# ============================================================================
|
||||
|
||||
@@ -80,6 +80,8 @@ class AgentProcessManager:
|
||||
self._output_task: asyncio.Task | None = None
|
||||
self.yolo_mode: bool = False # YOLO mode for rapid prototyping
|
||||
self.model: str | None = None # Model being used
|
||||
self.parallel_mode: bool = False # Parallel execution mode
|
||||
self.max_concurrency: int | None = None # Max concurrent agents
|
||||
|
||||
# Support multiple callbacks (for multiple WebSocket clients)
|
||||
self._output_callbacks: Set[Callable[[str], Awaitable[None]]] = set()
|
||||
@@ -241,13 +243,21 @@ class AgentProcessManager:
|
||||
self.status = "stopped"
|
||||
self._remove_lock()
|
||||
|
||||
async def start(self, yolo_mode: bool = False, model: str | None = None) -> tuple[bool, str]:
|
||||
async def start(
|
||||
self,
|
||||
yolo_mode: bool = False,
|
||||
model: str | None = None,
|
||||
parallel_mode: bool = False,
|
||||
max_concurrency: int | None = None,
|
||||
) -> tuple[bool, str]:
|
||||
"""
|
||||
Start the agent as a subprocess.
|
||||
|
||||
Args:
|
||||
yolo_mode: If True, run in YOLO mode (no browser testing)
|
||||
model: Model to use (e.g., claude-opus-4-5-20251101)
|
||||
parallel_mode: If True, run multiple features in parallel
|
||||
max_concurrency: Max concurrent agents (default 3 if parallel enabled)
|
||||
|
||||
Returns:
|
||||
Tuple of (success, message)
|
||||
@@ -261,6 +271,8 @@ class AgentProcessManager:
|
||||
# Store for status queries
|
||||
self.yolo_mode = yolo_mode
|
||||
self.model = model
|
||||
self.parallel_mode = parallel_mode
|
||||
self.max_concurrency = max_concurrency
|
||||
|
||||
# Build command - pass absolute path to project directory
|
||||
cmd = [
|
||||
@@ -278,6 +290,11 @@ class AgentProcessManager:
|
||||
if yolo_mode:
|
||||
cmd.append("--yolo")
|
||||
|
||||
# Add --parallel flag if parallel mode is enabled
|
||||
if parallel_mode:
|
||||
cmd.append("--parallel")
|
||||
cmd.append(str(max_concurrency or 3)) # Default to 3 concurrent agents
|
||||
|
||||
try:
|
||||
# Start subprocess with piped stdout/stderr
|
||||
# Use project_dir as cwd so Claude SDK sandbox allows access to project files
|
||||
@@ -340,6 +357,8 @@ class AgentProcessManager:
|
||||
self.started_at = None
|
||||
self.yolo_mode = False # Reset YOLO mode
|
||||
self.model = None # Reset model
|
||||
self.parallel_mode = False # Reset parallel mode
|
||||
self.max_concurrency = None # Reset concurrency
|
||||
|
||||
return True, "Agent stopped"
|
||||
except Exception as e:
|
||||
@@ -422,6 +441,8 @@ class AgentProcessManager:
|
||||
"started_at": self.started_at.isoformat() if self.started_at else None,
|
||||
"yolo_mode": self.yolo_mode,
|
||||
"model": self.model,
|
||||
"parallel_mode": self.parallel_mode,
|
||||
"max_concurrency": self.max_concurrency,
|
||||
}
|
||||
|
||||
|
||||
|
||||
@@ -15,6 +15,7 @@ from typing import Set
|
||||
|
||||
from fastapi import WebSocket, WebSocketDisconnect
|
||||
|
||||
from .schemas import AGENT_MASCOTS
|
||||
from .services.dev_server_manager import get_devserver_manager
|
||||
from .services.process_manager import get_manager
|
||||
|
||||
@@ -23,6 +24,177 @@ _count_passing_tests = None
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Pattern to extract feature ID from parallel orchestrator output
|
||||
FEATURE_ID_PATTERN = re.compile(r'\[Feature #(\d+)\]\s*(.*)')
|
||||
|
||||
# Patterns for detecting agent activity and thoughts
|
||||
THOUGHT_PATTERNS = [
|
||||
# Claude's tool usage patterns (actual format: [Tool: name])
|
||||
(re.compile(r'\[Tool:\s*Read\]', re.I), 'thinking'),
|
||||
(re.compile(r'\[Tool:\s*(?:Write|Edit|NotebookEdit)\]', re.I), 'working'),
|
||||
(re.compile(r'\[Tool:\s*Bash\]', re.I), 'testing'),
|
||||
(re.compile(r'\[Tool:\s*(?:Glob|Grep)\]', re.I), 'thinking'),
|
||||
(re.compile(r'\[Tool:\s*(\w+)\]', re.I), 'working'), # Fallback for other tools
|
||||
# Claude's internal thoughts
|
||||
(re.compile(r'(?:Reading|Analyzing|Checking|Looking at|Examining)\s+(.+)', re.I), 'thinking'),
|
||||
(re.compile(r'(?:Creating|Writing|Adding|Implementing|Building)\s+(.+)', re.I), 'working'),
|
||||
(re.compile(r'(?:Testing|Verifying|Running tests|Validating)\s+(.+)', re.I), 'testing'),
|
||||
(re.compile(r'(?:Error|Failed|Cannot|Unable to|Exception)\s+(.+)', re.I), 'struggling'),
|
||||
# Test results
|
||||
(re.compile(r'(?:PASS|passed|success)', re.I), 'success'),
|
||||
(re.compile(r'(?:FAIL|failed|error)', re.I), 'struggling'),
|
||||
]
|
||||
|
||||
|
||||
class AgentTracker:
|
||||
"""Tracks active agents and their states for multi-agent mode."""
|
||||
|
||||
def __init__(self):
|
||||
# feature_id -> {name, state, last_thought, agent_index}
|
||||
self.active_agents: dict[int, dict] = {}
|
||||
self._next_agent_index = 0
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
async def process_line(self, line: str) -> dict | None:
|
||||
"""
|
||||
Process an output line and return an agent_update message if relevant.
|
||||
|
||||
Returns None if no update should be emitted.
|
||||
"""
|
||||
# Check for feature-specific output
|
||||
match = FEATURE_ID_PATTERN.match(line)
|
||||
if not match:
|
||||
# Also check for orchestrator status messages
|
||||
if line.startswith("Started agent for feature #"):
|
||||
try:
|
||||
feature_id = int(re.search(r'#(\d+)', line).group(1))
|
||||
return await self._handle_agent_start(feature_id, line)
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
elif line.startswith("Feature #") and ("completed" in line or "failed" in line):
|
||||
try:
|
||||
feature_id = int(re.search(r'#(\d+)', line).group(1))
|
||||
is_success = "completed" in line
|
||||
return await self._handle_agent_complete(feature_id, is_success)
|
||||
except (AttributeError, ValueError):
|
||||
pass
|
||||
return None
|
||||
|
||||
feature_id = int(match.group(1))
|
||||
content = match.group(2)
|
||||
|
||||
async with self._lock:
|
||||
# Ensure agent is tracked
|
||||
if feature_id not in self.active_agents:
|
||||
agent_index = self._next_agent_index
|
||||
self._next_agent_index += 1
|
||||
self.active_agents[feature_id] = {
|
||||
'name': AGENT_MASCOTS[agent_index % len(AGENT_MASCOTS)],
|
||||
'agent_index': agent_index,
|
||||
'state': 'thinking',
|
||||
'feature_name': f'Feature #{feature_id}',
|
||||
'last_thought': None,
|
||||
}
|
||||
|
||||
agent = self.active_agents[feature_id]
|
||||
|
||||
# Detect state and thought from content
|
||||
state = 'working'
|
||||
thought = None
|
||||
|
||||
for pattern, detected_state in THOUGHT_PATTERNS:
|
||||
m = pattern.search(content)
|
||||
if m:
|
||||
state = detected_state
|
||||
thought = m.group(1) if m.lastindex else content[:100]
|
||||
break
|
||||
|
||||
# Only emit update if state changed or we have a new thought
|
||||
if state != agent['state'] or thought != agent['last_thought']:
|
||||
agent['state'] = state
|
||||
if thought:
|
||||
agent['last_thought'] = thought
|
||||
|
||||
return {
|
||||
'type': 'agent_update',
|
||||
'agentIndex': agent['agent_index'],
|
||||
'agentName': agent['name'],
|
||||
'featureId': feature_id,
|
||||
'featureName': agent['feature_name'],
|
||||
'state': state,
|
||||
'thought': thought,
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
}
|
||||
|
||||
return None
|
||||
|
||||
def get_agent_info(self, feature_id: int) -> tuple[int | None, str | None]:
|
||||
"""Get agent index and name for a feature ID.
|
||||
|
||||
Returns:
|
||||
Tuple of (agentIndex, agentName) or (None, None) if not tracked.
|
||||
"""
|
||||
agent = self.active_agents.get(feature_id)
|
||||
if agent:
|
||||
return agent['agent_index'], agent['name']
|
||||
return None, None
|
||||
|
||||
async def _handle_agent_start(self, feature_id: int, line: str) -> dict | None:
|
||||
"""Handle agent start message from orchestrator."""
|
||||
async with self._lock:
|
||||
agent_index = self._next_agent_index
|
||||
self._next_agent_index += 1
|
||||
|
||||
# Try to extract feature name from line
|
||||
feature_name = f'Feature #{feature_id}'
|
||||
name_match = re.search(r'#\d+:\s*(.+)$', line)
|
||||
if name_match:
|
||||
feature_name = name_match.group(1)
|
||||
|
||||
self.active_agents[feature_id] = {
|
||||
'name': AGENT_MASCOTS[agent_index % len(AGENT_MASCOTS)],
|
||||
'agent_index': agent_index,
|
||||
'state': 'thinking',
|
||||
'feature_name': feature_name,
|
||||
'last_thought': 'Starting work...',
|
||||
}
|
||||
|
||||
return {
|
||||
'type': 'agent_update',
|
||||
'agentIndex': agent_index,
|
||||
'agentName': AGENT_MASCOTS[agent_index % len(AGENT_MASCOTS)],
|
||||
'featureId': feature_id,
|
||||
'featureName': feature_name,
|
||||
'state': 'thinking',
|
||||
'thought': 'Starting work...',
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
}
|
||||
|
||||
async def _handle_agent_complete(self, feature_id: int, is_success: bool) -> dict | None:
|
||||
"""Handle agent completion message from orchestrator."""
|
||||
async with self._lock:
|
||||
if feature_id not in self.active_agents:
|
||||
return None
|
||||
|
||||
agent = self.active_agents[feature_id]
|
||||
state = 'success' if is_success else 'error'
|
||||
|
||||
result = {
|
||||
'type': 'agent_update',
|
||||
'agentIndex': agent['agent_index'],
|
||||
'agentName': agent['name'],
|
||||
'featureId': feature_id,
|
||||
'featureName': agent['feature_name'],
|
||||
'state': state,
|
||||
'thought': 'Completed successfully!' if is_success else 'Failed to complete',
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
}
|
||||
|
||||
# Remove from active agents
|
||||
del self.active_agents[feature_id]
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _get_project_path(project_name: str) -> Path:
|
||||
"""Get project path from registry."""
|
||||
@@ -171,14 +343,38 @@ async def project_websocket(websocket: WebSocket, project_name: str):
|
||||
# Get agent manager and register callbacks
|
||||
agent_manager = get_manager(project_name, project_dir, ROOT_DIR)
|
||||
|
||||
# Create agent tracker for multi-agent mode
|
||||
agent_tracker = AgentTracker()
|
||||
|
||||
async def on_output(line: str):
|
||||
"""Handle agent output - broadcast to this WebSocket."""
|
||||
try:
|
||||
await websocket.send_json({
|
||||
# Extract feature ID from line if present
|
||||
feature_id = None
|
||||
agent_index = None
|
||||
match = FEATURE_ID_PATTERN.match(line)
|
||||
if match:
|
||||
feature_id = int(match.group(1))
|
||||
agent_index, _ = agent_tracker.get_agent_info(feature_id)
|
||||
|
||||
# Send the raw log line with optional feature/agent attribution
|
||||
log_msg = {
|
||||
"type": "log",
|
||||
"line": line,
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
})
|
||||
}
|
||||
if feature_id is not None:
|
||||
log_msg["featureId"] = feature_id
|
||||
if agent_index is not None:
|
||||
log_msg["agentIndex"] = agent_index
|
||||
|
||||
await websocket.send_json(log_msg)
|
||||
|
||||
# Check if this line indicates agent activity (parallel mode)
|
||||
# and emit agent_update messages if so
|
||||
agent_update = await agent_tracker.process_line(line)
|
||||
if agent_update:
|
||||
await websocket.send_json(agent_update)
|
||||
except Exception:
|
||||
pass # Connection may be closed
|
||||
|
||||
|
||||
Reference in New Issue
Block a user