fix: address code review feedback from coderabbitai

- Add language specifier to fenced code block in expand-project.md
- Remove detailed exception strings from WebSocket responses (security)
- Make WebSocket "start" message idempotent to avoid session reset
- Fix race condition in bulk feature creation with row-level lock
- Add validation for starting_priority (must be >= 1)
- Fix _query_claude to handle multiple feature blocks and deduplicate
- Add FileReader error handling in ExpandProjectChat
- Fix disconnect() to clear pending reconnect timeout
- Enable sandbox mode and validate CLI path in expand_chat_session
- Clean up temporary settings file on session close

Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
Dan Gentry
2026-01-09 17:16:06 -05:00
parent 5f06dcf464
commit 75f2bf2a10
6 changed files with 90 additions and 43 deletions

View File

@@ -144,7 +144,7 @@ Once the user approves, create features directly.
**Then output the features in this exact JSON format (the system will parse this):** **Then output the features in this exact JSON format (the system will parse this):**
``` ```json
<features_to_create> <features_to_create>
[ [
{ {

View File

@@ -161,12 +161,22 @@ async def expand_project_websocket(websocket: WebSocket, project_name: str):
continue continue
elif msg_type == "start": elif msg_type == "start":
# Create and start a new expansion session # Check if session already exists (idempotent start)
session = await create_expand_session(project_name, project_dir) existing_session = get_expand_session(project_name)
if existing_session:
session = existing_session
await websocket.send_json({
"type": "text",
"content": "Resuming existing expansion session. What would you like to add?"
})
await websocket.send_json({"type": "response_done"})
else:
# Create and start a new expansion session
session = await create_expand_session(project_name, project_dir)
# Stream the initial greeting # Stream the initial greeting
async for chunk in session.start(): async for chunk in session.start():
await websocket.send_json(chunk) await websocket.send_json(chunk)
elif msg_type == "message": elif msg_type == "message":
# User sent a message # User sent a message
@@ -192,7 +202,7 @@ async def expand_project_websocket(websocket: WebSocket, project_name: str):
logger.warning(f"Invalid attachment data: {e}") logger.warning(f"Invalid attachment data: {e}")
await websocket.send_json({ await websocket.send_json({
"type": "error", "type": "error",
"content": f"Invalid attachment: {str(e)}" "content": "Invalid attachment format"
}) })
continue continue
@@ -236,7 +246,7 @@ async def expand_project_websocket(websocket: WebSocket, project_name: str):
try: try:
await websocket.send_json({ await websocket.send_json({
"type": "error", "type": "error",
"content": f"Server error: {str(e)}" "content": "Internal server error"
}) })
except Exception: except Exception:
pass pass

View File

@@ -305,7 +305,7 @@ async def create_features_bulk(project_name: str, bulk: FeatureBulkCreate):
Create multiple features at once. Create multiple features at once.
Features are assigned sequential priorities starting from: Features are assigned sequential priorities starting from:
- starting_priority if specified - starting_priority if specified (must be >= 1)
- max(existing priorities) + 1 if not specified - max(existing priorities) + 1 if not specified
This is useful for: This is useful for:
@@ -328,18 +328,28 @@ async def create_features_bulk(project_name: str, bulk: FeatureBulkCreate):
if not bulk.features: if not bulk.features:
return FeatureBulkCreateResponse(created=0, features=[]) return FeatureBulkCreateResponse(created=0, features=[])
# Validate starting_priority if provided
if bulk.starting_priority is not None and bulk.starting_priority < 1:
raise HTTPException(status_code=400, detail="starting_priority must be >= 1")
_, Feature = _get_db_classes() _, Feature = _get_db_classes()
try: try:
with get_db_session(project_dir) as session: with get_db_session(project_dir) as session:
# Determine starting priority # Determine starting priority with row-level lock to prevent race conditions
if bulk.starting_priority is not None: if bulk.starting_priority is not None:
current_priority = bulk.starting_priority current_priority = bulk.starting_priority
else: else:
max_priority_feature = session.query(Feature).order_by(Feature.priority.desc()).first() # Lock the max priority row to prevent concurrent inserts from getting same priority
max_priority_feature = (
session.query(Feature)
.order_by(Feature.priority.desc())
.with_for_update()
.first()
)
current_priority = (max_priority_feature.priority + 1) if max_priority_feature else 1 current_priority = (max_priority_feature.priority + 1) if max_priority_feature else 1
created_features = [] created_ids = []
for feature_data in bulk.features: for feature_data in bulk.features:
db_feature = Feature( db_feature = Feature(
@@ -351,20 +361,16 @@ async def create_features_bulk(project_name: str, bulk: FeatureBulkCreate):
passes=False, passes=False,
) )
session.add(db_feature) session.add(db_feature)
session.flush() # Flush to get the ID immediately
created_ids.append(db_feature.id)
current_priority += 1 current_priority += 1
session.commit() session.commit()
# Refresh to get IDs and return responses # Query created features by their IDs (avoids relying on priority range)
for db_feature in session.query(Feature).order_by(Feature.priority.desc()).limit(len(bulk.features)).all():
created_features.insert(0, feature_to_response(db_feature))
# Re-query to get the actual created features in order
created_features = [] created_features = []
start_priority = current_priority - len(bulk.features)
for db_feature in session.query(Feature).filter( for db_feature in session.query(Feature).filter(
Feature.priority >= start_priority, Feature.id.in_(created_ids)
Feature.priority < current_priority
).order_by(Feature.priority).all(): ).order_by(Feature.priority).all():
created_features.append(feature_to_response(db_feature)) created_features.append(feature_to_response(db_feature))

View File

@@ -67,6 +67,7 @@ class ExpandChatSession:
self._client_entered: bool = False self._client_entered: bool = False
self.features_created: int = 0 self.features_created: int = 0
self.created_feature_ids: list[int] = [] self.created_feature_ids: list[int] = []
self._settings_file: Optional[Path] = None
async def close(self) -> None: async def close(self) -> None:
"""Clean up resources and close the Claude client.""" """Clean up resources and close the Claude client."""
@@ -79,6 +80,13 @@ class ExpandChatSession:
self._client_entered = False self._client_entered = False
self.client = None self.client = None
# Clean up temporary settings file
if self._settings_file and self._settings_file.exists():
try:
self._settings_file.unlink()
except Exception as e:
logger.warning(f"Error removing settings file: {e}")
async def start(self) -> AsyncGenerator[dict, None]: async def start(self) -> AsyncGenerator[dict, None]:
""" """
Initialize session and get initial greeting from Claude. Initialize session and get initial greeting from Claude.
@@ -111,7 +119,7 @@ class ExpandChatSession:
# Create security settings file # Create security settings file
security_settings = { security_settings = {
"sandbox": {"enabled": False}, "sandbox": {"enabled": True},
"permissions": { "permissions": {
"defaultMode": "acceptEdits", "defaultMode": "acceptEdits",
"allow": [ "allow": [
@@ -121,6 +129,7 @@ class ExpandChatSession:
}, },
} }
settings_file = self.project_dir / ".claude_settings.json" settings_file = self.project_dir / ".claude_settings.json"
self._settings_file = settings_file
with open(settings_file, "w") as f: with open(settings_file, "w") as f:
json.dump(security_settings, f, indent=2) json.dump(security_settings, f, indent=2)
@@ -128,8 +137,14 @@ class ExpandChatSession:
project_path = str(self.project_dir.resolve()) project_path = str(self.project_dir.resolve())
system_prompt = skill_content.replace("$ARGUMENTS", project_path) system_prompt = skill_content.replace("$ARGUMENTS", project_path)
# Create Claude SDK client # Find and validate Claude CLI
system_cli = shutil.which("claude") system_cli = shutil.which("claude")
if not system_cli:
yield {
"type": "error",
"content": "Claude CLI not found. Please install Claude Code."
}
return
try: try:
self.client = ClaudeSDKClient( self.client = ClaudeSDKClient(
options=ClaudeAgentOptions( options=ClaudeAgentOptions(
@@ -268,20 +283,35 @@ class ExpandChatSession:
"timestamp": datetime.now().isoformat() "timestamp": datetime.now().isoformat()
}) })
# Check for feature creation block in full response # Check for feature creation blocks in full response (handle multiple blocks)
features_match = re.search( features_matches = re.findall(
r'<features_to_create>\s*(\[[\s\S]*?\])\s*</features_to_create>', r'<features_to_create>\s*(\[[\s\S]*?\])\s*</features_to_create>',
full_response full_response
) )
if features_match: if features_matches:
try: # Collect all features from all blocks, deduplicating by name
features_json = features_match.group(1) all_features: list[dict] = []
features_data = json.loads(features_json) seen_names: set[str] = set()
if features_data and isinstance(features_data, list): for features_json in features_matches:
# Create features via REST API try:
created = await self._create_features_bulk(features_data) features_data = json.loads(features_json)
if features_data and isinstance(features_data, list):
for feature in features_data:
name = feature.get("name", "")
if name and name not in seen_names:
seen_names.add(name)
all_features.append(feature)
except json.JSONDecodeError as e:
logger.error(f"Failed to parse features JSON block: {e}")
# Continue processing other blocks
if all_features:
try:
# Create all deduplicated features
created = await self._create_features_bulk(all_features)
if created: if created:
self.features_created += len(created) self.features_created += len(created)
@@ -294,18 +324,12 @@ class ExpandChatSession:
} }
logger.info(f"Created {len(created)} features for {self.project_name}") logger.info(f"Created {len(created)} features for {self.project_name}")
except json.JSONDecodeError as e: except Exception as e:
logger.error(f"Failed to parse features JSON: {e}") logger.exception("Failed to create features")
yield { yield {
"type": "error", "type": "error",
"content": f"Failed to parse feature definitions: {str(e)}" "content": "Failed to create features"
} }
except Exception as e:
logger.exception("Failed to create features")
yield {
"type": "error",
"content": f"Failed to create features: {str(e)}"
}
async def _create_features_bulk(self, features: list[dict]) -> list[dict]: async def _create_features_bulk(self, features: list[dict]) -> list[dict]:
""" """

View File

@@ -121,6 +121,9 @@ export function ExpandProjectChat({
setPendingAttachments((prev) => [...prev, attachment]) setPendingAttachments((prev) => [...prev, attachment])
} }
reader.onerror = () => {
setError(`Failed to read file: ${file.name}`)
}
reader.readAsDataURL(file) reader.readAsDataURL(file)
}) })
}, []) }, [])

View File

@@ -302,6 +302,10 @@ export function useExpandChat({
clearInterval(pingIntervalRef.current) clearInterval(pingIntervalRef.current)
pingIntervalRef.current = null pingIntervalRef.current = null
} }
if (reconnectTimeoutRef.current) {
clearTimeout(reconnectTimeoutRef.current)
reconnectTimeoutRef.current = null
}
if (wsRef.current) { if (wsRef.current) {
wsRef.current.close() wsRef.current.close()
wsRef.current = null wsRef.current = null