mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-01-31 06:42:03 +00:00
Compare commits
13 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
6c669fbe6a | ||
|
|
8a6a83bf52 | ||
|
|
84b582ffa7 | ||
|
|
bd5176165d | ||
|
|
49f32c4d59 | ||
|
|
0af5bc86f4 | ||
|
|
bc5a36c5f4 | ||
|
|
2934d73db2 | ||
|
|
a4968f7235 | ||
|
|
b8e0c18c53 | ||
|
|
d0b3e0d9bb | ||
|
|
2a0719e00c | ||
|
|
af394183e6 |
71
Dockerfile
71
Dockerfile
@@ -8,10 +8,12 @@
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
# BASE STAGE - Common setup for all builds (DRY: defined once, used by all)
|
# BASE STAGE - Common setup for all builds (DRY: defined once, used by all)
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
FROM node:22-alpine AS base
|
FROM node:22-slim AS base
|
||||||
|
|
||||||
# Install build dependencies for native modules (node-pty)
|
# Install build dependencies for native modules (node-pty)
|
||||||
RUN apk add --no-cache python3 make g++
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
|
python3 make g++ \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
@@ -51,32 +53,59 @@ RUN npm run build:packages && npm run build --workspace=apps/server
|
|||||||
# =============================================================================
|
# =============================================================================
|
||||||
# SERVER PRODUCTION STAGE
|
# SERVER PRODUCTION STAGE
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
FROM node:22-alpine AS server
|
FROM node:22-slim AS server
|
||||||
|
|
||||||
# Install git, curl, bash (for terminal), su-exec (for user switching), and GitHub CLI (pinned version, multi-arch)
|
# Install git, curl, bash (for terminal), gosu (for user switching), and GitHub CLI (pinned version, multi-arch)
|
||||||
RUN apk add --no-cache git curl bash su-exec && \
|
RUN apt-get update && apt-get install -y --no-install-recommends \
|
||||||
GH_VERSION="2.63.2" && \
|
git curl bash gosu ca-certificates \
|
||||||
ARCH=$(uname -m) && \
|
&& GH_VERSION="2.63.2" \
|
||||||
case "$ARCH" in \
|
&& ARCH=$(uname -m) \
|
||||||
|
&& case "$ARCH" in \
|
||||||
x86_64) GH_ARCH="amd64" ;; \
|
x86_64) GH_ARCH="amd64" ;; \
|
||||||
aarch64|arm64) GH_ARCH="arm64" ;; \
|
aarch64|arm64) GH_ARCH="arm64" ;; \
|
||||||
*) echo "Unsupported architecture: $ARCH" && exit 1 ;; \
|
*) echo "Unsupported architecture: $ARCH" && exit 1 ;; \
|
||||||
esac && \
|
esac \
|
||||||
curl -L "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${GH_ARCH}.tar.gz" -o gh.tar.gz && \
|
&& curl -L "https://github.com/cli/cli/releases/download/v${GH_VERSION}/gh_${GH_VERSION}_linux_${GH_ARCH}.tar.gz" -o gh.tar.gz \
|
||||||
tar -xzf gh.tar.gz && \
|
&& tar -xzf gh.tar.gz \
|
||||||
mv gh_${GH_VERSION}_linux_${GH_ARCH}/bin/gh /usr/local/bin/gh && \
|
&& mv gh_${GH_VERSION}_linux_${GH_ARCH}/bin/gh /usr/local/bin/gh \
|
||||||
rm -rf gh.tar.gz gh_${GH_VERSION}_linux_${GH_ARCH}
|
&& rm -rf gh.tar.gz gh_${GH_VERSION}_linux_${GH_ARCH} \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Install Claude CLI globally
|
# Install Claude CLI globally (available to all users via npm global bin)
|
||||||
RUN npm install -g @anthropic-ai/claude-code
|
RUN npm install -g @anthropic-ai/claude-code
|
||||||
|
|
||||||
WORKDIR /app
|
# Create non-root user with home directory BEFORE installing Cursor CLI
|
||||||
|
RUN groupadd -g 1001 automaker && \
|
||||||
|
useradd -u 1001 -g automaker -m -d /home/automaker -s /bin/bash automaker && \
|
||||||
|
mkdir -p /home/automaker/.local/bin && \
|
||||||
|
mkdir -p /home/automaker/.cursor && \
|
||||||
|
chown -R automaker:automaker /home/automaker && \
|
||||||
|
chmod 700 /home/automaker/.cursor
|
||||||
|
|
||||||
# Create non-root user with home directory
|
# Install Cursor CLI as the automaker user
|
||||||
RUN addgroup -g 1001 -S automaker && \
|
# Set HOME explicitly and install to /home/automaker/.local/bin/
|
||||||
adduser -S automaker -u 1001 -h /home/automaker && \
|
USER automaker
|
||||||
mkdir -p /home/automaker && \
|
ENV HOME=/home/automaker
|
||||||
chown automaker:automaker /home/automaker
|
RUN curl https://cursor.com/install -fsS | bash && \
|
||||||
|
echo "=== Checking Cursor CLI installation ===" && \
|
||||||
|
ls -la /home/automaker/.local/bin/ && \
|
||||||
|
echo "=== PATH is: $PATH ===" && \
|
||||||
|
(which cursor-agent && cursor-agent --version) || echo "cursor-agent installed (may need auth setup)"
|
||||||
|
USER root
|
||||||
|
|
||||||
|
# Add PATH to profile so it's available in all interactive shells (for login shells)
|
||||||
|
RUN mkdir -p /etc/profile.d && \
|
||||||
|
echo 'export PATH="/home/automaker/.local/bin:$PATH"' > /etc/profile.d/cursor-cli.sh && \
|
||||||
|
chmod +x /etc/profile.d/cursor-cli.sh
|
||||||
|
|
||||||
|
# Add to automaker's .bashrc for bash interactive shells
|
||||||
|
RUN echo 'export PATH="/home/automaker/.local/bin:$PATH"' >> /home/automaker/.bashrc && \
|
||||||
|
chown automaker:automaker /home/automaker/.bashrc
|
||||||
|
|
||||||
|
# Also add to root's .bashrc since docker exec defaults to root
|
||||||
|
RUN echo 'export PATH="/home/automaker/.local/bin:$PATH"' >> /root/.bashrc
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
# Copy root package.json (needed for workspace resolution)
|
# Copy root package.json (needed for workspace resolution)
|
||||||
COPY --from=server-builder /app/package*.json ./
|
COPY --from=server-builder /app/package*.json ./
|
||||||
@@ -111,6 +140,8 @@ RUN chmod +x /usr/local/bin/docker-entrypoint.sh
|
|||||||
ENV PORT=3008
|
ENV PORT=3008
|
||||||
ENV DATA_DIR=/data
|
ENV DATA_DIR=/data
|
||||||
ENV HOME=/home/automaker
|
ENV HOME=/home/automaker
|
||||||
|
# Add user's local bin to PATH for cursor-agent
|
||||||
|
ENV PATH="/home/automaker/.local/bin:${PATH}"
|
||||||
|
|
||||||
# Expose port
|
# Expose port
|
||||||
EXPOSE 3008
|
EXPOSE 3008
|
||||||
|
|||||||
@@ -47,7 +47,7 @@ export function createCheckoutBranchHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get current branch for reference
|
// Get current branch for reference
|
||||||
const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout: currentBranchOutput } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
const currentBranch = currentBranchOutput.trim();
|
const currentBranch = currentBranchOutput.trim();
|
||||||
|
|||||||
@@ -59,7 +59,7 @@ export function createCommitHandler() {
|
|||||||
const commitHash = hashOutput.trim().substring(0, 8);
|
const commitHash = hashOutput.trim().substring(0, 8);
|
||||||
|
|
||||||
// Get branch name
|
// Get branch name
|
||||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout: branchOutput } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
const branchName = branchOutput.trim();
|
const branchName = branchOutput.trim();
|
||||||
|
|||||||
@@ -43,7 +43,7 @@ export function createCreatePRHandler() {
|
|||||||
const effectiveProjectPath = projectPath || worktreePath;
|
const effectiveProjectPath = projectPath || worktreePath;
|
||||||
|
|
||||||
// Get current branch name
|
// Get current branch name
|
||||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout: branchOutput } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
env: execEnv,
|
env: execEnv,
|
||||||
});
|
});
|
||||||
|
|||||||
@@ -38,7 +38,7 @@ export function createDeleteHandler() {
|
|||||||
// Get branch name before removing worktree
|
// Get branch name before removing worktree
|
||||||
let branchName: string | null = null;
|
let branchName: string | null = null;
|
||||||
try {
|
try {
|
||||||
const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
branchName = stdout.trim();
|
branchName = stdout.trim();
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ export function createInfoHandler() {
|
|||||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||||
try {
|
try {
|
||||||
await secureFs.access(worktreePath);
|
await secureFs.access(worktreePath);
|
||||||
const { stdout } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
res.json({
|
res.json({
|
||||||
|
|||||||
@@ -34,7 +34,7 @@ export function createListBranchesHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get current branch
|
// Get current branch
|
||||||
const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout: currentBranchOutput } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
const currentBranch = currentBranchOutput.trim();
|
const currentBranch = currentBranchOutput.trim();
|
||||||
|
|||||||
@@ -35,7 +35,7 @@ export function createMergeHandler() {
|
|||||||
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
const worktreePath = path.join(projectPath, '.worktrees', featureId);
|
||||||
|
|
||||||
// Get current branch
|
// Get current branch
|
||||||
const { stdout: currentBranch } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout: currentBranch } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: projectPath,
|
cwd: projectPath,
|
||||||
});
|
});
|
||||||
|
|
||||||
|
|||||||
@@ -28,7 +28,7 @@ export function createPullHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get current branch name
|
// Get current branch name
|
||||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout: branchOutput } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
const branchName = branchOutput.trim();
|
const branchName = branchOutput.trim();
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ export function createPushHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get branch name
|
// Get branch name
|
||||||
const { stdout: branchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout: branchOutput } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
const branchName = branchOutput.trim();
|
const branchName = branchOutput.trim();
|
||||||
|
|||||||
@@ -87,7 +87,7 @@ export function createSwitchBranchHandler() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Get current branch
|
// Get current branch
|
||||||
const { stdout: currentBranchOutput } = await execAsync('git rev-parse --abbrev-ref HEAD', {
|
const { stdout: currentBranchOutput } = await execAsync('git symbolic-ref --short HEAD', {
|
||||||
cwd: worktreePath,
|
cwd: worktreePath,
|
||||||
});
|
});
|
||||||
const previousBranch = currentBranchOutput.trim();
|
const previousBranch = currentBranchOutput.trim();
|
||||||
|
|||||||
@@ -42,6 +42,9 @@ export function useSpecRegeneration({
|
|||||||
}
|
}
|
||||||
|
|
||||||
if (event.type === 'spec_regeneration_complete') {
|
if (event.type === 'spec_regeneration_complete') {
|
||||||
|
// Only show toast if we're in active creation flow (not regular regeneration)
|
||||||
|
const isCreationFlow = creatingSpecProjectPath !== null;
|
||||||
|
|
||||||
setSpecCreatingForProject(null);
|
setSpecCreatingForProject(null);
|
||||||
setShowSetupDialog(false);
|
setShowSetupDialog(false);
|
||||||
setProjectOverview('');
|
setProjectOverview('');
|
||||||
@@ -49,9 +52,12 @@ export function useSpecRegeneration({
|
|||||||
// Clear onboarding state if we came from onboarding
|
// Clear onboarding state if we came from onboarding
|
||||||
setNewProjectName('');
|
setNewProjectName('');
|
||||||
setNewProjectPath('');
|
setNewProjectPath('');
|
||||||
toast.success('App specification created', {
|
|
||||||
description: 'Your project is now set up and ready to go!',
|
if (isCreationFlow) {
|
||||||
});
|
toast.success('App specification created', {
|
||||||
|
description: 'Your project is now set up and ready to go!',
|
||||||
|
});
|
||||||
|
}
|
||||||
} else if (event.type === 'spec_regeneration_error') {
|
} else if (event.type === 'spec_regeneration_error') {
|
||||||
setSpecCreatingForProject(null);
|
setSpecCreatingForProject(null);
|
||||||
toast.error('Failed to create specification', {
|
toast.error('Failed to create specification', {
|
||||||
|
|||||||
@@ -32,6 +32,53 @@ export function useCliStatus() {
|
|||||||
|
|
||||||
const [isCheckingClaudeCli, setIsCheckingClaudeCli] = useState(false);
|
const [isCheckingClaudeCli, setIsCheckingClaudeCli] = useState(false);
|
||||||
|
|
||||||
|
// Refresh Claude auth status from the server
|
||||||
|
const refreshAuthStatus = useCallback(async () => {
|
||||||
|
const api = getElectronAPI();
|
||||||
|
if (!api?.setup?.getClaudeStatus) return;
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await api.setup.getClaudeStatus();
|
||||||
|
if (result.success && result.auth) {
|
||||||
|
// Cast to extended type that includes server-added fields
|
||||||
|
const auth = result.auth as typeof result.auth & {
|
||||||
|
oauthTokenValid?: boolean;
|
||||||
|
apiKeyValid?: boolean;
|
||||||
|
};
|
||||||
|
// Map server method names to client method types
|
||||||
|
// Server returns: oauth_token_env, oauth_token, api_key_env, api_key, credentials_file, cli_authenticated, none
|
||||||
|
const validMethods = [
|
||||||
|
'oauth_token_env',
|
||||||
|
'oauth_token',
|
||||||
|
'api_key',
|
||||||
|
'api_key_env',
|
||||||
|
'credentials_file',
|
||||||
|
'cli_authenticated',
|
||||||
|
'none',
|
||||||
|
] as const;
|
||||||
|
type AuthMethod = (typeof validMethods)[number];
|
||||||
|
const method: AuthMethod = validMethods.includes(auth.method as AuthMethod)
|
||||||
|
? (auth.method as AuthMethod)
|
||||||
|
: auth.authenticated
|
||||||
|
? 'api_key'
|
||||||
|
: 'none'; // Default authenticated to api_key, not none
|
||||||
|
const authStatus = {
|
||||||
|
authenticated: auth.authenticated,
|
||||||
|
method,
|
||||||
|
hasCredentialsFile: auth.hasCredentialsFile ?? false,
|
||||||
|
oauthTokenValid:
|
||||||
|
auth.oauthTokenValid || auth.hasStoredOAuthToken || auth.hasEnvOAuthToken,
|
||||||
|
apiKeyValid: auth.apiKeyValid || auth.hasStoredApiKey || auth.hasEnvApiKey,
|
||||||
|
hasEnvOAuthToken: auth.hasEnvOAuthToken,
|
||||||
|
hasEnvApiKey: auth.hasEnvApiKey,
|
||||||
|
};
|
||||||
|
setClaudeAuthStatus(authStatus);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
logger.error('Failed to refresh Claude auth status:', error);
|
||||||
|
}
|
||||||
|
}, [setClaudeAuthStatus]);
|
||||||
|
|
||||||
// Check CLI status on mount
|
// Check CLI status on mount
|
||||||
useEffect(() => {
|
useEffect(() => {
|
||||||
const checkCliStatus = async () => {
|
const checkCliStatus = async () => {
|
||||||
@@ -48,54 +95,13 @@ export function useCliStatus() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// Check Claude auth status (re-fetch on mount to ensure persistence)
|
// Check Claude auth status (re-fetch on mount to ensure persistence)
|
||||||
if (api?.setup?.getClaudeStatus) {
|
await refreshAuthStatus();
|
||||||
try {
|
|
||||||
const result = await api.setup.getClaudeStatus();
|
|
||||||
if (result.success && result.auth) {
|
|
||||||
// Cast to extended type that includes server-added fields
|
|
||||||
const auth = result.auth as typeof result.auth & {
|
|
||||||
oauthTokenValid?: boolean;
|
|
||||||
apiKeyValid?: boolean;
|
|
||||||
};
|
|
||||||
// Map server method names to client method types
|
|
||||||
// Server returns: oauth_token_env, oauth_token, api_key_env, api_key, credentials_file, cli_authenticated, none
|
|
||||||
const validMethods = [
|
|
||||||
'oauth_token_env',
|
|
||||||
'oauth_token',
|
|
||||||
'api_key',
|
|
||||||
'api_key_env',
|
|
||||||
'credentials_file',
|
|
||||||
'cli_authenticated',
|
|
||||||
'none',
|
|
||||||
] as const;
|
|
||||||
type AuthMethod = (typeof validMethods)[number];
|
|
||||||
const method: AuthMethod = validMethods.includes(auth.method as AuthMethod)
|
|
||||||
? (auth.method as AuthMethod)
|
|
||||||
: auth.authenticated
|
|
||||||
? 'api_key'
|
|
||||||
: 'none'; // Default authenticated to api_key, not none
|
|
||||||
const authStatus = {
|
|
||||||
authenticated: auth.authenticated,
|
|
||||||
method,
|
|
||||||
hasCredentialsFile: auth.hasCredentialsFile ?? false,
|
|
||||||
oauthTokenValid:
|
|
||||||
auth.oauthTokenValid || auth.hasStoredOAuthToken || auth.hasEnvOAuthToken,
|
|
||||||
apiKeyValid: auth.apiKeyValid || auth.hasStoredApiKey || auth.hasEnvApiKey,
|
|
||||||
hasEnvOAuthToken: auth.hasEnvOAuthToken,
|
|
||||||
hasEnvApiKey: auth.hasEnvApiKey,
|
|
||||||
};
|
|
||||||
setClaudeAuthStatus(authStatus);
|
|
||||||
}
|
|
||||||
} catch (error) {
|
|
||||||
logger.error('Failed to check Claude auth status:', error);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
};
|
};
|
||||||
|
|
||||||
checkCliStatus();
|
checkCliStatus();
|
||||||
}, [setClaudeAuthStatus]);
|
}, [refreshAuthStatus]);
|
||||||
|
|
||||||
// Refresh Claude CLI status
|
// Refresh Claude CLI status and auth status
|
||||||
const handleRefreshClaudeCli = useCallback(async () => {
|
const handleRefreshClaudeCli = useCallback(async () => {
|
||||||
setIsCheckingClaudeCli(true);
|
setIsCheckingClaudeCli(true);
|
||||||
try {
|
try {
|
||||||
@@ -104,12 +110,14 @@ export function useCliStatus() {
|
|||||||
const status = await api.checkClaudeCli();
|
const status = await api.checkClaudeCli();
|
||||||
setClaudeCliStatus(status);
|
setClaudeCliStatus(status);
|
||||||
}
|
}
|
||||||
|
// Also refresh auth status
|
||||||
|
await refreshAuthStatus();
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
logger.error('Failed to refresh Claude CLI status:', error);
|
logger.error('Failed to refresh Claude CLI status:', error);
|
||||||
} finally {
|
} finally {
|
||||||
setIsCheckingClaudeCli(false);
|
setIsCheckingClaudeCli(false);
|
||||||
}
|
}
|
||||||
}, []);
|
}, [refreshAuthStatus]);
|
||||||
|
|
||||||
return {
|
return {
|
||||||
claudeCliStatus,
|
claudeCliStatus,
|
||||||
|
|||||||
@@ -8,6 +8,9 @@ interface UseCliStatusOptions {
|
|||||||
setAuthStatus: (status: any) => void;
|
setAuthStatus: (status: any) => void;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// Create logger once outside the hook to prevent infinite re-renders
|
||||||
|
const logger = createLogger('CliStatus');
|
||||||
|
|
||||||
export function useCliStatus({
|
export function useCliStatus({
|
||||||
cliType,
|
cliType,
|
||||||
statusApi,
|
statusApi,
|
||||||
@@ -15,7 +18,6 @@ export function useCliStatus({
|
|||||||
setAuthStatus,
|
setAuthStatus,
|
||||||
}: UseCliStatusOptions) {
|
}: UseCliStatusOptions) {
|
||||||
const [isChecking, setIsChecking] = useState(false);
|
const [isChecking, setIsChecking] = useState(false);
|
||||||
const logger = createLogger('CliStatus');
|
|
||||||
|
|
||||||
const checkStatus = useCallback(async () => {
|
const checkStatus = useCallback(async () => {
|
||||||
logger.info(`Starting status check for ${cliType}...`);
|
logger.info(`Starting status check for ${cliType}...`);
|
||||||
@@ -66,7 +68,7 @@ export function useCliStatus({
|
|||||||
} finally {
|
} finally {
|
||||||
setIsChecking(false);
|
setIsChecking(false);
|
||||||
}
|
}
|
||||||
}, [cliType, statusApi, setCliStatus, setAuthStatus, logger]);
|
}, [cliType, statusApi, setCliStatus, setAuthStatus]);
|
||||||
|
|
||||||
return { isChecking, checkStatus };
|
return { isChecking, checkStatus };
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -11,7 +11,7 @@ interface ThemeStepProps {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function ThemeStep({ onNext, onBack }: ThemeStepProps) {
|
export function ThemeStep({ onNext, onBack }: ThemeStepProps) {
|
||||||
const { theme, setTheme, setPreviewTheme } = useAppStore();
|
const { theme, setTheme, setPreviewTheme, currentProject, setProjectTheme } = useAppStore();
|
||||||
const [activeTab, setActiveTab] = useState<'dark' | 'light'>('dark');
|
const [activeTab, setActiveTab] = useState<'dark' | 'light'>('dark');
|
||||||
|
|
||||||
const handleThemeHover = (themeValue: string) => {
|
const handleThemeHover = (themeValue: string) => {
|
||||||
@@ -24,6 +24,11 @@ export function ThemeStep({ onNext, onBack }: ThemeStepProps) {
|
|||||||
|
|
||||||
const handleThemeClick = (themeValue: string) => {
|
const handleThemeClick = (themeValue: string) => {
|
||||||
setTheme(themeValue as typeof theme);
|
setTheme(themeValue as typeof theme);
|
||||||
|
// Also update the current project's theme if one exists
|
||||||
|
// This ensures the selected theme is visible since getEffectiveTheme() prioritizes project theme
|
||||||
|
if (currentProject) {
|
||||||
|
setProjectTheme(currentProject.id, themeValue as typeof theme);
|
||||||
|
}
|
||||||
setPreviewTheme(null);
|
setPreviewTheme(null);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|||||||
50
dev.mjs
50
dev.mjs
@@ -11,13 +11,13 @@
|
|||||||
|
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
import { createRequire } from 'module';
|
|
||||||
|
|
||||||
import {
|
import {
|
||||||
createRestrictedFs,
|
createRestrictedFs,
|
||||||
log,
|
log,
|
||||||
runNpm,
|
runNpm,
|
||||||
runNpmAndWait,
|
runNpmAndWait,
|
||||||
|
runNpx,
|
||||||
printHeader,
|
printHeader,
|
||||||
printModeMenu,
|
printModeMenu,
|
||||||
resolvePortConfiguration,
|
resolvePortConfiguration,
|
||||||
@@ -26,11 +26,9 @@ import {
|
|||||||
startServerAndWait,
|
startServerAndWait,
|
||||||
ensureDependencies,
|
ensureDependencies,
|
||||||
prompt,
|
prompt,
|
||||||
|
launchDockerContainers,
|
||||||
} from './scripts/launcher-utils.mjs';
|
} from './scripts/launcher-utils.mjs';
|
||||||
|
|
||||||
const require = createRequire(import.meta.url);
|
|
||||||
const crossSpawn = require('cross-spawn');
|
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
const __dirname = path.dirname(__filename);
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
@@ -52,10 +50,11 @@ async function installPlaywrightBrowsers() {
|
|||||||
log('Checking Playwright browsers...', 'yellow');
|
log('Checking Playwright browsers...', 'yellow');
|
||||||
try {
|
try {
|
||||||
const exitCode = await new Promise((resolve) => {
|
const exitCode = await new Promise((resolve) => {
|
||||||
const playwright = crossSpawn('npx', ['playwright', 'install', 'chromium'], {
|
const playwright = runNpx(
|
||||||
stdio: 'inherit',
|
['playwright', 'install', 'chromium'],
|
||||||
cwd: path.join(__dirname, 'apps', 'ui'),
|
{ stdio: 'inherit' },
|
||||||
});
|
path.join(__dirname, 'apps', 'ui')
|
||||||
|
);
|
||||||
playwright.on('close', (code) => resolve(code));
|
playwright.on('close', (code) => resolve(code));
|
||||||
playwright.on('error', () => resolve(1));
|
playwright.on('error', () => resolve(1));
|
||||||
});
|
});
|
||||||
@@ -171,40 +170,7 @@ async function main() {
|
|||||||
break;
|
break;
|
||||||
} else if (choice === '3') {
|
} else if (choice === '3') {
|
||||||
console.log('');
|
console.log('');
|
||||||
log('Launching Docker Container (Isolated Mode)...', 'blue');
|
await launchDockerContainers({ baseDir: __dirname, processes });
|
||||||
log('Starting Docker containers...', 'yellow');
|
|
||||||
log('Note: Containers will only rebuild if images are missing.', 'yellow');
|
|
||||||
log('To force a rebuild, run: docker compose up --build', 'yellow');
|
|
||||||
console.log('');
|
|
||||||
|
|
||||||
// Check if ANTHROPIC_API_KEY is set
|
|
||||||
if (!process.env.ANTHROPIC_API_KEY) {
|
|
||||||
log('Warning: ANTHROPIC_API_KEY environment variable is not set.', 'yellow');
|
|
||||||
log('The server will require an API key to function.', 'yellow');
|
|
||||||
log('Set it with: export ANTHROPIC_API_KEY=your-key', 'yellow');
|
|
||||||
console.log('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start containers with docker-compose (without --build to preserve volumes)
|
|
||||||
// Images will only be built if they don't exist
|
|
||||||
processes.docker = crossSpawn('docker', ['compose', 'up'], {
|
|
||||||
stdio: 'inherit',
|
|
||||||
cwd: __dirname,
|
|
||||||
env: {
|
|
||||||
...process.env,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
log('Docker containers starting...', 'blue');
|
|
||||||
log('UI will be available at: http://localhost:3007', 'green');
|
|
||||||
log('API will be available at: http://localhost:3008', 'green');
|
|
||||||
console.log('');
|
|
||||||
log('Press Ctrl+C to stop the containers.', 'yellow');
|
|
||||||
|
|
||||||
await new Promise((resolve) => {
|
|
||||||
processes.docker.on('close', resolve);
|
|
||||||
});
|
|
||||||
|
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
log('Invalid choice. Please enter 1, 2, or 3.', 'red');
|
log('Invalid choice. Please enter 1, 2, or 3.', 'red');
|
||||||
|
|||||||
@@ -4,8 +4,26 @@ services:
|
|||||||
# Mount your workspace directory to /projects inside the container
|
# Mount your workspace directory to /projects inside the container
|
||||||
# Example: mount your local /workspace to /projects inside the container
|
# Example: mount your local /workspace to /projects inside the container
|
||||||
- /Users/webdevcody/Workspace/automaker-workspace:/projects:rw
|
- /Users/webdevcody/Workspace/automaker-workspace:/projects:rw
|
||||||
|
|
||||||
|
# ===== CLI Authentication (Optional) =====
|
||||||
|
# Mount host CLI credentials to avoid re-authenticating in container
|
||||||
|
|
||||||
|
# Claude CLI - mount your ~/.claude directory (Linux/Windows)
|
||||||
|
# This shares your 'claude login' OAuth session with the container
|
||||||
|
# - ~/.claude:/home/automaker/.claude
|
||||||
|
|
||||||
|
# Cursor CLI - mount your ~/.cursor directory (Linux/Windows)
|
||||||
|
# This shares your 'cursor-agent login' OAuth session with the container
|
||||||
|
# - ~/.cursor:/home/automaker/.cursor
|
||||||
|
|
||||||
environment:
|
environment:
|
||||||
# Set root directory for all projects and file operations
|
# Set root directory for all projects and file operations
|
||||||
# Users can only create/open projects within this directory
|
# Users can only create/open projects within this directory
|
||||||
- ALLOWED_ROOT_DIRECTORY=/projects
|
- ALLOWED_ROOT_DIRECTORY=/projects
|
||||||
- NODE_ENV=development
|
- NODE_ENV=development
|
||||||
|
|
||||||
|
# ===== macOS Users =====
|
||||||
|
# On macOS, OAuth tokens are stored in SQLite databases, not plain files.
|
||||||
|
# Extract your Cursor token with: ./scripts/get-cursor-token.sh
|
||||||
|
# Then set it here or in your .env file:
|
||||||
|
# - CURSOR_API_KEY=${CURSOR_API_KEY:-}
|
||||||
|
|||||||
@@ -36,6 +36,17 @@ services:
|
|||||||
# Required
|
# Required
|
||||||
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY}
|
||||||
|
|
||||||
|
# Optional - Claude CLI OAuth credentials (for macOS users)
|
||||||
|
# Extract with: ./scripts/get-claude-token.sh
|
||||||
|
# This writes the OAuth tokens to ~/.claude/.credentials.json in the container
|
||||||
|
- CLAUDE_OAUTH_CREDENTIALS=${CLAUDE_OAUTH_CREDENTIALS:-}
|
||||||
|
|
||||||
|
# Optional - Cursor CLI OAuth token (extract from host with the command shown below)
|
||||||
|
# macOS: ./scripts/get-cursor-token.sh (extracts from Keychain)
|
||||||
|
# Linux: jq -r '.accessToken' ~/.config/cursor/auth.json
|
||||||
|
# Note: cursor-agent stores its OAuth tokens separately from Cursor IDE
|
||||||
|
- CURSOR_AUTH_TOKEN=${CURSOR_AUTH_TOKEN:-}
|
||||||
|
|
||||||
# Optional - authentication, one will generate if left blank
|
# Optional - authentication, one will generate if left blank
|
||||||
- AUTOMAKER_API_KEY=${AUTOMAKER_API_KEY:-}
|
- AUTOMAKER_API_KEY=${AUTOMAKER_API_KEY:-}
|
||||||
|
|
||||||
@@ -63,6 +74,10 @@ services:
|
|||||||
# This allows 'claude login' authentication to persist between restarts
|
# This allows 'claude login' authentication to persist between restarts
|
||||||
- automaker-claude-config:/home/automaker/.claude
|
- automaker-claude-config:/home/automaker/.claude
|
||||||
|
|
||||||
|
# Persist Cursor CLI configuration and authentication across container restarts
|
||||||
|
# This allows 'cursor-agent login' authentication to persist between restarts
|
||||||
|
- automaker-cursor-config:/home/automaker/.cursor
|
||||||
|
|
||||||
# NO host directory mounts - container cannot access your laptop files
|
# NO host directory mounts - container cannot access your laptop files
|
||||||
# If you need to work on a project, create it INSIDE the container
|
# If you need to work on a project, create it INSIDE the container
|
||||||
# or use a separate docker-compose override file
|
# or use a separate docker-compose override file
|
||||||
@@ -81,3 +96,8 @@ volumes:
|
|||||||
name: automaker-claude-config
|
name: automaker-claude-config
|
||||||
# Named volume for Claude CLI OAuth session keys and configuration
|
# Named volume for Claude CLI OAuth session keys and configuration
|
||||||
# Persists authentication across container restarts
|
# Persists authentication across container restarts
|
||||||
|
|
||||||
|
automaker-cursor-config:
|
||||||
|
name: automaker-cursor-config
|
||||||
|
# Named volume for Cursor CLI configuration and authentication
|
||||||
|
# Persists cursor-agent login authentication across container restarts
|
||||||
|
|||||||
@@ -1,19 +1,45 @@
|
|||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
set -e
|
set -e
|
||||||
|
|
||||||
# Fix permissions on Claude CLI config directory if it exists
|
# Ensure Claude CLI config directory exists with correct permissions
|
||||||
# This handles the case where a volume is mounted and owned by root
|
|
||||||
if [ -d "/home/automaker/.claude" ]; then
|
|
||||||
chown -R automaker:automaker /home/automaker/.claude
|
|
||||||
chmod -R 755 /home/automaker/.claude
|
|
||||||
fi
|
|
||||||
|
|
||||||
# Ensure the directory exists with correct permissions if volume is empty
|
|
||||||
if [ ! -d "/home/automaker/.claude" ]; then
|
if [ ! -d "/home/automaker/.claude" ]; then
|
||||||
mkdir -p /home/automaker/.claude
|
mkdir -p /home/automaker/.claude
|
||||||
chown automaker:automaker /home/automaker/.claude
|
fi
|
||||||
chmod 755 /home/automaker/.claude
|
|
||||||
|
# If CLAUDE_OAUTH_CREDENTIALS is set, write it to the credentials file
|
||||||
|
# This allows passing OAuth tokens from host (especially macOS where they're in Keychain)
|
||||||
|
if [ -n "$CLAUDE_OAUTH_CREDENTIALS" ]; then
|
||||||
|
echo "$CLAUDE_OAUTH_CREDENTIALS" > /home/automaker/.claude/.credentials.json
|
||||||
|
chmod 600 /home/automaker/.claude/.credentials.json
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Fix permissions on Claude CLI config directory
|
||||||
|
chown -R automaker:automaker /home/automaker/.claude
|
||||||
|
chmod 700 /home/automaker/.claude
|
||||||
|
|
||||||
|
# Ensure Cursor CLI config directory exists with correct permissions
|
||||||
|
# This handles both: mounted volumes (owned by root) and empty directories
|
||||||
|
if [ ! -d "/home/automaker/.cursor" ]; then
|
||||||
|
mkdir -p /home/automaker/.cursor
|
||||||
|
fi
|
||||||
|
chown -R automaker:automaker /home/automaker/.cursor
|
||||||
|
chmod -R 700 /home/automaker/.cursor
|
||||||
|
|
||||||
|
# If CURSOR_AUTH_TOKEN is set, write it to the cursor auth file
|
||||||
|
# On Linux, cursor-agent uses ~/.config/cursor/auth.json for file-based credential storage
|
||||||
|
# The env var CURSOR_AUTH_TOKEN is also checked directly by cursor-agent
|
||||||
|
if [ -n "$CURSOR_AUTH_TOKEN" ]; then
|
||||||
|
CURSOR_CONFIG_DIR="/home/automaker/.config/cursor"
|
||||||
|
mkdir -p "$CURSOR_CONFIG_DIR"
|
||||||
|
# Write auth.json with the access token
|
||||||
|
cat > "$CURSOR_CONFIG_DIR/auth.json" << EOF
|
||||||
|
{
|
||||||
|
"accessToken": "$CURSOR_AUTH_TOKEN"
|
||||||
|
}
|
||||||
|
EOF
|
||||||
|
chmod 600 "$CURSOR_CONFIG_DIR/auth.json"
|
||||||
|
chown -R automaker:automaker /home/automaker/.config
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Switch to automaker user and execute the command
|
# Switch to automaker user and execute the command
|
||||||
exec su-exec automaker "$@"
|
exec gosu automaker "$@"
|
||||||
|
|||||||
@@ -57,10 +57,63 @@ docker-compose -f docker-compose.yml -f docker-compose.project.yml up -d
|
|||||||
|
|
||||||
**Tip**: Use `:ro` (read-only) when possible for extra safety.
|
**Tip**: Use `:ro` (read-only) when possible for extra safety.
|
||||||
|
|
||||||
|
## CLI Authentication (macOS)
|
||||||
|
|
||||||
|
On macOS, OAuth tokens are stored in Keychain (Claude) and SQLite (Cursor). Use these scripts to extract and pass them to the container:
|
||||||
|
|
||||||
|
### Claude CLI
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Extract and add to .env
|
||||||
|
echo "CLAUDE_OAUTH_CREDENTIALS=$(./scripts/get-claude-token.sh)" >> .env
|
||||||
|
```
|
||||||
|
|
||||||
|
### Cursor CLI
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Extract and add to .env (extracts from macOS Keychain)
|
||||||
|
echo "CURSOR_AUTH_TOKEN=$(./scripts/get-cursor-token.sh)" >> .env
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: The cursor-agent CLI stores its OAuth tokens separately from the Cursor IDE:
|
||||||
|
|
||||||
|
- **macOS**: Tokens are stored in Keychain (service: `cursor-access-token`)
|
||||||
|
- **Linux**: Tokens are stored in `~/.config/cursor/auth.json` (not `~/.cursor`)
|
||||||
|
|
||||||
|
### Apply to container
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Restart with new credentials
|
||||||
|
docker-compose down && docker-compose up -d
|
||||||
|
```
|
||||||
|
|
||||||
|
**Note**: Tokens expire periodically. If you get authentication errors, re-run the extraction scripts.
|
||||||
|
|
||||||
|
## CLI Authentication (Linux/Windows)
|
||||||
|
|
||||||
|
On Linux/Windows, cursor-agent stores credentials in files, so you can either:
|
||||||
|
|
||||||
|
**Option 1: Extract tokens to environment variables (recommended)**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Linux: Extract tokens to .env
|
||||||
|
echo "CURSOR_AUTH_TOKEN=$(jq -r '.accessToken' ~/.config/cursor/auth.json)" >> .env
|
||||||
|
```
|
||||||
|
|
||||||
|
**Option 2: Bind mount credential directories directly**
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# In docker-compose.override.yml
|
||||||
|
volumes:
|
||||||
|
- ~/.claude:/home/automaker/.claude
|
||||||
|
- ~/.config/cursor:/home/automaker/.config/cursor
|
||||||
|
```
|
||||||
|
|
||||||
## Troubleshooting
|
## Troubleshooting
|
||||||
|
|
||||||
| Problem | Solution |
|
| Problem | Solution |
|
||||||
| --------------------- | -------------------------------------------------------------------------------------------- |
|
| --------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------- |
|
||||||
| Container won't start | Check `.env` has `ANTHROPIC_API_KEY` set. Run `docker-compose logs` for errors. |
|
| Container won't start | Check `.env` has `ANTHROPIC_API_KEY` set. Run `docker-compose logs` for errors. |
|
||||||
| Can't access web UI | Verify container is running with `docker ps \| grep automaker` |
|
| Can't access web UI | Verify container is running with `docker ps \| grep automaker` |
|
||||||
| Need a fresh start | Run `docker-compose down && docker volume rm automaker-data && docker-compose up -d --build` |
|
| Need a fresh start | Run `docker-compose down && docker volume rm automaker-data && docker-compose up -d --build` |
|
||||||
|
| Cursor auth fails | Re-extract token with `./scripts/get-cursor-token.sh` - tokens expire periodically. Make sure you've run `cursor-agent login` on your host first. |
|
||||||
|
|||||||
@@ -800,8 +800,14 @@ export async function getClaudeAuthIndicators(): Promise<ClaudeAuthIndicators> {
|
|||||||
const content = await systemPathReadFile(credPath);
|
const content = await systemPathReadFile(credPath);
|
||||||
const credentials = JSON.parse(content);
|
const credentials = JSON.parse(content);
|
||||||
result.hasCredentialsFile = true;
|
result.hasCredentialsFile = true;
|
||||||
|
// Support multiple credential formats:
|
||||||
|
// 1. Claude Code CLI format: { claudeAiOauth: { accessToken, refreshToken } }
|
||||||
|
// 2. Legacy format: { oauth_token } or { access_token }
|
||||||
|
// 3. API key format: { api_key }
|
||||||
|
const hasClaudeOauth = !!credentials.claudeAiOauth?.accessToken;
|
||||||
|
const hasLegacyOauth = !!(credentials.oauth_token || credentials.access_token);
|
||||||
result.credentials = {
|
result.credentials = {
|
||||||
hasOAuthToken: !!(credentials.oauth_token || credentials.access_token),
|
hasOAuthToken: hasClaudeOauth || hasLegacyOauth,
|
||||||
hasApiKey: !!credentials.api_key,
|
hasApiKey: !!credentials.api_key,
|
||||||
};
|
};
|
||||||
break;
|
break;
|
||||||
|
|||||||
34
scripts/get-claude-token.sh
Executable file
34
scripts/get-claude-token.sh
Executable file
@@ -0,0 +1,34 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Extract Claude OAuth token from macOS Keychain for use in Docker container
|
||||||
|
# Usage: ./scripts/get-claude-token.sh
|
||||||
|
# or: export CLAUDE_OAUTH_TOKEN=$(./scripts/get-claude-token.sh)
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Only works on macOS (uses security command for Keychain access)
|
||||||
|
if [[ "$OSTYPE" != "darwin"* ]]; then
|
||||||
|
echo "Error: This script only works on macOS." >&2
|
||||||
|
echo "On Linux, mount ~/.claude directory directly instead." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if security command exists
|
||||||
|
if ! command -v security &> /dev/null; then
|
||||||
|
echo "Error: 'security' command not found." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Get the current username
|
||||||
|
USERNAME=$(whoami)
|
||||||
|
|
||||||
|
# Extract credentials from Keychain
|
||||||
|
CREDS=$(security find-generic-password -s "Claude Code-credentials" -a "$USERNAME" -w 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -z "$CREDS" ]; then
|
||||||
|
echo "Error: No Claude credentials found in Keychain." >&2
|
||||||
|
echo "Make sure you've logged in with 'claude login' first." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Output the full credentials JSON (contains accessToken and refreshToken)
|
||||||
|
echo "$CREDS"
|
||||||
69
scripts/get-cursor-token.sh
Executable file
69
scripts/get-cursor-token.sh
Executable file
@@ -0,0 +1,69 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
# Extract Cursor CLI OAuth token from host machine for use in Docker container
|
||||||
|
#
|
||||||
|
# IMPORTANT: This extracts the cursor-agent CLI OAuth token, NOT the Cursor IDE token.
|
||||||
|
# cursor-agent stores tokens in macOS Keychain (not SQLite like the IDE).
|
||||||
|
#
|
||||||
|
# Usage: ./scripts/get-cursor-token.sh
|
||||||
|
# or: export CURSOR_AUTH_TOKEN=$(./scripts/get-cursor-token.sh)
|
||||||
|
#
|
||||||
|
# For Docker: echo "CURSOR_AUTH_TOKEN=$(./scripts/get-cursor-token.sh)" >> .env
|
||||||
|
|
||||||
|
set -e
|
||||||
|
|
||||||
|
# Determine platform and extract token accordingly
|
||||||
|
if [[ "$OSTYPE" == "darwin"* ]]; then
|
||||||
|
# macOS: cursor-agent stores OAuth tokens in Keychain
|
||||||
|
# Service: cursor-access-token, Account: cursor-user
|
||||||
|
|
||||||
|
if ! command -v security &> /dev/null; then
|
||||||
|
echo "Error: 'security' command not found." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Extract access token from Keychain
|
||||||
|
TOKEN=$(security find-generic-password -a "cursor-user" -s "cursor-access-token" -w 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -z "$TOKEN" ]; then
|
||||||
|
echo "Error: No Cursor CLI token found in Keychain." >&2
|
||||||
|
echo "Make sure you've logged in with 'cursor-agent login' first." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
elif [[ "$OSTYPE" == "linux-gnu"* ]]; then
|
||||||
|
# Linux: cursor-agent stores OAuth tokens in a JSON file
|
||||||
|
# Default location: ~/.config/cursor/auth.json
|
||||||
|
# Or: $XDG_CONFIG_HOME/cursor/auth.json
|
||||||
|
|
||||||
|
if [ -n "$XDG_CONFIG_HOME" ]; then
|
||||||
|
AUTH_FILE="$XDG_CONFIG_HOME/cursor/auth.json"
|
||||||
|
else
|
||||||
|
AUTH_FILE="$HOME/.config/cursor/auth.json"
|
||||||
|
fi
|
||||||
|
|
||||||
|
if [ ! -f "$AUTH_FILE" ]; then
|
||||||
|
echo "Error: Cursor auth file not found at: $AUTH_FILE" >&2
|
||||||
|
echo "Make sure you've logged in with 'cursor-agent login' first." >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Check if jq is available
|
||||||
|
if ! command -v jq &> /dev/null; then
|
||||||
|
echo "Error: jq is required but not installed." >&2
|
||||||
|
echo "Install it with: apt install jq" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
TOKEN=$(jq -r '.accessToken // empty' "$AUTH_FILE" 2>/dev/null)
|
||||||
|
|
||||||
|
if [ -z "$TOKEN" ]; then
|
||||||
|
echo "Error: No access token found in $AUTH_FILE" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
echo "Error: Unsupported platform: $OSTYPE" >&2
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Output the token
|
||||||
|
echo "$TOKEN"
|
||||||
@@ -13,7 +13,7 @@
|
|||||||
*/
|
*/
|
||||||
|
|
||||||
import { execSync } from 'child_process';
|
import { execSync } from 'child_process';
|
||||||
import fsNative from 'fs';
|
import fsNative, { statSync } from 'fs';
|
||||||
import http from 'http';
|
import http from 'http';
|
||||||
import path from 'path';
|
import path from 'path';
|
||||||
import readline from 'readline';
|
import readline from 'readline';
|
||||||
@@ -662,3 +662,142 @@ export async function ensureDependencies(fs, baseDir) {
|
|||||||
});
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// =============================================================================
|
||||||
|
// Docker Utilities
|
||||||
|
// =============================================================================
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Sanitize a project name to be safe for use in shell commands and Docker image names.
|
||||||
|
* Converts to lowercase and removes any characters that aren't alphanumeric.
|
||||||
|
* @param {string} name - Project name to sanitize
|
||||||
|
* @returns {string} - Sanitized project name
|
||||||
|
*/
|
||||||
|
export function sanitizeProjectName(name) {
|
||||||
|
return name.toLowerCase().replace(/[^a-z0-9]/g, '');
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if Docker images need to be rebuilt based on Dockerfile or package.json changes
|
||||||
|
* @param {string} baseDir - Base directory containing Dockerfile and package.json
|
||||||
|
* @returns {boolean} - Whether images need to be rebuilt
|
||||||
|
*/
|
||||||
|
export function shouldRebuildDockerImages(baseDir) {
|
||||||
|
try {
|
||||||
|
const dockerfilePath = path.join(baseDir, 'Dockerfile');
|
||||||
|
const packageJsonPath = path.join(baseDir, 'package.json');
|
||||||
|
|
||||||
|
// Get modification times of source files
|
||||||
|
const dockerfileMtime = statSync(dockerfilePath).mtimeMs;
|
||||||
|
const packageJsonMtime = statSync(packageJsonPath).mtimeMs;
|
||||||
|
const latestSourceMtime = Math.max(dockerfileMtime, packageJsonMtime);
|
||||||
|
|
||||||
|
// Get project name from docker-compose config, falling back to directory name
|
||||||
|
let projectName;
|
||||||
|
try {
|
||||||
|
const composeConfig = execSync('docker compose config --format json', {
|
||||||
|
encoding: 'utf-8',
|
||||||
|
cwd: baseDir,
|
||||||
|
});
|
||||||
|
const config = JSON.parse(composeConfig);
|
||||||
|
projectName = config.name;
|
||||||
|
} catch (error) {
|
||||||
|
// Fallback handled below
|
||||||
|
}
|
||||||
|
|
||||||
|
// Sanitize project name (whether from config or fallback)
|
||||||
|
// This prevents command injection and ensures valid Docker image names
|
||||||
|
const sanitizedProjectName = sanitizeProjectName(projectName || path.basename(baseDir));
|
||||||
|
const serverImageName = `${sanitizedProjectName}_server`;
|
||||||
|
const uiImageName = `${sanitizedProjectName}_ui`;
|
||||||
|
|
||||||
|
// Check if images exist and get their creation times
|
||||||
|
let needsRebuild = false;
|
||||||
|
|
||||||
|
try {
|
||||||
|
// Check server image
|
||||||
|
const serverImageInfo = execSync(
|
||||||
|
`docker image inspect ${serverImageName} --format "{{.Created}}" 2>/dev/null || echo ""`,
|
||||||
|
{ encoding: 'utf-8', cwd: baseDir }
|
||||||
|
).trim();
|
||||||
|
|
||||||
|
// Check UI image
|
||||||
|
const uiImageInfo = execSync(
|
||||||
|
`docker image inspect ${uiImageName} --format "{{.Created}}" 2>/dev/null || echo ""`,
|
||||||
|
{ encoding: 'utf-8', cwd: baseDir }
|
||||||
|
).trim();
|
||||||
|
|
||||||
|
// If either image doesn't exist, we need to rebuild
|
||||||
|
if (!serverImageInfo || !uiImageInfo) {
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Parse image creation times (ISO 8601 format)
|
||||||
|
const serverCreated = new Date(serverImageInfo).getTime();
|
||||||
|
const uiCreated = new Date(uiImageInfo).getTime();
|
||||||
|
const oldestImageTime = Math.min(serverCreated, uiCreated);
|
||||||
|
|
||||||
|
// If source files are newer than images, rebuild
|
||||||
|
needsRebuild = latestSourceMtime > oldestImageTime;
|
||||||
|
} catch (error) {
|
||||||
|
// If images don't exist or inspect fails, rebuild
|
||||||
|
needsRebuild = true;
|
||||||
|
}
|
||||||
|
|
||||||
|
return needsRebuild;
|
||||||
|
} catch (error) {
|
||||||
|
// If we can't check, err on the side of rebuilding
|
||||||
|
log('Could not check Docker image status, will rebuild to be safe', 'yellow');
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Launch Docker containers with docker-compose
|
||||||
|
* @param {object} options - Configuration options
|
||||||
|
* @param {string} options.baseDir - Base directory containing docker-compose.yml
|
||||||
|
* @param {object} options.processes - Processes object to track docker process
|
||||||
|
* @returns {Promise<void>}
|
||||||
|
*/
|
||||||
|
export async function launchDockerContainers({ baseDir, processes }) {
|
||||||
|
log('Launching Docker Container (Isolated Mode)...', 'blue');
|
||||||
|
|
||||||
|
// Check if Dockerfile or package.json changed and rebuild if needed
|
||||||
|
const needsRebuild = shouldRebuildDockerImages(baseDir);
|
||||||
|
const buildFlag = needsRebuild ? ['--build'] : [];
|
||||||
|
|
||||||
|
if (needsRebuild) {
|
||||||
|
log('Dockerfile or package.json changed - rebuilding images...', 'yellow');
|
||||||
|
} else {
|
||||||
|
log('Starting Docker containers...', 'yellow');
|
||||||
|
}
|
||||||
|
console.log('');
|
||||||
|
|
||||||
|
// Check if ANTHROPIC_API_KEY is set
|
||||||
|
if (!process.env.ANTHROPIC_API_KEY) {
|
||||||
|
log('Warning: ANTHROPIC_API_KEY environment variable is not set.', 'yellow');
|
||||||
|
log('The server will require an API key to function.', 'yellow');
|
||||||
|
log('Set it with: export ANTHROPIC_API_KEY=your-key', 'yellow');
|
||||||
|
console.log('');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Start containers with docker-compose
|
||||||
|
// Will rebuild if Dockerfile or package.json changed
|
||||||
|
processes.docker = crossSpawn('docker', ['compose', 'up', ...buildFlag], {
|
||||||
|
stdio: 'inherit',
|
||||||
|
cwd: baseDir,
|
||||||
|
env: {
|
||||||
|
...process.env,
|
||||||
|
},
|
||||||
|
});
|
||||||
|
|
||||||
|
log('Docker containers starting...', 'blue');
|
||||||
|
log('UI will be available at: http://localhost:3007', 'green');
|
||||||
|
log('API will be available at: http://localhost:3008', 'green');
|
||||||
|
console.log('');
|
||||||
|
log('Press Ctrl+C to stop the containers.', 'yellow');
|
||||||
|
|
||||||
|
await new Promise((resolve) => {
|
||||||
|
processes.docker.on('close', resolve);
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|||||||
41
start.mjs
41
start.mjs
@@ -18,11 +18,9 @@
|
|||||||
import path from 'path';
|
import path from 'path';
|
||||||
import { fileURLToPath } from 'url';
|
import { fileURLToPath } from 'url';
|
||||||
|
|
||||||
import { createRequire } from 'module';
|
|
||||||
import {
|
import {
|
||||||
createRestrictedFs,
|
createRestrictedFs,
|
||||||
log,
|
log,
|
||||||
runNpm,
|
|
||||||
runNpmAndWait,
|
runNpmAndWait,
|
||||||
runNpx,
|
runNpx,
|
||||||
printHeader,
|
printHeader,
|
||||||
@@ -35,11 +33,9 @@ import {
|
|||||||
prompt,
|
prompt,
|
||||||
killProcessTree,
|
killProcessTree,
|
||||||
sleep,
|
sleep,
|
||||||
|
launchDockerContainers,
|
||||||
} from './scripts/launcher-utils.mjs';
|
} from './scripts/launcher-utils.mjs';
|
||||||
|
|
||||||
const require = createRequire(import.meta.url);
|
|
||||||
const crossSpawn = require('cross-spawn');
|
|
||||||
|
|
||||||
const __filename = fileURLToPath(import.meta.url);
|
const __filename = fileURLToPath(import.meta.url);
|
||||||
const __dirname = path.dirname(__filename);
|
const __dirname = path.dirname(__filename);
|
||||||
|
|
||||||
@@ -230,40 +226,7 @@ async function main() {
|
|||||||
break;
|
break;
|
||||||
} else if (choice === '3') {
|
} else if (choice === '3') {
|
||||||
console.log('');
|
console.log('');
|
||||||
log('Launching Docker Container (Isolated Mode)...', 'blue');
|
await launchDockerContainers({ baseDir: __dirname, processes });
|
||||||
log('Starting Docker containers...', 'yellow');
|
|
||||||
log('Note: Containers will only rebuild if images are missing.', 'yellow');
|
|
||||||
log('To force a rebuild, run: docker compose up --build', 'yellow');
|
|
||||||
console.log('');
|
|
||||||
|
|
||||||
// Check if ANTHROPIC_API_KEY is set
|
|
||||||
if (!process.env.ANTHROPIC_API_KEY) {
|
|
||||||
log('Warning: ANTHROPIC_API_KEY environment variable is not set.', 'yellow');
|
|
||||||
log('The server will require an API key to function.', 'yellow');
|
|
||||||
log('Set it with: export ANTHROPIC_API_KEY=your-key', 'yellow');
|
|
||||||
console.log('');
|
|
||||||
}
|
|
||||||
|
|
||||||
// Start containers with docker-compose (without --build to preserve volumes)
|
|
||||||
// Images will only be built if they don't exist
|
|
||||||
processes.docker = crossSpawn('docker', ['compose', 'up'], {
|
|
||||||
stdio: 'inherit',
|
|
||||||
cwd: __dirname,
|
|
||||||
env: {
|
|
||||||
...process.env,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
log('Docker containers starting...', 'blue');
|
|
||||||
log('UI will be available at: http://localhost:3007', 'green');
|
|
||||||
log('API will be available at: http://localhost:3008', 'green');
|
|
||||||
console.log('');
|
|
||||||
log('Press Ctrl+C to stop the containers.', 'yellow');
|
|
||||||
|
|
||||||
await new Promise((resolve) => {
|
|
||||||
processes.docker.on('close', resolve);
|
|
||||||
});
|
|
||||||
|
|
||||||
break;
|
break;
|
||||||
} else {
|
} else {
|
||||||
log('Invalid choice. Please enter 1, 2, or 3.', 'red');
|
log('Invalid choice. Please enter 1, 2, or 3.', 'red');
|
||||||
|
|||||||
Reference in New Issue
Block a user