mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-04 21:23:07 +00:00
Compare commits
3 Commits
feature/bu
...
fix/docker
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
3ccea7a67b | ||
|
|
b37a287c9c | ||
|
|
45f6f17eb0 |
10
Dockerfile
10
Dockerfile
@@ -118,6 +118,7 @@ RUN curl -fsSL https://opencode.ai/install | bash && \
|
|||||||
echo "=== Checking OpenCode CLI installation ===" && \
|
echo "=== Checking OpenCode CLI installation ===" && \
|
||||||
ls -la /home/automaker/.local/bin/ && \
|
ls -la /home/automaker/.local/bin/ && \
|
||||||
(which opencode && opencode --version) || echo "opencode installed (may need auth setup)"
|
(which opencode && opencode --version) || echo "opencode installed (may need auth setup)"
|
||||||
|
|
||||||
USER root
|
USER root
|
||||||
|
|
||||||
# Add PATH to profile so it's available in all interactive shells (for login shells)
|
# Add PATH to profile so it's available in all interactive shells (for login shells)
|
||||||
@@ -147,6 +148,15 @@ COPY --from=server-builder /app/apps/server/package*.json ./apps/server/
|
|||||||
# Copy node_modules (includes symlinks to libs)
|
# Copy node_modules (includes symlinks to libs)
|
||||||
COPY --from=server-builder /app/node_modules ./node_modules
|
COPY --from=server-builder /app/node_modules ./node_modules
|
||||||
|
|
||||||
|
# Install Playwright Chromium browser for AI agent verification tests
|
||||||
|
# This adds ~300MB to the image but enables automated testing mode out of the box
|
||||||
|
# Using the locally installed playwright ensures we use the pinned version from package-lock.json
|
||||||
|
USER automaker
|
||||||
|
RUN ./node_modules/.bin/playwright install chromium && \
|
||||||
|
echo "=== Playwright Chromium installed ===" && \
|
||||||
|
ls -la /home/automaker/.cache/ms-playwright/ || echo "Playwright browsers installed"
|
||||||
|
USER root
|
||||||
|
|
||||||
# Create data and projects directories
|
# Create data and projects directories
|
||||||
RUN mkdir -p /data /projects && chown automaker:automaker /data /projects
|
RUN mkdir -p /data /projects && chown automaker:automaker /data /projects
|
||||||
|
|
||||||
|
|||||||
36
README.md
36
README.md
@@ -338,6 +338,42 @@ services:
|
|||||||
|
|
||||||
The Docker image supports both AMD64 and ARM64 architectures. The GitHub CLI and Claude CLI are automatically downloaded for the correct architecture during build.
|
The Docker image supports both AMD64 and ARM64 architectures. The GitHub CLI and Claude CLI are automatically downloaded for the correct architecture during build.
|
||||||
|
|
||||||
|
##### Playwright for Automated Testing
|
||||||
|
|
||||||
|
The Docker image includes **Playwright Chromium pre-installed** for AI agent verification tests. When agents implement features in automated testing mode, they use Playwright to verify the implementation works correctly.
|
||||||
|
|
||||||
|
**No additional setup required** - Playwright verification works out of the box.
|
||||||
|
|
||||||
|
#### Optional: Persist browsers for manual updates
|
||||||
|
|
||||||
|
By default, Playwright Chromium is pre-installed in the Docker image. If you need to manually update browsers or want to persist browser installations across container restarts (not image rebuilds), you can mount a volume.
|
||||||
|
|
||||||
|
**Important:** When you first add this volume mount to an existing setup, the empty volume will override the pre-installed browsers. You must re-install them:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# After adding the volume mount for the first time
|
||||||
|
docker exec --user automaker -w /app automaker-server npx playwright install chromium
|
||||||
|
```
|
||||||
|
|
||||||
|
Add this to your `docker-compose.override.yml`:
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
services:
|
||||||
|
server:
|
||||||
|
volumes:
|
||||||
|
- playwright-cache:/home/automaker/.cache/ms-playwright
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
playwright-cache:
|
||||||
|
name: automaker-playwright-cache
|
||||||
|
```
|
||||||
|
|
||||||
|
**Updating browsers manually:**
|
||||||
|
|
||||||
|
```bash
|
||||||
|
docker exec --user automaker -w /app automaker-server npx playwright install chromium
|
||||||
|
```
|
||||||
|
|
||||||
### Testing
|
### Testing
|
||||||
|
|
||||||
#### End-to-End Tests (Playwright)
|
#### End-to-End Tests (Playwright)
|
||||||
|
|||||||
@@ -121,89 +121,21 @@ const BOX_CONTENT_WIDTH = 67;
|
|||||||
// The Claude Agent SDK can use either ANTHROPIC_API_KEY or Claude Code CLI authentication
|
// The Claude Agent SDK can use either ANTHROPIC_API_KEY or Claude Code CLI authentication
|
||||||
(async () => {
|
(async () => {
|
||||||
const hasAnthropicKey = !!process.env.ANTHROPIC_API_KEY;
|
const hasAnthropicKey = !!process.env.ANTHROPIC_API_KEY;
|
||||||
const hasEnvOAuthToken = !!process.env.CLAUDE_CODE_OAUTH_TOKEN;
|
|
||||||
|
|
||||||
logger.debug('[CREDENTIAL_CHECK] Starting credential detection...');
|
|
||||||
logger.debug('[CREDENTIAL_CHECK] Environment variables:', {
|
|
||||||
hasAnthropicKey,
|
|
||||||
hasEnvOAuthToken,
|
|
||||||
});
|
|
||||||
|
|
||||||
if (hasAnthropicKey) {
|
if (hasAnthropicKey) {
|
||||||
logger.info('✓ ANTHROPIC_API_KEY detected');
|
logger.info('✓ ANTHROPIC_API_KEY detected');
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
if (hasEnvOAuthToken) {
|
|
||||||
logger.info('✓ CLAUDE_CODE_OAUTH_TOKEN detected');
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Check for Claude Code CLI authentication
|
// Check for Claude Code CLI authentication
|
||||||
// Store indicators outside the try block so we can use them in the warning message
|
|
||||||
let cliAuthIndicators: Awaited<ReturnType<typeof getClaudeAuthIndicators>> | null = null;
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
cliAuthIndicators = await getClaudeAuthIndicators();
|
const indicators = await getClaudeAuthIndicators();
|
||||||
const indicators = cliAuthIndicators;
|
|
||||||
|
|
||||||
// Log detailed credential detection results
|
|
||||||
logger.debug('[CREDENTIAL_CHECK] Claude CLI auth indicators:', {
|
|
||||||
hasCredentialsFile: indicators.hasCredentialsFile,
|
|
||||||
hasSettingsFile: indicators.hasSettingsFile,
|
|
||||||
hasStatsCacheWithActivity: indicators.hasStatsCacheWithActivity,
|
|
||||||
hasProjectsSessions: indicators.hasProjectsSessions,
|
|
||||||
credentials: indicators.credentials,
|
|
||||||
});
|
|
||||||
|
|
||||||
logger.debug('[CREDENTIAL_CHECK] File check details:', {
|
|
||||||
settingsFile: {
|
|
||||||
path: indicators.checks.settingsFile.path,
|
|
||||||
exists: indicators.checks.settingsFile.exists,
|
|
||||||
readable: indicators.checks.settingsFile.readable,
|
|
||||||
error: indicators.checks.settingsFile.error,
|
|
||||||
},
|
|
||||||
statsCache: {
|
|
||||||
path: indicators.checks.statsCache.path,
|
|
||||||
exists: indicators.checks.statsCache.exists,
|
|
||||||
readable: indicators.checks.statsCache.readable,
|
|
||||||
hasDailyActivity: indicators.checks.statsCache.hasDailyActivity,
|
|
||||||
error: indicators.checks.statsCache.error,
|
|
||||||
},
|
|
||||||
projectsDir: {
|
|
||||||
path: indicators.checks.projectsDir.path,
|
|
||||||
exists: indicators.checks.projectsDir.exists,
|
|
||||||
readable: indicators.checks.projectsDir.readable,
|
|
||||||
entryCount: indicators.checks.projectsDir.entryCount,
|
|
||||||
error: indicators.checks.projectsDir.error,
|
|
||||||
},
|
|
||||||
credentialFiles: indicators.checks.credentialFiles.map((cf) => ({
|
|
||||||
path: cf.path,
|
|
||||||
exists: cf.exists,
|
|
||||||
readable: cf.readable,
|
|
||||||
error: cf.error,
|
|
||||||
})),
|
|
||||||
});
|
|
||||||
|
|
||||||
const hasCliAuth =
|
const hasCliAuth =
|
||||||
indicators.hasStatsCacheWithActivity ||
|
indicators.hasStatsCacheWithActivity ||
|
||||||
(indicators.hasSettingsFile && indicators.hasProjectsSessions) ||
|
(indicators.hasSettingsFile && indicators.hasProjectsSessions) ||
|
||||||
(indicators.hasCredentialsFile &&
|
(indicators.hasCredentialsFile &&
|
||||||
(indicators.credentials?.hasOAuthToken || indicators.credentials?.hasApiKey));
|
(indicators.credentials?.hasOAuthToken || indicators.credentials?.hasApiKey));
|
||||||
|
|
||||||
logger.debug('[CREDENTIAL_CHECK] Auth determination:', {
|
|
||||||
hasCliAuth,
|
|
||||||
reason: hasCliAuth
|
|
||||||
? indicators.hasStatsCacheWithActivity
|
|
||||||
? 'stats cache with activity'
|
|
||||||
: indicators.hasSettingsFile && indicators.hasProjectsSessions
|
|
||||||
? 'settings file + project sessions'
|
|
||||||
: indicators.credentials?.hasOAuthToken
|
|
||||||
? 'credentials file with OAuth token'
|
|
||||||
: 'credentials file with API key'
|
|
||||||
: 'no valid credentials found',
|
|
||||||
});
|
|
||||||
|
|
||||||
if (hasCliAuth) {
|
if (hasCliAuth) {
|
||||||
logger.info('✓ Claude Code CLI authentication detected');
|
logger.info('✓ Claude Code CLI authentication detected');
|
||||||
return;
|
return;
|
||||||
@@ -213,7 +145,7 @@ const BOX_CONTENT_WIDTH = 67;
|
|||||||
logger.warn('Error checking for Claude Code CLI authentication:', error);
|
logger.warn('Error checking for Claude Code CLI authentication:', error);
|
||||||
}
|
}
|
||||||
|
|
||||||
// No authentication found - show warning with paths that were checked
|
// No authentication found - show warning
|
||||||
const wHeader = '⚠️ WARNING: No Claude authentication configured'.padEnd(BOX_CONTENT_WIDTH);
|
const wHeader = '⚠️ WARNING: No Claude authentication configured'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
const w1 = 'The Claude Agent SDK requires authentication to function.'.padEnd(BOX_CONTENT_WIDTH);
|
const w1 = 'The Claude Agent SDK requires authentication to function.'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
const w2 = 'Options:'.padEnd(BOX_CONTENT_WIDTH);
|
const w2 = 'Options:'.padEnd(BOX_CONTENT_WIDTH);
|
||||||
@@ -226,33 +158,6 @@ const BOX_CONTENT_WIDTH = 67;
|
|||||||
BOX_CONTENT_WIDTH
|
BOX_CONTENT_WIDTH
|
||||||
);
|
);
|
||||||
|
|
||||||
// Build paths checked summary from the indicators (if available)
|
|
||||||
let pathsCheckedInfo = '';
|
|
||||||
if (cliAuthIndicators) {
|
|
||||||
const pathsChecked: string[] = [];
|
|
||||||
|
|
||||||
// Collect paths that were checked
|
|
||||||
if (cliAuthIndicators.checks.settingsFile.path) {
|
|
||||||
pathsChecked.push(`Settings: ${cliAuthIndicators.checks.settingsFile.path}`);
|
|
||||||
}
|
|
||||||
if (cliAuthIndicators.checks.statsCache.path) {
|
|
||||||
pathsChecked.push(`Stats cache: ${cliAuthIndicators.checks.statsCache.path}`);
|
|
||||||
}
|
|
||||||
if (cliAuthIndicators.checks.projectsDir.path) {
|
|
||||||
pathsChecked.push(`Projects dir: ${cliAuthIndicators.checks.projectsDir.path}`);
|
|
||||||
}
|
|
||||||
for (const credFile of cliAuthIndicators.checks.credentialFiles) {
|
|
||||||
pathsChecked.push(`Credentials: ${credFile.path}`);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (pathsChecked.length > 0) {
|
|
||||||
pathsCheckedInfo = `
|
|
||||||
║ ║
|
|
||||||
║ ${'Paths checked:'.padEnd(BOX_CONTENT_WIDTH)}║
|
|
||||||
${pathsChecked.map((p) => `║ ${p.substring(0, BOX_CONTENT_WIDTH - 2).padEnd(BOX_CONTENT_WIDTH - 2)} ║`).join('\n')}`;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.warn(`
|
logger.warn(`
|
||||||
╔═════════════════════════════════════════════════════════════════════╗
|
╔═════════════════════════════════════════════════════════════════════╗
|
||||||
║ ${wHeader}║
|
║ ${wHeader}║
|
||||||
@@ -264,7 +169,7 @@ ${pathsChecked.map((p) => `║ ${p.substring(0, BOX_CONTENT_WIDTH - 2).padEnd
|
|||||||
║ ${w3}║
|
║ ${w3}║
|
||||||
║ ${w4}║
|
║ ${w4}║
|
||||||
║ ${w5}║
|
║ ${w5}║
|
||||||
║ ${w6}║${pathsCheckedInfo}
|
║ ${w6}║
|
||||||
║ ║
|
║ ║
|
||||||
╚═════════════════════════════════════════════════════════════════════╝
|
╚═════════════════════════════════════════════════════════════════════╝
|
||||||
`);
|
`);
|
||||||
|
|||||||
@@ -320,28 +320,9 @@ export function createVerifyClaudeAuthHandler() {
|
|||||||
authMethod,
|
authMethod,
|
||||||
});
|
});
|
||||||
|
|
||||||
// Determine specific auth type for success messages
|
|
||||||
let authType: 'oauth' | 'api_key' | 'cli' | undefined;
|
|
||||||
if (authenticated) {
|
|
||||||
if (authMethod === 'api_key') {
|
|
||||||
authType = 'api_key';
|
|
||||||
} else if (authMethod === 'cli') {
|
|
||||||
// Check if CLI auth is via OAuth (Claude Code subscription) or generic CLI
|
|
||||||
// OAuth tokens are stored in the credentials file by the Claude CLI
|
|
||||||
const { getClaudeAuthIndicators } = await import('@automaker/platform');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
if (indicators.credentials?.hasOAuthToken) {
|
|
||||||
authType = 'oauth';
|
|
||||||
} else {
|
|
||||||
authType = 'cli';
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
res.json({
|
res.json({
|
||||||
success: true,
|
success: true,
|
||||||
authenticated,
|
authenticated,
|
||||||
authType,
|
|
||||||
error: errorMessage || undefined,
|
error: errorMessage || undefined,
|
||||||
});
|
});
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
|
|||||||
@@ -69,29 +69,6 @@ export function SandboxRiskDialog({ open, onConfirm, onDeny }: SandboxRiskDialog
|
|||||||
For safer operation, consider running Automaker in Docker. See the README for
|
For safer operation, consider running Automaker in Docker. See the README for
|
||||||
instructions.
|
instructions.
|
||||||
</p>
|
</p>
|
||||||
|
|
||||||
<div className="bg-muted/50 border border-border rounded-lg p-4 space-y-2">
|
|
||||||
<p className="text-sm font-medium text-foreground">
|
|
||||||
Already running in Docker? Try these troubleshooting steps:
|
|
||||||
</p>
|
|
||||||
<ul className="text-sm text-muted-foreground list-disc list-inside space-y-1">
|
|
||||||
<li>
|
|
||||||
Ensure <code className="bg-muted px-1 rounded">IS_CONTAINERIZED=true</code> is
|
|
||||||
set in your docker-compose environment
|
|
||||||
</li>
|
|
||||||
<li>
|
|
||||||
Verify the server container has the environment variable:{' '}
|
|
||||||
<code className="bg-muted px-1 rounded">
|
|
||||||
docker exec automaker-server printenv IS_CONTAINERIZED
|
|
||||||
</code>
|
|
||||||
</li>
|
|
||||||
<li>Rebuild and restart containers if you recently changed the configuration</li>
|
|
||||||
<li>
|
|
||||||
Check the server logs for startup messages:{' '}
|
|
||||||
<code className="bg-muted px-1 rounded">docker-compose logs server</code>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
</div>
|
|
||||||
</div>
|
</div>
|
||||||
</DialogDescription>
|
</DialogDescription>
|
||||||
</DialogHeader>
|
</DialogHeader>
|
||||||
|
|||||||
@@ -1275,10 +1275,8 @@ export function BoardView() {
|
|||||||
maxConcurrency={maxConcurrency}
|
maxConcurrency={maxConcurrency}
|
||||||
runningAgentsCount={runningAutoTasks.length}
|
runningAgentsCount={runningAutoTasks.length}
|
||||||
onConcurrencyChange={(newMaxConcurrency) => {
|
onConcurrencyChange={(newMaxConcurrency) => {
|
||||||
if (currentProject) {
|
if (currentProject && selectedWorktree) {
|
||||||
// If selectedWorktree is undefined or it's the main worktree, branchName will be null.
|
const branchName = selectedWorktree.isMain ? null : selectedWorktree.branch;
|
||||||
// Otherwise, use the branch name.
|
|
||||||
const branchName = selectedWorktree?.isMain === false ? selectedWorktree.branch : null;
|
|
||||||
setMaxConcurrencyForWorktree(currentProject.id, branchName, newMaxConcurrency);
|
setMaxConcurrencyForWorktree(currentProject.id, branchName, newMaxConcurrency);
|
||||||
|
|
||||||
// Persist to server settings so capacity checks use the correct value
|
// Persist to server settings so capacity checks use the correct value
|
||||||
|
|||||||
@@ -59,7 +59,6 @@ export function ClaudeSetupStep({ onNext, onBack, onSkip }: ClaudeSetupStepProps
|
|||||||
// CLI Verification state
|
// CLI Verification state
|
||||||
const [cliVerificationStatus, setCliVerificationStatus] = useState<VerificationStatus>('idle');
|
const [cliVerificationStatus, setCliVerificationStatus] = useState<VerificationStatus>('idle');
|
||||||
const [cliVerificationError, setCliVerificationError] = useState<string | null>(null);
|
const [cliVerificationError, setCliVerificationError] = useState<string | null>(null);
|
||||||
const [cliAuthType, setCliAuthType] = useState<'oauth' | 'cli' | null>(null);
|
|
||||||
|
|
||||||
// API Key Verification state
|
// API Key Verification state
|
||||||
const [apiKeyVerificationStatus, setApiKeyVerificationStatus] =
|
const [apiKeyVerificationStatus, setApiKeyVerificationStatus] =
|
||||||
@@ -120,7 +119,6 @@ export function ClaudeSetupStep({ onNext, onBack, onSkip }: ClaudeSetupStepProps
|
|||||||
const verifyCliAuth = useCallback(async () => {
|
const verifyCliAuth = useCallback(async () => {
|
||||||
setCliVerificationStatus('verifying');
|
setCliVerificationStatus('verifying');
|
||||||
setCliVerificationError(null);
|
setCliVerificationError(null);
|
||||||
setCliAuthType(null);
|
|
||||||
|
|
||||||
try {
|
try {
|
||||||
const api = getElectronAPI();
|
const api = getElectronAPI();
|
||||||
@@ -140,21 +138,12 @@ export function ClaudeSetupStep({ onNext, onBack, onSkip }: ClaudeSetupStepProps
|
|||||||
|
|
||||||
if (result.authenticated && !hasLimitReachedError) {
|
if (result.authenticated && !hasLimitReachedError) {
|
||||||
setCliVerificationStatus('verified');
|
setCliVerificationStatus('verified');
|
||||||
// Store the auth type for displaying specific success message
|
|
||||||
const authType = result.authType === 'oauth' ? 'oauth' : 'cli';
|
|
||||||
setCliAuthType(authType);
|
|
||||||
setClaudeAuthStatus({
|
setClaudeAuthStatus({
|
||||||
authenticated: true,
|
authenticated: true,
|
||||||
method: authType === 'oauth' ? 'oauth_token' : 'cli_authenticated',
|
method: 'cli_authenticated',
|
||||||
hasCredentialsFile: claudeAuthStatus?.hasCredentialsFile || false,
|
hasCredentialsFile: claudeAuthStatus?.hasCredentialsFile || false,
|
||||||
oauthTokenValid: authType === 'oauth',
|
|
||||||
});
|
});
|
||||||
// Show specific success message based on auth type
|
toast.success('Claude CLI authentication verified!');
|
||||||
if (authType === 'oauth') {
|
|
||||||
toast.success('Claude Code subscription detected and verified!');
|
|
||||||
} else {
|
|
||||||
toast.success('Claude CLI authentication verified!');
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
setCliVerificationStatus('error');
|
setCliVerificationStatus('error');
|
||||||
setCliVerificationError(
|
setCliVerificationError(
|
||||||
@@ -447,15 +436,9 @@ export function ClaudeSetupStep({ onNext, onBack, onSkip }: ClaudeSetupStepProps
|
|||||||
<div className="flex items-center gap-3 p-4 rounded-lg bg-green-500/10 border border-green-500/20">
|
<div className="flex items-center gap-3 p-4 rounded-lg bg-green-500/10 border border-green-500/20">
|
||||||
<CheckCircle2 className="w-5 h-5 text-green-500" />
|
<CheckCircle2 className="w-5 h-5 text-green-500" />
|
||||||
<div>
|
<div>
|
||||||
<p className="font-medium text-foreground">
|
<p className="font-medium text-foreground">CLI Authentication verified!</p>
|
||||||
{cliAuthType === 'oauth'
|
|
||||||
? 'Claude Code subscription verified!'
|
|
||||||
: 'CLI Authentication verified!'}
|
|
||||||
</p>
|
|
||||||
<p className="text-sm text-muted-foreground">
|
<p className="text-sm text-muted-foreground">
|
||||||
{cliAuthType === 'oauth'
|
Your Claude CLI is working correctly.
|
||||||
? 'Your Claude Code subscription is active and ready to use.'
|
|
||||||
: 'Your Claude CLI is working correctly.'}
|
|
||||||
</p>
|
</p>
|
||||||
</div>
|
</div>
|
||||||
</div>
|
</div>
|
||||||
|
|||||||
@@ -1442,7 +1442,6 @@ interface SetupAPI {
|
|||||||
verifyClaudeAuth: (authMethod?: 'cli' | 'api_key') => Promise<{
|
verifyClaudeAuth: (authMethod?: 'cli' | 'api_key') => Promise<{
|
||||||
success: boolean;
|
success: boolean;
|
||||||
authenticated: boolean;
|
authenticated: boolean;
|
||||||
authType?: 'oauth' | 'api_key' | 'cli';
|
|
||||||
error?: string;
|
error?: string;
|
||||||
}>;
|
}>;
|
||||||
getGhStatus?: () => Promise<{
|
getGhStatus?: () => Promise<{
|
||||||
|
|||||||
@@ -1350,7 +1350,6 @@ export class HttpApiClient implements ElectronAPI {
|
|||||||
): Promise<{
|
): Promise<{
|
||||||
success: boolean;
|
success: boolean;
|
||||||
authenticated: boolean;
|
authenticated: boolean;
|
||||||
authType?: 'oauth' | 'api_key' | 'cli';
|
|
||||||
error?: string;
|
error?: string;
|
||||||
}> => this.post('/api/setup/verify-claude-auth', { authMethod, apiKey }),
|
}> => this.post('/api/setup/verify-claude-auth', { authMethod, apiKey }),
|
||||||
|
|
||||||
|
|||||||
@@ -21,9 +21,13 @@ services:
|
|||||||
# - ~/.local/share/opencode:/home/automaker/.local/share/opencode
|
# - ~/.local/share/opencode:/home/automaker/.local/share/opencode
|
||||||
# - ~/.config/opencode:/home/automaker/.config/opencode
|
# - ~/.config/opencode:/home/automaker/.config/opencode
|
||||||
|
|
||||||
# Playwright browser cache - persists installed browsers across container restarts
|
# ===== Playwright Browser Cache (Optional) =====
|
||||||
# Run 'npx playwright install --with-deps chromium' once, and it will persist
|
# Playwright Chromium is PRE-INSTALLED in the Docker image for automated testing.
|
||||||
|
# Uncomment below to persist browser cache across container rebuilds (saves ~300MB download):
|
||||||
# - playwright-cache:/home/automaker/.cache/ms-playwright
|
# - playwright-cache:/home/automaker/.cache/ms-playwright
|
||||||
|
#
|
||||||
|
# To update Playwright browsers manually:
|
||||||
|
# docker exec --user automaker -w /app automaker-server npx playwright install chromium
|
||||||
environment:
|
environment:
|
||||||
# Set root directory for all projects and file operations
|
# Set root directory for all projects and file operations
|
||||||
# Users can only create/open projects within this directory
|
# Users can only create/open projects within this directory
|
||||||
@@ -37,6 +41,7 @@ services:
|
|||||||
# - CURSOR_API_KEY=${CURSOR_API_KEY:-}
|
# - CURSOR_API_KEY=${CURSOR_API_KEY:-}
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
# Playwright cache volume (persists Chromium installs)
|
# Playwright cache volume - optional, persists browser updates across container rebuilds
|
||||||
|
# Uncomment if you mounted the playwright-cache volume above
|
||||||
# playwright-cache:
|
# playwright-cache:
|
||||||
# name: automaker-playwright-cache
|
# name: automaker-playwright-cache
|
||||||
|
|||||||
@@ -134,8 +134,6 @@ export {
|
|||||||
findClaudeCliPath,
|
findClaudeCliPath,
|
||||||
getClaudeAuthIndicators,
|
getClaudeAuthIndicators,
|
||||||
type ClaudeAuthIndicators,
|
type ClaudeAuthIndicators,
|
||||||
type FileCheckResult,
|
|
||||||
type DirectoryCheckResult,
|
|
||||||
findCodexCliPath,
|
findCodexCliPath,
|
||||||
getCodexAuthIndicators,
|
getCodexAuthIndicators,
|
||||||
type CodexAuthIndicators,
|
type CodexAuthIndicators,
|
||||||
|
|||||||
@@ -976,27 +976,6 @@ export async function findGitBashPath(): Promise<string | null> {
|
|||||||
return findFirstExistingPath(getGitBashPaths());
|
return findFirstExistingPath(getGitBashPaths());
|
||||||
}
|
}
|
||||||
|
|
||||||
/**
|
|
||||||
* Details about a file check performed during auth detection
|
|
||||||
*/
|
|
||||||
export interface FileCheckResult {
|
|
||||||
path: string;
|
|
||||||
exists: boolean;
|
|
||||||
readable: boolean;
|
|
||||||
error?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
|
||||||
* Details about a directory check performed during auth detection
|
|
||||||
*/
|
|
||||||
export interface DirectoryCheckResult {
|
|
||||||
path: string;
|
|
||||||
exists: boolean;
|
|
||||||
readable: boolean;
|
|
||||||
entryCount: number;
|
|
||||||
error?: string;
|
|
||||||
}
|
|
||||||
|
|
||||||
/**
|
/**
|
||||||
* Get Claude authentication status by checking various indicators
|
* Get Claude authentication status by checking various indicators
|
||||||
*/
|
*/
|
||||||
@@ -1009,165 +988,67 @@ export interface ClaudeAuthIndicators {
|
|||||||
hasOAuthToken: boolean;
|
hasOAuthToken: boolean;
|
||||||
hasApiKey: boolean;
|
hasApiKey: boolean;
|
||||||
} | null;
|
} | null;
|
||||||
/** Detailed information about what was checked */
|
|
||||||
checks: {
|
|
||||||
settingsFile: FileCheckResult;
|
|
||||||
statsCache: FileCheckResult & { hasDailyActivity?: boolean };
|
|
||||||
projectsDir: DirectoryCheckResult;
|
|
||||||
credentialFiles: FileCheckResult[];
|
|
||||||
};
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getClaudeAuthIndicators(): Promise<ClaudeAuthIndicators> {
|
export async function getClaudeAuthIndicators(): Promise<ClaudeAuthIndicators> {
|
||||||
const settingsPath = getClaudeSettingsPath();
|
|
||||||
const statsCachePath = getClaudeStatsCachePath();
|
|
||||||
const projectsDir = getClaudeProjectsDir();
|
|
||||||
const credentialPaths = getClaudeCredentialPaths();
|
|
||||||
|
|
||||||
// Initialize checks with paths
|
|
||||||
const settingsFileCheck: FileCheckResult = {
|
|
||||||
path: settingsPath,
|
|
||||||
exists: false,
|
|
||||||
readable: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
const statsCacheCheck: FileCheckResult & { hasDailyActivity?: boolean } = {
|
|
||||||
path: statsCachePath,
|
|
||||||
exists: false,
|
|
||||||
readable: false,
|
|
||||||
};
|
|
||||||
|
|
||||||
const projectsDirCheck: DirectoryCheckResult = {
|
|
||||||
path: projectsDir,
|
|
||||||
exists: false,
|
|
||||||
readable: false,
|
|
||||||
entryCount: 0,
|
|
||||||
};
|
|
||||||
|
|
||||||
const credentialFileChecks: FileCheckResult[] = credentialPaths.map((p) => ({
|
|
||||||
path: p,
|
|
||||||
exists: false,
|
|
||||||
readable: false,
|
|
||||||
}));
|
|
||||||
|
|
||||||
const result: ClaudeAuthIndicators = {
|
const result: ClaudeAuthIndicators = {
|
||||||
hasCredentialsFile: false,
|
hasCredentialsFile: false,
|
||||||
hasSettingsFile: false,
|
hasSettingsFile: false,
|
||||||
hasStatsCacheWithActivity: false,
|
hasStatsCacheWithActivity: false,
|
||||||
hasProjectsSessions: false,
|
hasProjectsSessions: false,
|
||||||
credentials: null,
|
credentials: null,
|
||||||
checks: {
|
|
||||||
settingsFile: settingsFileCheck,
|
|
||||||
statsCache: statsCacheCheck,
|
|
||||||
projectsDir: projectsDirCheck,
|
|
||||||
credentialFiles: credentialFileChecks,
|
|
||||||
},
|
|
||||||
};
|
};
|
||||||
|
|
||||||
// Check settings file
|
// Check settings file
|
||||||
// First check existence, then try to read to confirm it's actually readable
|
|
||||||
try {
|
try {
|
||||||
if (await systemPathAccess(settingsPath)) {
|
if (await systemPathAccess(getClaudeSettingsPath())) {
|
||||||
settingsFileCheck.exists = true;
|
result.hasSettingsFile = true;
|
||||||
// Try to actually read the file to confirm read permissions
|
|
||||||
try {
|
|
||||||
await systemPathReadFile(settingsPath);
|
|
||||||
settingsFileCheck.readable = true;
|
|
||||||
result.hasSettingsFile = true;
|
|
||||||
} catch (readErr) {
|
|
||||||
// File exists but cannot be read (permission denied, etc.)
|
|
||||||
settingsFileCheck.readable = false;
|
|
||||||
settingsFileCheck.error = `Cannot read: ${readErr instanceof Error ? readErr.message : String(readErr)}`;
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch {
|
||||||
settingsFileCheck.error = err instanceof Error ? err.message : String(err);
|
// Ignore errors
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check stats cache for recent activity
|
// Check stats cache for recent activity
|
||||||
try {
|
try {
|
||||||
const statsContent = await systemPathReadFile(statsCachePath);
|
const statsContent = await systemPathReadFile(getClaudeStatsCachePath());
|
||||||
statsCacheCheck.exists = true;
|
const stats = JSON.parse(statsContent);
|
||||||
statsCacheCheck.readable = true;
|
if (stats.dailyActivity && stats.dailyActivity.length > 0) {
|
||||||
try {
|
result.hasStatsCacheWithActivity = true;
|
||||||
const stats = JSON.parse(statsContent);
|
|
||||||
if (stats.dailyActivity && stats.dailyActivity.length > 0) {
|
|
||||||
statsCacheCheck.hasDailyActivity = true;
|
|
||||||
result.hasStatsCacheWithActivity = true;
|
|
||||||
} else {
|
|
||||||
statsCacheCheck.hasDailyActivity = false;
|
|
||||||
}
|
|
||||||
} catch (parseErr) {
|
|
||||||
statsCacheCheck.error = `JSON parse error: ${parseErr instanceof Error ? parseErr.message : String(parseErr)}`;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
if ((err as NodeJS.ErrnoException).code === 'ENOENT') {
|
|
||||||
statsCacheCheck.exists = false;
|
|
||||||
} else {
|
|
||||||
statsCacheCheck.error = err instanceof Error ? err.message : String(err);
|
|
||||||
}
|
}
|
||||||
|
} catch {
|
||||||
|
// Ignore errors
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check for sessions in projects directory
|
// Check for sessions in projects directory
|
||||||
try {
|
try {
|
||||||
const sessions = await systemPathReaddir(projectsDir);
|
const sessions = await systemPathReaddir(getClaudeProjectsDir());
|
||||||
projectsDirCheck.exists = true;
|
|
||||||
projectsDirCheck.readable = true;
|
|
||||||
projectsDirCheck.entryCount = sessions.length;
|
|
||||||
if (sessions.length > 0) {
|
if (sessions.length > 0) {
|
||||||
result.hasProjectsSessions = true;
|
result.hasProjectsSessions = true;
|
||||||
}
|
}
|
||||||
} catch (err) {
|
} catch {
|
||||||
if ((err as NodeJS.ErrnoException).code === 'ENOENT') {
|
// Ignore errors
|
||||||
projectsDirCheck.exists = false;
|
|
||||||
} else {
|
|
||||||
projectsDirCheck.error = err instanceof Error ? err.message : String(err);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check credentials files
|
// Check credentials files
|
||||||
// We iterate through all credential paths and only stop when we find a file
|
const credentialPaths = getClaudeCredentialPaths();
|
||||||
// that contains actual credentials (OAuth tokens or API keys). An empty or
|
for (const credPath of credentialPaths) {
|
||||||
// token-less file should not prevent checking subsequent credential paths.
|
|
||||||
for (let i = 0; i < credentialPaths.length; i++) {
|
|
||||||
const credPath = credentialPaths[i];
|
|
||||||
const credCheck = credentialFileChecks[i];
|
|
||||||
try {
|
try {
|
||||||
const content = await systemPathReadFile(credPath);
|
const content = await systemPathReadFile(credPath);
|
||||||
credCheck.exists = true;
|
const credentials = JSON.parse(content);
|
||||||
credCheck.readable = true;
|
result.hasCredentialsFile = true;
|
||||||
try {
|
// Support multiple credential formats:
|
||||||
const credentials = JSON.parse(content);
|
// 1. Claude Code CLI format: { claudeAiOauth: { accessToken, refreshToken } }
|
||||||
// Support multiple credential formats:
|
// 2. Legacy format: { oauth_token } or { access_token }
|
||||||
// 1. Claude Code CLI format: { claudeAiOauth: { accessToken, refreshToken } }
|
// 3. API key format: { api_key }
|
||||||
// 2. Legacy format: { oauth_token } or { access_token }
|
const hasClaudeOauth = !!credentials.claudeAiOauth?.accessToken;
|
||||||
// 3. API key format: { api_key }
|
const hasLegacyOauth = !!(credentials.oauth_token || credentials.access_token);
|
||||||
const hasClaudeOauth = !!credentials.claudeAiOauth?.accessToken;
|
result.credentials = {
|
||||||
const hasLegacyOauth = !!(credentials.oauth_token || credentials.access_token);
|
hasOAuthToken: hasClaudeOauth || hasLegacyOauth,
|
||||||
const hasOAuthToken = hasClaudeOauth || hasLegacyOauth;
|
hasApiKey: !!credentials.api_key,
|
||||||
const hasApiKey = !!credentials.api_key;
|
};
|
||||||
|
break;
|
||||||
// Only consider this a valid credentials file if it actually contains tokens
|
} catch {
|
||||||
// An empty JSON file ({}) or file without tokens should not stop us from
|
// Continue to next path
|
||||||
// checking subsequent credential paths
|
|
||||||
if (hasOAuthToken || hasApiKey) {
|
|
||||||
result.hasCredentialsFile = true;
|
|
||||||
result.credentials = {
|
|
||||||
hasOAuthToken,
|
|
||||||
hasApiKey,
|
|
||||||
};
|
|
||||||
break; // Found valid credentials, stop searching
|
|
||||||
}
|
|
||||||
// File exists and is valid JSON but contains no tokens - continue checking other paths
|
|
||||||
} catch (parseErr) {
|
|
||||||
credCheck.error = `JSON parse error: ${parseErr instanceof Error ? parseErr.message : String(parseErr)}`;
|
|
||||||
}
|
|
||||||
} catch (err) {
|
|
||||||
if ((err as NodeJS.ErrnoException).code === 'ENOENT') {
|
|
||||||
credCheck.exists = false;
|
|
||||||
} else {
|
|
||||||
credCheck.error = err instanceof Error ? err.message : String(err);
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -1,761 +0,0 @@
|
|||||||
/**
|
|
||||||
* Unit tests for OAuth credential detection scenarios
|
|
||||||
*
|
|
||||||
* Tests the various Claude credential detection formats including:
|
|
||||||
* - Claude Code CLI OAuth format (claudeAiOauth)
|
|
||||||
* - Legacy OAuth token format (oauth_token, access_token)
|
|
||||||
* - API key format (api_key)
|
|
||||||
* - Invalid/malformed credential files
|
|
||||||
*
|
|
||||||
* These tests use real temp directories to avoid complex fs mocking issues.
|
|
||||||
*/
|
|
||||||
|
|
||||||
import { describe, it, expect, beforeEach, afterEach, vi } from 'vitest';
|
|
||||||
import fs from 'fs/promises';
|
|
||||||
import path from 'path';
|
|
||||||
import os from 'os';
|
|
||||||
|
|
||||||
describe('OAuth Credential Detection', () => {
|
|
||||||
let tempDir: string;
|
|
||||||
let originalHomedir: () => string;
|
|
||||||
let mockClaudeDir: string;
|
|
||||||
let mockCodexDir: string;
|
|
||||||
let mockOpenCodeDir: string;
|
|
||||||
|
|
||||||
beforeEach(async () => {
|
|
||||||
// Reset modules to get fresh state
|
|
||||||
vi.resetModules();
|
|
||||||
|
|
||||||
// Create a temporary directory
|
|
||||||
tempDir = await fs.mkdtemp(path.join(os.tmpdir(), 'oauth-detection-test-'));
|
|
||||||
|
|
||||||
// Create mock home directory structure
|
|
||||||
mockClaudeDir = path.join(tempDir, '.claude');
|
|
||||||
mockCodexDir = path.join(tempDir, '.codex');
|
|
||||||
mockOpenCodeDir = path.join(tempDir, '.local', 'share', 'opencode');
|
|
||||||
|
|
||||||
await fs.mkdir(mockClaudeDir, { recursive: true });
|
|
||||||
await fs.mkdir(mockCodexDir, { recursive: true });
|
|
||||||
await fs.mkdir(mockOpenCodeDir, { recursive: true });
|
|
||||||
|
|
||||||
// Mock os.homedir to return our temp directory
|
|
||||||
originalHomedir = os.homedir;
|
|
||||||
vi.spyOn(os, 'homedir').mockReturnValue(tempDir);
|
|
||||||
});
|
|
||||||
|
|
||||||
afterEach(async () => {
|
|
||||||
vi.restoreAllMocks();
|
|
||||||
// Clean up temp directory
|
|
||||||
try {
|
|
||||||
await fs.rm(tempDir, { recursive: true, force: true });
|
|
||||||
} catch {
|
|
||||||
// Ignore cleanup errors
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('getClaudeAuthIndicators', () => {
|
|
||||||
it('should detect Claude Code CLI OAuth format (claudeAiOauth)', async () => {
|
|
||||||
const credentialsContent = JSON.stringify({
|
|
||||||
claudeAiOauth: {
|
|
||||||
accessToken: 'oauth-access-token-12345',
|
|
||||||
refreshToken: 'oauth-refresh-token-67890',
|
|
||||||
expiresAt: Date.now() + 3600000,
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), credentialsContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.credentials).not.toBeNull();
|
|
||||||
expect(indicators.credentials?.hasOAuthToken).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect legacy OAuth token format (oauth_token)', async () => {
|
|
||||||
const credentialsContent = JSON.stringify({
|
|
||||||
oauth_token: 'legacy-oauth-token-abcdef',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), credentialsContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasOAuthToken).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect legacy access_token format', async () => {
|
|
||||||
const credentialsContent = JSON.stringify({
|
|
||||||
access_token: 'legacy-access-token-xyz',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), credentialsContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasOAuthToken).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect API key format', async () => {
|
|
||||||
const credentialsContent = JSON.stringify({
|
|
||||||
api_key: 'sk-ant-api03-xxxxxxxxxxxx',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), credentialsContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasOAuthToken).toBe(false);
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect both OAuth and API key when present', async () => {
|
|
||||||
const credentialsContent = JSON.stringify({
|
|
||||||
claudeAiOauth: {
|
|
||||||
accessToken: 'oauth-token',
|
|
||||||
refreshToken: 'refresh-token',
|
|
||||||
},
|
|
||||||
api_key: 'sk-ant-api03-xxxxxxxxxxxx',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), credentialsContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasOAuthToken).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle missing credentials file gracefully', async () => {
|
|
||||||
// No credentials file created
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(false);
|
|
||||||
expect(indicators.credentials).toBeNull();
|
|
||||||
expect(indicators.checks.credentialFiles).toBeDefined();
|
|
||||||
expect(indicators.checks.credentialFiles.length).toBeGreaterThan(0);
|
|
||||||
expect(indicators.checks.credentialFiles[0].exists).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle malformed JSON in credentials file', async () => {
|
|
||||||
const malformedContent = '{ invalid json }';
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), malformedContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// File exists but parsing fails
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(false);
|
|
||||||
expect(indicators.credentials).toBeNull();
|
|
||||||
expect(indicators.checks.credentialFiles[0].exists).toBe(true);
|
|
||||||
expect(indicators.checks.credentialFiles[0].error).toContain('JSON parse error');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle empty credentials file', async () => {
|
|
||||||
const emptyContent = JSON.stringify({});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), emptyContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// Empty credentials file ({}) should NOT be treated as having credentials
|
|
||||||
// because it contains no actual tokens. This allows the system to continue
|
|
||||||
// checking subsequent credential paths that might have valid tokens.
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(false);
|
|
||||||
expect(indicators.credentials).toBeNull();
|
|
||||||
// But the file should still show as existing and readable in the checks
|
|
||||||
expect(indicators.checks.credentialFiles[0].exists).toBe(true);
|
|
||||||
expect(indicators.checks.credentialFiles[0].readable).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle credentials file with null values', async () => {
|
|
||||||
const nullContent = JSON.stringify({
|
|
||||||
claudeAiOauth: null,
|
|
||||||
api_key: null,
|
|
||||||
oauth_token: null,
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), nullContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// File with all null values should NOT be treated as having credentials
|
|
||||||
// because null values are not valid tokens
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(false);
|
|
||||||
expect(indicators.credentials).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle credentials with empty string values', async () => {
|
|
||||||
const emptyStrings = JSON.stringify({
|
|
||||||
claudeAiOauth: {
|
|
||||||
accessToken: '',
|
|
||||||
refreshToken: '',
|
|
||||||
},
|
|
||||||
api_key: '',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), emptyStrings);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// Empty strings should NOT be treated as having credentials
|
|
||||||
// This allows checking subsequent credential paths for valid tokens
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(false);
|
|
||||||
expect(indicators.credentials).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect settings file presence', async () => {
|
|
||||||
await fs.writeFile(
|
|
||||||
path.join(mockClaudeDir, 'settings.json'),
|
|
||||||
JSON.stringify({ theme: 'dark' })
|
|
||||||
);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasSettingsFile).toBe(true);
|
|
||||||
expect(indicators.checks.settingsFile.exists).toBe(true);
|
|
||||||
expect(indicators.checks.settingsFile.readable).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect stats cache with activity', async () => {
|
|
||||||
const statsContent = JSON.stringify({
|
|
||||||
dailyActivity: [
|
|
||||||
{ date: '2025-01-15', messagesCount: 10 },
|
|
||||||
{ date: '2025-01-16', messagesCount: 5 },
|
|
||||||
],
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, 'stats-cache.json'), statsContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasStatsCacheWithActivity).toBe(true);
|
|
||||||
expect(indicators.checks.statsCache.exists).toBe(true);
|
|
||||||
expect(indicators.checks.statsCache.hasDailyActivity).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect stats cache without activity', async () => {
|
|
||||||
const statsContent = JSON.stringify({
|
|
||||||
dailyActivity: [],
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, 'stats-cache.json'), statsContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasStatsCacheWithActivity).toBe(false);
|
|
||||||
expect(indicators.checks.statsCache.exists).toBe(true);
|
|
||||||
expect(indicators.checks.statsCache.hasDailyActivity).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect project sessions', async () => {
|
|
||||||
const projectsDir = path.join(mockClaudeDir, 'projects');
|
|
||||||
await fs.mkdir(projectsDir, { recursive: true });
|
|
||||||
await fs.mkdir(path.join(projectsDir, 'session-1'));
|
|
||||||
await fs.mkdir(path.join(projectsDir, 'session-2'));
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasProjectsSessions).toBe(true);
|
|
||||||
expect(indicators.checks.projectsDir.exists).toBe(true);
|
|
||||||
expect(indicators.checks.projectsDir.entryCount).toBe(2);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return comprehensive check details', async () => {
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// Verify all check detail objects are present
|
|
||||||
expect(indicators.checks).toBeDefined();
|
|
||||||
expect(indicators.checks.settingsFile).toBeDefined();
|
|
||||||
expect(indicators.checks.settingsFile.path).toContain('settings.json');
|
|
||||||
expect(indicators.checks.statsCache).toBeDefined();
|
|
||||||
expect(indicators.checks.statsCache.path).toContain('stats-cache.json');
|
|
||||||
expect(indicators.checks.projectsDir).toBeDefined();
|
|
||||||
expect(indicators.checks.projectsDir.path).toContain('projects');
|
|
||||||
expect(indicators.checks.credentialFiles).toBeDefined();
|
|
||||||
expect(Array.isArray(indicators.checks.credentialFiles)).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should try both .credentials.json and credentials.json paths', async () => {
|
|
||||||
// Write to credentials.json (without leading dot)
|
|
||||||
const credentialsContent = JSON.stringify({
|
|
||||||
api_key: 'sk-test-key',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, 'credentials.json'), credentialsContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// Should find credentials in the second path
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should prefer first credentials file if both exist', async () => {
|
|
||||||
// Write OAuth to .credentials.json (first path checked)
|
|
||||||
await fs.writeFile(
|
|
||||||
path.join(mockClaudeDir, '.credentials.json'),
|
|
||||||
JSON.stringify({
|
|
||||||
claudeAiOauth: {
|
|
||||||
accessToken: 'oauth-token',
|
|
||||||
refreshToken: 'refresh-token',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
// Write API key to credentials.json (second path)
|
|
||||||
await fs.writeFile(
|
|
||||||
path.join(mockClaudeDir, 'credentials.json'),
|
|
||||||
JSON.stringify({
|
|
||||||
api_key: 'sk-test-key',
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// Should use first file (.credentials.json) which has OAuth
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasOAuthToken).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should check second credentials file if first file has no tokens', async () => {
|
|
||||||
// Write empty/token-less content to .credentials.json (first path checked)
|
|
||||||
// This tests the bug fix: previously, an empty JSON file would stop the search
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), JSON.stringify({}));
|
|
||||||
|
|
||||||
// Write actual credentials to credentials.json (second path)
|
|
||||||
await fs.writeFile(
|
|
||||||
path.join(mockClaudeDir, 'credentials.json'),
|
|
||||||
JSON.stringify({
|
|
||||||
api_key: 'sk-test-key-from-second-file',
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// Should find credentials in second file since first file has no tokens
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('getCodexAuthIndicators', () => {
|
|
||||||
it('should detect OAuth token in Codex auth file', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
access_token: 'codex-oauth-token-12345',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockCodexDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getCodexAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getCodexAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(true);
|
|
||||||
expect(indicators.hasApiKey).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect API key in Codex auth file', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
OPENAI_API_KEY: 'sk-xxxxxxxxxxxxxxxx',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockCodexDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getCodexAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getCodexAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(false);
|
|
||||||
expect(indicators.hasApiKey).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect nested tokens in Codex auth file', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
tokens: {
|
|
||||||
oauth_token: 'nested-oauth-token',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockCodexDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getCodexAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getCodexAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle missing Codex auth file', async () => {
|
|
||||||
// No auth file created
|
|
||||||
const { getCodexAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getCodexAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(false);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(false);
|
|
||||||
expect(indicators.hasApiKey).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect api_key field in Codex auth', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
api_key: 'sk-api-key-value',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockCodexDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getCodexAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getCodexAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasApiKey).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('getOpenCodeAuthIndicators', () => {
|
|
||||||
it('should detect provider-specific OAuth credentials', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
anthropic: {
|
|
||||||
type: 'oauth',
|
|
||||||
access: 'oauth-access-token',
|
|
||||||
refresh: 'oauth-refresh-token',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockOpenCodeDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getOpenCodeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getOpenCodeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(true);
|
|
||||||
expect(indicators.hasApiKey).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect GitHub Copilot refresh token as OAuth', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
'github-copilot': {
|
|
||||||
type: 'oauth',
|
|
||||||
access: '', // Empty access token
|
|
||||||
refresh: 'gh-refresh-token', // But has refresh token
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockOpenCodeDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getOpenCodeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getOpenCodeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect provider-specific API key credentials', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
openai: {
|
|
||||||
type: 'api_key',
|
|
||||||
key: 'sk-xxxxxxxxxxxx',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockOpenCodeDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getOpenCodeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getOpenCodeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(false);
|
|
||||||
expect(indicators.hasApiKey).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect multiple providers', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
anthropic: {
|
|
||||||
type: 'oauth',
|
|
||||||
access: 'anthropic-token',
|
|
||||||
refresh: 'refresh-token',
|
|
||||||
},
|
|
||||||
openai: {
|
|
||||||
type: 'api_key',
|
|
||||||
key: 'sk-xxxxxxxxxxxx',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockOpenCodeDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getOpenCodeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getOpenCodeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(true);
|
|
||||||
expect(indicators.hasApiKey).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle missing OpenCode auth file', async () => {
|
|
||||||
// No auth file created
|
|
||||||
const { getOpenCodeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getOpenCodeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(false);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(false);
|
|
||||||
expect(indicators.hasApiKey).toBe(false);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle legacy top-level OAuth keys', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
access_token: 'legacy-access-token',
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockOpenCodeDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getOpenCodeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getOpenCodeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect copilot provider OAuth', async () => {
|
|
||||||
const authContent = JSON.stringify({
|
|
||||||
copilot: {
|
|
||||||
type: 'oauth',
|
|
||||||
access: 'copilot-access-token',
|
|
||||||
refresh: 'copilot-refresh-token',
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockOpenCodeDir, 'auth.json'), authContent);
|
|
||||||
|
|
||||||
const { getOpenCodeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getOpenCodeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasAuthFile).toBe(true);
|
|
||||||
expect(indicators.hasOAuthToken).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('Credential path helpers', () => {
|
|
||||||
it('should return correct Claude credential paths', async () => {
|
|
||||||
const { getClaudeCredentialPaths, getClaudeConfigDir } = await import('../src/system-paths');
|
|
||||||
|
|
||||||
const configDir = getClaudeConfigDir();
|
|
||||||
expect(configDir).toContain('.claude');
|
|
||||||
|
|
||||||
const credPaths = getClaudeCredentialPaths();
|
|
||||||
expect(credPaths.length).toBeGreaterThan(0);
|
|
||||||
expect(credPaths.some((p) => p.includes('.credentials.json'))).toBe(true);
|
|
||||||
expect(credPaths.some((p) => p.includes('credentials.json'))).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return correct Codex auth path', async () => {
|
|
||||||
const { getCodexAuthPath, getCodexConfigDir } = await import('../src/system-paths');
|
|
||||||
|
|
||||||
const configDir = getCodexConfigDir();
|
|
||||||
expect(configDir).toContain('.codex');
|
|
||||||
|
|
||||||
const authPath = getCodexAuthPath();
|
|
||||||
expect(authPath).toContain('.codex');
|
|
||||||
expect(authPath).toContain('auth.json');
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should return correct OpenCode auth path', async () => {
|
|
||||||
const { getOpenCodeAuthPath, getOpenCodeConfigDir } = await import('../src/system-paths');
|
|
||||||
|
|
||||||
const configDir = getOpenCodeConfigDir();
|
|
||||||
expect(configDir).toContain('opencode');
|
|
||||||
|
|
||||||
const authPath = getOpenCodeAuthPath();
|
|
||||||
expect(authPath).toContain('opencode');
|
|
||||||
expect(authPath).toContain('auth.json');
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('Edge cases for credential detection', () => {
|
|
||||||
it('should handle credentials file with unexpected structure', async () => {
|
|
||||||
const unexpectedContent = JSON.stringify({
|
|
||||||
someUnexpectedKey: 'value',
|
|
||||||
nested: {
|
|
||||||
deeply: {
|
|
||||||
unexpected: true,
|
|
||||||
},
|
|
||||||
},
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), unexpectedContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// File with unexpected structure but no valid tokens should NOT be treated as having credentials
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(false);
|
|
||||||
expect(indicators.credentials).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle array instead of object in credentials', async () => {
|
|
||||||
const arrayContent = JSON.stringify(['token1', 'token2']);
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), arrayContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// Array is valid JSON but wrong structure - no valid tokens, so not treated as credentials file
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(false);
|
|
||||||
expect(indicators.credentials).toBeNull();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle numeric values in credential fields', async () => {
|
|
||||||
const numericContent = JSON.stringify({
|
|
||||||
api_key: 12345,
|
|
||||||
oauth_token: 67890,
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), numericContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// Note: Current implementation uses JavaScript truthiness which accepts numbers
|
|
||||||
// This documents the actual behavior - ideally would validate string type
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
// The implementation checks truthiness, not strict string type
|
|
||||||
expect(indicators.credentials?.hasOAuthToken).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle boolean values in credential fields', async () => {
|
|
||||||
const booleanContent = JSON.stringify({
|
|
||||||
api_key: true,
|
|
||||||
oauth_token: false,
|
|
||||||
});
|
|
||||||
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, '.credentials.json'), booleanContent);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
// Note: Current implementation uses JavaScript truthiness
|
|
||||||
// api_key: true is truthy, oauth_token: false is falsy
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasOAuthToken).toBe(false); // false is falsy
|
|
||||||
expect(indicators.credentials?.hasApiKey).toBe(true); // true is truthy
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle malformed stats-cache.json gracefully', async () => {
|
|
||||||
await fs.writeFile(path.join(mockClaudeDir, 'stats-cache.json'), '{ invalid json }');
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasStatsCacheWithActivity).toBe(false);
|
|
||||||
expect(indicators.checks.statsCache.exists).toBe(true);
|
|
||||||
expect(indicators.checks.statsCache.error).toBeDefined();
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle empty projects directory', async () => {
|
|
||||||
const projectsDir = path.join(mockClaudeDir, 'projects');
|
|
||||||
await fs.mkdir(projectsDir, { recursive: true });
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasProjectsSessions).toBe(false);
|
|
||||||
expect(indicators.checks.projectsDir.exists).toBe(true);
|
|
||||||
expect(indicators.checks.projectsDir.entryCount).toBe(0);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
describe('Combined authentication scenarios', () => {
|
|
||||||
it('should detect CLI authenticated state with settings + sessions', async () => {
|
|
||||||
// Create settings file
|
|
||||||
await fs.writeFile(
|
|
||||||
path.join(mockClaudeDir, 'settings.json'),
|
|
||||||
JSON.stringify({ theme: 'dark' })
|
|
||||||
);
|
|
||||||
|
|
||||||
// Create projects directory with sessions
|
|
||||||
const projectsDir = path.join(mockClaudeDir, 'projects');
|
|
||||||
await fs.mkdir(projectsDir, { recursive: true });
|
|
||||||
await fs.mkdir(path.join(projectsDir, 'session-1'));
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasSettingsFile).toBe(true);
|
|
||||||
expect(indicators.hasProjectsSessions).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should detect recent activity indicating working auth', async () => {
|
|
||||||
// Create stats cache with recent activity
|
|
||||||
await fs.writeFile(
|
|
||||||
path.join(mockClaudeDir, 'stats-cache.json'),
|
|
||||||
JSON.stringify({
|
|
||||||
dailyActivity: [{ date: new Date().toISOString().split('T')[0], messagesCount: 10 }],
|
|
||||||
})
|
|
||||||
);
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasStatsCacheWithActivity).toBe(true);
|
|
||||||
});
|
|
||||||
|
|
||||||
it('should handle complete auth setup', async () => {
|
|
||||||
// Create all auth indicators
|
|
||||||
await fs.writeFile(
|
|
||||||
path.join(mockClaudeDir, '.credentials.json'),
|
|
||||||
JSON.stringify({
|
|
||||||
claudeAiOauth: {
|
|
||||||
accessToken: 'token',
|
|
||||||
refreshToken: 'refresh',
|
|
||||||
},
|
|
||||||
})
|
|
||||||
);
|
|
||||||
await fs.writeFile(
|
|
||||||
path.join(mockClaudeDir, 'settings.json'),
|
|
||||||
JSON.stringify({ theme: 'dark' })
|
|
||||||
);
|
|
||||||
await fs.writeFile(
|
|
||||||
path.join(mockClaudeDir, 'stats-cache.json'),
|
|
||||||
JSON.stringify({ dailyActivity: [{ date: '2025-01-15', messagesCount: 5 }] })
|
|
||||||
);
|
|
||||||
const projectsDir = path.join(mockClaudeDir, 'projects');
|
|
||||||
await fs.mkdir(projectsDir, { recursive: true });
|
|
||||||
await fs.mkdir(path.join(projectsDir, 'session-1'));
|
|
||||||
|
|
||||||
const { getClaudeAuthIndicators } = await import('../src/system-paths');
|
|
||||||
const indicators = await getClaudeAuthIndicators();
|
|
||||||
|
|
||||||
expect(indicators.hasCredentialsFile).toBe(true);
|
|
||||||
expect(indicators.hasSettingsFile).toBe(true);
|
|
||||||
expect(indicators.hasStatsCacheWithActivity).toBe(true);
|
|
||||||
expect(indicators.hasProjectsSessions).toBe(true);
|
|
||||||
expect(indicators.credentials?.hasOAuthToken).toBe(true);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
Reference in New Issue
Block a user