mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 20:43:36 +00:00
Merge main into massive-terminal-upgrade
Resolves merge conflicts: - apps/server/src/routes/terminal/common.ts: Keep randomBytes import, use @automaker/utils for createLogger - apps/ui/eslint.config.mjs: Use main's explicit globals list with XMLHttpRequest and MediaQueryListEvent additions - apps/ui/src/components/views/terminal-view.tsx: Keep our terminal improvements (killAllSessions, beforeunload, better error handling) - apps/ui/src/config/terminal-themes.ts: Keep our search highlight colors for all themes - apps/ui/src/store/app-store.ts: Keep our terminal settings persistence improvements (merge function) 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -1,7 +1,7 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import path from "path";
|
||||
import fs from "fs/promises";
|
||||
import os from "os";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import path from 'path';
|
||||
import fs from 'fs/promises';
|
||||
import os from 'os';
|
||||
import {
|
||||
getAutomakerDir,
|
||||
getFeaturesDir,
|
||||
@@ -13,97 +13,89 @@ import {
|
||||
getAppSpecPath,
|
||||
getBranchTrackingPath,
|
||||
ensureAutomakerDir,
|
||||
} from "@/lib/automaker-paths.js";
|
||||
getGlobalSettingsPath,
|
||||
getCredentialsPath,
|
||||
getProjectSettingsPath,
|
||||
ensureDataDir,
|
||||
} from '@automaker/platform';
|
||||
|
||||
describe("automaker-paths.ts", () => {
|
||||
const projectPath = path.join("/test", "project");
|
||||
describe('automaker-paths.ts', () => {
|
||||
const projectPath = path.join('/test', 'project');
|
||||
|
||||
describe("getAutomakerDir", () => {
|
||||
it("should return path to .automaker directory", () => {
|
||||
expect(getAutomakerDir(projectPath)).toBe(
|
||||
path.join(projectPath, ".automaker")
|
||||
describe('getAutomakerDir', () => {
|
||||
it('should return path to .automaker directory', () => {
|
||||
expect(getAutomakerDir(projectPath)).toBe(path.join(projectPath, '.automaker'));
|
||||
});
|
||||
|
||||
it('should handle paths with trailing slashes', () => {
|
||||
const pathWithSlash = path.join('/test', 'project') + path.sep;
|
||||
expect(getAutomakerDir(pathWithSlash)).toBe(path.join(pathWithSlash, '.automaker'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFeaturesDir', () => {
|
||||
it('should return path to features directory', () => {
|
||||
expect(getFeaturesDir(projectPath)).toBe(path.join(projectPath, '.automaker', 'features'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFeatureDir', () => {
|
||||
it('should return path to specific feature directory', () => {
|
||||
expect(getFeatureDir(projectPath, 'feature-123')).toBe(
|
||||
path.join(projectPath, '.automaker', 'features', 'feature-123')
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle paths with trailing slashes", () => {
|
||||
const pathWithSlash = path.join("/test", "project") + path.sep;
|
||||
expect(getAutomakerDir(pathWithSlash)).toBe(
|
||||
path.join(pathWithSlash, ".automaker")
|
||||
it('should handle feature IDs with special characters', () => {
|
||||
expect(getFeatureDir(projectPath, 'my-feature_v2')).toBe(
|
||||
path.join(projectPath, '.automaker', 'features', 'my-feature_v2')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getFeaturesDir", () => {
|
||||
it("should return path to features directory", () => {
|
||||
expect(getFeaturesDir(projectPath)).toBe(
|
||||
path.join(projectPath, ".automaker", "features")
|
||||
describe('getFeatureImagesDir', () => {
|
||||
it('should return path to feature images directory', () => {
|
||||
expect(getFeatureImagesDir(projectPath, 'feature-123')).toBe(
|
||||
path.join(projectPath, '.automaker', 'features', 'feature-123', 'images')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getFeatureDir", () => {
|
||||
it("should return path to specific feature directory", () => {
|
||||
expect(getFeatureDir(projectPath, "feature-123")).toBe(
|
||||
path.join(projectPath, ".automaker", "features", "feature-123")
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle feature IDs with special characters", () => {
|
||||
expect(getFeatureDir(projectPath, "my-feature_v2")).toBe(
|
||||
path.join(projectPath, ".automaker", "features", "my-feature_v2")
|
||||
);
|
||||
describe('getBoardDir', () => {
|
||||
it('should return path to board directory', () => {
|
||||
expect(getBoardDir(projectPath)).toBe(path.join(projectPath, '.automaker', 'board'));
|
||||
});
|
||||
});
|
||||
|
||||
describe("getFeatureImagesDir", () => {
|
||||
it("should return path to feature images directory", () => {
|
||||
expect(getFeatureImagesDir(projectPath, "feature-123")).toBe(
|
||||
path.join(projectPath, ".automaker", "features", "feature-123", "images")
|
||||
);
|
||||
describe('getImagesDir', () => {
|
||||
it('should return path to images directory', () => {
|
||||
expect(getImagesDir(projectPath)).toBe(path.join(projectPath, '.automaker', 'images'));
|
||||
});
|
||||
});
|
||||
|
||||
describe("getBoardDir", () => {
|
||||
it("should return path to board directory", () => {
|
||||
expect(getBoardDir(projectPath)).toBe(
|
||||
path.join(projectPath, ".automaker", "board")
|
||||
);
|
||||
describe('getWorktreesDir', () => {
|
||||
it('should return path to worktrees directory', () => {
|
||||
expect(getWorktreesDir(projectPath)).toBe(path.join(projectPath, '.automaker', 'worktrees'));
|
||||
});
|
||||
});
|
||||
|
||||
describe("getImagesDir", () => {
|
||||
it("should return path to images directory", () => {
|
||||
expect(getImagesDir(projectPath)).toBe(
|
||||
path.join(projectPath, ".automaker", "images")
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getWorktreesDir", () => {
|
||||
it("should return path to worktrees directory", () => {
|
||||
expect(getWorktreesDir(projectPath)).toBe(
|
||||
path.join(projectPath, ".automaker", "worktrees")
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAppSpecPath", () => {
|
||||
it("should return path to app_spec.txt file", () => {
|
||||
describe('getAppSpecPath', () => {
|
||||
it('should return path to app_spec.txt file', () => {
|
||||
expect(getAppSpecPath(projectPath)).toBe(
|
||||
path.join(projectPath, ".automaker", "app_spec.txt")
|
||||
path.join(projectPath, '.automaker', 'app_spec.txt')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getBranchTrackingPath", () => {
|
||||
it("should return path to active-branches.json file", () => {
|
||||
describe('getBranchTrackingPath', () => {
|
||||
it('should return path to active-branches.json file', () => {
|
||||
expect(getBranchTrackingPath(projectPath)).toBe(
|
||||
path.join(projectPath, ".automaker", "active-branches.json")
|
||||
path.join(projectPath, '.automaker', 'active-branches.json')
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ensureAutomakerDir", () => {
|
||||
describe('ensureAutomakerDir', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -119,16 +111,16 @@ describe("automaker-paths.ts", () => {
|
||||
}
|
||||
});
|
||||
|
||||
it("should create automaker directory and return path", async () => {
|
||||
it('should create automaker directory and return path', async () => {
|
||||
const result = await ensureAutomakerDir(testDir);
|
||||
|
||||
expect(result).toBe(path.join(testDir, ".automaker"));
|
||||
expect(result).toBe(path.join(testDir, '.automaker'));
|
||||
const stats = await fs.stat(result);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it("should succeed if directory already exists", async () => {
|
||||
const automakerDir = path.join(testDir, ".automaker");
|
||||
it('should succeed if directory already exists', async () => {
|
||||
const automakerDir = path.join(testDir, '.automaker');
|
||||
await fs.mkdir(automakerDir, { recursive: true });
|
||||
|
||||
const result = await ensureAutomakerDir(testDir);
|
||||
@@ -136,4 +128,87 @@ describe("automaker-paths.ts", () => {
|
||||
expect(result).toBe(automakerDir);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getGlobalSettingsPath', () => {
|
||||
it('should return path to settings.json in data directory', () => {
|
||||
const dataDir = '/test/data';
|
||||
const result = getGlobalSettingsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, 'settings.json'));
|
||||
});
|
||||
|
||||
it('should handle paths with trailing slashes', () => {
|
||||
const dataDir = '/test/data' + path.sep;
|
||||
const result = getGlobalSettingsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, 'settings.json'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getCredentialsPath', () => {
|
||||
it('should return path to credentials.json in data directory', () => {
|
||||
const dataDir = '/test/data';
|
||||
const result = getCredentialsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, 'credentials.json'));
|
||||
});
|
||||
|
||||
it('should handle paths with trailing slashes', () => {
|
||||
const dataDir = '/test/data' + path.sep;
|
||||
const result = getCredentialsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, 'credentials.json'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('getProjectSettingsPath', () => {
|
||||
it('should return path to settings.json in project .automaker directory', () => {
|
||||
const projectPath = '/test/project';
|
||||
const result = getProjectSettingsPath(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'settings.json'));
|
||||
});
|
||||
|
||||
it('should handle paths with trailing slashes', () => {
|
||||
const projectPath = '/test/project' + path.sep;
|
||||
const result = getProjectSettingsPath(projectPath);
|
||||
expect(result).toBe(path.join(projectPath, '.automaker', 'settings.json'));
|
||||
});
|
||||
});
|
||||
|
||||
describe('ensureDataDir', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
testDir = path.join(os.tmpdir(), `data-dir-test-${Date.now()}`);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
try {
|
||||
await fs.rm(testDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
it('should create data directory and return path', async () => {
|
||||
const result = await ensureDataDir(testDir);
|
||||
|
||||
expect(result).toBe(testDir);
|
||||
const stats = await fs.stat(testDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it('should succeed if directory already exists', async () => {
|
||||
await fs.mkdir(testDir, { recursive: true });
|
||||
|
||||
const result = await ensureDataDir(testDir);
|
||||
|
||||
expect(result).toBe(testDir);
|
||||
});
|
||||
|
||||
it('should create nested directories', async () => {
|
||||
const nestedDir = path.join(testDir, 'nested', 'deep');
|
||||
const result = await ensureDataDir(nestedDir);
|
||||
|
||||
expect(result).toBe(nestedDir);
|
||||
const stats = await fs.stat(nestedDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,146 +1,146 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
extractTextFromContent,
|
||||
normalizeContentBlocks,
|
||||
formatHistoryAsText,
|
||||
convertHistoryToMessages,
|
||||
} from "@/lib/conversation-utils.js";
|
||||
import { conversationHistoryFixture } from "../../fixtures/messages.js";
|
||||
} from '@automaker/utils';
|
||||
import { conversationHistoryFixture } from '../../fixtures/messages.js';
|
||||
|
||||
describe("conversation-utils.ts", () => {
|
||||
describe("extractTextFromContent", () => {
|
||||
it("should return string content as-is", () => {
|
||||
const result = extractTextFromContent("Hello world");
|
||||
expect(result).toBe("Hello world");
|
||||
describe('conversation-utils.ts', () => {
|
||||
describe('extractTextFromContent', () => {
|
||||
it('should return string content as-is', () => {
|
||||
const result = extractTextFromContent('Hello world');
|
||||
expect(result).toBe('Hello world');
|
||||
});
|
||||
|
||||
it("should extract text from single text block", () => {
|
||||
const content = [{ type: "text", text: "Hello" }];
|
||||
it('should extract text from single text block', () => {
|
||||
const content = [{ type: 'text', text: 'Hello' }];
|
||||
const result = extractTextFromContent(content);
|
||||
expect(result).toBe("Hello");
|
||||
expect(result).toBe('Hello');
|
||||
});
|
||||
|
||||
it("should extract and join multiple text blocks with newlines", () => {
|
||||
it('should extract and join multiple text blocks with newlines', () => {
|
||||
const content = [
|
||||
{ type: "text", text: "First block" },
|
||||
{ type: "text", text: "Second block" },
|
||||
{ type: "text", text: "Third block" },
|
||||
{ type: 'text', text: 'First block' },
|
||||
{ type: 'text', text: 'Second block' },
|
||||
{ type: 'text', text: 'Third block' },
|
||||
];
|
||||
const result = extractTextFromContent(content);
|
||||
expect(result).toBe("First block\nSecond block\nThird block");
|
||||
expect(result).toBe('First block\nSecond block\nThird block');
|
||||
});
|
||||
|
||||
it("should ignore non-text blocks", () => {
|
||||
it('should ignore non-text blocks', () => {
|
||||
const content = [
|
||||
{ type: "text", text: "Text content" },
|
||||
{ type: "image", source: { type: "base64", data: "abc" } },
|
||||
{ type: "text", text: "More text" },
|
||||
{ type: "tool_use", name: "bash", input: {} },
|
||||
{ type: 'text', text: 'Text content' },
|
||||
{ type: 'image', source: { type: 'base64', data: 'abc' } },
|
||||
{ type: 'text', text: 'More text' },
|
||||
{ type: 'tool_use', name: 'bash', input: {} },
|
||||
];
|
||||
const result = extractTextFromContent(content);
|
||||
expect(result).toBe("Text content\nMore text");
|
||||
expect(result).toBe('Text content\nMore text');
|
||||
});
|
||||
|
||||
it("should handle blocks without text property", () => {
|
||||
it('should handle blocks without text property', () => {
|
||||
const content = [
|
||||
{ type: "text", text: "Valid" },
|
||||
{ type: "text" } as any,
|
||||
{ type: "text", text: "Also valid" },
|
||||
{ type: 'text', text: 'Valid' },
|
||||
{ type: 'text' } as any,
|
||||
{ type: 'text', text: 'Also valid' },
|
||||
];
|
||||
const result = extractTextFromContent(content);
|
||||
expect(result).toBe("Valid\n\nAlso valid");
|
||||
expect(result).toBe('Valid\n\nAlso valid');
|
||||
});
|
||||
|
||||
it("should handle empty array", () => {
|
||||
it('should handle empty array', () => {
|
||||
const result = extractTextFromContent([]);
|
||||
expect(result).toBe("");
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it("should handle array with only non-text blocks", () => {
|
||||
it('should handle array with only non-text blocks', () => {
|
||||
const content = [
|
||||
{ type: "image", source: {} },
|
||||
{ type: "tool_use", name: "test" },
|
||||
{ type: 'image', source: {} },
|
||||
{ type: 'tool_use', name: 'test' },
|
||||
];
|
||||
const result = extractTextFromContent(content);
|
||||
expect(result).toBe("");
|
||||
expect(result).toBe('');
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeContentBlocks", () => {
|
||||
it("should convert string to content block array", () => {
|
||||
const result = normalizeContentBlocks("Hello");
|
||||
expect(result).toEqual([{ type: "text", text: "Hello" }]);
|
||||
describe('normalizeContentBlocks', () => {
|
||||
it('should convert string to content block array', () => {
|
||||
const result = normalizeContentBlocks('Hello');
|
||||
expect(result).toEqual([{ type: 'text', text: 'Hello' }]);
|
||||
});
|
||||
|
||||
it("should return array content as-is", () => {
|
||||
it('should return array content as-is', () => {
|
||||
const content = [
|
||||
{ type: "text", text: "Hello" },
|
||||
{ type: "image", source: {} },
|
||||
{ type: 'text', text: 'Hello' },
|
||||
{ type: 'image', source: {} },
|
||||
];
|
||||
const result = normalizeContentBlocks(content);
|
||||
expect(result).toBe(content);
|
||||
expect(result).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should handle empty string", () => {
|
||||
const result = normalizeContentBlocks("");
|
||||
expect(result).toEqual([{ type: "text", text: "" }]);
|
||||
it('should handle empty string', () => {
|
||||
const result = normalizeContentBlocks('');
|
||||
expect(result).toEqual([{ type: 'text', text: '' }]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatHistoryAsText", () => {
|
||||
it("should return empty string for empty history", () => {
|
||||
describe('formatHistoryAsText', () => {
|
||||
it('should return empty string for empty history', () => {
|
||||
const result = formatHistoryAsText([]);
|
||||
expect(result).toBe("");
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it("should format single user message", () => {
|
||||
const history = [{ role: "user" as const, content: "Hello" }];
|
||||
it('should format single user message', () => {
|
||||
const history = [{ role: 'user' as const, content: 'Hello' }];
|
||||
const result = formatHistoryAsText(history);
|
||||
|
||||
expect(result).toContain("Previous conversation:");
|
||||
expect(result).toContain("User: Hello");
|
||||
expect(result).toContain("---");
|
||||
expect(result).toContain('Previous conversation:');
|
||||
expect(result).toContain('User: Hello');
|
||||
expect(result).toContain('---');
|
||||
});
|
||||
|
||||
it("should format single assistant message", () => {
|
||||
const history = [{ role: "assistant" as const, content: "Hi there" }];
|
||||
it('should format single assistant message', () => {
|
||||
const history = [{ role: 'assistant' as const, content: 'Hi there' }];
|
||||
const result = formatHistoryAsText(history);
|
||||
|
||||
expect(result).toContain("Assistant: Hi there");
|
||||
expect(result).toContain('Assistant: Hi there');
|
||||
});
|
||||
|
||||
it("should format multiple messages with correct roles", () => {
|
||||
it('should format multiple messages with correct roles', () => {
|
||||
const history = conversationHistoryFixture.slice(0, 2);
|
||||
const result = formatHistoryAsText(history);
|
||||
|
||||
expect(result).toContain("User: Hello, can you help me?");
|
||||
expect(result).toContain("Assistant: Of course! How can I assist you today?");
|
||||
expect(result).toContain("---");
|
||||
expect(result).toContain('User: Hello, can you help me?');
|
||||
expect(result).toContain('Assistant: Of course! How can I assist you today?');
|
||||
expect(result).toContain('---');
|
||||
});
|
||||
|
||||
it("should handle messages with array content (multipart)", () => {
|
||||
it('should handle messages with array content (multipart)', () => {
|
||||
const history = [conversationHistoryFixture[2]]; // Has text + image
|
||||
const result = formatHistoryAsText(history);
|
||||
|
||||
expect(result).toContain("What is in this image?");
|
||||
expect(result).not.toContain("base64"); // Should not include image data
|
||||
expect(result).toContain('What is in this image?');
|
||||
expect(result).not.toContain('base64'); // Should not include image data
|
||||
});
|
||||
|
||||
it("should format all messages from fixture", () => {
|
||||
it('should format all messages from fixture', () => {
|
||||
const result = formatHistoryAsText(conversationHistoryFixture);
|
||||
|
||||
expect(result).toContain("Previous conversation:");
|
||||
expect(result).toContain("User: Hello, can you help me?");
|
||||
expect(result).toContain("Assistant: Of course!");
|
||||
expect(result).toContain("User: What is in this image?");
|
||||
expect(result).toContain("---");
|
||||
expect(result).toContain('Previous conversation:');
|
||||
expect(result).toContain('User: Hello, can you help me?');
|
||||
expect(result).toContain('Assistant: Of course!');
|
||||
expect(result).toContain('User: What is in this image?');
|
||||
expect(result).toContain('---');
|
||||
});
|
||||
|
||||
it("should separate messages with double newlines", () => {
|
||||
it('should separate messages with double newlines', () => {
|
||||
const history = [
|
||||
{ role: "user" as const, content: "First" },
|
||||
{ role: "assistant" as const, content: "Second" },
|
||||
{ role: 'user' as const, content: 'First' },
|
||||
{ role: 'assistant' as const, content: 'Second' },
|
||||
];
|
||||
const result = formatHistoryAsText(history);
|
||||
|
||||
@@ -148,73 +148,71 @@ describe("conversation-utils.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("convertHistoryToMessages", () => {
|
||||
it("should convert empty history", () => {
|
||||
describe('convertHistoryToMessages', () => {
|
||||
it('should convert empty history', () => {
|
||||
const result = convertHistoryToMessages([]);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("should convert single message to SDK format", () => {
|
||||
const history = [{ role: "user" as const, content: "Hello" }];
|
||||
it('should convert single message to SDK format', () => {
|
||||
const history = [{ role: 'user' as const, content: 'Hello' }];
|
||||
const result = convertHistoryToMessages(history);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toMatchObject({
|
||||
type: "user",
|
||||
session_id: "",
|
||||
type: 'user',
|
||||
session_id: '',
|
||||
message: {
|
||||
role: "user",
|
||||
content: [{ type: "text", text: "Hello" }],
|
||||
role: 'user',
|
||||
content: [{ type: 'text', text: 'Hello' }],
|
||||
},
|
||||
parent_tool_use_id: null,
|
||||
});
|
||||
});
|
||||
|
||||
it("should normalize string content to array", () => {
|
||||
const history = [{ role: "assistant" as const, content: "Response" }];
|
||||
it('should normalize string content to array', () => {
|
||||
const history = [{ role: 'assistant' as const, content: 'Response' }];
|
||||
const result = convertHistoryToMessages(history);
|
||||
|
||||
expect(result[0].message.content).toEqual([
|
||||
{ type: "text", text: "Response" },
|
||||
]);
|
||||
expect(result[0].message.content).toEqual([{ type: 'text', text: 'Response' }]);
|
||||
});
|
||||
|
||||
it("should preserve array content", () => {
|
||||
it('should preserve array content', () => {
|
||||
const history = [
|
||||
{
|
||||
role: "user" as const,
|
||||
role: 'user' as const,
|
||||
content: [
|
||||
{ type: "text", text: "Hello" },
|
||||
{ type: "image", source: {} },
|
||||
{ type: 'text', text: 'Hello' },
|
||||
{ type: 'image', source: {} },
|
||||
],
|
||||
},
|
||||
];
|
||||
const result = convertHistoryToMessages(history);
|
||||
|
||||
expect(result[0].message.content).toHaveLength(2);
|
||||
expect(result[0].message.content[0]).toEqual({ type: "text", text: "Hello" });
|
||||
expect(result[0].message.content[0]).toEqual({ type: 'text', text: 'Hello' });
|
||||
});
|
||||
|
||||
it("should convert multiple messages", () => {
|
||||
it('should convert multiple messages', () => {
|
||||
const history = conversationHistoryFixture.slice(0, 2);
|
||||
const result = convertHistoryToMessages(history);
|
||||
|
||||
expect(result).toHaveLength(2);
|
||||
expect(result[0].type).toBe("user");
|
||||
expect(result[1].type).toBe("assistant");
|
||||
expect(result[0].type).toBe('user');
|
||||
expect(result[1].type).toBe('assistant');
|
||||
});
|
||||
|
||||
it("should set correct fields for SDK format", () => {
|
||||
const history = [{ role: "user" as const, content: "Test" }];
|
||||
it('should set correct fields for SDK format', () => {
|
||||
const history = [{ role: 'user' as const, content: 'Test' }];
|
||||
const result = convertHistoryToMessages(history);
|
||||
|
||||
expect(result[0].session_id).toBe("");
|
||||
expect(result[0].session_id).toBe('');
|
||||
expect(result[0].parent_tool_use_id).toBeNull();
|
||||
expect(result[0].type).toBe("user");
|
||||
expect(result[0].message.role).toBe("user");
|
||||
expect(result[0].type).toBe('user');
|
||||
expect(result[0].message.role).toBe('user');
|
||||
});
|
||||
|
||||
it("should handle all messages from fixture", () => {
|
||||
it('should handle all messages from fixture', () => {
|
||||
const result = convertHistoryToMessages(conversationHistoryFixture);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
|
||||
@@ -1,11 +1,11 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
resolveDependencies,
|
||||
areDependenciesSatisfied,
|
||||
getBlockingDependencies,
|
||||
type DependencyResolutionResult,
|
||||
} from "@/lib/dependency-resolver.js";
|
||||
import type { Feature } from "@/services/feature-loader.js";
|
||||
} from '@automaker/dependency-resolver';
|
||||
import type { Feature } from '@automaker/types';
|
||||
|
||||
// Helper to create test features
|
||||
function createFeature(
|
||||
@@ -20,17 +20,17 @@ function createFeature(
|
||||
): Feature {
|
||||
return {
|
||||
id,
|
||||
category: options.category || "test",
|
||||
category: options.category || 'test',
|
||||
description: options.description || `Feature ${id}`,
|
||||
status: options.status || "backlog",
|
||||
status: options.status || 'backlog',
|
||||
priority: options.priority,
|
||||
dependencies: options.dependencies,
|
||||
};
|
||||
}
|
||||
|
||||
describe("dependency-resolver.ts", () => {
|
||||
describe("resolveDependencies", () => {
|
||||
it("should handle empty feature list", () => {
|
||||
describe('dependency-resolver.ts', () => {
|
||||
describe('resolveDependencies', () => {
|
||||
it('should handle empty feature list', () => {
|
||||
const result = resolveDependencies([]);
|
||||
|
||||
expect(result.orderedFeatures).toEqual([]);
|
||||
@@ -39,103 +39,103 @@ describe("dependency-resolver.ts", () => {
|
||||
expect(result.blockedFeatures.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should handle features with no dependencies", () => {
|
||||
it('should handle features with no dependencies', () => {
|
||||
const features = [
|
||||
createFeature("f1", { priority: 1 }),
|
||||
createFeature("f2", { priority: 2 }),
|
||||
createFeature("f3", { priority: 3 }),
|
||||
createFeature('f1', { priority: 1 }),
|
||||
createFeature('f2', { priority: 2 }),
|
||||
createFeature('f3', { priority: 3 }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
expect(result.orderedFeatures).toHaveLength(3);
|
||||
expect(result.orderedFeatures[0].id).toBe("f1"); // Highest priority first
|
||||
expect(result.orderedFeatures[1].id).toBe("f2");
|
||||
expect(result.orderedFeatures[2].id).toBe("f3");
|
||||
expect(result.orderedFeatures[0].id).toBe('f1'); // Highest priority first
|
||||
expect(result.orderedFeatures[1].id).toBe('f2');
|
||||
expect(result.orderedFeatures[2].id).toBe('f3');
|
||||
expect(result.circularDependencies).toEqual([]);
|
||||
expect(result.missingDependencies.size).toBe(0);
|
||||
expect(result.blockedFeatures.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should order features by dependencies (simple chain)", () => {
|
||||
it('should order features by dependencies (simple chain)', () => {
|
||||
const features = [
|
||||
createFeature("f3", { dependencies: ["f2"] }),
|
||||
createFeature("f1"),
|
||||
createFeature("f2", { dependencies: ["f1"] }),
|
||||
createFeature('f3', { dependencies: ['f2'] }),
|
||||
createFeature('f1'),
|
||||
createFeature('f2', { dependencies: ['f1'] }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
expect(result.orderedFeatures).toHaveLength(3);
|
||||
expect(result.orderedFeatures[0].id).toBe("f1");
|
||||
expect(result.orderedFeatures[1].id).toBe("f2");
|
||||
expect(result.orderedFeatures[2].id).toBe("f3");
|
||||
expect(result.orderedFeatures[0].id).toBe('f1');
|
||||
expect(result.orderedFeatures[1].id).toBe('f2');
|
||||
expect(result.orderedFeatures[2].id).toBe('f3');
|
||||
expect(result.circularDependencies).toEqual([]);
|
||||
});
|
||||
|
||||
it("should respect priority within same dependency level", () => {
|
||||
it('should respect priority within same dependency level', () => {
|
||||
const features = [
|
||||
createFeature("f1", { priority: 3, dependencies: ["base"] }),
|
||||
createFeature("f2", { priority: 1, dependencies: ["base"] }),
|
||||
createFeature("f3", { priority: 2, dependencies: ["base"] }),
|
||||
createFeature("base"),
|
||||
createFeature('f1', { priority: 3, dependencies: ['base'] }),
|
||||
createFeature('f2', { priority: 1, dependencies: ['base'] }),
|
||||
createFeature('f3', { priority: 2, dependencies: ['base'] }),
|
||||
createFeature('base'),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
expect(result.orderedFeatures[0].id).toBe("base");
|
||||
expect(result.orderedFeatures[1].id).toBe("f2"); // Priority 1
|
||||
expect(result.orderedFeatures[2].id).toBe("f3"); // Priority 2
|
||||
expect(result.orderedFeatures[3].id).toBe("f1"); // Priority 3
|
||||
expect(result.orderedFeatures[0].id).toBe('base');
|
||||
expect(result.orderedFeatures[1].id).toBe('f2'); // Priority 1
|
||||
expect(result.orderedFeatures[2].id).toBe('f3'); // Priority 2
|
||||
expect(result.orderedFeatures[3].id).toBe('f1'); // Priority 3
|
||||
});
|
||||
|
||||
it("should use default priority of 2 when not specified", () => {
|
||||
it('should use default priority of 2 when not specified', () => {
|
||||
const features = [
|
||||
createFeature("f1", { priority: 1 }),
|
||||
createFeature("f2"), // No priority = default 2
|
||||
createFeature("f3", { priority: 3 }),
|
||||
createFeature('f1', { priority: 1 }),
|
||||
createFeature('f2'), // No priority = default 2
|
||||
createFeature('f3', { priority: 3 }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
expect(result.orderedFeatures[0].id).toBe("f1");
|
||||
expect(result.orderedFeatures[1].id).toBe("f2");
|
||||
expect(result.orderedFeatures[2].id).toBe("f3");
|
||||
expect(result.orderedFeatures[0].id).toBe('f1');
|
||||
expect(result.orderedFeatures[1].id).toBe('f2');
|
||||
expect(result.orderedFeatures[2].id).toBe('f3');
|
||||
});
|
||||
|
||||
it("should detect missing dependencies", () => {
|
||||
it('should detect missing dependencies', () => {
|
||||
const features = [
|
||||
createFeature("f1", { dependencies: ["missing1", "missing2"] }),
|
||||
createFeature("f2", { dependencies: ["f1", "missing3"] }),
|
||||
createFeature('f1', { dependencies: ['missing1', 'missing2'] }),
|
||||
createFeature('f2', { dependencies: ['f1', 'missing3'] }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
expect(result.missingDependencies.size).toBe(2);
|
||||
expect(result.missingDependencies.get("f1")).toEqual(["missing1", "missing2"]);
|
||||
expect(result.missingDependencies.get("f2")).toEqual(["missing3"]);
|
||||
expect(result.missingDependencies.get('f1')).toEqual(['missing1', 'missing2']);
|
||||
expect(result.missingDependencies.get('f2')).toEqual(['missing3']);
|
||||
expect(result.orderedFeatures).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should detect blocked features (incomplete dependencies)", () => {
|
||||
it('should detect blocked features (incomplete dependencies)', () => {
|
||||
const features = [
|
||||
createFeature("f1", { status: "in_progress" }),
|
||||
createFeature("f2", { status: "backlog", dependencies: ["f1"] }),
|
||||
createFeature("f3", { status: "completed" }),
|
||||
createFeature("f4", { status: "backlog", dependencies: ["f3"] }),
|
||||
createFeature('f1', { status: 'in_progress' }),
|
||||
createFeature('f2', { status: 'backlog', dependencies: ['f1'] }),
|
||||
createFeature('f3', { status: 'completed' }),
|
||||
createFeature('f4', { status: 'backlog', dependencies: ['f3'] }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
expect(result.blockedFeatures.size).toBe(1);
|
||||
expect(result.blockedFeatures.get("f2")).toEqual(["f1"]);
|
||||
expect(result.blockedFeatures.has("f4")).toBe(false); // f3 is completed
|
||||
expect(result.blockedFeatures.get('f2')).toEqual(['f1']);
|
||||
expect(result.blockedFeatures.has('f4')).toBe(false); // f3 is completed
|
||||
});
|
||||
|
||||
it("should not block features whose dependencies are verified", () => {
|
||||
it('should not block features whose dependencies are verified', () => {
|
||||
const features = [
|
||||
createFeature("f1", { status: "verified" }),
|
||||
createFeature("f2", { status: "backlog", dependencies: ["f1"] }),
|
||||
createFeature('f1', { status: 'verified' }),
|
||||
createFeature('f2', { status: 'backlog', dependencies: ['f1'] }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
@@ -143,25 +143,25 @@ describe("dependency-resolver.ts", () => {
|
||||
expect(result.blockedFeatures.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should detect circular dependencies (simple cycle)", () => {
|
||||
it('should detect circular dependencies (simple cycle)', () => {
|
||||
const features = [
|
||||
createFeature("f1", { dependencies: ["f2"] }),
|
||||
createFeature("f2", { dependencies: ["f1"] }),
|
||||
createFeature('f1', { dependencies: ['f2'] }),
|
||||
createFeature('f2', { dependencies: ['f1'] }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
expect(result.circularDependencies).toHaveLength(1);
|
||||
expect(result.circularDependencies[0]).toContain("f1");
|
||||
expect(result.circularDependencies[0]).toContain("f2");
|
||||
expect(result.circularDependencies[0]).toContain('f1');
|
||||
expect(result.circularDependencies[0]).toContain('f2');
|
||||
expect(result.orderedFeatures).toHaveLength(2); // Features still included
|
||||
});
|
||||
|
||||
it("should detect circular dependencies (multi-node cycle)", () => {
|
||||
it('should detect circular dependencies (multi-node cycle)', () => {
|
||||
const features = [
|
||||
createFeature("f1", { dependencies: ["f3"] }),
|
||||
createFeature("f2", { dependencies: ["f1"] }),
|
||||
createFeature("f3", { dependencies: ["f2"] }),
|
||||
createFeature('f1', { dependencies: ['f3'] }),
|
||||
createFeature('f2', { dependencies: ['f1'] }),
|
||||
createFeature('f3', { dependencies: ['f2'] }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
@@ -170,47 +170,47 @@ describe("dependency-resolver.ts", () => {
|
||||
expect(result.orderedFeatures).toHaveLength(3);
|
||||
});
|
||||
|
||||
it("should handle mixed valid and circular dependencies", () => {
|
||||
it('should handle mixed valid and circular dependencies', () => {
|
||||
const features = [
|
||||
createFeature("base"),
|
||||
createFeature("f1", { dependencies: ["base", "f2"] }),
|
||||
createFeature("f2", { dependencies: ["f1"] }), // Circular with f1
|
||||
createFeature("f3", { dependencies: ["base"] }),
|
||||
createFeature('base'),
|
||||
createFeature('f1', { dependencies: ['base', 'f2'] }),
|
||||
createFeature('f2', { dependencies: ['f1'] }), // Circular with f1
|
||||
createFeature('f3', { dependencies: ['base'] }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
expect(result.circularDependencies.length).toBeGreaterThan(0);
|
||||
expect(result.orderedFeatures[0].id).toBe("base");
|
||||
expect(result.orderedFeatures[0].id).toBe('base');
|
||||
expect(result.orderedFeatures).toHaveLength(4);
|
||||
});
|
||||
|
||||
it("should handle complex dependency graph", () => {
|
||||
it('should handle complex dependency graph', () => {
|
||||
const features = [
|
||||
createFeature("ui", { dependencies: ["api", "auth"], priority: 1 }),
|
||||
createFeature("api", { dependencies: ["db"], priority: 2 }),
|
||||
createFeature("auth", { dependencies: ["db"], priority: 1 }),
|
||||
createFeature("db", { priority: 1 }),
|
||||
createFeature("tests", { dependencies: ["ui"], priority: 3 }),
|
||||
createFeature('ui', { dependencies: ['api', 'auth'], priority: 1 }),
|
||||
createFeature('api', { dependencies: ['db'], priority: 2 }),
|
||||
createFeature('auth', { dependencies: ['db'], priority: 1 }),
|
||||
createFeature('db', { priority: 1 }),
|
||||
createFeature('tests', { dependencies: ['ui'], priority: 3 }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
const order = result.orderedFeatures.map(f => f.id);
|
||||
const order = result.orderedFeatures.map((f) => f.id);
|
||||
|
||||
expect(order[0]).toBe("db");
|
||||
expect(order.indexOf("db")).toBeLessThan(order.indexOf("api"));
|
||||
expect(order.indexOf("db")).toBeLessThan(order.indexOf("auth"));
|
||||
expect(order.indexOf("api")).toBeLessThan(order.indexOf("ui"));
|
||||
expect(order.indexOf("auth")).toBeLessThan(order.indexOf("ui"));
|
||||
expect(order.indexOf("ui")).toBeLessThan(order.indexOf("tests"));
|
||||
expect(order[0]).toBe('db');
|
||||
expect(order.indexOf('db')).toBeLessThan(order.indexOf('api'));
|
||||
expect(order.indexOf('db')).toBeLessThan(order.indexOf('auth'));
|
||||
expect(order.indexOf('api')).toBeLessThan(order.indexOf('ui'));
|
||||
expect(order.indexOf('auth')).toBeLessThan(order.indexOf('ui'));
|
||||
expect(order.indexOf('ui')).toBeLessThan(order.indexOf('tests'));
|
||||
expect(result.circularDependencies).toEqual([]);
|
||||
});
|
||||
|
||||
it("should handle features with empty dependencies array", () => {
|
||||
it('should handle features with empty dependencies array', () => {
|
||||
const features = [
|
||||
createFeature("f1", { dependencies: [] }),
|
||||
createFeature("f2", { dependencies: [] }),
|
||||
createFeature('f1', { dependencies: [] }),
|
||||
createFeature('f2', { dependencies: [] }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
@@ -220,22 +220,20 @@ describe("dependency-resolver.ts", () => {
|
||||
expect(result.blockedFeatures.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should track multiple blocking dependencies", () => {
|
||||
it('should track multiple blocking dependencies', () => {
|
||||
const features = [
|
||||
createFeature("f1", { status: "in_progress" }),
|
||||
createFeature("f2", { status: "backlog" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'in_progress' }),
|
||||
createFeature('f2', { status: 'backlog' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
expect(result.blockedFeatures.get("f3")).toEqual(["f1", "f2"]);
|
||||
expect(result.blockedFeatures.get('f3')).toEqual(['f1', 'f2']);
|
||||
});
|
||||
|
||||
it("should handle self-referencing dependency", () => {
|
||||
const features = [
|
||||
createFeature("f1", { dependencies: ["f1"] }),
|
||||
];
|
||||
it('should handle self-referencing dependency', () => {
|
||||
const features = [createFeature('f1', { dependencies: ['f1'] })];
|
||||
|
||||
const result = resolveDependencies(features);
|
||||
|
||||
@@ -244,195 +242,191 @@ describe("dependency-resolver.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("areDependenciesSatisfied", () => {
|
||||
it("should return true for feature with no dependencies", () => {
|
||||
const feature = createFeature("f1");
|
||||
describe('areDependenciesSatisfied', () => {
|
||||
it('should return true for feature with no dependencies', () => {
|
||||
const feature = createFeature('f1');
|
||||
const allFeatures = [feature];
|
||||
|
||||
expect(areDependenciesSatisfied(feature, allFeatures)).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true for feature with empty dependencies array", () => {
|
||||
const feature = createFeature("f1", { dependencies: [] });
|
||||
it('should return true for feature with empty dependencies array', () => {
|
||||
const feature = createFeature('f1', { dependencies: [] });
|
||||
const allFeatures = [feature];
|
||||
|
||||
expect(areDependenciesSatisfied(feature, allFeatures)).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true when all dependencies are completed", () => {
|
||||
it('should return true when all dependencies are completed', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "completed" }),
|
||||
createFeature("f2", { status: "completed" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'completed' }),
|
||||
createFeature('f2', { status: 'completed' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true when all dependencies are verified", () => {
|
||||
it('should return true when all dependencies are verified', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "verified" }),
|
||||
createFeature("f2", { status: "verified" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'verified' }),
|
||||
createFeature('f2', { status: 'verified' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true when dependencies are mix of completed and verified", () => {
|
||||
it('should return true when dependencies are mix of completed and verified', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "completed" }),
|
||||
createFeature("f2", { status: "verified" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'completed' }),
|
||||
createFeature('f2', { status: 'verified' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false when any dependency is in_progress", () => {
|
||||
it('should return false when any dependency is in_progress', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "completed" }),
|
||||
createFeature("f2", { status: "in_progress" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'completed' }),
|
||||
createFeature('f2', { status: 'in_progress' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false when any dependency is in backlog", () => {
|
||||
it('should return false when any dependency is in backlog', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "completed" }),
|
||||
createFeature("f2", { status: "backlog" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'completed' }),
|
||||
createFeature('f2', { status: 'backlog' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(areDependenciesSatisfied(allFeatures[2], allFeatures)).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false when dependency is missing", () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "backlog", dependencies: ["missing"] }),
|
||||
];
|
||||
it('should return false when dependency is missing', () => {
|
||||
const allFeatures = [createFeature('f1', { status: 'backlog', dependencies: ['missing'] })];
|
||||
|
||||
expect(areDependenciesSatisfied(allFeatures[0], allFeatures)).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false when multiple dependencies are incomplete", () => {
|
||||
it('should return false when multiple dependencies are incomplete', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "backlog" }),
|
||||
createFeature("f2", { status: "in_progress" }),
|
||||
createFeature("f3", { status: "waiting_approval" }),
|
||||
createFeature("f4", { status: "backlog", dependencies: ["f1", "f2", "f3"] }),
|
||||
createFeature('f1', { status: 'backlog' }),
|
||||
createFeature('f2', { status: 'in_progress' }),
|
||||
createFeature('f3', { status: 'waiting_approval' }),
|
||||
createFeature('f4', { status: 'backlog', dependencies: ['f1', 'f2', 'f3'] }),
|
||||
];
|
||||
|
||||
expect(areDependenciesSatisfied(allFeatures[3], allFeatures)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getBlockingDependencies", () => {
|
||||
it("should return empty array for feature with no dependencies", () => {
|
||||
const feature = createFeature("f1");
|
||||
describe('getBlockingDependencies', () => {
|
||||
it('should return empty array for feature with no dependencies', () => {
|
||||
const feature = createFeature('f1');
|
||||
const allFeatures = [feature];
|
||||
|
||||
expect(getBlockingDependencies(feature, allFeatures)).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return empty array for feature with empty dependencies array", () => {
|
||||
const feature = createFeature("f1", { dependencies: [] });
|
||||
it('should return empty array for feature with empty dependencies array', () => {
|
||||
const feature = createFeature('f1', { dependencies: [] });
|
||||
const allFeatures = [feature];
|
||||
|
||||
expect(getBlockingDependencies(feature, allFeatures)).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return empty array when all dependencies are completed", () => {
|
||||
it('should return empty array when all dependencies are completed', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "completed" }),
|
||||
createFeature("f2", { status: "completed" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'completed' }),
|
||||
createFeature('f2', { status: 'completed' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return empty array when all dependencies are verified", () => {
|
||||
it('should return empty array when all dependencies are verified', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "verified" }),
|
||||
createFeature("f2", { status: "verified" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'verified' }),
|
||||
createFeature('f2', { status: 'verified' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual([]);
|
||||
});
|
||||
|
||||
it("should return blocking dependencies in backlog status", () => {
|
||||
it('should return blocking dependencies in backlog status', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "backlog" }),
|
||||
createFeature("f2", { status: "completed" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'backlog' }),
|
||||
createFeature('f2', { status: 'completed' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(["f1"]);
|
||||
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(['f1']);
|
||||
});
|
||||
|
||||
it("should return blocking dependencies in in_progress status", () => {
|
||||
it('should return blocking dependencies in in_progress status', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "in_progress" }),
|
||||
createFeature("f2", { status: "verified" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'in_progress' }),
|
||||
createFeature('f2', { status: 'verified' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(["f1"]);
|
||||
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(['f1']);
|
||||
});
|
||||
|
||||
it("should return blocking dependencies in waiting_approval status", () => {
|
||||
it('should return blocking dependencies in waiting_approval status', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "waiting_approval" }),
|
||||
createFeature("f2", { status: "completed" }),
|
||||
createFeature("f3", { status: "backlog", dependencies: ["f1", "f2"] }),
|
||||
createFeature('f1', { status: 'waiting_approval' }),
|
||||
createFeature('f2', { status: 'completed' }),
|
||||
createFeature('f3', { status: 'backlog', dependencies: ['f1', 'f2'] }),
|
||||
];
|
||||
|
||||
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(["f1"]);
|
||||
expect(getBlockingDependencies(allFeatures[2], allFeatures)).toEqual(['f1']);
|
||||
});
|
||||
|
||||
it("should return all blocking dependencies", () => {
|
||||
it('should return all blocking dependencies', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "backlog" }),
|
||||
createFeature("f2", { status: "in_progress" }),
|
||||
createFeature("f3", { status: "waiting_approval" }),
|
||||
createFeature("f4", { status: "completed" }),
|
||||
createFeature("f5", { status: "backlog", dependencies: ["f1", "f2", "f3", "f4"] }),
|
||||
createFeature('f1', { status: 'backlog' }),
|
||||
createFeature('f2', { status: 'in_progress' }),
|
||||
createFeature('f3', { status: 'waiting_approval' }),
|
||||
createFeature('f4', { status: 'completed' }),
|
||||
createFeature('f5', { status: 'backlog', dependencies: ['f1', 'f2', 'f3', 'f4'] }),
|
||||
];
|
||||
|
||||
const blocking = getBlockingDependencies(allFeatures[4], allFeatures);
|
||||
expect(blocking).toHaveLength(3);
|
||||
expect(blocking).toContain("f1");
|
||||
expect(blocking).toContain("f2");
|
||||
expect(blocking).toContain("f3");
|
||||
expect(blocking).not.toContain("f4");
|
||||
expect(blocking).toContain('f1');
|
||||
expect(blocking).toContain('f2');
|
||||
expect(blocking).toContain('f3');
|
||||
expect(blocking).not.toContain('f4');
|
||||
});
|
||||
|
||||
it("should handle missing dependencies", () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "backlog", dependencies: ["missing"] }),
|
||||
];
|
||||
it('should handle missing dependencies', () => {
|
||||
const allFeatures = [createFeature('f1', { status: 'backlog', dependencies: ['missing'] })];
|
||||
|
||||
// Missing dependencies won't be in the blocking list since they don't exist
|
||||
expect(getBlockingDependencies(allFeatures[0], allFeatures)).toEqual([]);
|
||||
});
|
||||
|
||||
it("should handle mix of completed, verified, and incomplete dependencies", () => {
|
||||
it('should handle mix of completed, verified, and incomplete dependencies', () => {
|
||||
const allFeatures = [
|
||||
createFeature("f1", { status: "completed" }),
|
||||
createFeature("f2", { status: "verified" }),
|
||||
createFeature("f3", { status: "in_progress" }),
|
||||
createFeature("f4", { status: "backlog" }),
|
||||
createFeature("f5", { status: "backlog", dependencies: ["f1", "f2", "f3", "f4"] }),
|
||||
createFeature('f1', { status: 'completed' }),
|
||||
createFeature('f2', { status: 'verified' }),
|
||||
createFeature('f3', { status: 'in_progress' }),
|
||||
createFeature('f4', { status: 'backlog' }),
|
||||
createFeature('f5', { status: 'backlog', dependencies: ['f1', 'f2', 'f3', 'f4'] }),
|
||||
];
|
||||
|
||||
const blocking = getBlockingDependencies(allFeatures[4], allFeatures);
|
||||
expect(blocking).toHaveLength(2);
|
||||
expect(blocking).toContain("f3");
|
||||
expect(blocking).toContain("f4");
|
||||
expect(blocking).toContain('f3');
|
||||
expect(blocking).toContain('f4');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,146 +1,211 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
isAbortError,
|
||||
isAuthenticationError,
|
||||
isCancellationError,
|
||||
classifyError,
|
||||
getUserFriendlyErrorMessage,
|
||||
type ErrorType,
|
||||
} from "@/lib/error-handler.js";
|
||||
} from '@automaker/utils';
|
||||
|
||||
describe("error-handler.ts", () => {
|
||||
describe("isAbortError", () => {
|
||||
it("should detect AbortError by error name", () => {
|
||||
const error = new Error("Operation cancelled");
|
||||
error.name = "AbortError";
|
||||
describe('error-handler.ts', () => {
|
||||
describe('isAbortError', () => {
|
||||
it('should detect AbortError by error name', () => {
|
||||
const error = new Error('Operation cancelled');
|
||||
error.name = 'AbortError';
|
||||
expect(isAbortError(error)).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect abort error by message content", () => {
|
||||
const error = new Error("Request was aborted");
|
||||
it('should detect abort error by message content', () => {
|
||||
const error = new Error('Request was aborted');
|
||||
expect(isAbortError(error)).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false for non-abort errors", () => {
|
||||
const error = new Error("Something else went wrong");
|
||||
it('should return false for non-abort errors', () => {
|
||||
const error = new Error('Something else went wrong');
|
||||
expect(isAbortError(error)).toBe(false);
|
||||
});
|
||||
|
||||
it("should return false for non-Error objects", () => {
|
||||
expect(isAbortError("not an error")).toBe(false);
|
||||
it('should return false for non-Error objects', () => {
|
||||
expect(isAbortError('not an error')).toBe(false);
|
||||
expect(isAbortError(null)).toBe(false);
|
||||
expect(isAbortError(undefined)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isAuthenticationError", () => {
|
||||
it("should detect 'Authentication failed' message", () => {
|
||||
expect(isAuthenticationError("Authentication failed")).toBe(true);
|
||||
describe('isCancellationError', () => {
|
||||
it("should detect 'cancelled' message", () => {
|
||||
expect(isCancellationError('Operation was cancelled')).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'Invalid API key' message", () => {
|
||||
expect(isAuthenticationError("Invalid API key provided")).toBe(true);
|
||||
it("should detect 'canceled' message", () => {
|
||||
expect(isCancellationError('Request was canceled')).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'authentication_failed' message", () => {
|
||||
expect(isAuthenticationError("authentication_failed")).toBe(true);
|
||||
it("should detect 'stopped' message", () => {
|
||||
expect(isCancellationError('Process was stopped')).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'Fix external API key' message", () => {
|
||||
expect(isAuthenticationError("Fix external API key configuration")).toBe(true);
|
||||
it("should detect 'aborted' message", () => {
|
||||
expect(isCancellationError('Task was aborted')).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false for non-authentication errors", () => {
|
||||
expect(isAuthenticationError("Network connection error")).toBe(false);
|
||||
expect(isAuthenticationError("File not found")).toBe(false);
|
||||
it('should be case insensitive', () => {
|
||||
expect(isCancellationError('CANCELLED')).toBe(true);
|
||||
expect(isCancellationError('Canceled')).toBe(true);
|
||||
});
|
||||
|
||||
it("should be case sensitive", () => {
|
||||
expect(isAuthenticationError("authentication Failed")).toBe(false);
|
||||
it('should return false for non-cancellation errors', () => {
|
||||
expect(isCancellationError('File not found')).toBe(false);
|
||||
expect(isCancellationError('Network error')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("classifyError", () => {
|
||||
it("should classify authentication errors", () => {
|
||||
const error = new Error("Authentication failed");
|
||||
describe('isAuthenticationError', () => {
|
||||
it("should detect 'Authentication failed' message", () => {
|
||||
expect(isAuthenticationError('Authentication failed')).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'Invalid API key' message", () => {
|
||||
expect(isAuthenticationError('Invalid API key provided')).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'authentication_failed' message", () => {
|
||||
expect(isAuthenticationError('authentication_failed')).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'Fix external API key' message", () => {
|
||||
expect(isAuthenticationError('Fix external API key configuration')).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for non-authentication errors', () => {
|
||||
expect(isAuthenticationError('Network connection error')).toBe(false);
|
||||
expect(isAuthenticationError('File not found')).toBe(false);
|
||||
});
|
||||
|
||||
it('should be case sensitive', () => {
|
||||
expect(isAuthenticationError('authentication Failed')).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('classifyError', () => {
|
||||
it('should classify authentication errors', () => {
|
||||
const error = new Error('Authentication failed');
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("authentication");
|
||||
expect(result.type).toBe('authentication');
|
||||
expect(result.isAuth).toBe(true);
|
||||
expect(result.isAbort).toBe(false);
|
||||
expect(result.message).toBe("Authentication failed");
|
||||
expect(result.message).toBe('Authentication failed');
|
||||
expect(result.originalError).toBe(error);
|
||||
});
|
||||
|
||||
it("should classify abort errors", () => {
|
||||
const error = new Error("Operation aborted");
|
||||
error.name = "AbortError";
|
||||
it('should classify abort errors', () => {
|
||||
const error = new Error('Operation aborted');
|
||||
error.name = 'AbortError';
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("abort");
|
||||
expect(result.type).toBe('abort');
|
||||
expect(result.isAbort).toBe(true);
|
||||
expect(result.isAuth).toBe(false);
|
||||
expect(result.message).toBe("Operation aborted");
|
||||
expect(result.message).toBe('Operation aborted');
|
||||
});
|
||||
|
||||
it("should prioritize auth over abort if both match", () => {
|
||||
const error = new Error("Authentication failed and aborted");
|
||||
it('should prioritize auth over abort if both match', () => {
|
||||
const error = new Error('Authentication failed and aborted');
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("authentication");
|
||||
expect(result.type).toBe('authentication');
|
||||
expect(result.isAuth).toBe(true);
|
||||
expect(result.isAbort).toBe(true); // Still detected as abort too
|
||||
});
|
||||
|
||||
it("should classify generic Error as execution error", () => {
|
||||
const error = new Error("Something went wrong");
|
||||
it('should classify cancellation errors', () => {
|
||||
const error = new Error('Operation was cancelled');
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("execution");
|
||||
expect(result.type).toBe('cancellation');
|
||||
expect(result.isCancellation).toBe(true);
|
||||
expect(result.isAbort).toBe(false);
|
||||
expect(result.isAuth).toBe(false);
|
||||
});
|
||||
|
||||
it('should prioritize abort over cancellation if both match', () => {
|
||||
const error = new Error('Operation aborted');
|
||||
error.name = 'AbortError';
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe('abort');
|
||||
expect(result.isAbort).toBe(true);
|
||||
expect(result.isCancellation).toBe(true); // Still detected as cancellation too
|
||||
});
|
||||
|
||||
it("should classify cancellation errors with 'canceled' spelling", () => {
|
||||
const error = new Error('Request was canceled');
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe('cancellation');
|
||||
expect(result.isCancellation).toBe(true);
|
||||
});
|
||||
|
||||
it("should classify cancellation errors with 'stopped' message", () => {
|
||||
const error = new Error('Process was stopped');
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe('cancellation');
|
||||
expect(result.isCancellation).toBe(true);
|
||||
});
|
||||
|
||||
it('should classify generic Error as execution error', () => {
|
||||
const error = new Error('Something went wrong');
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe('execution');
|
||||
expect(result.isAuth).toBe(false);
|
||||
expect(result.isAbort).toBe(false);
|
||||
});
|
||||
|
||||
it("should classify non-Error objects as unknown", () => {
|
||||
const error = "string error";
|
||||
it('should classify non-Error objects as unknown', () => {
|
||||
const error = 'string error';
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("unknown");
|
||||
expect(result.message).toBe("string error");
|
||||
expect(result.type).toBe('unknown');
|
||||
expect(result.message).toBe('string error');
|
||||
});
|
||||
|
||||
it("should handle null and undefined", () => {
|
||||
it('should handle null and undefined', () => {
|
||||
const nullResult = classifyError(null);
|
||||
expect(nullResult.type).toBe("unknown");
|
||||
expect(nullResult.message).toBe("Unknown error");
|
||||
expect(nullResult.type).toBe('unknown');
|
||||
expect(nullResult.message).toBe('Unknown error');
|
||||
|
||||
const undefinedResult = classifyError(undefined);
|
||||
expect(undefinedResult.type).toBe("unknown");
|
||||
expect(undefinedResult.message).toBe("Unknown error");
|
||||
expect(undefinedResult.type).toBe('unknown');
|
||||
expect(undefinedResult.message).toBe('Unknown error');
|
||||
});
|
||||
});
|
||||
|
||||
describe("getUserFriendlyErrorMessage", () => {
|
||||
it("should return friendly message for abort errors", () => {
|
||||
const error = new Error("abort");
|
||||
describe('getUserFriendlyErrorMessage', () => {
|
||||
it('should return friendly message for abort errors', () => {
|
||||
const error = new Error('abort');
|
||||
const result = getUserFriendlyErrorMessage(error);
|
||||
expect(result).toBe("Operation was cancelled");
|
||||
expect(result).toBe('Operation was cancelled');
|
||||
});
|
||||
|
||||
it("should return friendly message for authentication errors", () => {
|
||||
const error = new Error("Authentication failed");
|
||||
it('should return friendly message for authentication errors', () => {
|
||||
const error = new Error('Authentication failed');
|
||||
const result = getUserFriendlyErrorMessage(error);
|
||||
expect(result).toBe("Authentication failed. Please check your API key.");
|
||||
expect(result).toBe('Authentication failed. Please check your API key.');
|
||||
});
|
||||
|
||||
it("should return original message for other errors", () => {
|
||||
const error = new Error("File not found");
|
||||
it('should return original message for other errors', () => {
|
||||
const error = new Error('File not found');
|
||||
const result = getUserFriendlyErrorMessage(error);
|
||||
expect(result).toBe("File not found");
|
||||
expect(result).toBe('File not found');
|
||||
});
|
||||
|
||||
it("should handle non-Error objects", () => {
|
||||
const result = getUserFriendlyErrorMessage("Custom error");
|
||||
expect(result).toBe("Custom error");
|
||||
it('should handle non-Error objects', () => {
|
||||
const result = getUserFriendlyErrorMessage('Custom error');
|
||||
expect(result).toBe('Custom error');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,10 +1,10 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import { mkdirSafe, existsSafe } from "@/lib/fs-utils.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { mkdirSafe, existsSafe } from '@automaker/utils';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
|
||||
describe("fs-utils.ts", () => {
|
||||
describe('fs-utils.ts', () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -22,43 +22,41 @@ describe("fs-utils.ts", () => {
|
||||
}
|
||||
});
|
||||
|
||||
describe("mkdirSafe", () => {
|
||||
it("should create a new directory", async () => {
|
||||
const newDir = path.join(testDir, "new-directory");
|
||||
describe('mkdirSafe', () => {
|
||||
it('should create a new directory', async () => {
|
||||
const newDir = path.join(testDir, 'new-directory');
|
||||
await mkdirSafe(newDir);
|
||||
|
||||
const stats = await fs.stat(newDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it("should succeed if directory already exists", async () => {
|
||||
const existingDir = path.join(testDir, "existing");
|
||||
it('should succeed if directory already exists', async () => {
|
||||
const existingDir = path.join(testDir, 'existing');
|
||||
await fs.mkdir(existingDir);
|
||||
|
||||
// Should not throw
|
||||
await expect(mkdirSafe(existingDir)).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it("should create nested directories", async () => {
|
||||
const nestedDir = path.join(testDir, "a", "b", "c");
|
||||
it('should create nested directories', async () => {
|
||||
const nestedDir = path.join(testDir, 'a', 'b', 'c');
|
||||
await mkdirSafe(nestedDir);
|
||||
|
||||
const stats = await fs.stat(nestedDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it("should throw if path exists as a file", async () => {
|
||||
const filePath = path.join(testDir, "file.txt");
|
||||
await fs.writeFile(filePath, "content");
|
||||
it('should throw if path exists as a file', async () => {
|
||||
const filePath = path.join(testDir, 'file.txt');
|
||||
await fs.writeFile(filePath, 'content');
|
||||
|
||||
await expect(mkdirSafe(filePath)).rejects.toThrow(
|
||||
"Path exists and is not a directory"
|
||||
);
|
||||
await expect(mkdirSafe(filePath)).rejects.toThrow('Path exists and is not a directory');
|
||||
});
|
||||
|
||||
it("should succeed if path is a symlink to a directory", async () => {
|
||||
const realDir = path.join(testDir, "real-dir");
|
||||
const symlinkPath = path.join(testDir, "link-to-dir");
|
||||
it('should succeed if path is a symlink to a directory', async () => {
|
||||
const realDir = path.join(testDir, 'real-dir');
|
||||
const symlinkPath = path.join(testDir, 'link-to-dir');
|
||||
await fs.mkdir(realDir);
|
||||
await fs.symlink(realDir, symlinkPath);
|
||||
|
||||
@@ -66,12 +64,12 @@ describe("fs-utils.ts", () => {
|
||||
await expect(mkdirSafe(symlinkPath)).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it("should handle ELOOP error gracefully when checking path", async () => {
|
||||
it('should handle ELOOP error gracefully when checking path', async () => {
|
||||
// Mock lstat to throw ELOOP error
|
||||
const originalLstat = fs.lstat;
|
||||
const mkdirSafePath = path.join(testDir, "eloop-path");
|
||||
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
|
||||
const mkdirSafePath = path.join(testDir, 'eloop-path');
|
||||
|
||||
vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'ELOOP' });
|
||||
|
||||
// Should not throw, should return gracefully
|
||||
await expect(mkdirSafe(mkdirSafePath)).resolves.toBeUndefined();
|
||||
@@ -79,13 +77,13 @@ describe("fs-utils.ts", () => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should handle EEXIST error gracefully when creating directory", async () => {
|
||||
const newDir = path.join(testDir, "race-condition-dir");
|
||||
|
||||
it('should handle EEXIST error gracefully when creating directory', async () => {
|
||||
const newDir = path.join(testDir, 'race-condition-dir');
|
||||
|
||||
// Mock lstat to return ENOENT (path doesn't exist)
|
||||
// Then mock mkdir to throw EEXIST (race condition)
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
|
||||
vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "EEXIST" });
|
||||
vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'ENOENT' });
|
||||
vi.spyOn(fs, 'mkdir').mockRejectedValueOnce({ code: 'EEXIST' });
|
||||
|
||||
// Should not throw, should return gracefully
|
||||
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
|
||||
@@ -93,13 +91,13 @@ describe("fs-utils.ts", () => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should handle ELOOP error gracefully when creating directory", async () => {
|
||||
const newDir = path.join(testDir, "eloop-create-dir");
|
||||
|
||||
it('should handle ELOOP error gracefully when creating directory', async () => {
|
||||
const newDir = path.join(testDir, 'eloop-create-dir');
|
||||
|
||||
// Mock lstat to return ENOENT (path doesn't exist)
|
||||
// Then mock mkdir to throw ELOOP
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
|
||||
vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "ELOOP" });
|
||||
vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'ENOENT' });
|
||||
vi.spyOn(fs, 'mkdir').mockRejectedValueOnce({ code: 'ELOOP' });
|
||||
|
||||
// Should not throw, should return gracefully
|
||||
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
|
||||
@@ -108,34 +106,34 @@ describe("fs-utils.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("existsSafe", () => {
|
||||
it("should return true for existing file", async () => {
|
||||
const filePath = path.join(testDir, "test-file.txt");
|
||||
await fs.writeFile(filePath, "content");
|
||||
describe('existsSafe', () => {
|
||||
it('should return true for existing file', async () => {
|
||||
const filePath = path.join(testDir, 'test-file.txt');
|
||||
await fs.writeFile(filePath, 'content');
|
||||
|
||||
const exists = await existsSafe(filePath);
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true for existing directory", async () => {
|
||||
const dirPath = path.join(testDir, "test-dir");
|
||||
it('should return true for existing directory', async () => {
|
||||
const dirPath = path.join(testDir, 'test-dir');
|
||||
await fs.mkdir(dirPath);
|
||||
|
||||
const exists = await existsSafe(dirPath);
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false for non-existent path", async () => {
|
||||
const nonExistent = path.join(testDir, "does-not-exist");
|
||||
it('should return false for non-existent path', async () => {
|
||||
const nonExistent = path.join(testDir, 'does-not-exist');
|
||||
|
||||
const exists = await existsSafe(nonExistent);
|
||||
expect(exists).toBe(false);
|
||||
});
|
||||
|
||||
it("should return true for symlink", async () => {
|
||||
const realFile = path.join(testDir, "real-file.txt");
|
||||
const symlinkPath = path.join(testDir, "link-to-file");
|
||||
await fs.writeFile(realFile, "content");
|
||||
it('should return true for symlink', async () => {
|
||||
const realFile = path.join(testDir, 'real-file.txt');
|
||||
const symlinkPath = path.join(testDir, 'link-to-file');
|
||||
await fs.writeFile(realFile, 'content');
|
||||
await fs.symlink(realFile, symlinkPath);
|
||||
|
||||
const exists = await existsSafe(symlinkPath);
|
||||
@@ -143,29 +141,29 @@ describe("fs-utils.ts", () => {
|
||||
});
|
||||
|
||||
it("should return true for broken symlink (symlink exists even if target doesn't)", async () => {
|
||||
const symlinkPath = path.join(testDir, "broken-link");
|
||||
const nonExistent = path.join(testDir, "non-existent-target");
|
||||
const symlinkPath = path.join(testDir, 'broken-link');
|
||||
const nonExistent = path.join(testDir, 'non-existent-target');
|
||||
await fs.symlink(nonExistent, symlinkPath);
|
||||
|
||||
const exists = await existsSafe(symlinkPath);
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true for ELOOP error (symlink loop)", async () => {
|
||||
it('should return true for ELOOP error (symlink loop)', async () => {
|
||||
// Mock lstat to throw ELOOP error
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
|
||||
vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'ELOOP' });
|
||||
|
||||
const exists = await existsSafe("/some/path/with/loop");
|
||||
const exists = await existsSafe('/some/path/with/loop');
|
||||
expect(exists).toBe(true);
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should throw for other errors", async () => {
|
||||
it('should throw for other errors', async () => {
|
||||
// Mock lstat to throw a non-ENOENT, non-ELOOP error
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "EACCES" });
|
||||
vi.spyOn(fs, 'lstat').mockRejectedValueOnce({ code: 'EACCES' });
|
||||
|
||||
await expect(existsSafe("/some/path")).rejects.toMatchObject({ code: "EACCES" });
|
||||
await expect(existsSafe('/some/path')).rejects.toMatchObject({ code: 'EACCES' });
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
@@ -1,174 +1,164 @@
|
||||
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||
import { describe, it, expect, vi, beforeEach } from 'vitest';
|
||||
import {
|
||||
getMimeTypeForImage,
|
||||
readImageAsBase64,
|
||||
convertImagesToContentBlocks,
|
||||
formatImagePathsForPrompt,
|
||||
} from "@/lib/image-handler.js";
|
||||
import { pngBase64Fixture } from "../../fixtures/images.js";
|
||||
import * as fs from "fs/promises";
|
||||
} from '@automaker/utils';
|
||||
import { pngBase64Fixture } from '../../fixtures/images.js';
|
||||
import * as fs from 'fs/promises';
|
||||
|
||||
vi.mock("fs/promises");
|
||||
vi.mock('fs/promises');
|
||||
|
||||
describe("image-handler.ts", () => {
|
||||
describe('image-handler.ts', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
describe("getMimeTypeForImage", () => {
|
||||
it("should return correct MIME type for .jpg", () => {
|
||||
expect(getMimeTypeForImage("test.jpg")).toBe("image/jpeg");
|
||||
expect(getMimeTypeForImage("/path/to/test.jpg")).toBe("image/jpeg");
|
||||
describe('getMimeTypeForImage', () => {
|
||||
it('should return correct MIME type for .jpg', () => {
|
||||
expect(getMimeTypeForImage('test.jpg')).toBe('image/jpeg');
|
||||
expect(getMimeTypeForImage('/path/to/test.jpg')).toBe('image/jpeg');
|
||||
});
|
||||
|
||||
it("should return correct MIME type for .jpeg", () => {
|
||||
expect(getMimeTypeForImage("test.jpeg")).toBe("image/jpeg");
|
||||
it('should return correct MIME type for .jpeg', () => {
|
||||
expect(getMimeTypeForImage('test.jpeg')).toBe('image/jpeg');
|
||||
});
|
||||
|
||||
it("should return correct MIME type for .png", () => {
|
||||
expect(getMimeTypeForImage("test.png")).toBe("image/png");
|
||||
it('should return correct MIME type for .png', () => {
|
||||
expect(getMimeTypeForImage('test.png')).toBe('image/png');
|
||||
});
|
||||
|
||||
it("should return correct MIME type for .gif", () => {
|
||||
expect(getMimeTypeForImage("test.gif")).toBe("image/gif");
|
||||
it('should return correct MIME type for .gif', () => {
|
||||
expect(getMimeTypeForImage('test.gif')).toBe('image/gif');
|
||||
});
|
||||
|
||||
it("should return correct MIME type for .webp", () => {
|
||||
expect(getMimeTypeForImage("test.webp")).toBe("image/webp");
|
||||
it('should return correct MIME type for .webp', () => {
|
||||
expect(getMimeTypeForImage('test.webp')).toBe('image/webp');
|
||||
});
|
||||
|
||||
it("should be case-insensitive", () => {
|
||||
expect(getMimeTypeForImage("test.PNG")).toBe("image/png");
|
||||
expect(getMimeTypeForImage("test.JPG")).toBe("image/jpeg");
|
||||
expect(getMimeTypeForImage("test.GIF")).toBe("image/gif");
|
||||
expect(getMimeTypeForImage("test.WEBP")).toBe("image/webp");
|
||||
it('should be case-insensitive', () => {
|
||||
expect(getMimeTypeForImage('test.PNG')).toBe('image/png');
|
||||
expect(getMimeTypeForImage('test.JPG')).toBe('image/jpeg');
|
||||
expect(getMimeTypeForImage('test.GIF')).toBe('image/gif');
|
||||
expect(getMimeTypeForImage('test.WEBP')).toBe('image/webp');
|
||||
});
|
||||
|
||||
it("should default to image/png for unknown extensions", () => {
|
||||
expect(getMimeTypeForImage("test.unknown")).toBe("image/png");
|
||||
expect(getMimeTypeForImage("test.txt")).toBe("image/png");
|
||||
expect(getMimeTypeForImage("test")).toBe("image/png");
|
||||
it('should default to image/png for unknown extensions', () => {
|
||||
expect(getMimeTypeForImage('test.unknown')).toBe('image/png');
|
||||
expect(getMimeTypeForImage('test.txt')).toBe('image/png');
|
||||
expect(getMimeTypeForImage('test')).toBe('image/png');
|
||||
});
|
||||
|
||||
it("should handle paths with multiple dots", () => {
|
||||
expect(getMimeTypeForImage("my.image.file.jpg")).toBe("image/jpeg");
|
||||
it('should handle paths with multiple dots', () => {
|
||||
expect(getMimeTypeForImage('my.image.file.jpg')).toBe('image/jpeg');
|
||||
});
|
||||
});
|
||||
|
||||
describe("readImageAsBase64", () => {
|
||||
it("should read image and return base64 data", async () => {
|
||||
const mockBuffer = Buffer.from(pngBase64Fixture, "base64");
|
||||
describe('readImageAsBase64', () => {
|
||||
it('should read image and return base64 data', async () => {
|
||||
const mockBuffer = Buffer.from(pngBase64Fixture, 'base64');
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
|
||||
|
||||
const result = await readImageAsBase64("/path/to/test.png");
|
||||
const result = await readImageAsBase64('/path/to/test.png');
|
||||
|
||||
expect(result).toMatchObject({
|
||||
base64: pngBase64Fixture,
|
||||
mimeType: "image/png",
|
||||
filename: "test.png",
|
||||
originalPath: "/path/to/test.png",
|
||||
mimeType: 'image/png',
|
||||
filename: 'test.png',
|
||||
originalPath: '/path/to/test.png',
|
||||
});
|
||||
expect(fs.readFile).toHaveBeenCalledWith("/path/to/test.png");
|
||||
expect(fs.readFile).toHaveBeenCalledWith('/path/to/test.png');
|
||||
});
|
||||
|
||||
it("should handle different image formats", async () => {
|
||||
const mockBuffer = Buffer.from("jpeg-data");
|
||||
it('should handle different image formats', async () => {
|
||||
const mockBuffer = Buffer.from('jpeg-data');
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
|
||||
|
||||
const result = await readImageAsBase64("/path/to/photo.jpg");
|
||||
const result = await readImageAsBase64('/path/to/photo.jpg');
|
||||
|
||||
expect(result.mimeType).toBe("image/jpeg");
|
||||
expect(result.filename).toBe("photo.jpg");
|
||||
expect(result.base64).toBe(mockBuffer.toString("base64"));
|
||||
expect(result.mimeType).toBe('image/jpeg');
|
||||
expect(result.filename).toBe('photo.jpg');
|
||||
expect(result.base64).toBe(mockBuffer.toString('base64'));
|
||||
});
|
||||
|
||||
it("should extract filename from path", async () => {
|
||||
const mockBuffer = Buffer.from("data");
|
||||
it('should extract filename from path', async () => {
|
||||
const mockBuffer = Buffer.from('data');
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
|
||||
|
||||
const result = await readImageAsBase64("/deep/nested/path/image.webp");
|
||||
const result = await readImageAsBase64('/deep/nested/path/image.webp');
|
||||
|
||||
expect(result.filename).toBe("image.webp");
|
||||
expect(result.filename).toBe('image.webp');
|
||||
});
|
||||
|
||||
it("should throw error if file cannot be read", async () => {
|
||||
vi.mocked(fs.readFile).mockRejectedValue(new Error("File not found"));
|
||||
it('should throw error if file cannot be read', async () => {
|
||||
vi.mocked(fs.readFile).mockRejectedValue(new Error('File not found'));
|
||||
|
||||
await expect(readImageAsBase64("/nonexistent.png")).rejects.toThrow(
|
||||
"File not found"
|
||||
);
|
||||
await expect(readImageAsBase64('/nonexistent.png')).rejects.toThrow('File not found');
|
||||
});
|
||||
});
|
||||
|
||||
describe("convertImagesToContentBlocks", () => {
|
||||
it("should convert single image to content block", async () => {
|
||||
const mockBuffer = Buffer.from(pngBase64Fixture, "base64");
|
||||
describe('convertImagesToContentBlocks', () => {
|
||||
it('should convert single image to content block', async () => {
|
||||
const mockBuffer = Buffer.from(pngBase64Fixture, 'base64');
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
|
||||
|
||||
const result = await convertImagesToContentBlocks(["/path/test.png"]);
|
||||
const result = await convertImagesToContentBlocks(['/path/test.png']);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(result[0]).toMatchObject({
|
||||
type: "image",
|
||||
type: 'image',
|
||||
source: {
|
||||
type: "base64",
|
||||
media_type: "image/png",
|
||||
type: 'base64',
|
||||
media_type: 'image/png',
|
||||
data: pngBase64Fixture,
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("should convert multiple images to content blocks", async () => {
|
||||
const mockBuffer = Buffer.from("test-data");
|
||||
it('should convert multiple images to content blocks', async () => {
|
||||
const mockBuffer = Buffer.from('test-data');
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
|
||||
|
||||
const result = await convertImagesToContentBlocks([
|
||||
"/a.png",
|
||||
"/b.jpg",
|
||||
"/c.webp",
|
||||
]);
|
||||
const result = await convertImagesToContentBlocks(['/a.png', '/b.jpg', '/c.webp']);
|
||||
|
||||
expect(result).toHaveLength(3);
|
||||
expect(result[0].source.media_type).toBe("image/png");
|
||||
expect(result[1].source.media_type).toBe("image/jpeg");
|
||||
expect(result[2].source.media_type).toBe("image/webp");
|
||||
expect(result[0].source.media_type).toBe('image/png');
|
||||
expect(result[1].source.media_type).toBe('image/jpeg');
|
||||
expect(result[2].source.media_type).toBe('image/webp');
|
||||
});
|
||||
|
||||
it("should resolve relative paths with workDir", async () => {
|
||||
const mockBuffer = Buffer.from("data");
|
||||
it('should resolve relative paths with workDir', async () => {
|
||||
const mockBuffer = Buffer.from('data');
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
|
||||
|
||||
await convertImagesToContentBlocks(["relative.png"], "/work/dir");
|
||||
await convertImagesToContentBlocks(['relative.png'], '/work/dir');
|
||||
|
||||
// Use path-agnostic check since Windows uses backslashes
|
||||
const calls = vi.mocked(fs.readFile).mock.calls;
|
||||
expect(calls[0][0]).toMatch(/relative\.png$/);
|
||||
expect(calls[0][0]).toContain("work");
|
||||
expect(calls[0][0]).toContain("dir");
|
||||
expect(calls[0][0]).toContain('work');
|
||||
expect(calls[0][0]).toContain('dir');
|
||||
});
|
||||
|
||||
it("should handle absolute paths without workDir", async () => {
|
||||
const mockBuffer = Buffer.from("data");
|
||||
it('should handle absolute paths without workDir', async () => {
|
||||
const mockBuffer = Buffer.from('data');
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
|
||||
|
||||
await convertImagesToContentBlocks(["/absolute/path.png"]);
|
||||
await convertImagesToContentBlocks(['/absolute/path.png']);
|
||||
|
||||
expect(fs.readFile).toHaveBeenCalledWith("/absolute/path.png");
|
||||
expect(fs.readFile).toHaveBeenCalledWith('/absolute/path.png');
|
||||
});
|
||||
|
||||
it("should continue processing on individual image errors", async () => {
|
||||
it('should continue processing on individual image errors', async () => {
|
||||
vi.mocked(fs.readFile)
|
||||
.mockResolvedValueOnce(Buffer.from("ok1"))
|
||||
.mockRejectedValueOnce(new Error("Failed"))
|
||||
.mockResolvedValueOnce(Buffer.from("ok2"));
|
||||
.mockResolvedValueOnce(Buffer.from('ok1'))
|
||||
.mockRejectedValueOnce(new Error('Failed'))
|
||||
.mockResolvedValueOnce(Buffer.from('ok2'));
|
||||
|
||||
const consoleSpy = vi.spyOn(console, "error").mockImplementation(() => {});
|
||||
const consoleSpy = vi.spyOn(console, 'error').mockImplementation(() => {});
|
||||
|
||||
const result = await convertImagesToContentBlocks([
|
||||
"/a.png",
|
||||
"/b.png",
|
||||
"/c.png",
|
||||
]);
|
||||
const result = await convertImagesToContentBlocks(['/a.png', '/b.png', '/c.png']);
|
||||
|
||||
expect(result).toHaveLength(2); // Only successful images
|
||||
expect(consoleSpy).toHaveBeenCalled();
|
||||
@@ -176,56 +166,52 @@ describe("image-handler.ts", () => {
|
||||
consoleSpy.mockRestore();
|
||||
});
|
||||
|
||||
it("should return empty array for empty input", async () => {
|
||||
it('should return empty array for empty input', async () => {
|
||||
const result = await convertImagesToContentBlocks([]);
|
||||
expect(result).toEqual([]);
|
||||
});
|
||||
|
||||
it("should handle undefined workDir", async () => {
|
||||
const mockBuffer = Buffer.from("data");
|
||||
it('should handle undefined workDir', async () => {
|
||||
const mockBuffer = Buffer.from('data');
|
||||
vi.mocked(fs.readFile).mockResolvedValue(mockBuffer);
|
||||
|
||||
const result = await convertImagesToContentBlocks(["/test.png"], undefined);
|
||||
const result = await convertImagesToContentBlocks(['/test.png'], undefined);
|
||||
|
||||
expect(result).toHaveLength(1);
|
||||
expect(fs.readFile).toHaveBeenCalledWith("/test.png");
|
||||
expect(fs.readFile).toHaveBeenCalledWith('/test.png');
|
||||
});
|
||||
});
|
||||
|
||||
describe("formatImagePathsForPrompt", () => {
|
||||
it("should format single image path as bulleted list", () => {
|
||||
const result = formatImagePathsForPrompt(["/path/image.png"]);
|
||||
describe('formatImagePathsForPrompt', () => {
|
||||
it('should format single image path as bulleted list', () => {
|
||||
const result = formatImagePathsForPrompt(['/path/image.png']);
|
||||
|
||||
expect(result).toContain("\n\nAttached images:");
|
||||
expect(result).toContain("- /path/image.png");
|
||||
expect(result).toContain('\n\nAttached images:');
|
||||
expect(result).toContain('- /path/image.png');
|
||||
});
|
||||
|
||||
it("should format multiple image paths as bulleted list", () => {
|
||||
const result = formatImagePathsForPrompt([
|
||||
"/path/a.png",
|
||||
"/path/b.jpg",
|
||||
"/path/c.webp",
|
||||
]);
|
||||
it('should format multiple image paths as bulleted list', () => {
|
||||
const result = formatImagePathsForPrompt(['/path/a.png', '/path/b.jpg', '/path/c.webp']);
|
||||
|
||||
expect(result).toContain("Attached images:");
|
||||
expect(result).toContain("- /path/a.png");
|
||||
expect(result).toContain("- /path/b.jpg");
|
||||
expect(result).toContain("- /path/c.webp");
|
||||
expect(result).toContain('Attached images:');
|
||||
expect(result).toContain('- /path/a.png');
|
||||
expect(result).toContain('- /path/b.jpg');
|
||||
expect(result).toContain('- /path/c.webp');
|
||||
});
|
||||
|
||||
it("should return empty string for empty array", () => {
|
||||
it('should return empty string for empty array', () => {
|
||||
const result = formatImagePathsForPrompt([]);
|
||||
expect(result).toBe("");
|
||||
expect(result).toBe('');
|
||||
});
|
||||
|
||||
it("should start with double newline", () => {
|
||||
const result = formatImagePathsForPrompt(["/test.png"]);
|
||||
expect(result.startsWith("\n\n")).toBe(true);
|
||||
it('should start with double newline', () => {
|
||||
const result = formatImagePathsForPrompt(['/test.png']);
|
||||
expect(result.startsWith('\n\n')).toBe(true);
|
||||
});
|
||||
|
||||
it("should handle paths with special characters", () => {
|
||||
const result = formatImagePathsForPrompt(["/path/with spaces/image.png"]);
|
||||
expect(result).toContain("- /path/with spaces/image.png");
|
||||
it('should handle paths with special characters', () => {
|
||||
const result = formatImagePathsForPrompt(['/path/with spaces/image.png']);
|
||||
expect(result).toContain('- /path/with spaces/image.png');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,12 +1,7 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import {
|
||||
LogLevel,
|
||||
createLogger,
|
||||
getLogLevel,
|
||||
setLogLevel,
|
||||
} from "@/lib/logger.js";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import { LogLevel, createLogger, getLogLevel, setLogLevel } from '@automaker/utils';
|
||||
|
||||
describe("logger.ts", () => {
|
||||
describe('logger.ts', () => {
|
||||
let consoleSpy: {
|
||||
log: ReturnType<typeof vi.spyOn>;
|
||||
warn: ReturnType<typeof vi.spyOn>;
|
||||
@@ -17,9 +12,9 @@ describe("logger.ts", () => {
|
||||
beforeEach(() => {
|
||||
originalLogLevel = getLogLevel();
|
||||
consoleSpy = {
|
||||
log: vi.spyOn(console, "log").mockImplementation(() => {}),
|
||||
warn: vi.spyOn(console, "warn").mockImplementation(() => {}),
|
||||
error: vi.spyOn(console, "error").mockImplementation(() => {}),
|
||||
log: vi.spyOn(console, 'log').mockImplementation(() => {}),
|
||||
warn: vi.spyOn(console, 'warn').mockImplementation(() => {}),
|
||||
error: vi.spyOn(console, 'error').mockImplementation(() => {}),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -30,8 +25,8 @@ describe("logger.ts", () => {
|
||||
consoleSpy.error.mockRestore();
|
||||
});
|
||||
|
||||
describe("LogLevel enum", () => {
|
||||
it("should have correct numeric values", () => {
|
||||
describe('LogLevel enum', () => {
|
||||
it('should have correct numeric values', () => {
|
||||
expect(LogLevel.ERROR).toBe(0);
|
||||
expect(LogLevel.WARN).toBe(1);
|
||||
expect(LogLevel.INFO).toBe(2);
|
||||
@@ -39,8 +34,8 @@ describe("logger.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("setLogLevel and getLogLevel", () => {
|
||||
it("should set and get log level", () => {
|
||||
describe('setLogLevel and getLogLevel', () => {
|
||||
it('should set and get log level', () => {
|
||||
setLogLevel(LogLevel.DEBUG);
|
||||
expect(getLogLevel()).toBe(LogLevel.DEBUG);
|
||||
|
||||
@@ -49,71 +44,66 @@ describe("logger.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("createLogger", () => {
|
||||
it("should create a logger with context prefix", () => {
|
||||
describe('createLogger', () => {
|
||||
it('should create a logger with context prefix', () => {
|
||||
setLogLevel(LogLevel.INFO);
|
||||
const logger = createLogger("TestContext");
|
||||
const logger = createLogger('TestContext');
|
||||
|
||||
logger.info("test message");
|
||||
logger.info('test message');
|
||||
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith("[TestContext]", "test message");
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith('[TestContext]', 'test message');
|
||||
});
|
||||
|
||||
it("should log error at all log levels", () => {
|
||||
const logger = createLogger("Test");
|
||||
it('should log error at all log levels', () => {
|
||||
const logger = createLogger('Test');
|
||||
|
||||
setLogLevel(LogLevel.ERROR);
|
||||
logger.error("error message");
|
||||
expect(consoleSpy.error).toHaveBeenCalledWith("[Test]", "error message");
|
||||
logger.error('error message');
|
||||
expect(consoleSpy.error).toHaveBeenCalledWith('[Test]', 'error message');
|
||||
});
|
||||
|
||||
it("should log warn when level is WARN or higher", () => {
|
||||
const logger = createLogger("Test");
|
||||
it('should log warn when level is WARN or higher', () => {
|
||||
const logger = createLogger('Test');
|
||||
|
||||
setLogLevel(LogLevel.ERROR);
|
||||
logger.warn("warn message 1");
|
||||
logger.warn('warn message 1');
|
||||
expect(consoleSpy.warn).not.toHaveBeenCalled();
|
||||
|
||||
setLogLevel(LogLevel.WARN);
|
||||
logger.warn("warn message 2");
|
||||
expect(consoleSpy.warn).toHaveBeenCalledWith("[Test]", "warn message 2");
|
||||
logger.warn('warn message 2');
|
||||
expect(consoleSpy.warn).toHaveBeenCalledWith('[Test]', 'warn message 2');
|
||||
});
|
||||
|
||||
it("should log info when level is INFO or higher", () => {
|
||||
const logger = createLogger("Test");
|
||||
it('should log info when level is INFO or higher', () => {
|
||||
const logger = createLogger('Test');
|
||||
|
||||
setLogLevel(LogLevel.WARN);
|
||||
logger.info("info message 1");
|
||||
logger.info('info message 1');
|
||||
expect(consoleSpy.log).not.toHaveBeenCalled();
|
||||
|
||||
setLogLevel(LogLevel.INFO);
|
||||
logger.info("info message 2");
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith("[Test]", "info message 2");
|
||||
logger.info('info message 2');
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith('[Test]', 'info message 2');
|
||||
});
|
||||
|
||||
it("should log debug only when level is DEBUG", () => {
|
||||
const logger = createLogger("Test");
|
||||
it('should log debug only when level is DEBUG', () => {
|
||||
const logger = createLogger('Test');
|
||||
|
||||
setLogLevel(LogLevel.INFO);
|
||||
logger.debug("debug message 1");
|
||||
logger.debug('debug message 1');
|
||||
expect(consoleSpy.log).not.toHaveBeenCalled();
|
||||
|
||||
setLogLevel(LogLevel.DEBUG);
|
||||
logger.debug("debug message 2");
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith("[Test]", "[DEBUG]", "debug message 2");
|
||||
logger.debug('debug message 2');
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith('[Test]', '[DEBUG]', 'debug message 2');
|
||||
});
|
||||
|
||||
it("should pass multiple arguments to log functions", () => {
|
||||
it('should pass multiple arguments to log functions', () => {
|
||||
setLogLevel(LogLevel.DEBUG);
|
||||
const logger = createLogger("Multi");
|
||||
const logger = createLogger('Multi');
|
||||
|
||||
logger.info("message", { data: "value" }, 123);
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith(
|
||||
"[Multi]",
|
||||
"message",
|
||||
{ data: "value" },
|
||||
123
|
||||
);
|
||||
logger.info('message', { data: 'value' }, 123);
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith('[Multi]', 'message', { data: 'value' }, 123);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,18 +1,18 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import {
|
||||
resolveModelString,
|
||||
getEffectiveModel,
|
||||
CLAUDE_MODEL_MAP,
|
||||
DEFAULT_MODELS,
|
||||
} from "@/lib/model-resolver.js";
|
||||
} from '@automaker/model-resolver';
|
||||
|
||||
describe("model-resolver.ts", () => {
|
||||
describe('model-resolver.ts', () => {
|
||||
let consoleSpy: any;
|
||||
|
||||
beforeEach(() => {
|
||||
consoleSpy = {
|
||||
log: vi.spyOn(console, "log").mockImplementation(() => {}),
|
||||
warn: vi.spyOn(console, "warn").mockImplementation(() => {}),
|
||||
log: vi.spyOn(console, 'log').mockImplementation(() => {}),
|
||||
warn: vi.spyOn(console, 'warn').mockImplementation(() => {}),
|
||||
};
|
||||
});
|
||||
|
||||
@@ -21,27 +21,27 @@ describe("model-resolver.ts", () => {
|
||||
consoleSpy.warn.mockRestore();
|
||||
});
|
||||
|
||||
describe("resolveModelString", () => {
|
||||
describe('resolveModelString', () => {
|
||||
it("should resolve 'haiku' alias to full model string", () => {
|
||||
const result = resolveModelString("haiku");
|
||||
expect(result).toBe("claude-haiku-4-5");
|
||||
const result = resolveModelString('haiku');
|
||||
expect(result).toBe('claude-haiku-4-5');
|
||||
});
|
||||
|
||||
it("should resolve 'sonnet' alias to full model string", () => {
|
||||
const result = resolveModelString("sonnet");
|
||||
expect(result).toBe("claude-sonnet-4-20250514");
|
||||
const result = resolveModelString('sonnet');
|
||||
expect(result).toBe('claude-sonnet-4-20250514');
|
||||
});
|
||||
|
||||
it("should resolve 'opus' alias to full model string", () => {
|
||||
const result = resolveModelString("opus");
|
||||
expect(result).toBe("claude-opus-4-5-20251101");
|
||||
const result = resolveModelString('opus');
|
||||
expect(result).toBe('claude-opus-4-5-20251101');
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Resolved model alias: "opus"')
|
||||
);
|
||||
});
|
||||
|
||||
it("should treat unknown models as falling back to default", () => {
|
||||
const models = ["o1", "o1-mini", "o3", "gpt-5.2", "unknown-model"];
|
||||
it('should treat unknown models as falling back to default', () => {
|
||||
const models = ['o1', 'o1-mini', 'o3', 'gpt-5.2', 'unknown-model'];
|
||||
models.forEach((model) => {
|
||||
const result = resolveModelString(model);
|
||||
// Should fall back to default since these aren't supported
|
||||
@@ -49,95 +49,91 @@ describe("model-resolver.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("should pass through full Claude model strings", () => {
|
||||
const models = [
|
||||
"claude-opus-4-5-20251101",
|
||||
"claude-sonnet-4-20250514",
|
||||
"claude-haiku-4-5",
|
||||
];
|
||||
it('should pass through full Claude model strings', () => {
|
||||
const models = ['claude-opus-4-5-20251101', 'claude-sonnet-4-20250514', 'claude-haiku-4-5'];
|
||||
models.forEach((model) => {
|
||||
const result = resolveModelString(model);
|
||||
expect(result).toBe(model);
|
||||
});
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Using full Claude model string")
|
||||
expect.stringContaining('Using full Claude model string')
|
||||
);
|
||||
});
|
||||
|
||||
it("should return default model when modelKey is undefined", () => {
|
||||
it('should return default model when modelKey is undefined', () => {
|
||||
const result = resolveModelString(undefined);
|
||||
expect(result).toBe(DEFAULT_MODELS.claude);
|
||||
});
|
||||
|
||||
it("should return custom default model when provided", () => {
|
||||
const customDefault = "custom-model";
|
||||
it('should return custom default model when provided', () => {
|
||||
const customDefault = 'custom-model';
|
||||
const result = resolveModelString(undefined, customDefault);
|
||||
expect(result).toBe(customDefault);
|
||||
});
|
||||
|
||||
it("should return default for unknown model key", () => {
|
||||
const result = resolveModelString("unknown-model");
|
||||
it('should return default for unknown model key', () => {
|
||||
const result = resolveModelString('unknown-model');
|
||||
expect(result).toBe(DEFAULT_MODELS.claude);
|
||||
expect(consoleSpy.warn).toHaveBeenCalledWith(
|
||||
expect.stringContaining('Unknown model key "unknown-model"')
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle empty string", () => {
|
||||
const result = resolveModelString("");
|
||||
it('should handle empty string', () => {
|
||||
const result = resolveModelString('');
|
||||
expect(result).toBe(DEFAULT_MODELS.claude);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getEffectiveModel", () => {
|
||||
it("should prioritize explicit model over session and default", () => {
|
||||
const result = getEffectiveModel("opus", "haiku", "gpt-5.2");
|
||||
expect(result).toBe("claude-opus-4-5-20251101");
|
||||
describe('getEffectiveModel', () => {
|
||||
it('should prioritize explicit model over session and default', () => {
|
||||
const result = getEffectiveModel('opus', 'haiku', 'gpt-5.2');
|
||||
expect(result).toBe('claude-opus-4-5-20251101');
|
||||
});
|
||||
|
||||
it("should use session model when explicit is not provided", () => {
|
||||
const result = getEffectiveModel(undefined, "sonnet", "gpt-5.2");
|
||||
expect(result).toBe("claude-sonnet-4-20250514");
|
||||
it('should use session model when explicit is not provided', () => {
|
||||
const result = getEffectiveModel(undefined, 'sonnet', 'gpt-5.2');
|
||||
expect(result).toBe('claude-sonnet-4-20250514');
|
||||
});
|
||||
|
||||
it("should use default when neither explicit nor session is provided", () => {
|
||||
const customDefault = "claude-haiku-4-5";
|
||||
it('should use default when neither explicit nor session is provided', () => {
|
||||
const customDefault = 'claude-haiku-4-5';
|
||||
const result = getEffectiveModel(undefined, undefined, customDefault);
|
||||
expect(result).toBe(customDefault);
|
||||
});
|
||||
|
||||
it("should use Claude default when no arguments provided", () => {
|
||||
it('should use Claude default when no arguments provided', () => {
|
||||
const result = getEffectiveModel();
|
||||
expect(result).toBe(DEFAULT_MODELS.claude);
|
||||
});
|
||||
|
||||
it("should handle explicit empty strings as undefined", () => {
|
||||
const result = getEffectiveModel("", "haiku");
|
||||
expect(result).toBe("claude-haiku-4-5");
|
||||
it('should handle explicit empty strings as undefined', () => {
|
||||
const result = getEffectiveModel('', 'haiku');
|
||||
expect(result).toBe('claude-haiku-4-5');
|
||||
});
|
||||
});
|
||||
|
||||
describe("CLAUDE_MODEL_MAP", () => {
|
||||
it("should have haiku, sonnet, opus mappings", () => {
|
||||
expect(CLAUDE_MODEL_MAP).toHaveProperty("haiku");
|
||||
expect(CLAUDE_MODEL_MAP).toHaveProperty("sonnet");
|
||||
expect(CLAUDE_MODEL_MAP).toHaveProperty("opus");
|
||||
describe('CLAUDE_MODEL_MAP', () => {
|
||||
it('should have haiku, sonnet, opus mappings', () => {
|
||||
expect(CLAUDE_MODEL_MAP).toHaveProperty('haiku');
|
||||
expect(CLAUDE_MODEL_MAP).toHaveProperty('sonnet');
|
||||
expect(CLAUDE_MODEL_MAP).toHaveProperty('opus');
|
||||
});
|
||||
|
||||
it("should have valid Claude model strings", () => {
|
||||
expect(CLAUDE_MODEL_MAP.haiku).toContain("haiku");
|
||||
expect(CLAUDE_MODEL_MAP.sonnet).toContain("sonnet");
|
||||
expect(CLAUDE_MODEL_MAP.opus).toContain("opus");
|
||||
it('should have valid Claude model strings', () => {
|
||||
expect(CLAUDE_MODEL_MAP.haiku).toContain('haiku');
|
||||
expect(CLAUDE_MODEL_MAP.sonnet).toContain('sonnet');
|
||||
expect(CLAUDE_MODEL_MAP.opus).toContain('opus');
|
||||
});
|
||||
});
|
||||
|
||||
describe("DEFAULT_MODELS", () => {
|
||||
it("should have claude default", () => {
|
||||
expect(DEFAULT_MODELS).toHaveProperty("claude");
|
||||
describe('DEFAULT_MODELS', () => {
|
||||
it('should have claude default', () => {
|
||||
expect(DEFAULT_MODELS).toHaveProperty('claude');
|
||||
});
|
||||
|
||||
it("should have valid default model", () => {
|
||||
expect(DEFAULT_MODELS.claude).toContain("claude");
|
||||
it('should have valid default model', () => {
|
||||
expect(DEFAULT_MODELS.claude).toContain('claude');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,197 +1,120 @@
|
||||
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||
import { buildPromptWithImages } from "@/lib/prompt-builder.js";
|
||||
import * as imageHandler from "@/lib/image-handler.js";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
import * as utils from '@automaker/utils';
|
||||
import * as fs from 'fs/promises';
|
||||
|
||||
vi.mock("@/lib/image-handler.js");
|
||||
// Mock fs module for the image-handler's readFile calls
|
||||
vi.mock('fs/promises');
|
||||
|
||||
describe("prompt-builder.ts", () => {
|
||||
describe('prompt-builder.ts', () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
// Setup default mock for fs.readFile to return a valid image buffer
|
||||
vi.mocked(fs.readFile).mockResolvedValue(Buffer.from('fake-image-data'));
|
||||
});
|
||||
|
||||
describe("buildPromptWithImages", () => {
|
||||
it("should return plain text when no images provided", async () => {
|
||||
const result = await buildPromptWithImages("Hello world");
|
||||
afterEach(() => {
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
describe('buildPromptWithImages', () => {
|
||||
it('should return plain text when no images provided', async () => {
|
||||
const result = await utils.buildPromptWithImages('Hello world');
|
||||
|
||||
expect(result).toEqual({
|
||||
content: "Hello world",
|
||||
content: 'Hello world',
|
||||
hasImages: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("should return plain text when imagePaths is empty array", async () => {
|
||||
const result = await buildPromptWithImages("Hello world", []);
|
||||
it('should return plain text when imagePaths is empty array', async () => {
|
||||
const result = await utils.buildPromptWithImages('Hello world', []);
|
||||
|
||||
expect(result).toEqual({
|
||||
content: "Hello world",
|
||||
content: 'Hello world',
|
||||
hasImages: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("should build content blocks with single image", async () => {
|
||||
vi.mocked(imageHandler.convertImagesToContentBlocks).mockResolvedValue([
|
||||
{
|
||||
type: "image",
|
||||
source: { type: "base64", media_type: "image/png", data: "base64data" },
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await buildPromptWithImages("Describe this image", [
|
||||
"/test.png",
|
||||
]);
|
||||
it('should build content blocks with single image', async () => {
|
||||
const result = await utils.buildPromptWithImages('Describe this image', ['/test.png']);
|
||||
|
||||
expect(result.hasImages).toBe(true);
|
||||
expect(Array.isArray(result.content)).toBe(true);
|
||||
const content = result.content as Array<any>;
|
||||
const content = result.content as Array<{ type: string; text?: string }>;
|
||||
expect(content).toHaveLength(2);
|
||||
expect(content[0]).toEqual({ type: "text", text: "Describe this image" });
|
||||
expect(content[1].type).toBe("image");
|
||||
expect(content[0]).toEqual({ type: 'text', text: 'Describe this image' });
|
||||
expect(content[1].type).toBe('image');
|
||||
});
|
||||
|
||||
it("should build content blocks with multiple images", async () => {
|
||||
vi.mocked(imageHandler.convertImagesToContentBlocks).mockResolvedValue([
|
||||
{
|
||||
type: "image",
|
||||
source: { type: "base64", media_type: "image/png", data: "data1" },
|
||||
},
|
||||
{
|
||||
type: "image",
|
||||
source: { type: "base64", media_type: "image/jpeg", data: "data2" },
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await buildPromptWithImages("Analyze these", [
|
||||
"/a.png",
|
||||
"/b.jpg",
|
||||
]);
|
||||
it('should build content blocks with multiple images', async () => {
|
||||
const result = await utils.buildPromptWithImages('Analyze these', ['/a.png', '/b.jpg']);
|
||||
|
||||
expect(result.hasImages).toBe(true);
|
||||
const content = result.content as Array<any>;
|
||||
const content = result.content as Array<{ type: string }>;
|
||||
expect(content).toHaveLength(3); // 1 text + 2 images
|
||||
expect(content[0].type).toBe("text");
|
||||
expect(content[1].type).toBe("image");
|
||||
expect(content[2].type).toBe("image");
|
||||
expect(content[0].type).toBe('text');
|
||||
expect(content[1].type).toBe('image');
|
||||
expect(content[2].type).toBe('image');
|
||||
});
|
||||
|
||||
it("should include image paths in text when requested", async () => {
|
||||
vi.mocked(imageHandler.formatImagePathsForPrompt).mockReturnValue(
|
||||
"\n\nAttached images:\n- /test.png"
|
||||
);
|
||||
vi.mocked(imageHandler.convertImagesToContentBlocks).mockResolvedValue([
|
||||
{
|
||||
type: "image",
|
||||
source: { type: "base64", media_type: "image/png", data: "data" },
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await buildPromptWithImages(
|
||||
"Base prompt",
|
||||
["/test.png"],
|
||||
it('should include image paths in text when requested', async () => {
|
||||
const result = await utils.buildPromptWithImages(
|
||||
'Base prompt',
|
||||
['/test.png'],
|
||||
undefined,
|
||||
true
|
||||
);
|
||||
|
||||
expect(imageHandler.formatImagePathsForPrompt).toHaveBeenCalledWith([
|
||||
"/test.png",
|
||||
]);
|
||||
const content = result.content as Array<any>;
|
||||
expect(content[0].text).toContain("Base prompt");
|
||||
expect(content[0].text).toContain("Attached images:");
|
||||
const content = result.content as Array<{ type: string; text?: string }>;
|
||||
expect(content[0].text).toContain('Base prompt');
|
||||
expect(content[0].text).toContain('/test.png');
|
||||
});
|
||||
|
||||
it("should not include image paths by default", async () => {
|
||||
vi.mocked(imageHandler.formatImagePathsForPrompt).mockReturnValue(
|
||||
"\n\nAttached images:\n- /test.png"
|
||||
);
|
||||
vi.mocked(imageHandler.convertImagesToContentBlocks).mockResolvedValue([
|
||||
{
|
||||
type: "image",
|
||||
source: { type: "base64", media_type: "image/png", data: "data" },
|
||||
},
|
||||
]);
|
||||
it('should not include image paths by default', async () => {
|
||||
const result = await utils.buildPromptWithImages('Base prompt', ['/test.png']);
|
||||
|
||||
const result = await buildPromptWithImages("Base prompt", ["/test.png"]);
|
||||
|
||||
expect(imageHandler.formatImagePathsForPrompt).not.toHaveBeenCalled();
|
||||
const content = result.content as Array<any>;
|
||||
expect(content[0].text).toBe("Base prompt");
|
||||
const content = result.content as Array<{ type: string; text?: string }>;
|
||||
expect(content[0].text).toBe('Base prompt');
|
||||
expect(content[0].text).not.toContain('Attached');
|
||||
});
|
||||
|
||||
it("should pass workDir to convertImagesToContentBlocks", async () => {
|
||||
vi.mocked(imageHandler.convertImagesToContentBlocks).mockResolvedValue([
|
||||
{
|
||||
type: "image",
|
||||
source: { type: "base64", media_type: "image/png", data: "data" },
|
||||
},
|
||||
]);
|
||||
|
||||
await buildPromptWithImages("Test", ["/test.png"], "/work/dir");
|
||||
|
||||
expect(imageHandler.convertImagesToContentBlocks).toHaveBeenCalledWith(
|
||||
["/test.png"],
|
||||
"/work/dir"
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle empty text content", async () => {
|
||||
vi.mocked(imageHandler.convertImagesToContentBlocks).mockResolvedValue([
|
||||
{
|
||||
type: "image",
|
||||
source: { type: "base64", media_type: "image/png", data: "data" },
|
||||
},
|
||||
]);
|
||||
|
||||
const result = await buildPromptWithImages("", ["/test.png"]);
|
||||
it('should handle empty text content', async () => {
|
||||
const result = await utils.buildPromptWithImages('', ['/test.png']);
|
||||
|
||||
expect(result.hasImages).toBe(true);
|
||||
// When text is empty/whitespace, should only have image blocks
|
||||
const content = result.content as Array<any>;
|
||||
expect(content.every((block) => block.type === "image")).toBe(true);
|
||||
const content = result.content as Array<{ type: string }>;
|
||||
expect(content.every((block) => block.type === 'image')).toBe(true);
|
||||
});
|
||||
|
||||
it("should trim text content before checking if empty", async () => {
|
||||
vi.mocked(imageHandler.convertImagesToContentBlocks).mockResolvedValue([
|
||||
{
|
||||
type: "image",
|
||||
source: { type: "base64", media_type: "image/png", data: "data" },
|
||||
},
|
||||
]);
|
||||
it('should trim text content before checking if empty', async () => {
|
||||
const result = await utils.buildPromptWithImages(' ', ['/test.png']);
|
||||
|
||||
const result = await buildPromptWithImages(" ", ["/test.png"]);
|
||||
|
||||
const content = result.content as Array<any>;
|
||||
const content = result.content as Array<{ type: string }>;
|
||||
// Whitespace-only text should be excluded
|
||||
expect(content.every((block) => block.type === "image")).toBe(true);
|
||||
expect(content.every((block) => block.type === 'image')).toBe(true);
|
||||
});
|
||||
|
||||
it("should return text when only one block and it's text", async () => {
|
||||
vi.mocked(imageHandler.convertImagesToContentBlocks).mockResolvedValue([]);
|
||||
// Make readFile reject to simulate image load failure
|
||||
vi.mocked(fs.readFile).mockRejectedValue(new Error('File not found'));
|
||||
|
||||
const result = await buildPromptWithImages("Just text", ["/missing.png"]);
|
||||
const result = await utils.buildPromptWithImages('Just text', ['/missing.png']);
|
||||
|
||||
// If no images are successfully loaded, should return just the text
|
||||
expect(result.content).toBe("Just text");
|
||||
expect(result.content).toBe('Just text');
|
||||
expect(result.hasImages).toBe(true); // Still true because images were requested
|
||||
});
|
||||
|
||||
it("should handle workDir with relative paths", async () => {
|
||||
vi.mocked(imageHandler.convertImagesToContentBlocks).mockResolvedValue([
|
||||
{
|
||||
type: "image",
|
||||
source: { type: "base64", media_type: "image/png", data: "data" },
|
||||
},
|
||||
]);
|
||||
it('should pass workDir for path resolution', async () => {
|
||||
// The function should use workDir to resolve relative paths
|
||||
const result = await utils.buildPromptWithImages('Test', ['relative.png'], '/work/dir');
|
||||
|
||||
await buildPromptWithImages(
|
||||
"Test",
|
||||
["relative.png"],
|
||||
"/absolute/work/dir"
|
||||
);
|
||||
|
||||
expect(imageHandler.convertImagesToContentBlocks).toHaveBeenCalledWith(
|
||||
["relative.png"],
|
||||
"/absolute/work/dir"
|
||||
);
|
||||
// Verify it tried to read the file (with resolved path including workDir)
|
||||
expect(fs.readFile).toHaveBeenCalled();
|
||||
// The path should be resolved using workDir
|
||||
const readCall = vi.mocked(fs.readFile).mock.calls[0][0];
|
||||
expect(readCall).toContain('relative.png');
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from 'vitest';
|
||||
|
||||
describe("sdk-options.ts", () => {
|
||||
describe('sdk-options.ts', () => {
|
||||
let originalEnv: NodeJS.ProcessEnv;
|
||||
|
||||
beforeEach(() => {
|
||||
@@ -12,34 +12,34 @@ describe("sdk-options.ts", () => {
|
||||
process.env = originalEnv;
|
||||
});
|
||||
|
||||
describe("TOOL_PRESETS", () => {
|
||||
it("should export readOnly tools", async () => {
|
||||
const { TOOL_PRESETS } = await import("@/lib/sdk-options.js");
|
||||
expect(TOOL_PRESETS.readOnly).toEqual(["Read", "Glob", "Grep"]);
|
||||
describe('TOOL_PRESETS', () => {
|
||||
it('should export readOnly tools', async () => {
|
||||
const { TOOL_PRESETS } = await import('@/lib/sdk-options.js');
|
||||
expect(TOOL_PRESETS.readOnly).toEqual(['Read', 'Glob', 'Grep']);
|
||||
});
|
||||
|
||||
it("should export specGeneration tools", async () => {
|
||||
const { TOOL_PRESETS } = await import("@/lib/sdk-options.js");
|
||||
expect(TOOL_PRESETS.specGeneration).toEqual(["Read", "Glob", "Grep"]);
|
||||
it('should export specGeneration tools', async () => {
|
||||
const { TOOL_PRESETS } = await import('@/lib/sdk-options.js');
|
||||
expect(TOOL_PRESETS.specGeneration).toEqual(['Read', 'Glob', 'Grep']);
|
||||
});
|
||||
|
||||
it("should export fullAccess tools", async () => {
|
||||
const { TOOL_PRESETS } = await import("@/lib/sdk-options.js");
|
||||
expect(TOOL_PRESETS.fullAccess).toContain("Read");
|
||||
expect(TOOL_PRESETS.fullAccess).toContain("Write");
|
||||
expect(TOOL_PRESETS.fullAccess).toContain("Edit");
|
||||
expect(TOOL_PRESETS.fullAccess).toContain("Bash");
|
||||
it('should export fullAccess tools', async () => {
|
||||
const { TOOL_PRESETS } = await import('@/lib/sdk-options.js');
|
||||
expect(TOOL_PRESETS.fullAccess).toContain('Read');
|
||||
expect(TOOL_PRESETS.fullAccess).toContain('Write');
|
||||
expect(TOOL_PRESETS.fullAccess).toContain('Edit');
|
||||
expect(TOOL_PRESETS.fullAccess).toContain('Bash');
|
||||
});
|
||||
|
||||
it("should export chat tools matching fullAccess", async () => {
|
||||
const { TOOL_PRESETS } = await import("@/lib/sdk-options.js");
|
||||
it('should export chat tools matching fullAccess', async () => {
|
||||
const { TOOL_PRESETS } = await import('@/lib/sdk-options.js');
|
||||
expect(TOOL_PRESETS.chat).toEqual(TOOL_PRESETS.fullAccess);
|
||||
});
|
||||
});
|
||||
|
||||
describe("MAX_TURNS", () => {
|
||||
it("should export turn presets", async () => {
|
||||
const { MAX_TURNS } = await import("@/lib/sdk-options.js");
|
||||
describe('MAX_TURNS', () => {
|
||||
it('should export turn presets', async () => {
|
||||
const { MAX_TURNS } = await import('@/lib/sdk-options.js');
|
||||
expect(MAX_TURNS.quick).toBe(50);
|
||||
expect(MAX_TURNS.standard).toBe(100);
|
||||
expect(MAX_TURNS.extended).toBe(250);
|
||||
@@ -47,71 +47,67 @@ describe("sdk-options.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("getModelForUseCase", () => {
|
||||
it("should return explicit model when provided", async () => {
|
||||
const { getModelForUseCase } = await import("@/lib/sdk-options.js");
|
||||
const result = getModelForUseCase("spec", "claude-sonnet-4-20250514");
|
||||
expect(result).toBe("claude-sonnet-4-20250514");
|
||||
describe('getModelForUseCase', () => {
|
||||
it('should return explicit model when provided', async () => {
|
||||
const { getModelForUseCase } = await import('@/lib/sdk-options.js');
|
||||
const result = getModelForUseCase('spec', 'claude-sonnet-4-20250514');
|
||||
expect(result).toBe('claude-sonnet-4-20250514');
|
||||
});
|
||||
|
||||
it("should use environment variable for spec model", async () => {
|
||||
process.env.AUTOMAKER_MODEL_SPEC = "claude-sonnet-4-20250514";
|
||||
const { getModelForUseCase } = await import("@/lib/sdk-options.js");
|
||||
const result = getModelForUseCase("spec");
|
||||
expect(result).toBe("claude-sonnet-4-20250514");
|
||||
it('should use environment variable for spec model', async () => {
|
||||
process.env.AUTOMAKER_MODEL_SPEC = 'claude-sonnet-4-20250514';
|
||||
const { getModelForUseCase } = await import('@/lib/sdk-options.js');
|
||||
const result = getModelForUseCase('spec');
|
||||
expect(result).toBe('claude-sonnet-4-20250514');
|
||||
});
|
||||
|
||||
it("should use default model for spec when no override", async () => {
|
||||
it('should use default model for spec when no override', async () => {
|
||||
delete process.env.AUTOMAKER_MODEL_SPEC;
|
||||
delete process.env.AUTOMAKER_MODEL_DEFAULT;
|
||||
const { getModelForUseCase } = await import("@/lib/sdk-options.js");
|
||||
const result = getModelForUseCase("spec");
|
||||
expect(result).toContain("claude");
|
||||
const { getModelForUseCase } = await import('@/lib/sdk-options.js');
|
||||
const result = getModelForUseCase('spec');
|
||||
expect(result).toContain('claude');
|
||||
});
|
||||
|
||||
it("should fall back to AUTOMAKER_MODEL_DEFAULT", async () => {
|
||||
it('should fall back to AUTOMAKER_MODEL_DEFAULT', async () => {
|
||||
delete process.env.AUTOMAKER_MODEL_SPEC;
|
||||
process.env.AUTOMAKER_MODEL_DEFAULT = "claude-sonnet-4-20250514";
|
||||
const { getModelForUseCase } = await import("@/lib/sdk-options.js");
|
||||
const result = getModelForUseCase("spec");
|
||||
expect(result).toBe("claude-sonnet-4-20250514");
|
||||
process.env.AUTOMAKER_MODEL_DEFAULT = 'claude-sonnet-4-20250514';
|
||||
const { getModelForUseCase } = await import('@/lib/sdk-options.js');
|
||||
const result = getModelForUseCase('spec');
|
||||
expect(result).toBe('claude-sonnet-4-20250514');
|
||||
});
|
||||
});
|
||||
|
||||
describe("createSpecGenerationOptions", () => {
|
||||
it("should create options with spec generation settings", async () => {
|
||||
describe('createSpecGenerationOptions', () => {
|
||||
it('should create options with spec generation settings', async () => {
|
||||
const { createSpecGenerationOptions, TOOL_PRESETS, MAX_TURNS } =
|
||||
await import("@/lib/sdk-options.js");
|
||||
await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createSpecGenerationOptions({ cwd: "/test/path" });
|
||||
const options = createSpecGenerationOptions({ cwd: '/test/path' });
|
||||
|
||||
expect(options.cwd).toBe("/test/path");
|
||||
expect(options.cwd).toBe('/test/path');
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.maximum);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.specGeneration]);
|
||||
expect(options.permissionMode).toBe("default");
|
||||
expect(options.permissionMode).toBe('default');
|
||||
});
|
||||
|
||||
it("should include system prompt when provided", async () => {
|
||||
const { createSpecGenerationOptions } = await import(
|
||||
"@/lib/sdk-options.js"
|
||||
);
|
||||
it('should include system prompt when provided', async () => {
|
||||
const { createSpecGenerationOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createSpecGenerationOptions({
|
||||
cwd: "/test/path",
|
||||
systemPrompt: "Custom prompt",
|
||||
cwd: '/test/path',
|
||||
systemPrompt: 'Custom prompt',
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe("Custom prompt");
|
||||
expect(options.systemPrompt).toBe('Custom prompt');
|
||||
});
|
||||
|
||||
it("should include abort controller when provided", async () => {
|
||||
const { createSpecGenerationOptions } = await import(
|
||||
"@/lib/sdk-options.js"
|
||||
);
|
||||
it('should include abort controller when provided', async () => {
|
||||
const { createSpecGenerationOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createSpecGenerationOptions({
|
||||
cwd: "/test/path",
|
||||
cwd: '/test/path',
|
||||
abortController,
|
||||
});
|
||||
|
||||
@@ -119,42 +115,73 @@ describe("sdk-options.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("createFeatureGenerationOptions", () => {
|
||||
it("should create options with feature generation settings", async () => {
|
||||
describe('createFeatureGenerationOptions', () => {
|
||||
it('should create options with feature generation settings', async () => {
|
||||
const { createFeatureGenerationOptions, TOOL_PRESETS, MAX_TURNS } =
|
||||
await import("@/lib/sdk-options.js");
|
||||
await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createFeatureGenerationOptions({ cwd: "/test/path" });
|
||||
const options = createFeatureGenerationOptions({ cwd: '/test/path' });
|
||||
|
||||
expect(options.cwd).toBe("/test/path");
|
||||
expect(options.cwd).toBe('/test/path');
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.quick);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createSuggestionsOptions", () => {
|
||||
it("should create options with suggestions settings", async () => {
|
||||
const { createSuggestionsOptions, TOOL_PRESETS, MAX_TURNS } = await import(
|
||||
"@/lib/sdk-options.js"
|
||||
);
|
||||
describe('createSuggestionsOptions', () => {
|
||||
it('should create options with suggestions settings', async () => {
|
||||
const { createSuggestionsOptions, TOOL_PRESETS, MAX_TURNS } =
|
||||
await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createSuggestionsOptions({ cwd: "/test/path" });
|
||||
const options = createSuggestionsOptions({ cwd: '/test/path' });
|
||||
|
||||
expect(options.cwd).toBe("/test/path");
|
||||
expect(options.cwd).toBe('/test/path');
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.extended);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
|
||||
});
|
||||
|
||||
it('should include systemPrompt when provided', async () => {
|
||||
const { createSuggestionsOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createSuggestionsOptions({
|
||||
cwd: '/test/path',
|
||||
systemPrompt: 'Custom prompt',
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe('Custom prompt');
|
||||
});
|
||||
|
||||
it('should include abortController when provided', async () => {
|
||||
const { createSuggestionsOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createSuggestionsOptions({
|
||||
cwd: '/test/path',
|
||||
abortController,
|
||||
});
|
||||
|
||||
expect(options.abortController).toBe(abortController);
|
||||
});
|
||||
|
||||
it('should include outputFormat when provided', async () => {
|
||||
const { createSuggestionsOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createSuggestionsOptions({
|
||||
cwd: '/test/path',
|
||||
outputFormat: { type: 'json' },
|
||||
});
|
||||
|
||||
expect(options.outputFormat).toEqual({ type: 'json' });
|
||||
});
|
||||
});
|
||||
|
||||
describe("createChatOptions", () => {
|
||||
it("should create options with chat settings", async () => {
|
||||
const { createChatOptions, TOOL_PRESETS, MAX_TURNS } = await import(
|
||||
"@/lib/sdk-options.js"
|
||||
);
|
||||
describe('createChatOptions', () => {
|
||||
it('should create options with chat settings', async () => {
|
||||
const { createChatOptions, TOOL_PRESETS, MAX_TURNS } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createChatOptions({ cwd: "/test/path" });
|
||||
const options = createChatOptions({ cwd: '/test/path' });
|
||||
|
||||
expect(options.cwd).toBe("/test/path");
|
||||
expect(options.cwd).toBe('/test/path');
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.standard);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.chat]);
|
||||
expect(options.sandbox).toEqual({
|
||||
@@ -163,41 +190,38 @@ describe("sdk-options.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
it("should prefer explicit model over session model", async () => {
|
||||
const { createChatOptions, getModelForUseCase } = await import(
|
||||
"@/lib/sdk-options.js"
|
||||
);
|
||||
it('should prefer explicit model over session model', async () => {
|
||||
const { createChatOptions, getModelForUseCase } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createChatOptions({
|
||||
cwd: "/test/path",
|
||||
model: "claude-opus-4-20250514",
|
||||
sessionModel: "claude-haiku-3-5-20241022",
|
||||
cwd: '/test/path',
|
||||
model: 'claude-opus-4-20250514',
|
||||
sessionModel: 'claude-haiku-3-5-20241022',
|
||||
});
|
||||
|
||||
expect(options.model).toBe("claude-opus-4-20250514");
|
||||
expect(options.model).toBe('claude-opus-4-20250514');
|
||||
});
|
||||
|
||||
it("should use session model when explicit model not provided", async () => {
|
||||
const { createChatOptions } = await import("@/lib/sdk-options.js");
|
||||
it('should use session model when explicit model not provided', async () => {
|
||||
const { createChatOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createChatOptions({
|
||||
cwd: "/test/path",
|
||||
sessionModel: "claude-sonnet-4-20250514",
|
||||
cwd: '/test/path',
|
||||
sessionModel: 'claude-sonnet-4-20250514',
|
||||
});
|
||||
|
||||
expect(options.model).toBe("claude-sonnet-4-20250514");
|
||||
expect(options.model).toBe('claude-sonnet-4-20250514');
|
||||
});
|
||||
});
|
||||
|
||||
describe("createAutoModeOptions", () => {
|
||||
it("should create options with auto mode settings", async () => {
|
||||
const { createAutoModeOptions, TOOL_PRESETS, MAX_TURNS } = await import(
|
||||
"@/lib/sdk-options.js"
|
||||
);
|
||||
describe('createAutoModeOptions', () => {
|
||||
it('should create options with auto mode settings', async () => {
|
||||
const { createAutoModeOptions, TOOL_PRESETS, MAX_TURNS } =
|
||||
await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createAutoModeOptions({ cwd: "/test/path" });
|
||||
const options = createAutoModeOptions({ cwd: '/test/path' });
|
||||
|
||||
expect(options.cwd).toBe("/test/path");
|
||||
expect(options.cwd).toBe('/test/path');
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.maximum);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.fullAccess]);
|
||||
expect(options.sandbox).toEqual({
|
||||
@@ -205,34 +229,92 @@ describe("sdk-options.ts", () => {
|
||||
autoAllowBashIfSandboxed: true,
|
||||
});
|
||||
});
|
||||
|
||||
it('should include systemPrompt when provided', async () => {
|
||||
const { createAutoModeOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createAutoModeOptions({
|
||||
cwd: '/test/path',
|
||||
systemPrompt: 'Custom prompt',
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe('Custom prompt');
|
||||
});
|
||||
|
||||
it('should include abortController when provided', async () => {
|
||||
const { createAutoModeOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createAutoModeOptions({
|
||||
cwd: '/test/path',
|
||||
abortController,
|
||||
});
|
||||
|
||||
expect(options.abortController).toBe(abortController);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createCustomOptions", () => {
|
||||
it("should create options with custom settings", async () => {
|
||||
const { createCustomOptions } = await import("@/lib/sdk-options.js");
|
||||
describe('createCustomOptions', () => {
|
||||
it('should create options with custom settings', async () => {
|
||||
const { createCustomOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createCustomOptions({
|
||||
cwd: "/test/path",
|
||||
cwd: '/test/path',
|
||||
maxTurns: 10,
|
||||
allowedTools: ["Read", "Write"],
|
||||
allowedTools: ['Read', 'Write'],
|
||||
sandbox: { enabled: true },
|
||||
});
|
||||
|
||||
expect(options.cwd).toBe("/test/path");
|
||||
expect(options.cwd).toBe('/test/path');
|
||||
expect(options.maxTurns).toBe(10);
|
||||
expect(options.allowedTools).toEqual(["Read", "Write"]);
|
||||
expect(options.allowedTools).toEqual(['Read', 'Write']);
|
||||
expect(options.sandbox).toEqual({ enabled: true });
|
||||
});
|
||||
|
||||
it("should use defaults when optional params not provided", async () => {
|
||||
const { createCustomOptions, TOOL_PRESETS, MAX_TURNS } = await import(
|
||||
"@/lib/sdk-options.js"
|
||||
);
|
||||
it('should use defaults when optional params not provided', async () => {
|
||||
const { createCustomOptions, TOOL_PRESETS, MAX_TURNS } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createCustomOptions({ cwd: "/test/path" });
|
||||
const options = createCustomOptions({ cwd: '/test/path' });
|
||||
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.maximum);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
|
||||
});
|
||||
|
||||
it('should include sandbox when provided', async () => {
|
||||
const { createCustomOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createCustomOptions({
|
||||
cwd: '/test/path',
|
||||
sandbox: { enabled: true, autoAllowBashIfSandboxed: false },
|
||||
});
|
||||
|
||||
expect(options.sandbox).toEqual({
|
||||
enabled: true,
|
||||
autoAllowBashIfSandboxed: false,
|
||||
});
|
||||
});
|
||||
|
||||
it('should include systemPrompt when provided', async () => {
|
||||
const { createCustomOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const options = createCustomOptions({
|
||||
cwd: '/test/path',
|
||||
systemPrompt: 'Custom prompt',
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe('Custom prompt');
|
||||
});
|
||||
|
||||
it('should include abortController when provided', async () => {
|
||||
const { createCustomOptions } = await import('@/lib/sdk-options.js');
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createCustomOptions({
|
||||
cwd: '/test/path',
|
||||
abortController,
|
||||
});
|
||||
|
||||
expect(options.abortController).toBe(abortController);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,207 +1,186 @@
|
||||
import { describe, it, expect, beforeEach, vi } from "vitest";
|
||||
import path from "path";
|
||||
import { describe, it, expect, beforeEach, vi } from 'vitest';
|
||||
import path from 'path';
|
||||
|
||||
/**
|
||||
* Note: security.ts maintains module-level state (allowed paths Set).
|
||||
* We need to reset modules and reimport for each test to get fresh state.
|
||||
*/
|
||||
describe("security.ts", () => {
|
||||
describe('security.ts', () => {
|
||||
beforeEach(() => {
|
||||
vi.resetModules();
|
||||
});
|
||||
|
||||
describe("initAllowedPaths", () => {
|
||||
it("should parse comma-separated directories from environment", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path1,/path2,/path3";
|
||||
process.env.DATA_DIR = "";
|
||||
describe('initAllowedPaths', () => {
|
||||
it('should load ALLOWED_ROOT_DIRECTORY if set', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/path1"));
|
||||
expect(allowed).toContain(path.resolve("/path2"));
|
||||
expect(allowed).toContain(path.resolve("/path3"));
|
||||
expect(allowed).toContain(path.resolve('/projects'));
|
||||
});
|
||||
|
||||
it("should trim whitespace from paths", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = " /path1 , /path2 , /path3 ";
|
||||
process.env.DATA_DIR = "";
|
||||
it('should include DATA_DIR if set', async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
process.env.DATA_DIR = '/data/dir';
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/path1"));
|
||||
expect(allowed).toContain(path.resolve("/path2"));
|
||||
expect(allowed).toContain(path.resolve('/data/dir'));
|
||||
});
|
||||
|
||||
it("should always include DATA_DIR if set", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "/data/dir";
|
||||
it('should include both ALLOWED_ROOT_DIRECTORY and DATA_DIR if both set', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
|
||||
process.env.DATA_DIR = '/data';
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/data/dir"));
|
||||
expect(allowed).toContain(path.resolve('/projects'));
|
||||
expect(allowed).toContain(path.resolve('/data'));
|
||||
expect(allowed).toHaveLength(2);
|
||||
});
|
||||
|
||||
it("should handle empty ALLOWED_PROJECT_DIRS", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "/data";
|
||||
it('should return empty array when no paths configured', async () => {
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
delete process.env.DATA_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toHaveLength(1);
|
||||
expect(allowed[0]).toBe(path.resolve("/data"));
|
||||
});
|
||||
|
||||
it("should skip empty entries in comma list", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path1,,/path2, ,/path3";
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toHaveLength(3);
|
||||
expect(allowed).toHaveLength(0);
|
||||
});
|
||||
});
|
||||
|
||||
describe("addAllowedPath", () => {
|
||||
it("should add path to allowed list", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "";
|
||||
describe('isPathAllowed', () => {
|
||||
it('should allow paths within ALLOWED_ROOT_DIRECTORY', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/allowed/project';
|
||||
process.env.DATA_DIR = '';
|
||||
|
||||
const { initAllowedPaths, addAllowedPath, getAllowedPaths } =
|
||||
await import("@/lib/security.js");
|
||||
const { initAllowedPaths, isPathAllowed } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
addAllowedPath("/new/path");
|
||||
// Paths within allowed directory should be allowed
|
||||
expect(isPathAllowed('/allowed/project/file.txt')).toBe(true);
|
||||
expect(isPathAllowed('/allowed/project/subdir/file.txt')).toBe(true);
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/new/path"));
|
||||
// Paths outside allowed directory should be denied
|
||||
expect(isPathAllowed('/not/allowed/file.txt')).toBe(false);
|
||||
expect(isPathAllowed('/tmp/file.txt')).toBe(false);
|
||||
expect(isPathAllowed('/etc/passwd')).toBe(false);
|
||||
});
|
||||
|
||||
it("should resolve relative paths before adding", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "";
|
||||
it('should allow all paths when no restrictions are configured', async () => {
|
||||
delete process.env.DATA_DIR;
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
|
||||
const { initAllowedPaths, addAllowedPath, getAllowedPaths } =
|
||||
await import("@/lib/security.js");
|
||||
const { initAllowedPaths, isPathAllowed } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
addAllowedPath("./relative/path");
|
||||
// All paths should be allowed when no restrictions are configured
|
||||
expect(isPathAllowed('/allowed/project/file.txt')).toBe(true);
|
||||
expect(isPathAllowed('/not/allowed/file.txt')).toBe(true);
|
||||
expect(isPathAllowed('/tmp/file.txt')).toBe(true);
|
||||
expect(isPathAllowed('/etc/passwd')).toBe(true);
|
||||
expect(isPathAllowed('/any/path')).toBe(true);
|
||||
});
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
it('should allow all paths when DATA_DIR is set but ALLOWED_ROOT_DIRECTORY is not', async () => {
|
||||
process.env.DATA_DIR = '/data';
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
// DATA_DIR should be allowed
|
||||
expect(isPathAllowed('/data/settings.json')).toBe(true);
|
||||
// But all other paths should also be allowed when ALLOWED_ROOT_DIRECTORY is not set
|
||||
expect(isPathAllowed('/allowed/project/file.txt')).toBe(true);
|
||||
expect(isPathAllowed('/not/allowed/file.txt')).toBe(true);
|
||||
expect(isPathAllowed('/tmp/file.txt')).toBe(true);
|
||||
expect(isPathAllowed('/etc/passwd')).toBe(true);
|
||||
expect(isPathAllowed('/any/path')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe('validatePath', () => {
|
||||
it('should return resolved path for allowed paths', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/allowed';
|
||||
process.env.DATA_DIR = '';
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath('/allowed/file.txt');
|
||||
expect(result).toBe(path.resolve('/allowed/file.txt'));
|
||||
});
|
||||
|
||||
it('should throw error for paths outside allowed directories', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/allowed';
|
||||
process.env.DATA_DIR = '';
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
// Disallowed paths should throw PathNotAllowedError
|
||||
expect(() => validatePath('/disallowed/file.txt')).toThrow();
|
||||
});
|
||||
|
||||
it('should not throw error for any path when no restrictions are configured', async () => {
|
||||
delete process.env.DATA_DIR;
|
||||
delete process.env.ALLOWED_ROOT_DIRECTORY;
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
// All paths are allowed when no restrictions configured
|
||||
expect(() => validatePath('/disallowed/file.txt')).not.toThrow();
|
||||
expect(validatePath('/disallowed/file.txt')).toBe(path.resolve('/disallowed/file.txt'));
|
||||
});
|
||||
|
||||
it('should resolve relative paths within allowed directory', async () => {
|
||||
const cwd = process.cwd();
|
||||
expect(allowed).toContain(path.resolve(cwd, "./relative/path"));
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = cwd;
|
||||
process.env.DATA_DIR = '';
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath('./file.txt');
|
||||
expect(result).toBe(path.resolve(cwd, './file.txt'));
|
||||
});
|
||||
});
|
||||
|
||||
describe("isPathAllowed", () => {
|
||||
it("should allow all paths (permissions disabled)", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/allowed/project";
|
||||
process.env.DATA_DIR = "";
|
||||
describe('getAllowedPaths', () => {
|
||||
it('should return array of allowed paths', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/projects';
|
||||
process.env.DATA_DIR = '/data';
|
||||
|
||||
const { initAllowedPaths, isPathAllowed } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
// All paths are now allowed regardless of configuration
|
||||
expect(isPathAllowed("/allowed/project/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/not/allowed/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/tmp/file.txt")).toBe(true);
|
||||
expect(isPathAllowed("/etc/passwd")).toBe(true);
|
||||
expect(isPathAllowed("/any/path")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("validatePath", () => {
|
||||
it("should return resolved path for any path (permissions disabled)", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/allowed";
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath("/allowed/file.txt");
|
||||
expect(result).toBe(path.resolve("/allowed/file.txt"));
|
||||
});
|
||||
|
||||
it("should not throw error for any path (permissions disabled)", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/allowed";
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
// All paths are now allowed, no errors thrown
|
||||
expect(() => validatePath("/disallowed/file.txt")).not.toThrow();
|
||||
expect(validatePath("/disallowed/file.txt")).toBe(
|
||||
path.resolve("/disallowed/file.txt")
|
||||
);
|
||||
});
|
||||
|
||||
it("should resolve relative paths", async () => {
|
||||
const cwd = process.cwd();
|
||||
process.env.ALLOWED_PROJECT_DIRS = cwd;
|
||||
process.env.DATA_DIR = "";
|
||||
|
||||
const { initAllowedPaths, validatePath } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
const result = validatePath("./file.txt");
|
||||
expect(result).toBe(path.resolve(cwd, "./file.txt"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("getAllowedPaths", () => {
|
||||
it("should return array of allowed paths", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path1,/path2";
|
||||
process.env.DATA_DIR = "/data";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
const result = getAllowedPaths();
|
||||
expect(Array.isArray(result)).toBe(true);
|
||||
expect(result.length).toBeGreaterThan(0);
|
||||
expect(result.length).toBe(2);
|
||||
expect(result).toContain(path.resolve('/projects'));
|
||||
expect(result).toContain(path.resolve('/data'));
|
||||
});
|
||||
|
||||
it("should return resolved paths", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/test";
|
||||
process.env.DATA_DIR = "";
|
||||
it('should return resolved paths', async () => {
|
||||
process.env.ALLOWED_ROOT_DIRECTORY = '/test';
|
||||
process.env.DATA_DIR = '';
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
const { initAllowedPaths, getAllowedPaths } = await import('@automaker/platform');
|
||||
initAllowedPaths();
|
||||
|
||||
const result = getAllowedPaths();
|
||||
expect(result[0]).toBe(path.resolve("/test"));
|
||||
expect(result[0]).toBe(path.resolve('/test'));
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -1,482 +0,0 @@
|
||||
import { describe, it, expect, vi, beforeEach } from "vitest";
|
||||
import {
|
||||
spawnJSONLProcess,
|
||||
spawnProcess,
|
||||
type SubprocessOptions,
|
||||
} from "@/lib/subprocess-manager.js";
|
||||
import * as cp from "child_process";
|
||||
import { EventEmitter } from "events";
|
||||
import { Readable } from "stream";
|
||||
import { collectAsyncGenerator } from "../../utils/helpers.js";
|
||||
|
||||
vi.mock("child_process");
|
||||
|
||||
describe("subprocess-manager.ts", () => {
|
||||
let consoleSpy: any;
|
||||
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
consoleSpy = {
|
||||
log: vi.spyOn(console, "log").mockImplementation(() => {}),
|
||||
error: vi.spyOn(console, "error").mockImplementation(() => {}),
|
||||
};
|
||||
});
|
||||
|
||||
afterEach(() => {
|
||||
consoleSpy.log.mockRestore();
|
||||
consoleSpy.error.mockRestore();
|
||||
});
|
||||
|
||||
/**
|
||||
* Helper to create a mock ChildProcess with stdout/stderr streams
|
||||
*/
|
||||
function createMockProcess(config: {
|
||||
stdoutLines?: string[];
|
||||
stderrLines?: string[];
|
||||
exitCode?: number;
|
||||
error?: Error;
|
||||
delayMs?: number;
|
||||
}) {
|
||||
const mockProcess = new EventEmitter() as any;
|
||||
|
||||
// Create readable streams for stdout and stderr
|
||||
const stdout = new Readable({ read() {} });
|
||||
const stderr = new Readable({ read() {} });
|
||||
|
||||
mockProcess.stdout = stdout;
|
||||
mockProcess.stderr = stderr;
|
||||
mockProcess.kill = vi.fn();
|
||||
|
||||
// Use process.nextTick to ensure readline interface is set up first
|
||||
process.nextTick(() => {
|
||||
// Emit stderr lines immediately
|
||||
if (config.stderrLines) {
|
||||
for (const line of config.stderrLines) {
|
||||
stderr.emit("data", Buffer.from(line));
|
||||
}
|
||||
}
|
||||
|
||||
// Emit stdout lines with small delays to ensure readline processes them
|
||||
const emitLines = async () => {
|
||||
if (config.stdoutLines) {
|
||||
for (const line of config.stdoutLines) {
|
||||
stdout.push(line + "\n");
|
||||
// Small delay to allow readline to process
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
}
|
||||
}
|
||||
|
||||
// Small delay before ending stream
|
||||
await new Promise((resolve) => setImmediate(resolve));
|
||||
stdout.push(null); // End stdout
|
||||
|
||||
// Small delay before exit
|
||||
await new Promise((resolve) =>
|
||||
setTimeout(resolve, config.delayMs ?? 10)
|
||||
);
|
||||
|
||||
// Emit exit or error
|
||||
if (config.error) {
|
||||
mockProcess.emit("error", config.error);
|
||||
} else {
|
||||
mockProcess.emit("exit", config.exitCode ?? 0);
|
||||
}
|
||||
};
|
||||
|
||||
emitLines();
|
||||
});
|
||||
|
||||
return mockProcess;
|
||||
}
|
||||
|
||||
describe("spawnJSONLProcess", () => {
|
||||
const baseOptions: SubprocessOptions = {
|
||||
command: "test-command",
|
||||
args: ["arg1", "arg2"],
|
||||
cwd: "/test/dir",
|
||||
};
|
||||
|
||||
it("should yield parsed JSONL objects line by line", async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: [
|
||||
'{"type":"start","id":1}',
|
||||
'{"type":"progress","value":50}',
|
||||
'{"type":"complete","result":"success"}',
|
||||
],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(3);
|
||||
expect(results[0]).toEqual({ type: "start", id: 1 });
|
||||
expect(results[1]).toEqual({ type: "progress", value: 50 });
|
||||
expect(results[2]).toEqual({ type: "complete", result: "success" });
|
||||
});
|
||||
|
||||
it("should skip empty lines", async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: [
|
||||
'{"type":"first"}',
|
||||
"",
|
||||
" ",
|
||||
'{"type":"second"}',
|
||||
],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0]).toEqual({ type: "first" });
|
||||
expect(results[1]).toEqual({ type: "second" });
|
||||
});
|
||||
|
||||
it("should yield error for malformed JSON and continue processing", async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: [
|
||||
'{"type":"valid"}',
|
||||
'{invalid json}',
|
||||
'{"type":"also_valid"}',
|
||||
],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(3);
|
||||
expect(results[0]).toEqual({ type: "valid" });
|
||||
expect(results[1]).toMatchObject({
|
||||
type: "error",
|
||||
error: expect.stringContaining("Failed to parse output"),
|
||||
});
|
||||
expect(results[2]).toEqual({ type: "also_valid" });
|
||||
});
|
||||
|
||||
it("should collect stderr output", async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"test"}'],
|
||||
stderrLines: ["Warning: something happened", "Error: critical issue"],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
await collectAsyncGenerator(generator);
|
||||
|
||||
expect(consoleSpy.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Warning: something happened")
|
||||
);
|
||||
expect(consoleSpy.error).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Error: critical issue")
|
||||
);
|
||||
});
|
||||
|
||||
it("should yield error on non-zero exit code", async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"started"}'],
|
||||
stderrLines: ["Process failed with error"],
|
||||
exitCode: 1,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[0]).toEqual({ type: "started" });
|
||||
expect(results[1]).toMatchObject({
|
||||
type: "error",
|
||||
error: expect.stringContaining("Process failed with error"),
|
||||
});
|
||||
});
|
||||
|
||||
it("should yield error with exit code when stderr is empty", async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"test"}'],
|
||||
exitCode: 127,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(2);
|
||||
expect(results[1]).toMatchObject({
|
||||
type: "error",
|
||||
error: "Process exited with code 127",
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle process spawn errors", async () => {
|
||||
const mockProcess = createMockProcess({
|
||||
error: new Error("Command not found"),
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
// When process.on('error') fires, exitCode is null
|
||||
// The generator should handle this gracefully
|
||||
expect(results).toEqual([]);
|
||||
});
|
||||
|
||||
it("should kill process on AbortController signal", async () => {
|
||||
const abortController = new AbortController();
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: ['{"type":"start"}'],
|
||||
exitCode: 0,
|
||||
delayMs: 100, // Delay to allow abort
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess({
|
||||
...baseOptions,
|
||||
abortController,
|
||||
});
|
||||
|
||||
// Start consuming the generator
|
||||
const promise = collectAsyncGenerator(generator);
|
||||
|
||||
// Abort after a short delay
|
||||
setTimeout(() => abortController.abort(), 20);
|
||||
|
||||
await promise;
|
||||
|
||||
expect(mockProcess.kill).toHaveBeenCalledWith("SIGTERM");
|
||||
expect(consoleSpy.log).toHaveBeenCalledWith(
|
||||
expect.stringContaining("Abort signal received")
|
||||
);
|
||||
});
|
||||
|
||||
// Note: Timeout behavior tests are omitted from unit tests as they involve
|
||||
// complex timing interactions that are difficult to mock reliably.
|
||||
// These scenarios are better covered by integration tests with real subprocesses.
|
||||
|
||||
it("should spawn process with correct arguments", async () => {
|
||||
const mockProcess = createMockProcess({ exitCode: 0 });
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const options: SubprocessOptions = {
|
||||
command: "my-command",
|
||||
args: ["--flag", "value"],
|
||||
cwd: "/work/dir",
|
||||
env: { CUSTOM_VAR: "test" },
|
||||
};
|
||||
|
||||
const generator = spawnJSONLProcess(options);
|
||||
await collectAsyncGenerator(generator);
|
||||
|
||||
expect(cp.spawn).toHaveBeenCalledWith("my-command", ["--flag", "value"], {
|
||||
cwd: "/work/dir",
|
||||
env: expect.objectContaining({ CUSTOM_VAR: "test" }),
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
});
|
||||
|
||||
it("should merge env with process.env", async () => {
|
||||
const mockProcess = createMockProcess({ exitCode: 0 });
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const options: SubprocessOptions = {
|
||||
command: "test",
|
||||
args: [],
|
||||
cwd: "/test",
|
||||
env: { CUSTOM: "value" },
|
||||
};
|
||||
|
||||
const generator = spawnJSONLProcess(options);
|
||||
await collectAsyncGenerator(generator);
|
||||
|
||||
expect(cp.spawn).toHaveBeenCalledWith(
|
||||
"test",
|
||||
[],
|
||||
expect.objectContaining({
|
||||
env: expect.objectContaining({
|
||||
CUSTOM: "value",
|
||||
// Should also include existing process.env
|
||||
NODE_ENV: process.env.NODE_ENV,
|
||||
}),
|
||||
})
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle complex JSON objects", async () => {
|
||||
const complexObject = {
|
||||
type: "complex",
|
||||
nested: { deep: { value: [1, 2, 3] } },
|
||||
array: [{ id: 1 }, { id: 2 }],
|
||||
string: "with \"quotes\" and \\backslashes",
|
||||
};
|
||||
|
||||
const mockProcess = createMockProcess({
|
||||
stdoutLines: [JSON.stringify(complexObject)],
|
||||
exitCode: 0,
|
||||
});
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
const generator = spawnJSONLProcess(baseOptions);
|
||||
const results = await collectAsyncGenerator(generator);
|
||||
|
||||
expect(results).toHaveLength(1);
|
||||
expect(results[0]).toEqual(complexObject);
|
||||
});
|
||||
});
|
||||
|
||||
describe("spawnProcess", () => {
|
||||
const baseOptions: SubprocessOptions = {
|
||||
command: "test-command",
|
||||
args: ["arg1"],
|
||||
cwd: "/test",
|
||||
};
|
||||
|
||||
it("should collect stdout and stderr", async () => {
|
||||
const mockProcess = new EventEmitter() as any;
|
||||
const stdout = new Readable({ read() {} });
|
||||
const stderr = new Readable({ read() {} });
|
||||
|
||||
mockProcess.stdout = stdout;
|
||||
mockProcess.stderr = stderr;
|
||||
mockProcess.kill = vi.fn();
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
stdout.push("line 1\n");
|
||||
stdout.push("line 2\n");
|
||||
stdout.push(null);
|
||||
|
||||
stderr.push("error 1\n");
|
||||
stderr.push("error 2\n");
|
||||
stderr.push(null);
|
||||
|
||||
mockProcess.emit("exit", 0);
|
||||
}, 10);
|
||||
|
||||
const result = await spawnProcess(baseOptions);
|
||||
|
||||
expect(result.stdout).toBe("line 1\nline 2\n");
|
||||
expect(result.stderr).toBe("error 1\nerror 2\n");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
|
||||
it("should return correct exit code", async () => {
|
||||
const mockProcess = new EventEmitter() as any;
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn();
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
mockProcess.stdout.push(null);
|
||||
mockProcess.stderr.push(null);
|
||||
mockProcess.emit("exit", 42);
|
||||
}, 10);
|
||||
|
||||
const result = await spawnProcess(baseOptions);
|
||||
|
||||
expect(result.exitCode).toBe(42);
|
||||
});
|
||||
|
||||
it("should handle process errors", async () => {
|
||||
const mockProcess = new EventEmitter() as any;
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn();
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
mockProcess.emit("error", new Error("Spawn failed"));
|
||||
}, 10);
|
||||
|
||||
await expect(spawnProcess(baseOptions)).rejects.toThrow("Spawn failed");
|
||||
});
|
||||
|
||||
it("should handle AbortController signal", async () => {
|
||||
const abortController = new AbortController();
|
||||
const mockProcess = new EventEmitter() as any;
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn();
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => abortController.abort(), 20);
|
||||
|
||||
await expect(
|
||||
spawnProcess({ ...baseOptions, abortController })
|
||||
).rejects.toThrow("Process aborted");
|
||||
|
||||
expect(mockProcess.kill).toHaveBeenCalledWith("SIGTERM");
|
||||
});
|
||||
|
||||
it("should spawn with correct options", async () => {
|
||||
const mockProcess = new EventEmitter() as any;
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn();
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
mockProcess.stdout.push(null);
|
||||
mockProcess.stderr.push(null);
|
||||
mockProcess.emit("exit", 0);
|
||||
}, 10);
|
||||
|
||||
const options: SubprocessOptions = {
|
||||
command: "my-cmd",
|
||||
args: ["--verbose"],
|
||||
cwd: "/my/dir",
|
||||
env: { MY_VAR: "value" },
|
||||
};
|
||||
|
||||
await spawnProcess(options);
|
||||
|
||||
expect(cp.spawn).toHaveBeenCalledWith("my-cmd", ["--verbose"], {
|
||||
cwd: "/my/dir",
|
||||
env: expect.objectContaining({ MY_VAR: "value" }),
|
||||
stdio: ["ignore", "pipe", "pipe"],
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle empty stdout and stderr", async () => {
|
||||
const mockProcess = new EventEmitter() as any;
|
||||
mockProcess.stdout = new Readable({ read() {} });
|
||||
mockProcess.stderr = new Readable({ read() {} });
|
||||
mockProcess.kill = vi.fn();
|
||||
|
||||
vi.mocked(cp.spawn).mockReturnValue(mockProcess);
|
||||
|
||||
setTimeout(() => {
|
||||
mockProcess.stdout.push(null);
|
||||
mockProcess.stderr.push(null);
|
||||
mockProcess.emit("exit", 0);
|
||||
}, 10);
|
||||
|
||||
const result = await spawnProcess(baseOptions);
|
||||
|
||||
expect(result.stdout).toBe("");
|
||||
expect(result.stderr).toBe("");
|
||||
expect(result.exitCode).toBe(0);
|
||||
});
|
||||
});
|
||||
});
|
||||
@@ -1,4 +1,4 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
import { describe, it, expect, beforeEach, afterEach } from 'vitest';
|
||||
import {
|
||||
readWorktreeMetadata,
|
||||
writeWorktreeMetadata,
|
||||
@@ -8,12 +8,12 @@ import {
|
||||
deleteWorktreeMetadata,
|
||||
type WorktreeMetadata,
|
||||
type WorktreePRInfo,
|
||||
} from "@/lib/worktree-metadata.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
} from '@/lib/worktree-metadata.js';
|
||||
import fs from 'fs/promises';
|
||||
import path from 'path';
|
||||
import os from 'os';
|
||||
|
||||
describe("worktree-metadata.ts", () => {
|
||||
describe('worktree-metadata.ts', () => {
|
||||
let testProjectPath: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
@@ -29,10 +29,10 @@ describe("worktree-metadata.ts", () => {
|
||||
}
|
||||
});
|
||||
|
||||
describe("sanitizeBranchName", () => {
|
||||
describe('sanitizeBranchName', () => {
|
||||
// Test through readWorktreeMetadata and writeWorktreeMetadata
|
||||
it("should sanitize branch names with invalid characters", async () => {
|
||||
const branch = "feature/test-branch";
|
||||
it('should sanitize branch names with invalid characters', async () => {
|
||||
const branch = 'feature/test-branch';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -43,8 +43,8 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should sanitize branch names with Windows invalid characters", async () => {
|
||||
const branch = "feature:test*branch?";
|
||||
it('should sanitize branch names with Windows invalid characters', async () => {
|
||||
const branch = 'feature:test*branch?';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -55,8 +55,8 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should sanitize Windows reserved names", async () => {
|
||||
const branch = "CON";
|
||||
it('should sanitize Windows reserved names', async () => {
|
||||
const branch = 'CON';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -66,16 +66,42 @@ describe("worktree-metadata.ts", () => {
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it('should handle empty branch name', async () => {
|
||||
const branch = '';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch: 'branch',
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Empty branch name should be sanitized to "_branch"
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it('should handle branch name that becomes empty after sanitization', async () => {
|
||||
// Test branch that would become empty after removing invalid chars
|
||||
const branch = '///';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch: 'branch',
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
});
|
||||
|
||||
describe("readWorktreeMetadata", () => {
|
||||
describe('readWorktreeMetadata', () => {
|
||||
it("should return null when metadata file doesn't exist", async () => {
|
||||
const result = await readWorktreeMetadata(testProjectPath, "nonexistent-branch");
|
||||
const result = await readWorktreeMetadata(testProjectPath, 'nonexistent-branch');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should read existing metadata", async () => {
|
||||
const branch = "test-branch";
|
||||
it('should read existing metadata', async () => {
|
||||
const branch = 'test-branch';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -86,16 +112,16 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should read metadata with PR info", async () => {
|
||||
const branch = "pr-branch";
|
||||
it('should read metadata with PR info', async () => {
|
||||
const branch = 'pr-branch';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
pr: {
|
||||
number: 123,
|
||||
url: "https://github.com/owner/repo/pull/123",
|
||||
title: "Test PR",
|
||||
state: "open",
|
||||
url: 'https://github.com/owner/repo/pull/123',
|
||||
title: 'Test PR',
|
||||
state: 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
@@ -106,9 +132,9 @@ describe("worktree-metadata.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("writeWorktreeMetadata", () => {
|
||||
describe('writeWorktreeMetadata', () => {
|
||||
it("should create metadata directory if it doesn't exist", async () => {
|
||||
const branch = "new-branch";
|
||||
const branch = 'new-branch';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -119,8 +145,8 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should overwrite existing metadata", async () => {
|
||||
const branch = "existing-branch";
|
||||
it('should overwrite existing metadata', async () => {
|
||||
const branch = 'existing-branch';
|
||||
const metadata1: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -130,9 +156,9 @@ describe("worktree-metadata.ts", () => {
|
||||
createdAt: new Date().toISOString(),
|
||||
pr: {
|
||||
number: 456,
|
||||
url: "https://github.com/owner/repo/pull/456",
|
||||
title: "Updated PR",
|
||||
state: "closed",
|
||||
url: 'https://github.com/owner/repo/pull/456',
|
||||
title: 'Updated PR',
|
||||
state: 'closed',
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
@@ -144,14 +170,14 @@ describe("worktree-metadata.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateWorktreePRInfo", () => {
|
||||
describe('updateWorktreePRInfo', () => {
|
||||
it("should create new metadata if it doesn't exist", async () => {
|
||||
const branch = "new-pr-branch";
|
||||
const branch = 'new-pr-branch';
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 789,
|
||||
url: "https://github.com/owner/repo/pull/789",
|
||||
title: "New PR",
|
||||
state: "open",
|
||||
url: 'https://github.com/owner/repo/pull/789',
|
||||
title: 'New PR',
|
||||
state: 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
@@ -162,8 +188,8 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result?.pr).toEqual(prInfo);
|
||||
});
|
||||
|
||||
it("should update existing metadata with PR info", async () => {
|
||||
const branch = "existing-pr-branch";
|
||||
it('should update existing metadata with PR info', async () => {
|
||||
const branch = 'existing-pr-branch';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -173,9 +199,9 @@ describe("worktree-metadata.ts", () => {
|
||||
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 999,
|
||||
url: "https://github.com/owner/repo/pull/999",
|
||||
title: "Updated PR",
|
||||
state: "merged",
|
||||
url: 'https://github.com/owner/repo/pull/999',
|
||||
title: 'Updated PR',
|
||||
state: 'merged',
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
@@ -184,8 +210,8 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result?.pr).toEqual(prInfo);
|
||||
});
|
||||
|
||||
it("should preserve existing metadata when updating PR info", async () => {
|
||||
const branch = "preserve-branch";
|
||||
it('should preserve existing metadata when updating PR info', async () => {
|
||||
const branch = 'preserve-branch';
|
||||
const originalCreatedAt = new Date().toISOString();
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
@@ -196,9 +222,9 @@ describe("worktree-metadata.ts", () => {
|
||||
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 111,
|
||||
url: "https://github.com/owner/repo/pull/111",
|
||||
title: "PR",
|
||||
state: "open",
|
||||
url: 'https://github.com/owner/repo/pull/111',
|
||||
title: 'PR',
|
||||
state: 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
@@ -209,14 +235,14 @@ describe("worktree-metadata.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("getWorktreePRInfo", () => {
|
||||
describe('getWorktreePRInfo', () => {
|
||||
it("should return null when metadata doesn't exist", async () => {
|
||||
const result = await getWorktreePRInfo(testProjectPath, "nonexistent");
|
||||
const result = await getWorktreePRInfo(testProjectPath, 'nonexistent');
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null when metadata exists but has no PR info", async () => {
|
||||
const branch = "no-pr-branch";
|
||||
it('should return null when metadata exists but has no PR info', async () => {
|
||||
const branch = 'no-pr-branch';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -227,13 +253,13 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return PR info when it exists", async () => {
|
||||
const branch = "has-pr-branch";
|
||||
it('should return PR info when it exists', async () => {
|
||||
const branch = 'has-pr-branch';
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 222,
|
||||
url: "https://github.com/owner/repo/pull/222",
|
||||
title: "Has PR",
|
||||
state: "open",
|
||||
url: 'https://github.com/owner/repo/pull/222',
|
||||
title: 'Has PR',
|
||||
state: 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
@@ -243,23 +269,23 @@ describe("worktree-metadata.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("readAllWorktreeMetadata", () => {
|
||||
describe('readAllWorktreeMetadata', () => {
|
||||
it("should return empty map when worktrees directory doesn't exist", async () => {
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should return empty map when worktrees directory is empty", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
it('should return empty map when worktrees directory is empty', async () => {
|
||||
const worktreesDir = path.join(testProjectPath, '.automaker', 'worktrees');
|
||||
await fs.mkdir(worktreesDir, { recursive: true });
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should read all worktree metadata", async () => {
|
||||
const branch1 = "branch-1";
|
||||
const branch2 = "branch-2";
|
||||
it('should read all worktree metadata', async () => {
|
||||
const branch1 = 'branch-1';
|
||||
const branch2 = 'branch-2';
|
||||
const metadata1: WorktreeMetadata = {
|
||||
branch: branch1,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -269,9 +295,9 @@ describe("worktree-metadata.ts", () => {
|
||||
createdAt: new Date().toISOString(),
|
||||
pr: {
|
||||
number: 333,
|
||||
url: "https://github.com/owner/repo/pull/333",
|
||||
title: "PR 3",
|
||||
state: "open",
|
||||
url: 'https://github.com/owner/repo/pull/333',
|
||||
title: 'PR 3',
|
||||
state: 'open',
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
@@ -285,12 +311,12 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result.get(branch2)).toEqual(metadata2);
|
||||
});
|
||||
|
||||
it("should skip directories without worktree.json", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
const emptyDir = path.join(worktreesDir, "empty-dir");
|
||||
it('should skip directories without worktree.json', async () => {
|
||||
const worktreesDir = path.join(testProjectPath, '.automaker', 'worktrees');
|
||||
const emptyDir = path.join(worktreesDir, 'empty-dir');
|
||||
await fs.mkdir(emptyDir, { recursive: true });
|
||||
|
||||
const branch = "valid-branch";
|
||||
const branch = 'valid-branch';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -302,13 +328,13 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result.get(branch)).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should skip files in worktrees directory", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
it('should skip files in worktrees directory', async () => {
|
||||
const worktreesDir = path.join(testProjectPath, '.automaker', 'worktrees');
|
||||
await fs.mkdir(worktreesDir, { recursive: true });
|
||||
const filePath = path.join(worktreesDir, "not-a-dir.txt");
|
||||
await fs.writeFile(filePath, "content");
|
||||
const filePath = path.join(worktreesDir, 'not-a-dir.txt');
|
||||
await fs.writeFile(filePath, 'content');
|
||||
|
||||
const branch = "valid-branch";
|
||||
const branch = 'valid-branch';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -320,14 +346,14 @@ describe("worktree-metadata.ts", () => {
|
||||
expect(result.get(branch)).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should skip directories with malformed JSON", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
const badDir = path.join(worktreesDir, "bad-dir");
|
||||
it('should skip directories with malformed JSON', async () => {
|
||||
const worktreesDir = path.join(testProjectPath, '.automaker', 'worktrees');
|
||||
const badDir = path.join(worktreesDir, 'bad-dir');
|
||||
await fs.mkdir(badDir, { recursive: true });
|
||||
const badJsonPath = path.join(badDir, "worktree.json");
|
||||
await fs.writeFile(badJsonPath, "not valid json");
|
||||
const badJsonPath = path.join(badDir, 'worktree.json');
|
||||
await fs.writeFile(badJsonPath, 'not valid json');
|
||||
|
||||
const branch = "valid-branch";
|
||||
const branch = 'valid-branch';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -340,9 +366,9 @@ describe("worktree-metadata.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteWorktreeMetadata", () => {
|
||||
it("should delete worktree metadata directory", async () => {
|
||||
const branch = "to-delete";
|
||||
describe('deleteWorktreeMetadata', () => {
|
||||
it('should delete worktree metadata directory', async () => {
|
||||
const branch = 'to-delete';
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
@@ -359,10 +385,7 @@ describe("worktree-metadata.ts", () => {
|
||||
|
||||
it("should handle deletion when metadata doesn't exist", async () => {
|
||||
// Should not throw
|
||||
await expect(
|
||||
deleteWorktreeMetadata(testProjectPath, "nonexistent")
|
||||
).resolves.toBeUndefined();
|
||||
await expect(deleteWorktreeMetadata(testProjectPath, 'nonexistent')).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
|
||||
Reference in New Issue
Block a user