mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 08:33:36 +00:00
Merge origin/main into feature/shared-packages
Resolved conflicts: - list.ts: Keep @automaker/git-utils import, add worktree-metadata import - feature-loader.ts: Use Feature type from @automaker/types - automaker-paths.test.ts: Import from @automaker/platform - kanban-card.tsx: Accept deletion (split into components/) - subprocess.test.ts: Keep libs/platform location Added missing exports to @automaker/platform: - getGlobalSettingsPath, getCredentialsPath, getProjectSettingsPath, ensureDataDir Added title and titleGenerating fields to @automaker/types Feature interface. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -13,6 +13,10 @@ import {
|
||||
getAppSpecPath,
|
||||
getBranchTrackingPath,
|
||||
ensureAutomakerDir,
|
||||
getGlobalSettingsPath,
|
||||
getCredentialsPath,
|
||||
getProjectSettingsPath,
|
||||
ensureDataDir,
|
||||
} from "@automaker/platform";
|
||||
|
||||
describe("automaker-paths.ts", () => {
|
||||
@@ -136,4 +140,91 @@ describe("automaker-paths.ts", () => {
|
||||
expect(result).toBe(automakerDir);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getGlobalSettingsPath", () => {
|
||||
it("should return path to settings.json in data directory", () => {
|
||||
const dataDir = "/test/data";
|
||||
const result = getGlobalSettingsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, "settings.json"));
|
||||
});
|
||||
|
||||
it("should handle paths with trailing slashes", () => {
|
||||
const dataDir = "/test/data" + path.sep;
|
||||
const result = getGlobalSettingsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, "settings.json"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("getCredentialsPath", () => {
|
||||
it("should return path to credentials.json in data directory", () => {
|
||||
const dataDir = "/test/data";
|
||||
const result = getCredentialsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, "credentials.json"));
|
||||
});
|
||||
|
||||
it("should handle paths with trailing slashes", () => {
|
||||
const dataDir = "/test/data" + path.sep;
|
||||
const result = getCredentialsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, "credentials.json"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("getProjectSettingsPath", () => {
|
||||
it("should return path to settings.json in project .automaker directory", () => {
|
||||
const projectPath = "/test/project";
|
||||
const result = getProjectSettingsPath(projectPath);
|
||||
expect(result).toBe(
|
||||
path.join(projectPath, ".automaker", "settings.json")
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle paths with trailing slashes", () => {
|
||||
const projectPath = "/test/project" + path.sep;
|
||||
const result = getProjectSettingsPath(projectPath);
|
||||
expect(result).toBe(
|
||||
path.join(projectPath, ".automaker", "settings.json")
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ensureDataDir", () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
testDir = path.join(os.tmpdir(), `data-dir-test-${Date.now()}`);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
try {
|
||||
await fs.rm(testDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
it("should create data directory and return path", async () => {
|
||||
const result = await ensureDataDir(testDir);
|
||||
|
||||
expect(result).toBe(testDir);
|
||||
const stats = await fs.stat(testDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it("should succeed if directory already exists", async () => {
|
||||
await fs.mkdir(testDir, { recursive: true });
|
||||
|
||||
const result = await ensureDataDir(testDir);
|
||||
|
||||
expect(result).toBe(testDir);
|
||||
});
|
||||
|
||||
it("should create nested directories", async () => {
|
||||
const nestedDir = path.join(testDir, "nested", "deep");
|
||||
const result = await ensureDataDir(nestedDir);
|
||||
|
||||
expect(result).toBe(nestedDir);
|
||||
const stats = await fs.stat(nestedDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { describe, it, expect } from "vitest";
|
||||
import {
|
||||
isAbortError,
|
||||
isAuthenticationError,
|
||||
isCancellationError,
|
||||
classifyError,
|
||||
getUserFriendlyErrorMessage,
|
||||
type ErrorType,
|
||||
@@ -32,6 +33,34 @@ describe("error-handler.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("isCancellationError", () => {
|
||||
it("should detect 'cancelled' message", () => {
|
||||
expect(isCancellationError("Operation was cancelled")).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'canceled' message", () => {
|
||||
expect(isCancellationError("Request was canceled")).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'stopped' message", () => {
|
||||
expect(isCancellationError("Process was stopped")).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'aborted' message", () => {
|
||||
expect(isCancellationError("Task was aborted")).toBe(true);
|
||||
});
|
||||
|
||||
it("should be case insensitive", () => {
|
||||
expect(isCancellationError("CANCELLED")).toBe(true);
|
||||
expect(isCancellationError("Canceled")).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false for non-cancellation errors", () => {
|
||||
expect(isCancellationError("File not found")).toBe(false);
|
||||
expect(isCancellationError("Network error")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isAuthenticationError", () => {
|
||||
it("should detect 'Authentication failed' message", () => {
|
||||
expect(isAuthenticationError("Authentication failed")).toBe(true);
|
||||
@@ -91,6 +120,42 @@ describe("error-handler.ts", () => {
|
||||
expect(result.isAbort).toBe(true); // Still detected as abort too
|
||||
});
|
||||
|
||||
it("should classify cancellation errors", () => {
|
||||
const error = new Error("Operation was cancelled");
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("cancellation");
|
||||
expect(result.isCancellation).toBe(true);
|
||||
expect(result.isAbort).toBe(false);
|
||||
expect(result.isAuth).toBe(false);
|
||||
});
|
||||
|
||||
it("should prioritize abort over cancellation if both match", () => {
|
||||
const error = new Error("Operation aborted");
|
||||
error.name = "AbortError";
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("abort");
|
||||
expect(result.isAbort).toBe(true);
|
||||
expect(result.isCancellation).toBe(true); // Still detected as cancellation too
|
||||
});
|
||||
|
||||
it("should classify cancellation errors with 'canceled' spelling", () => {
|
||||
const error = new Error("Request was canceled");
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("cancellation");
|
||||
expect(result.isCancellation).toBe(true);
|
||||
});
|
||||
|
||||
it("should classify cancellation errors with 'stopped' message", () => {
|
||||
const error = new Error("Process was stopped");
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("cancellation");
|
||||
expect(result.isCancellation).toBe(true);
|
||||
});
|
||||
|
||||
it("should classify generic Error as execution error", () => {
|
||||
const error = new Error("Something went wrong");
|
||||
const result = classifyError(error);
|
||||
|
||||
@@ -65,6 +65,47 @@ describe("fs-utils.ts", () => {
|
||||
// Should not throw
|
||||
await expect(mkdirSafe(symlinkPath)).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it("should handle ELOOP error gracefully when checking path", async () => {
|
||||
// Mock lstat to throw ELOOP error
|
||||
const originalLstat = fs.lstat;
|
||||
const mkdirSafePath = path.join(testDir, "eloop-path");
|
||||
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
|
||||
|
||||
// Should not throw, should return gracefully
|
||||
await expect(mkdirSafe(mkdirSafePath)).resolves.toBeUndefined();
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should handle EEXIST error gracefully when creating directory", async () => {
|
||||
const newDir = path.join(testDir, "race-condition-dir");
|
||||
|
||||
// Mock lstat to return ENOENT (path doesn't exist)
|
||||
// Then mock mkdir to throw EEXIST (race condition)
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
|
||||
vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "EEXIST" });
|
||||
|
||||
// Should not throw, should return gracefully
|
||||
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should handle ELOOP error gracefully when creating directory", async () => {
|
||||
const newDir = path.join(testDir, "eloop-create-dir");
|
||||
|
||||
// Mock lstat to return ENOENT (path doesn't exist)
|
||||
// Then mock mkdir to throw ELOOP
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
|
||||
vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "ELOOP" });
|
||||
|
||||
// Should not throw, should return gracefully
|
||||
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
});
|
||||
|
||||
describe("existsSafe", () => {
|
||||
@@ -109,5 +150,24 @@ describe("fs-utils.ts", () => {
|
||||
const exists = await existsSafe(symlinkPath);
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true for ELOOP error (symlink loop)", async () => {
|
||||
// Mock lstat to throw ELOOP error
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
|
||||
|
||||
const exists = await existsSafe("/some/path/with/loop");
|
||||
expect(exists).toBe(true);
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should throw for other errors", async () => {
|
||||
// Mock lstat to throw a non-ENOENT, non-ELOOP error
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "EACCES" });
|
||||
|
||||
await expect(existsSafe("/some/path")).rejects.toMatchObject({ code: "EACCES" });
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -144,6 +144,40 @@ describe("sdk-options.ts", () => {
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.extended);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
|
||||
});
|
||||
|
||||
it("should include systemPrompt when provided", async () => {
|
||||
const { createSuggestionsOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createSuggestionsOptions({
|
||||
cwd: "/test/path",
|
||||
systemPrompt: "Custom prompt",
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe("Custom prompt");
|
||||
});
|
||||
|
||||
it("should include abortController when provided", async () => {
|
||||
const { createSuggestionsOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createSuggestionsOptions({
|
||||
cwd: "/test/path",
|
||||
abortController,
|
||||
});
|
||||
|
||||
expect(options.abortController).toBe(abortController);
|
||||
});
|
||||
|
||||
it("should include outputFormat when provided", async () => {
|
||||
const { createSuggestionsOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createSuggestionsOptions({
|
||||
cwd: "/test/path",
|
||||
outputFormat: { type: "json" },
|
||||
});
|
||||
|
||||
expect(options.outputFormat).toEqual({ type: "json" });
|
||||
});
|
||||
});
|
||||
|
||||
describe("createChatOptions", () => {
|
||||
@@ -205,6 +239,29 @@ describe("sdk-options.ts", () => {
|
||||
autoAllowBashIfSandboxed: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("should include systemPrompt when provided", async () => {
|
||||
const { createAutoModeOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createAutoModeOptions({
|
||||
cwd: "/test/path",
|
||||
systemPrompt: "Custom prompt",
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe("Custom prompt");
|
||||
});
|
||||
|
||||
it("should include abortController when provided", async () => {
|
||||
const { createAutoModeOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createAutoModeOptions({
|
||||
cwd: "/test/path",
|
||||
abortController,
|
||||
});
|
||||
|
||||
expect(options.abortController).toBe(abortController);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createCustomOptions", () => {
|
||||
@@ -234,5 +291,42 @@ describe("sdk-options.ts", () => {
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.maximum);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
|
||||
});
|
||||
|
||||
it("should include sandbox when provided", async () => {
|
||||
const { createCustomOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createCustomOptions({
|
||||
cwd: "/test/path",
|
||||
sandbox: { enabled: true, autoAllowBashIfSandboxed: false },
|
||||
});
|
||||
|
||||
expect(options.sandbox).toEqual({
|
||||
enabled: true,
|
||||
autoAllowBashIfSandboxed: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("should include systemPrompt when provided", async () => {
|
||||
const { createCustomOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createCustomOptions({
|
||||
cwd: "/test/path",
|
||||
systemPrompt: "Custom prompt",
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe("Custom prompt");
|
||||
});
|
||||
|
||||
it("should include abortController when provided", async () => {
|
||||
const { createCustomOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createCustomOptions({
|
||||
cwd: "/test/path",
|
||||
abortController,
|
||||
});
|
||||
|
||||
expect(options.abortController).toBe(abortController);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -53,9 +53,24 @@ describe("security.ts", () => {
|
||||
expect(allowed).toContain(path.resolve("/data/dir"));
|
||||
});
|
||||
|
||||
it("should include WORKSPACE_DIR if set", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "";
|
||||
process.env.WORKSPACE_DIR = "/workspace/dir";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/workspace/dir"));
|
||||
});
|
||||
|
||||
it("should handle empty ALLOWED_PROJECT_DIRS", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "/data";
|
||||
delete process.env.WORKSPACE_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
@@ -70,6 +85,7 @@ describe("security.ts", () => {
|
||||
it("should skip empty entries in comma list", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path1,,/path2, ,/path3";
|
||||
process.env.DATA_DIR = "";
|
||||
delete process.env.WORKSPACE_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
|
||||
394
apps/server/tests/unit/lib/worktree-metadata.test.ts
Normal file
394
apps/server/tests/unit/lib/worktree-metadata.test.ts
Normal file
@@ -0,0 +1,394 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
import {
|
||||
readWorktreeMetadata,
|
||||
writeWorktreeMetadata,
|
||||
updateWorktreePRInfo,
|
||||
getWorktreePRInfo,
|
||||
readAllWorktreeMetadata,
|
||||
deleteWorktreeMetadata,
|
||||
type WorktreeMetadata,
|
||||
type WorktreePRInfo,
|
||||
} from "@/lib/worktree-metadata.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
|
||||
describe("worktree-metadata.ts", () => {
|
||||
let testProjectPath: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
testProjectPath = path.join(os.tmpdir(), `worktree-metadata-test-${Date.now()}`);
|
||||
await fs.mkdir(testProjectPath, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
try {
|
||||
await fs.rm(testProjectPath, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe("sanitizeBranchName", () => {
|
||||
// Test through readWorktreeMetadata and writeWorktreeMetadata
|
||||
it("should sanitize branch names with invalid characters", async () => {
|
||||
const branch = "feature/test-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should sanitize branch names with Windows invalid characters", async () => {
|
||||
const branch = "feature:test*branch?";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should sanitize Windows reserved names", async () => {
|
||||
const branch = "CON";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should handle empty branch name", async () => {
|
||||
const branch = "";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch: "branch",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Empty branch name should be sanitized to "_branch"
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should handle branch name that becomes empty after sanitization", async () => {
|
||||
// Test branch that would become empty after removing invalid chars
|
||||
const branch = "///";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch: "branch",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
});
|
||||
|
||||
describe("readWorktreeMetadata", () => {
|
||||
it("should return null when metadata file doesn't exist", async () => {
|
||||
const result = await readWorktreeMetadata(testProjectPath, "nonexistent-branch");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should read existing metadata", async () => {
|
||||
const branch = "test-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should read metadata with PR info", async () => {
|
||||
const branch = "pr-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
pr: {
|
||||
number: 123,
|
||||
url: "https://github.com/owner/repo/pull/123",
|
||||
title: "Test PR",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
});
|
||||
|
||||
describe("writeWorktreeMetadata", () => {
|
||||
it("should create metadata directory if it doesn't exist", async () => {
|
||||
const branch = "new-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should overwrite existing metadata", async () => {
|
||||
const branch = "existing-branch";
|
||||
const metadata1: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
const metadata2: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
pr: {
|
||||
number: 456,
|
||||
url: "https://github.com/owner/repo/pull/456",
|
||||
title: "Updated PR",
|
||||
state: "closed",
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata1);
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata2);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateWorktreePRInfo", () => {
|
||||
it("should create new metadata if it doesn't exist", async () => {
|
||||
const branch = "new-pr-branch";
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 789,
|
||||
url: "https://github.com/owner/repo/pull/789",
|
||||
title: "New PR",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.branch).toBe(branch);
|
||||
expect(result?.pr).toEqual(prInfo);
|
||||
});
|
||||
|
||||
it("should update existing metadata with PR info", async () => {
|
||||
const branch = "existing-pr-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 999,
|
||||
url: "https://github.com/owner/repo/pull/999",
|
||||
title: "Updated PR",
|
||||
state: "merged",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result?.pr).toEqual(prInfo);
|
||||
});
|
||||
|
||||
it("should preserve existing metadata when updating PR info", async () => {
|
||||
const branch = "preserve-branch";
|
||||
const originalCreatedAt = new Date().toISOString();
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: originalCreatedAt,
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 111,
|
||||
url: "https://github.com/owner/repo/pull/111",
|
||||
title: "PR",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result?.createdAt).toBe(originalCreatedAt);
|
||||
expect(result?.pr).toEqual(prInfo);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getWorktreePRInfo", () => {
|
||||
it("should return null when metadata doesn't exist", async () => {
|
||||
const result = await getWorktreePRInfo(testProjectPath, "nonexistent");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null when metadata exists but has no PR info", async () => {
|
||||
const branch = "no-pr-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await getWorktreePRInfo(testProjectPath, branch);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return PR info when it exists", async () => {
|
||||
const branch = "has-pr-branch";
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 222,
|
||||
url: "https://github.com/owner/repo/pull/222",
|
||||
title: "Has PR",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
|
||||
const result = await getWorktreePRInfo(testProjectPath, branch);
|
||||
expect(result).toEqual(prInfo);
|
||||
});
|
||||
});
|
||||
|
||||
describe("readAllWorktreeMetadata", () => {
|
||||
it("should return empty map when worktrees directory doesn't exist", async () => {
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should return empty map when worktrees directory is empty", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
await fs.mkdir(worktreesDir, { recursive: true });
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should read all worktree metadata", async () => {
|
||||
const branch1 = "branch-1";
|
||||
const branch2 = "branch-2";
|
||||
const metadata1: WorktreeMetadata = {
|
||||
branch: branch1,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
const metadata2: WorktreeMetadata = {
|
||||
branch: branch2,
|
||||
createdAt: new Date().toISOString(),
|
||||
pr: {
|
||||
number: 333,
|
||||
url: "https://github.com/owner/repo/pull/333",
|
||||
title: "PR 3",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch1, metadata1);
|
||||
await writeWorktreeMetadata(testProjectPath, branch2, metadata2);
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(2);
|
||||
expect(result.get(branch1)).toEqual(metadata1);
|
||||
expect(result.get(branch2)).toEqual(metadata2);
|
||||
});
|
||||
|
||||
it("should skip directories without worktree.json", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
const emptyDir = path.join(worktreesDir, "empty-dir");
|
||||
await fs.mkdir(emptyDir, { recursive: true });
|
||||
|
||||
const branch = "valid-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(branch)).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should skip files in worktrees directory", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
await fs.mkdir(worktreesDir, { recursive: true });
|
||||
const filePath = path.join(worktreesDir, "not-a-dir.txt");
|
||||
await fs.writeFile(filePath, "content");
|
||||
|
||||
const branch = "valid-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(branch)).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should skip directories with malformed JSON", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
const badDir = path.join(worktreesDir, "bad-dir");
|
||||
await fs.mkdir(badDir, { recursive: true });
|
||||
const badJsonPath = path.join(badDir, "worktree.json");
|
||||
await fs.writeFile(badJsonPath, "not valid json");
|
||||
|
||||
const branch = "valid-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(branch)).toEqual(metadata);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteWorktreeMetadata", () => {
|
||||
it("should delete worktree metadata directory", async () => {
|
||||
const branch = "to-delete";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
let result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).not.toBeNull();
|
||||
|
||||
await deleteWorktreeMetadata(testProjectPath, branch);
|
||||
result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle deletion when metadata doesn't exist", async () => {
|
||||
// Should not throw
|
||||
await expect(
|
||||
deleteWorktreeMetadata(testProjectPath, "nonexistent")
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -234,6 +234,30 @@ describe("claude-provider.ts", () => {
|
||||
}),
|
||||
});
|
||||
});
|
||||
|
||||
it("should handle errors during execution and rethrow", async () => {
|
||||
const consoleErrorSpy = vi.spyOn(console, "error").mockImplementation(() => {});
|
||||
const testError = new Error("SDK execution failed");
|
||||
|
||||
vi.mocked(sdk.query).mockReturnValue(
|
||||
(async function* () {
|
||||
throw testError;
|
||||
})()
|
||||
);
|
||||
|
||||
const generator = provider.executeQuery({
|
||||
prompt: "Test",
|
||||
cwd: "/test",
|
||||
});
|
||||
|
||||
await expect(collectAsyncGenerator(generator)).rejects.toThrow("SDK execution failed");
|
||||
expect(consoleErrorSpy).toHaveBeenCalledWith(
|
||||
"[ClaudeProvider] executeQuery() error during execution:",
|
||||
testError
|
||||
);
|
||||
|
||||
consoleErrorSpy.mockRestore();
|
||||
});
|
||||
});
|
||||
|
||||
describe("detectInstallation", () => {
|
||||
|
||||
643
apps/server/tests/unit/services/settings-service.test.ts
Normal file
643
apps/server/tests/unit/services/settings-service.test.ts
Normal file
@@ -0,0 +1,643 @@
|
||||
import { describe, it, expect, vi, beforeEach, afterEach } from "vitest";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
import { SettingsService } from "@/services/settings-service.js";
|
||||
import {
|
||||
DEFAULT_GLOBAL_SETTINGS,
|
||||
DEFAULT_CREDENTIALS,
|
||||
DEFAULT_PROJECT_SETTINGS,
|
||||
SETTINGS_VERSION,
|
||||
CREDENTIALS_VERSION,
|
||||
PROJECT_SETTINGS_VERSION,
|
||||
type GlobalSettings,
|
||||
type Credentials,
|
||||
type ProjectSettings,
|
||||
} from "@/types/settings.js";
|
||||
|
||||
describe("settings-service.ts", () => {
|
||||
let testDataDir: string;
|
||||
let testProjectDir: string;
|
||||
let settingsService: SettingsService;
|
||||
|
||||
beforeEach(async () => {
|
||||
testDataDir = path.join(os.tmpdir(), `settings-test-${Date.now()}`);
|
||||
testProjectDir = path.join(os.tmpdir(), `project-test-${Date.now()}`);
|
||||
await fs.mkdir(testDataDir, { recursive: true });
|
||||
await fs.mkdir(testProjectDir, { recursive: true });
|
||||
settingsService = new SettingsService(testDataDir);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
try {
|
||||
await fs.rm(testDataDir, { recursive: true, force: true });
|
||||
await fs.rm(testProjectDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe("getGlobalSettings", () => {
|
||||
it("should return default settings when file does not exist", async () => {
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
expect(settings).toEqual(DEFAULT_GLOBAL_SETTINGS);
|
||||
});
|
||||
|
||||
it("should read and return existing settings", async () => {
|
||||
const customSettings: GlobalSettings = {
|
||||
...DEFAULT_GLOBAL_SETTINGS,
|
||||
theme: "light",
|
||||
sidebarOpen: false,
|
||||
maxConcurrency: 5,
|
||||
};
|
||||
const settingsPath = path.join(testDataDir, "settings.json");
|
||||
await fs.writeFile(settingsPath, JSON.stringify(customSettings, null, 2));
|
||||
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
expect(settings.theme).toBe("light");
|
||||
expect(settings.sidebarOpen).toBe(false);
|
||||
expect(settings.maxConcurrency).toBe(5);
|
||||
});
|
||||
|
||||
it("should merge with defaults for missing properties", async () => {
|
||||
const partialSettings = {
|
||||
version: SETTINGS_VERSION,
|
||||
theme: "dark",
|
||||
};
|
||||
const settingsPath = path.join(testDataDir, "settings.json");
|
||||
await fs.writeFile(settingsPath, JSON.stringify(partialSettings, null, 2));
|
||||
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
expect(settings.theme).toBe("dark");
|
||||
expect(settings.sidebarOpen).toBe(DEFAULT_GLOBAL_SETTINGS.sidebarOpen);
|
||||
expect(settings.maxConcurrency).toBe(DEFAULT_GLOBAL_SETTINGS.maxConcurrency);
|
||||
});
|
||||
|
||||
it("should merge keyboard shortcuts deeply", async () => {
|
||||
const customSettings: GlobalSettings = {
|
||||
...DEFAULT_GLOBAL_SETTINGS,
|
||||
keyboardShortcuts: {
|
||||
...DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts,
|
||||
board: "B",
|
||||
},
|
||||
};
|
||||
const settingsPath = path.join(testDataDir, "settings.json");
|
||||
await fs.writeFile(settingsPath, JSON.stringify(customSettings, null, 2));
|
||||
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
expect(settings.keyboardShortcuts.board).toBe("B");
|
||||
expect(settings.keyboardShortcuts.agent).toBe(
|
||||
DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts.agent
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateGlobalSettings", () => {
|
||||
it("should create settings file with updates", async () => {
|
||||
const updates: Partial<GlobalSettings> = {
|
||||
theme: "light",
|
||||
sidebarOpen: false,
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateGlobalSettings(updates);
|
||||
|
||||
expect(updated.theme).toBe("light");
|
||||
expect(updated.sidebarOpen).toBe(false);
|
||||
expect(updated.version).toBe(SETTINGS_VERSION);
|
||||
|
||||
const settingsPath = path.join(testDataDir, "settings.json");
|
||||
const fileContent = await fs.readFile(settingsPath, "utf-8");
|
||||
const saved = JSON.parse(fileContent);
|
||||
expect(saved.theme).toBe("light");
|
||||
expect(saved.sidebarOpen).toBe(false);
|
||||
});
|
||||
|
||||
it("should merge updates with existing settings", async () => {
|
||||
const initial: GlobalSettings = {
|
||||
...DEFAULT_GLOBAL_SETTINGS,
|
||||
theme: "dark",
|
||||
maxConcurrency: 3,
|
||||
};
|
||||
const settingsPath = path.join(testDataDir, "settings.json");
|
||||
await fs.writeFile(settingsPath, JSON.stringify(initial, null, 2));
|
||||
|
||||
const updates: Partial<GlobalSettings> = {
|
||||
theme: "light",
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateGlobalSettings(updates);
|
||||
|
||||
expect(updated.theme).toBe("light");
|
||||
expect(updated.maxConcurrency).toBe(3); // Preserved from initial
|
||||
});
|
||||
|
||||
it("should deep merge keyboard shortcuts", async () => {
|
||||
const updates: Partial<GlobalSettings> = {
|
||||
keyboardShortcuts: {
|
||||
board: "B",
|
||||
},
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateGlobalSettings(updates);
|
||||
|
||||
expect(updated.keyboardShortcuts.board).toBe("B");
|
||||
expect(updated.keyboardShortcuts.agent).toBe(
|
||||
DEFAULT_GLOBAL_SETTINGS.keyboardShortcuts.agent
|
||||
);
|
||||
});
|
||||
|
||||
it("should create data directory if it does not exist", async () => {
|
||||
const newDataDir = path.join(os.tmpdir(), `new-data-dir-${Date.now()}`);
|
||||
const newService = new SettingsService(newDataDir);
|
||||
|
||||
await newService.updateGlobalSettings({ theme: "light" });
|
||||
|
||||
const stats = await fs.stat(newDataDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
|
||||
await fs.rm(newDataDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe("hasGlobalSettings", () => {
|
||||
it("should return false when settings file does not exist", async () => {
|
||||
const exists = await settingsService.hasGlobalSettings();
|
||||
expect(exists).toBe(false);
|
||||
});
|
||||
|
||||
it("should return true when settings file exists", async () => {
|
||||
await settingsService.updateGlobalSettings({ theme: "light" });
|
||||
const exists = await settingsService.hasGlobalSettings();
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getCredentials", () => {
|
||||
it("should return default credentials when file does not exist", async () => {
|
||||
const credentials = await settingsService.getCredentials();
|
||||
expect(credentials).toEqual(DEFAULT_CREDENTIALS);
|
||||
});
|
||||
|
||||
it("should read and return existing credentials", async () => {
|
||||
const customCredentials: Credentials = {
|
||||
...DEFAULT_CREDENTIALS,
|
||||
apiKeys: {
|
||||
anthropic: "sk-test-key",
|
||||
google: "",
|
||||
openai: "",
|
||||
},
|
||||
};
|
||||
const credentialsPath = path.join(testDataDir, "credentials.json");
|
||||
await fs.writeFile(credentialsPath, JSON.stringify(customCredentials, null, 2));
|
||||
|
||||
const credentials = await settingsService.getCredentials();
|
||||
expect(credentials.apiKeys.anthropic).toBe("sk-test-key");
|
||||
});
|
||||
|
||||
it("should merge with defaults for missing api keys", async () => {
|
||||
const partialCredentials = {
|
||||
version: CREDENTIALS_VERSION,
|
||||
apiKeys: {
|
||||
anthropic: "sk-test",
|
||||
},
|
||||
};
|
||||
const credentialsPath = path.join(testDataDir, "credentials.json");
|
||||
await fs.writeFile(credentialsPath, JSON.stringify(partialCredentials, null, 2));
|
||||
|
||||
const credentials = await settingsService.getCredentials();
|
||||
expect(credentials.apiKeys.anthropic).toBe("sk-test");
|
||||
expect(credentials.apiKeys.google).toBe("");
|
||||
expect(credentials.apiKeys.openai).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateCredentials", () => {
|
||||
it("should create credentials file with updates", async () => {
|
||||
const updates: Partial<Credentials> = {
|
||||
apiKeys: {
|
||||
anthropic: "sk-test-key",
|
||||
google: "",
|
||||
openai: "",
|
||||
},
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateCredentials(updates);
|
||||
|
||||
expect(updated.apiKeys.anthropic).toBe("sk-test-key");
|
||||
expect(updated.version).toBe(CREDENTIALS_VERSION);
|
||||
|
||||
const credentialsPath = path.join(testDataDir, "credentials.json");
|
||||
const fileContent = await fs.readFile(credentialsPath, "utf-8");
|
||||
const saved = JSON.parse(fileContent);
|
||||
expect(saved.apiKeys.anthropic).toBe("sk-test-key");
|
||||
});
|
||||
|
||||
it("should merge updates with existing credentials", async () => {
|
||||
const initial: Credentials = {
|
||||
...DEFAULT_CREDENTIALS,
|
||||
apiKeys: {
|
||||
anthropic: "sk-initial",
|
||||
google: "google-key",
|
||||
openai: "",
|
||||
},
|
||||
};
|
||||
const credentialsPath = path.join(testDataDir, "credentials.json");
|
||||
await fs.writeFile(credentialsPath, JSON.stringify(initial, null, 2));
|
||||
|
||||
const updates: Partial<Credentials> = {
|
||||
apiKeys: {
|
||||
anthropic: "sk-updated",
|
||||
},
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateCredentials(updates);
|
||||
|
||||
expect(updated.apiKeys.anthropic).toBe("sk-updated");
|
||||
expect(updated.apiKeys.google).toBe("google-key"); // Preserved
|
||||
});
|
||||
|
||||
it("should deep merge api keys", async () => {
|
||||
const initial: Credentials = {
|
||||
...DEFAULT_CREDENTIALS,
|
||||
apiKeys: {
|
||||
anthropic: "sk-anthropic",
|
||||
google: "google-key",
|
||||
openai: "openai-key",
|
||||
},
|
||||
};
|
||||
const credentialsPath = path.join(testDataDir, "credentials.json");
|
||||
await fs.writeFile(credentialsPath, JSON.stringify(initial, null, 2));
|
||||
|
||||
const updates: Partial<Credentials> = {
|
||||
apiKeys: {
|
||||
openai: "new-openai-key",
|
||||
},
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateCredentials(updates);
|
||||
|
||||
expect(updated.apiKeys.anthropic).toBe("sk-anthropic");
|
||||
expect(updated.apiKeys.google).toBe("google-key");
|
||||
expect(updated.apiKeys.openai).toBe("new-openai-key");
|
||||
});
|
||||
});
|
||||
|
||||
describe("getMaskedCredentials", () => {
|
||||
it("should return masked credentials for empty keys", async () => {
|
||||
const masked = await settingsService.getMaskedCredentials();
|
||||
expect(masked.anthropic.configured).toBe(false);
|
||||
expect(masked.anthropic.masked).toBe("");
|
||||
expect(masked.google.configured).toBe(false);
|
||||
expect(masked.openai.configured).toBe(false);
|
||||
});
|
||||
|
||||
it("should mask keys correctly", async () => {
|
||||
await settingsService.updateCredentials({
|
||||
apiKeys: {
|
||||
anthropic: "sk-ant-api03-1234567890abcdef",
|
||||
google: "AIzaSy1234567890abcdef",
|
||||
openai: "sk-1234567890abcdef",
|
||||
},
|
||||
});
|
||||
|
||||
const masked = await settingsService.getMaskedCredentials();
|
||||
expect(masked.anthropic.configured).toBe(true);
|
||||
expect(masked.anthropic.masked).toBe("sk-a...cdef");
|
||||
expect(masked.google.configured).toBe(true);
|
||||
expect(masked.google.masked).toBe("AIza...cdef");
|
||||
expect(masked.openai.configured).toBe(true);
|
||||
expect(masked.openai.masked).toBe("sk-1...cdef");
|
||||
});
|
||||
|
||||
it("should handle short keys", async () => {
|
||||
await settingsService.updateCredentials({
|
||||
apiKeys: {
|
||||
anthropic: "short",
|
||||
google: "",
|
||||
openai: "",
|
||||
},
|
||||
});
|
||||
|
||||
const masked = await settingsService.getMaskedCredentials();
|
||||
expect(masked.anthropic.configured).toBe(true);
|
||||
expect(masked.anthropic.masked).toBe("");
|
||||
});
|
||||
});
|
||||
|
||||
describe("hasCredentials", () => {
|
||||
it("should return false when credentials file does not exist", async () => {
|
||||
const exists = await settingsService.hasCredentials();
|
||||
expect(exists).toBe(false);
|
||||
});
|
||||
|
||||
it("should return true when credentials file exists", async () => {
|
||||
await settingsService.updateCredentials({
|
||||
apiKeys: { anthropic: "test", google: "", openai: "" },
|
||||
});
|
||||
const exists = await settingsService.hasCredentials();
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getProjectSettings", () => {
|
||||
it("should return default settings when file does not exist", async () => {
|
||||
const settings = await settingsService.getProjectSettings(testProjectDir);
|
||||
expect(settings).toEqual(DEFAULT_PROJECT_SETTINGS);
|
||||
});
|
||||
|
||||
it("should read and return existing project settings", async () => {
|
||||
const customSettings: ProjectSettings = {
|
||||
...DEFAULT_PROJECT_SETTINGS,
|
||||
theme: "light",
|
||||
useWorktrees: true,
|
||||
};
|
||||
const automakerDir = path.join(testProjectDir, ".automaker");
|
||||
await fs.mkdir(automakerDir, { recursive: true });
|
||||
const settingsPath = path.join(automakerDir, "settings.json");
|
||||
await fs.writeFile(settingsPath, JSON.stringify(customSettings, null, 2));
|
||||
|
||||
const settings = await settingsService.getProjectSettings(testProjectDir);
|
||||
expect(settings.theme).toBe("light");
|
||||
expect(settings.useWorktrees).toBe(true);
|
||||
});
|
||||
|
||||
it("should merge with defaults for missing properties", async () => {
|
||||
const partialSettings = {
|
||||
version: PROJECT_SETTINGS_VERSION,
|
||||
theme: "dark",
|
||||
};
|
||||
const automakerDir = path.join(testProjectDir, ".automaker");
|
||||
await fs.mkdir(automakerDir, { recursive: true });
|
||||
const settingsPath = path.join(automakerDir, "settings.json");
|
||||
await fs.writeFile(settingsPath, JSON.stringify(partialSettings, null, 2));
|
||||
|
||||
const settings = await settingsService.getProjectSettings(testProjectDir);
|
||||
expect(settings.theme).toBe("dark");
|
||||
expect(settings.version).toBe(PROJECT_SETTINGS_VERSION);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateProjectSettings", () => {
|
||||
it("should create project settings file with updates", async () => {
|
||||
const updates: Partial<ProjectSettings> = {
|
||||
theme: "light",
|
||||
useWorktrees: true,
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateProjectSettings(testProjectDir, updates);
|
||||
|
||||
expect(updated.theme).toBe("light");
|
||||
expect(updated.useWorktrees).toBe(true);
|
||||
expect(updated.version).toBe(PROJECT_SETTINGS_VERSION);
|
||||
|
||||
const automakerDir = path.join(testProjectDir, ".automaker");
|
||||
const settingsPath = path.join(automakerDir, "settings.json");
|
||||
const fileContent = await fs.readFile(settingsPath, "utf-8");
|
||||
const saved = JSON.parse(fileContent);
|
||||
expect(saved.theme).toBe("light");
|
||||
expect(saved.useWorktrees).toBe(true);
|
||||
});
|
||||
|
||||
it("should merge updates with existing project settings", async () => {
|
||||
const initial: ProjectSettings = {
|
||||
...DEFAULT_PROJECT_SETTINGS,
|
||||
theme: "dark",
|
||||
useWorktrees: false,
|
||||
};
|
||||
const automakerDir = path.join(testProjectDir, ".automaker");
|
||||
await fs.mkdir(automakerDir, { recursive: true });
|
||||
const settingsPath = path.join(automakerDir, "settings.json");
|
||||
await fs.writeFile(settingsPath, JSON.stringify(initial, null, 2));
|
||||
|
||||
const updates: Partial<ProjectSettings> = {
|
||||
theme: "light",
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateProjectSettings(testProjectDir, updates);
|
||||
|
||||
expect(updated.theme).toBe("light");
|
||||
expect(updated.useWorktrees).toBe(false); // Preserved
|
||||
});
|
||||
|
||||
it("should deep merge board background", async () => {
|
||||
const initial: ProjectSettings = {
|
||||
...DEFAULT_PROJECT_SETTINGS,
|
||||
boardBackground: {
|
||||
imagePath: "/path/to/image.jpg",
|
||||
cardOpacity: 0.8,
|
||||
columnOpacity: 0.9,
|
||||
columnBorderEnabled: true,
|
||||
cardGlassmorphism: false,
|
||||
cardBorderEnabled: true,
|
||||
cardBorderOpacity: 0.5,
|
||||
hideScrollbar: false,
|
||||
},
|
||||
};
|
||||
const automakerDir = path.join(testProjectDir, ".automaker");
|
||||
await fs.mkdir(automakerDir, { recursive: true });
|
||||
const settingsPath = path.join(automakerDir, "settings.json");
|
||||
await fs.writeFile(settingsPath, JSON.stringify(initial, null, 2));
|
||||
|
||||
const updates: Partial<ProjectSettings> = {
|
||||
boardBackground: {
|
||||
cardOpacity: 0.9,
|
||||
},
|
||||
};
|
||||
|
||||
const updated = await settingsService.updateProjectSettings(testProjectDir, updates);
|
||||
|
||||
expect(updated.boardBackground?.imagePath).toBe("/path/to/image.jpg");
|
||||
expect(updated.boardBackground?.cardOpacity).toBe(0.9);
|
||||
expect(updated.boardBackground?.columnOpacity).toBe(0.9);
|
||||
});
|
||||
|
||||
it("should create .automaker directory if it does not exist", async () => {
|
||||
const newProjectDir = path.join(os.tmpdir(), `new-project-${Date.now()}`);
|
||||
|
||||
await settingsService.updateProjectSettings(newProjectDir, { theme: "light" });
|
||||
|
||||
const automakerDir = path.join(newProjectDir, ".automaker");
|
||||
const stats = await fs.stat(automakerDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
|
||||
await fs.rm(newProjectDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe("hasProjectSettings", () => {
|
||||
it("should return false when project settings file does not exist", async () => {
|
||||
const exists = await settingsService.hasProjectSettings(testProjectDir);
|
||||
expect(exists).toBe(false);
|
||||
});
|
||||
|
||||
it("should return true when project settings file exists", async () => {
|
||||
await settingsService.updateProjectSettings(testProjectDir, { theme: "light" });
|
||||
const exists = await settingsService.hasProjectSettings(testProjectDir);
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("migrateFromLocalStorage", () => {
|
||||
it("should migrate global settings from localStorage data", async () => {
|
||||
const localStorageData = {
|
||||
"automaker-storage": JSON.stringify({
|
||||
state: {
|
||||
theme: "light",
|
||||
sidebarOpen: false,
|
||||
maxConcurrency: 5,
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
const result = await settingsService.migrateFromLocalStorage(localStorageData);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.migratedGlobalSettings).toBe(true);
|
||||
expect(result.migratedCredentials).toBe(false);
|
||||
expect(result.migratedProjectCount).toBe(0);
|
||||
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
expect(settings.theme).toBe("light");
|
||||
expect(settings.sidebarOpen).toBe(false);
|
||||
expect(settings.maxConcurrency).toBe(5);
|
||||
});
|
||||
|
||||
it("should migrate credentials from localStorage data", async () => {
|
||||
const localStorageData = {
|
||||
"automaker-storage": JSON.stringify({
|
||||
state: {
|
||||
apiKeys: {
|
||||
anthropic: "sk-test-key",
|
||||
google: "google-key",
|
||||
openai: "openai-key",
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
const result = await settingsService.migrateFromLocalStorage(localStorageData);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.migratedCredentials).toBe(true);
|
||||
|
||||
const credentials = await settingsService.getCredentials();
|
||||
expect(credentials.apiKeys.anthropic).toBe("sk-test-key");
|
||||
expect(credentials.apiKeys.google).toBe("google-key");
|
||||
expect(credentials.apiKeys.openai).toBe("openai-key");
|
||||
});
|
||||
|
||||
it("should migrate project settings from localStorage data", async () => {
|
||||
const localStorageData = {
|
||||
"automaker-storage": JSON.stringify({
|
||||
state: {
|
||||
projects: [
|
||||
{
|
||||
id: "proj1",
|
||||
name: "Project 1",
|
||||
path: testProjectDir,
|
||||
theme: "light",
|
||||
},
|
||||
],
|
||||
boardBackgroundByProject: {
|
||||
[testProjectDir]: {
|
||||
imagePath: "/path/to/image.jpg",
|
||||
cardOpacity: 0.8,
|
||||
columnOpacity: 0.9,
|
||||
columnBorderEnabled: true,
|
||||
cardGlassmorphism: false,
|
||||
cardBorderEnabled: true,
|
||||
cardBorderOpacity: 0.5,
|
||||
hideScrollbar: false,
|
||||
},
|
||||
},
|
||||
},
|
||||
}),
|
||||
};
|
||||
|
||||
const result = await settingsService.migrateFromLocalStorage(localStorageData);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.migratedProjectCount).toBe(1);
|
||||
|
||||
const projectSettings = await settingsService.getProjectSettings(testProjectDir);
|
||||
expect(projectSettings.theme).toBe("light");
|
||||
expect(projectSettings.boardBackground?.imagePath).toBe("/path/to/image.jpg");
|
||||
});
|
||||
|
||||
it("should handle direct localStorage values", async () => {
|
||||
const localStorageData = {
|
||||
"automaker:lastProjectDir": "/path/to/project",
|
||||
"file-browser-recent-folders": JSON.stringify(["/path1", "/path2"]),
|
||||
"worktree-panel-collapsed": "true",
|
||||
};
|
||||
|
||||
const result = await settingsService.migrateFromLocalStorage(localStorageData);
|
||||
|
||||
expect(result.success).toBe(true);
|
||||
const settings = await settingsService.getGlobalSettings();
|
||||
expect(settings.lastProjectDir).toBe("/path/to/project");
|
||||
expect(settings.recentFolders).toEqual(["/path1", "/path2"]);
|
||||
expect(settings.worktreePanelCollapsed).toBe(true);
|
||||
});
|
||||
|
||||
it("should handle invalid JSON gracefully", async () => {
|
||||
const localStorageData = {
|
||||
"automaker-storage": "invalid json",
|
||||
"file-browser-recent-folders": "invalid json",
|
||||
};
|
||||
|
||||
const result = await settingsService.migrateFromLocalStorage(localStorageData);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it("should handle migration errors gracefully", async () => {
|
||||
// Create a read-only directory to cause write errors
|
||||
const readOnlyDir = path.join(os.tmpdir(), `readonly-${Date.now()}`);
|
||||
await fs.mkdir(readOnlyDir, { recursive: true });
|
||||
await fs.chmod(readOnlyDir, 0o444);
|
||||
|
||||
const readOnlyService = new SettingsService(readOnlyDir);
|
||||
const localStorageData = {
|
||||
"automaker-storage": JSON.stringify({
|
||||
state: { theme: "light" },
|
||||
}),
|
||||
};
|
||||
|
||||
const result = await readOnlyService.migrateFromLocalStorage(localStorageData);
|
||||
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(0);
|
||||
|
||||
await fs.chmod(readOnlyDir, 0o755);
|
||||
await fs.rm(readOnlyDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
|
||||
describe("getDataDir", () => {
|
||||
it("should return the data directory path", () => {
|
||||
const dataDir = settingsService.getDataDir();
|
||||
expect(dataDir).toBe(testDataDir);
|
||||
});
|
||||
});
|
||||
|
||||
describe("atomicWriteJson", () => {
|
||||
it("should handle write errors and clean up temp file", async () => {
|
||||
// Create a read-only directory to cause write errors
|
||||
const readOnlyDir = path.join(os.tmpdir(), `readonly-${Date.now()}`);
|
||||
await fs.mkdir(readOnlyDir, { recursive: true });
|
||||
await fs.chmod(readOnlyDir, 0o444);
|
||||
|
||||
const readOnlyService = new SettingsService(readOnlyDir);
|
||||
|
||||
await expect(
|
||||
readOnlyService.updateGlobalSettings({ theme: "light" })
|
||||
).rejects.toThrow();
|
||||
|
||||
await fs.chmod(readOnlyDir, 0o755);
|
||||
await fs.rm(readOnlyDir, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user