mirror of
https://github.com/AutoMaker-Org/automaker.git
synced 2026-02-02 08:33:36 +00:00
Merge origin/main into feature/shared-packages
Resolved conflicts: - list.ts: Keep @automaker/git-utils import, add worktree-metadata import - feature-loader.ts: Use Feature type from @automaker/types - automaker-paths.test.ts: Import from @automaker/platform - kanban-card.tsx: Accept deletion (split into components/) - subprocess.test.ts: Keep libs/platform location Added missing exports to @automaker/platform: - getGlobalSettingsPath, getCredentialsPath, getProjectSettingsPath, ensureDataDir Added title and titleGenerating fields to @automaker/types Feature interface. 🤖 Generated with [Claude Code](https://claude.com/claude-code) Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
This commit is contained in:
@@ -13,6 +13,10 @@ import {
|
||||
getAppSpecPath,
|
||||
getBranchTrackingPath,
|
||||
ensureAutomakerDir,
|
||||
getGlobalSettingsPath,
|
||||
getCredentialsPath,
|
||||
getProjectSettingsPath,
|
||||
ensureDataDir,
|
||||
} from "@automaker/platform";
|
||||
|
||||
describe("automaker-paths.ts", () => {
|
||||
@@ -136,4 +140,91 @@ describe("automaker-paths.ts", () => {
|
||||
expect(result).toBe(automakerDir);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getGlobalSettingsPath", () => {
|
||||
it("should return path to settings.json in data directory", () => {
|
||||
const dataDir = "/test/data";
|
||||
const result = getGlobalSettingsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, "settings.json"));
|
||||
});
|
||||
|
||||
it("should handle paths with trailing slashes", () => {
|
||||
const dataDir = "/test/data" + path.sep;
|
||||
const result = getGlobalSettingsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, "settings.json"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("getCredentialsPath", () => {
|
||||
it("should return path to credentials.json in data directory", () => {
|
||||
const dataDir = "/test/data";
|
||||
const result = getCredentialsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, "credentials.json"));
|
||||
});
|
||||
|
||||
it("should handle paths with trailing slashes", () => {
|
||||
const dataDir = "/test/data" + path.sep;
|
||||
const result = getCredentialsPath(dataDir);
|
||||
expect(result).toBe(path.join(dataDir, "credentials.json"));
|
||||
});
|
||||
});
|
||||
|
||||
describe("getProjectSettingsPath", () => {
|
||||
it("should return path to settings.json in project .automaker directory", () => {
|
||||
const projectPath = "/test/project";
|
||||
const result = getProjectSettingsPath(projectPath);
|
||||
expect(result).toBe(
|
||||
path.join(projectPath, ".automaker", "settings.json")
|
||||
);
|
||||
});
|
||||
|
||||
it("should handle paths with trailing slashes", () => {
|
||||
const projectPath = "/test/project" + path.sep;
|
||||
const result = getProjectSettingsPath(projectPath);
|
||||
expect(result).toBe(
|
||||
path.join(projectPath, ".automaker", "settings.json")
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("ensureDataDir", () => {
|
||||
let testDir: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
testDir = path.join(os.tmpdir(), `data-dir-test-${Date.now()}`);
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
try {
|
||||
await fs.rm(testDir, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
it("should create data directory and return path", async () => {
|
||||
const result = await ensureDataDir(testDir);
|
||||
|
||||
expect(result).toBe(testDir);
|
||||
const stats = await fs.stat(testDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
|
||||
it("should succeed if directory already exists", async () => {
|
||||
await fs.mkdir(testDir, { recursive: true });
|
||||
|
||||
const result = await ensureDataDir(testDir);
|
||||
|
||||
expect(result).toBe(testDir);
|
||||
});
|
||||
|
||||
it("should create nested directories", async () => {
|
||||
const nestedDir = path.join(testDir, "nested", "deep");
|
||||
const result = await ensureDataDir(nestedDir);
|
||||
|
||||
expect(result).toBe(nestedDir);
|
||||
const stats = await fs.stat(nestedDir);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -2,6 +2,7 @@ import { describe, it, expect } from "vitest";
|
||||
import {
|
||||
isAbortError,
|
||||
isAuthenticationError,
|
||||
isCancellationError,
|
||||
classifyError,
|
||||
getUserFriendlyErrorMessage,
|
||||
type ErrorType,
|
||||
@@ -32,6 +33,34 @@ describe("error-handler.ts", () => {
|
||||
});
|
||||
});
|
||||
|
||||
describe("isCancellationError", () => {
|
||||
it("should detect 'cancelled' message", () => {
|
||||
expect(isCancellationError("Operation was cancelled")).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'canceled' message", () => {
|
||||
expect(isCancellationError("Request was canceled")).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'stopped' message", () => {
|
||||
expect(isCancellationError("Process was stopped")).toBe(true);
|
||||
});
|
||||
|
||||
it("should detect 'aborted' message", () => {
|
||||
expect(isCancellationError("Task was aborted")).toBe(true);
|
||||
});
|
||||
|
||||
it("should be case insensitive", () => {
|
||||
expect(isCancellationError("CANCELLED")).toBe(true);
|
||||
expect(isCancellationError("Canceled")).toBe(true);
|
||||
});
|
||||
|
||||
it("should return false for non-cancellation errors", () => {
|
||||
expect(isCancellationError("File not found")).toBe(false);
|
||||
expect(isCancellationError("Network error")).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("isAuthenticationError", () => {
|
||||
it("should detect 'Authentication failed' message", () => {
|
||||
expect(isAuthenticationError("Authentication failed")).toBe(true);
|
||||
@@ -91,6 +120,42 @@ describe("error-handler.ts", () => {
|
||||
expect(result.isAbort).toBe(true); // Still detected as abort too
|
||||
});
|
||||
|
||||
it("should classify cancellation errors", () => {
|
||||
const error = new Error("Operation was cancelled");
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("cancellation");
|
||||
expect(result.isCancellation).toBe(true);
|
||||
expect(result.isAbort).toBe(false);
|
||||
expect(result.isAuth).toBe(false);
|
||||
});
|
||||
|
||||
it("should prioritize abort over cancellation if both match", () => {
|
||||
const error = new Error("Operation aborted");
|
||||
error.name = "AbortError";
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("abort");
|
||||
expect(result.isAbort).toBe(true);
|
||||
expect(result.isCancellation).toBe(true); // Still detected as cancellation too
|
||||
});
|
||||
|
||||
it("should classify cancellation errors with 'canceled' spelling", () => {
|
||||
const error = new Error("Request was canceled");
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("cancellation");
|
||||
expect(result.isCancellation).toBe(true);
|
||||
});
|
||||
|
||||
it("should classify cancellation errors with 'stopped' message", () => {
|
||||
const error = new Error("Process was stopped");
|
||||
const result = classifyError(error);
|
||||
|
||||
expect(result.type).toBe("cancellation");
|
||||
expect(result.isCancellation).toBe(true);
|
||||
});
|
||||
|
||||
it("should classify generic Error as execution error", () => {
|
||||
const error = new Error("Something went wrong");
|
||||
const result = classifyError(error);
|
||||
|
||||
@@ -65,6 +65,47 @@ describe("fs-utils.ts", () => {
|
||||
// Should not throw
|
||||
await expect(mkdirSafe(symlinkPath)).resolves.toBeUndefined();
|
||||
});
|
||||
|
||||
it("should handle ELOOP error gracefully when checking path", async () => {
|
||||
// Mock lstat to throw ELOOP error
|
||||
const originalLstat = fs.lstat;
|
||||
const mkdirSafePath = path.join(testDir, "eloop-path");
|
||||
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
|
||||
|
||||
// Should not throw, should return gracefully
|
||||
await expect(mkdirSafe(mkdirSafePath)).resolves.toBeUndefined();
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should handle EEXIST error gracefully when creating directory", async () => {
|
||||
const newDir = path.join(testDir, "race-condition-dir");
|
||||
|
||||
// Mock lstat to return ENOENT (path doesn't exist)
|
||||
// Then mock mkdir to throw EEXIST (race condition)
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
|
||||
vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "EEXIST" });
|
||||
|
||||
// Should not throw, should return gracefully
|
||||
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should handle ELOOP error gracefully when creating directory", async () => {
|
||||
const newDir = path.join(testDir, "eloop-create-dir");
|
||||
|
||||
// Mock lstat to return ENOENT (path doesn't exist)
|
||||
// Then mock mkdir to throw ELOOP
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ENOENT" });
|
||||
vi.spyOn(fs, "mkdir").mockRejectedValueOnce({ code: "ELOOP" });
|
||||
|
||||
// Should not throw, should return gracefully
|
||||
await expect(mkdirSafe(newDir)).resolves.toBeUndefined();
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
});
|
||||
|
||||
describe("existsSafe", () => {
|
||||
@@ -109,5 +150,24 @@ describe("fs-utils.ts", () => {
|
||||
const exists = await existsSafe(symlinkPath);
|
||||
expect(exists).toBe(true);
|
||||
});
|
||||
|
||||
it("should return true for ELOOP error (symlink loop)", async () => {
|
||||
// Mock lstat to throw ELOOP error
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "ELOOP" });
|
||||
|
||||
const exists = await existsSafe("/some/path/with/loop");
|
||||
expect(exists).toBe(true);
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
|
||||
it("should throw for other errors", async () => {
|
||||
// Mock lstat to throw a non-ENOENT, non-ELOOP error
|
||||
vi.spyOn(fs, "lstat").mockRejectedValueOnce({ code: "EACCES" });
|
||||
|
||||
await expect(existsSafe("/some/path")).rejects.toMatchObject({ code: "EACCES" });
|
||||
|
||||
vi.restoreAllMocks();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -144,6 +144,40 @@ describe("sdk-options.ts", () => {
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.extended);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
|
||||
});
|
||||
|
||||
it("should include systemPrompt when provided", async () => {
|
||||
const { createSuggestionsOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createSuggestionsOptions({
|
||||
cwd: "/test/path",
|
||||
systemPrompt: "Custom prompt",
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe("Custom prompt");
|
||||
});
|
||||
|
||||
it("should include abortController when provided", async () => {
|
||||
const { createSuggestionsOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createSuggestionsOptions({
|
||||
cwd: "/test/path",
|
||||
abortController,
|
||||
});
|
||||
|
||||
expect(options.abortController).toBe(abortController);
|
||||
});
|
||||
|
||||
it("should include outputFormat when provided", async () => {
|
||||
const { createSuggestionsOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createSuggestionsOptions({
|
||||
cwd: "/test/path",
|
||||
outputFormat: { type: "json" },
|
||||
});
|
||||
|
||||
expect(options.outputFormat).toEqual({ type: "json" });
|
||||
});
|
||||
});
|
||||
|
||||
describe("createChatOptions", () => {
|
||||
@@ -205,6 +239,29 @@ describe("sdk-options.ts", () => {
|
||||
autoAllowBashIfSandboxed: true,
|
||||
});
|
||||
});
|
||||
|
||||
it("should include systemPrompt when provided", async () => {
|
||||
const { createAutoModeOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createAutoModeOptions({
|
||||
cwd: "/test/path",
|
||||
systemPrompt: "Custom prompt",
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe("Custom prompt");
|
||||
});
|
||||
|
||||
it("should include abortController when provided", async () => {
|
||||
const { createAutoModeOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createAutoModeOptions({
|
||||
cwd: "/test/path",
|
||||
abortController,
|
||||
});
|
||||
|
||||
expect(options.abortController).toBe(abortController);
|
||||
});
|
||||
});
|
||||
|
||||
describe("createCustomOptions", () => {
|
||||
@@ -234,5 +291,42 @@ describe("sdk-options.ts", () => {
|
||||
expect(options.maxTurns).toBe(MAX_TURNS.maximum);
|
||||
expect(options.allowedTools).toEqual([...TOOL_PRESETS.readOnly]);
|
||||
});
|
||||
|
||||
it("should include sandbox when provided", async () => {
|
||||
const { createCustomOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createCustomOptions({
|
||||
cwd: "/test/path",
|
||||
sandbox: { enabled: true, autoAllowBashIfSandboxed: false },
|
||||
});
|
||||
|
||||
expect(options.sandbox).toEqual({
|
||||
enabled: true,
|
||||
autoAllowBashIfSandboxed: false,
|
||||
});
|
||||
});
|
||||
|
||||
it("should include systemPrompt when provided", async () => {
|
||||
const { createCustomOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const options = createCustomOptions({
|
||||
cwd: "/test/path",
|
||||
systemPrompt: "Custom prompt",
|
||||
});
|
||||
|
||||
expect(options.systemPrompt).toBe("Custom prompt");
|
||||
});
|
||||
|
||||
it("should include abortController when provided", async () => {
|
||||
const { createCustomOptions } = await import("@/lib/sdk-options.js");
|
||||
|
||||
const abortController = new AbortController();
|
||||
const options = createCustomOptions({
|
||||
cwd: "/test/path",
|
||||
abortController,
|
||||
});
|
||||
|
||||
expect(options.abortController).toBe(abortController);
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
@@ -53,9 +53,24 @@ describe("security.ts", () => {
|
||||
expect(allowed).toContain(path.resolve("/data/dir"));
|
||||
});
|
||||
|
||||
it("should include WORKSPACE_DIR if set", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "";
|
||||
process.env.WORKSPACE_DIR = "/workspace/dir";
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@/lib/security.js"
|
||||
);
|
||||
initAllowedPaths();
|
||||
|
||||
const allowed = getAllowedPaths();
|
||||
expect(allowed).toContain(path.resolve("/workspace/dir"));
|
||||
});
|
||||
|
||||
it("should handle empty ALLOWED_PROJECT_DIRS", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "";
|
||||
process.env.DATA_DIR = "/data";
|
||||
delete process.env.WORKSPACE_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
@@ -70,6 +85,7 @@ describe("security.ts", () => {
|
||||
it("should skip empty entries in comma list", async () => {
|
||||
process.env.ALLOWED_PROJECT_DIRS = "/path1,,/path2, ,/path3";
|
||||
process.env.DATA_DIR = "";
|
||||
delete process.env.WORKSPACE_DIR;
|
||||
|
||||
const { initAllowedPaths, getAllowedPaths } = await import(
|
||||
"@automaker/platform"
|
||||
|
||||
394
apps/server/tests/unit/lib/worktree-metadata.test.ts
Normal file
394
apps/server/tests/unit/lib/worktree-metadata.test.ts
Normal file
@@ -0,0 +1,394 @@
|
||||
import { describe, it, expect, beforeEach, afterEach } from "vitest";
|
||||
import {
|
||||
readWorktreeMetadata,
|
||||
writeWorktreeMetadata,
|
||||
updateWorktreePRInfo,
|
||||
getWorktreePRInfo,
|
||||
readAllWorktreeMetadata,
|
||||
deleteWorktreeMetadata,
|
||||
type WorktreeMetadata,
|
||||
type WorktreePRInfo,
|
||||
} from "@/lib/worktree-metadata.js";
|
||||
import fs from "fs/promises";
|
||||
import path from "path";
|
||||
import os from "os";
|
||||
|
||||
describe("worktree-metadata.ts", () => {
|
||||
let testProjectPath: string;
|
||||
|
||||
beforeEach(async () => {
|
||||
testProjectPath = path.join(os.tmpdir(), `worktree-metadata-test-${Date.now()}`);
|
||||
await fs.mkdir(testProjectPath, { recursive: true });
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
try {
|
||||
await fs.rm(testProjectPath, { recursive: true, force: true });
|
||||
} catch {
|
||||
// Ignore cleanup errors
|
||||
}
|
||||
});
|
||||
|
||||
describe("sanitizeBranchName", () => {
|
||||
// Test through readWorktreeMetadata and writeWorktreeMetadata
|
||||
it("should sanitize branch names with invalid characters", async () => {
|
||||
const branch = "feature/test-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should sanitize branch names with Windows invalid characters", async () => {
|
||||
const branch = "feature:test*branch?";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should sanitize Windows reserved names", async () => {
|
||||
const branch = "CON";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should handle empty branch name", async () => {
|
||||
const branch = "";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch: "branch",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
// Empty branch name should be sanitized to "_branch"
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should handle branch name that becomes empty after sanitization", async () => {
|
||||
// Test branch that would become empty after removing invalid chars
|
||||
const branch = "///";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch: "branch",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
});
|
||||
|
||||
describe("readWorktreeMetadata", () => {
|
||||
it("should return null when metadata file doesn't exist", async () => {
|
||||
const result = await readWorktreeMetadata(testProjectPath, "nonexistent-branch");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should read existing metadata", async () => {
|
||||
const branch = "test-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should read metadata with PR info", async () => {
|
||||
const branch = "pr-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
pr: {
|
||||
number: 123,
|
||||
url: "https://github.com/owner/repo/pull/123",
|
||||
title: "Test PR",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
});
|
||||
|
||||
describe("writeWorktreeMetadata", () => {
|
||||
it("should create metadata directory if it doesn't exist", async () => {
|
||||
const branch = "new-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should overwrite existing metadata", async () => {
|
||||
const branch = "existing-branch";
|
||||
const metadata1: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
const metadata2: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
pr: {
|
||||
number: 456,
|
||||
url: "https://github.com/owner/repo/pull/456",
|
||||
title: "Updated PR",
|
||||
state: "closed",
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata1);
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata2);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toEqual(metadata2);
|
||||
});
|
||||
});
|
||||
|
||||
describe("updateWorktreePRInfo", () => {
|
||||
it("should create new metadata if it doesn't exist", async () => {
|
||||
const branch = "new-pr-branch";
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 789,
|
||||
url: "https://github.com/owner/repo/pull/789",
|
||||
title: "New PR",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).not.toBeNull();
|
||||
expect(result?.branch).toBe(branch);
|
||||
expect(result?.pr).toEqual(prInfo);
|
||||
});
|
||||
|
||||
it("should update existing metadata with PR info", async () => {
|
||||
const branch = "existing-pr-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 999,
|
||||
url: "https://github.com/owner/repo/pull/999",
|
||||
title: "Updated PR",
|
||||
state: "merged",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result?.pr).toEqual(prInfo);
|
||||
});
|
||||
|
||||
it("should preserve existing metadata when updating PR info", async () => {
|
||||
const branch = "preserve-branch";
|
||||
const originalCreatedAt = new Date().toISOString();
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: originalCreatedAt,
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 111,
|
||||
url: "https://github.com/owner/repo/pull/111",
|
||||
title: "PR",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
|
||||
const result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result?.createdAt).toBe(originalCreatedAt);
|
||||
expect(result?.pr).toEqual(prInfo);
|
||||
});
|
||||
});
|
||||
|
||||
describe("getWorktreePRInfo", () => {
|
||||
it("should return null when metadata doesn't exist", async () => {
|
||||
const result = await getWorktreePRInfo(testProjectPath, "nonexistent");
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return null when metadata exists but has no PR info", async () => {
|
||||
const branch = "no-pr-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
const result = await getWorktreePRInfo(testProjectPath, branch);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should return PR info when it exists", async () => {
|
||||
const branch = "has-pr-branch";
|
||||
const prInfo: WorktreePRInfo = {
|
||||
number: 222,
|
||||
url: "https://github.com/owner/repo/pull/222",
|
||||
title: "Has PR",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await updateWorktreePRInfo(testProjectPath, branch, prInfo);
|
||||
const result = await getWorktreePRInfo(testProjectPath, branch);
|
||||
expect(result).toEqual(prInfo);
|
||||
});
|
||||
});
|
||||
|
||||
describe("readAllWorktreeMetadata", () => {
|
||||
it("should return empty map when worktrees directory doesn't exist", async () => {
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should return empty map when worktrees directory is empty", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
await fs.mkdir(worktreesDir, { recursive: true });
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(0);
|
||||
});
|
||||
|
||||
it("should read all worktree metadata", async () => {
|
||||
const branch1 = "branch-1";
|
||||
const branch2 = "branch-2";
|
||||
const metadata1: WorktreeMetadata = {
|
||||
branch: branch1,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
const metadata2: WorktreeMetadata = {
|
||||
branch: branch2,
|
||||
createdAt: new Date().toISOString(),
|
||||
pr: {
|
||||
number: 333,
|
||||
url: "https://github.com/owner/repo/pull/333",
|
||||
title: "PR 3",
|
||||
state: "open",
|
||||
createdAt: new Date().toISOString(),
|
||||
},
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch1, metadata1);
|
||||
await writeWorktreeMetadata(testProjectPath, branch2, metadata2);
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(2);
|
||||
expect(result.get(branch1)).toEqual(metadata1);
|
||||
expect(result.get(branch2)).toEqual(metadata2);
|
||||
});
|
||||
|
||||
it("should skip directories without worktree.json", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
const emptyDir = path.join(worktreesDir, "empty-dir");
|
||||
await fs.mkdir(emptyDir, { recursive: true });
|
||||
|
||||
const branch = "valid-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(branch)).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should skip files in worktrees directory", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
await fs.mkdir(worktreesDir, { recursive: true });
|
||||
const filePath = path.join(worktreesDir, "not-a-dir.txt");
|
||||
await fs.writeFile(filePath, "content");
|
||||
|
||||
const branch = "valid-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(branch)).toEqual(metadata);
|
||||
});
|
||||
|
||||
it("should skip directories with malformed JSON", async () => {
|
||||
const worktreesDir = path.join(testProjectPath, ".automaker", "worktrees");
|
||||
const badDir = path.join(worktreesDir, "bad-dir");
|
||||
await fs.mkdir(badDir, { recursive: true });
|
||||
const badJsonPath = path.join(badDir, "worktree.json");
|
||||
await fs.writeFile(badJsonPath, "not valid json");
|
||||
|
||||
const branch = "valid-branch";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
|
||||
const result = await readAllWorktreeMetadata(testProjectPath);
|
||||
expect(result.size).toBe(1);
|
||||
expect(result.get(branch)).toEqual(metadata);
|
||||
});
|
||||
});
|
||||
|
||||
describe("deleteWorktreeMetadata", () => {
|
||||
it("should delete worktree metadata directory", async () => {
|
||||
const branch = "to-delete";
|
||||
const metadata: WorktreeMetadata = {
|
||||
branch,
|
||||
createdAt: new Date().toISOString(),
|
||||
};
|
||||
|
||||
await writeWorktreeMetadata(testProjectPath, branch, metadata);
|
||||
let result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).not.toBeNull();
|
||||
|
||||
await deleteWorktreeMetadata(testProjectPath, branch);
|
||||
result = await readWorktreeMetadata(testProjectPath, branch);
|
||||
expect(result).toBeNull();
|
||||
});
|
||||
|
||||
it("should handle deletion when metadata doesn't exist", async () => {
|
||||
// Should not throw
|
||||
await expect(
|
||||
deleteWorktreeMetadata(testProjectPath, "nonexistent")
|
||||
).resolves.toBeUndefined();
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
Reference in New Issue
Block a user