Merge public-gh/master into review/pr-162
This commit is contained in:
@@ -1,5 +1,42 @@
|
||||
# @paperclipai/server
|
||||
|
||||
## 0.3.1
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Stable release preparation for 0.3.1
|
||||
- Updated dependencies
|
||||
- @paperclipai/adapter-utils@0.3.1
|
||||
- @paperclipai/adapter-claude-local@0.3.1
|
||||
- @paperclipai/adapter-codex-local@0.3.1
|
||||
- @paperclipai/adapter-cursor-local@0.3.1
|
||||
- @paperclipai/adapter-gemini-local@0.3.1
|
||||
- @paperclipai/adapter-openclaw-gateway@0.3.1
|
||||
- @paperclipai/adapter-opencode-local@0.3.1
|
||||
- @paperclipai/adapter-pi-local@0.3.1
|
||||
- @paperclipai/db@0.3.1
|
||||
- @paperclipai/shared@0.3.1
|
||||
|
||||
## 0.3.0
|
||||
|
||||
### Minor Changes
|
||||
|
||||
- Stable release preparation for 0.3.0
|
||||
|
||||
### Patch Changes
|
||||
|
||||
- Updated dependencies [6077ae6]
|
||||
- Updated dependencies
|
||||
- @paperclipai/shared@0.3.0
|
||||
- @paperclipai/adapter-utils@0.3.0
|
||||
- @paperclipai/adapter-claude-local@0.3.0
|
||||
- @paperclipai/adapter-codex-local@0.3.0
|
||||
- @paperclipai/adapter-cursor-local@0.3.0
|
||||
- @paperclipai/adapter-openclaw-gateway@0.3.0
|
||||
- @paperclipai/adapter-opencode-local@0.3.0
|
||||
- @paperclipai/adapter-pi-local@0.3.0
|
||||
- @paperclipai/db@0.3.0
|
||||
|
||||
## 0.2.7
|
||||
|
||||
### Patch Changes
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@paperclipai/server",
|
||||
"version": "0.2.7",
|
||||
"version": "0.3.1",
|
||||
"type": "module",
|
||||
"exports": {
|
||||
".": "./src/index.ts"
|
||||
@@ -23,26 +23,33 @@
|
||||
],
|
||||
"scripts": {
|
||||
"dev": "tsx src/index.ts",
|
||||
"dev:watch": "PAPERCLIP_MIGRATION_PROMPT=never tsx watch --ignore ../ui/node_modules --ignore ../ui/.vite --ignore ../ui/dist src/index.ts",
|
||||
"dev:watch": "cross-env PAPERCLIP_MIGRATION_PROMPT=never tsx watch --ignore ../ui/node_modules --ignore ../ui/.vite --ignore ../ui/dist src/index.ts",
|
||||
"prepare:ui-dist": "bash ../scripts/prepare-server-ui-dist.sh",
|
||||
"build": "tsc",
|
||||
"prepack": "pnpm run prepare:ui-dist",
|
||||
"postpack": "rm -rf ui-dist",
|
||||
"clean": "rm -rf dist",
|
||||
"start": "node dist/index.js",
|
||||
"typecheck": "tsc --noEmit"
|
||||
"typecheck": "pnpm --filter @paperclipai/plugin-sdk build && tsc --noEmit"
|
||||
},
|
||||
"dependencies": {
|
||||
"@aws-sdk/client-s3": "^3.888.0",
|
||||
"@paperclipai/adapter-claude-local": "workspace:*",
|
||||
"@paperclipai/adapter-codex-local": "workspace:*",
|
||||
"@paperclipai/adapter-cursor-local": "workspace:*",
|
||||
|
||||
"@paperclipai/adapter-gemini-local": "workspace:*",
|
||||
"@paperclipai/adapter-openclaw-gateway": "workspace:*",
|
||||
"@paperclipai/adapter-opencode-local": "workspace:*",
|
||||
"@paperclipai/adapter-pi-local": "workspace:*",
|
||||
"@paperclipai/adapter-openclaw": "workspace:*",
|
||||
"@paperclipai/adapter-opencode-local": "workspace:*",
|
||||
"hermes-paperclip-adapter": "0.1.1",
|
||||
"@paperclipai/adapter-utils": "workspace:*",
|
||||
"@paperclipai/db": "workspace:*",
|
||||
"@paperclipai/plugin-sdk": "workspace:*",
|
||||
"@paperclipai/shared": "workspace:*",
|
||||
"ajv": "^8.18.0",
|
||||
"ajv-formats": "^3.0.1",
|
||||
"better-auth": "1.4.18",
|
||||
"chokidar": "^4.0.3",
|
||||
"detect-port": "^2.1.0",
|
||||
"dompurify": "^3.3.2",
|
||||
"dotenv": "^17.0.1",
|
||||
@@ -66,6 +73,7 @@
|
||||
"@types/node": "^24.6.0",
|
||||
"@types/supertest": "^6.0.2",
|
||||
"@types/ws": "^8.18.1",
|
||||
"cross-env": "^10.1.0",
|
||||
"supertest": "^7.0.0",
|
||||
"tsx": "^4.19.2",
|
||||
"typescript": "^5.7.3",
|
||||
|
||||
70
server/src/__tests__/activity-routes.test.ts
Normal file
70
server/src/__tests__/activity-routes.test.ts
Normal file
@@ -0,0 +1,70 @@
|
||||
import express from "express";
|
||||
import request from "supertest";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { errorHandler } from "../middleware/index.js";
|
||||
import { activityRoutes } from "../routes/activity.js";
|
||||
|
||||
const mockActivityService = vi.hoisted(() => ({
|
||||
list: vi.fn(),
|
||||
forIssue: vi.fn(),
|
||||
runsForIssue: vi.fn(),
|
||||
issuesForRun: vi.fn(),
|
||||
create: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockIssueService = vi.hoisted(() => ({
|
||||
getById: vi.fn(),
|
||||
getByIdentifier: vi.fn(),
|
||||
}));
|
||||
|
||||
vi.mock("../services/activity.js", () => ({
|
||||
activityService: () => mockActivityService,
|
||||
}));
|
||||
|
||||
vi.mock("../services/index.js", () => ({
|
||||
issueService: () => mockIssueService,
|
||||
}));
|
||||
|
||||
function createApp() {
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, _res, next) => {
|
||||
(req as any).actor = {
|
||||
type: "board",
|
||||
userId: "user-1",
|
||||
companyIds: ["company-1"],
|
||||
source: "session",
|
||||
isInstanceAdmin: false,
|
||||
};
|
||||
next();
|
||||
});
|
||||
app.use("/api", activityRoutes({} as any));
|
||||
app.use(errorHandler);
|
||||
return app;
|
||||
}
|
||||
|
||||
describe("activity routes", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
});
|
||||
|
||||
it("resolves issue identifiers before loading runs", async () => {
|
||||
mockIssueService.getByIdentifier.mockResolvedValue({
|
||||
id: "issue-uuid-1",
|
||||
companyId: "company-1",
|
||||
});
|
||||
mockActivityService.runsForIssue.mockResolvedValue([
|
||||
{
|
||||
runId: "run-1",
|
||||
},
|
||||
]);
|
||||
|
||||
const res = await request(createApp()).get("/api/issues/PAP-475/runs");
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(mockIssueService.getByIdentifier).toHaveBeenCalledWith("PAP-475");
|
||||
expect(mockIssueService.getById).not.toHaveBeenCalled();
|
||||
expect(mockActivityService.runsForIssue).toHaveBeenCalledWith("company-1", "issue-uuid-1");
|
||||
expect(res.body).toEqual([{ runId: "run-1" }]);
|
||||
});
|
||||
});
|
||||
@@ -5,6 +5,10 @@ import {
|
||||
sessionCodec as cursorSessionCodec,
|
||||
isCursorUnknownSessionError,
|
||||
} from "@paperclipai/adapter-cursor-local/server";
|
||||
import {
|
||||
sessionCodec as geminiSessionCodec,
|
||||
isGeminiUnknownSessionError,
|
||||
} from "@paperclipai/adapter-gemini-local/server";
|
||||
import {
|
||||
sessionCodec as opencodeSessionCodec,
|
||||
isOpenCodeUnknownSessionError,
|
||||
@@ -82,6 +86,24 @@ describe("adapter session codecs", () => {
|
||||
});
|
||||
expect(cursorSessionCodec.getDisplayId?.(serialized ?? null)).toBe("cursor-session-1");
|
||||
});
|
||||
|
||||
it("normalizes gemini session params with cwd", () => {
|
||||
const parsed = geminiSessionCodec.deserialize({
|
||||
session_id: "gemini-session-1",
|
||||
cwd: "/tmp/gemini",
|
||||
});
|
||||
expect(parsed).toEqual({
|
||||
sessionId: "gemini-session-1",
|
||||
cwd: "/tmp/gemini",
|
||||
});
|
||||
|
||||
const serialized = geminiSessionCodec.serialize(parsed);
|
||||
expect(serialized).toEqual({
|
||||
sessionId: "gemini-session-1",
|
||||
cwd: "/tmp/gemini",
|
||||
});
|
||||
expect(geminiSessionCodec.getDisplayId?.(serialized ?? null)).toBe("gemini-session-1");
|
||||
});
|
||||
});
|
||||
|
||||
describe("codex resume recovery detection", () => {
|
||||
@@ -146,3 +168,26 @@ describe("cursor resume recovery detection", () => {
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe("gemini resume recovery detection", () => {
|
||||
it("detects unknown session errors from gemini output", () => {
|
||||
expect(
|
||||
isGeminiUnknownSessionError(
|
||||
"",
|
||||
"unknown session id abc",
|
||||
),
|
||||
).toBe(true);
|
||||
expect(
|
||||
isGeminiUnknownSessionError(
|
||||
"",
|
||||
"checkpoint latest not found",
|
||||
),
|
||||
).toBe(true);
|
||||
expect(
|
||||
isGeminiUnknownSessionError(
|
||||
"{\"type\":\"result\",\"subtype\":\"success\"}",
|
||||
"",
|
||||
),
|
||||
).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
110
server/src/__tests__/approval-routes-idempotency.test.ts
Normal file
110
server/src/__tests__/approval-routes-idempotency.test.ts
Normal file
@@ -0,0 +1,110 @@
|
||||
import express from "express";
|
||||
import request from "supertest";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { approvalRoutes } from "../routes/approvals.js";
|
||||
import { errorHandler } from "../middleware/index.js";
|
||||
|
||||
const mockApprovalService = vi.hoisted(() => ({
|
||||
list: vi.fn(),
|
||||
getById: vi.fn(),
|
||||
create: vi.fn(),
|
||||
approve: vi.fn(),
|
||||
reject: vi.fn(),
|
||||
requestRevision: vi.fn(),
|
||||
resubmit: vi.fn(),
|
||||
listComments: vi.fn(),
|
||||
addComment: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockHeartbeatService = vi.hoisted(() => ({
|
||||
wakeup: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockIssueApprovalService = vi.hoisted(() => ({
|
||||
listIssuesForApproval: vi.fn(),
|
||||
linkManyForApproval: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockSecretService = vi.hoisted(() => ({
|
||||
normalizeHireApprovalPayloadForPersistence: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockLogActivity = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock("../services/index.js", () => ({
|
||||
approvalService: () => mockApprovalService,
|
||||
heartbeatService: () => mockHeartbeatService,
|
||||
issueApprovalService: () => mockIssueApprovalService,
|
||||
logActivity: mockLogActivity,
|
||||
secretService: () => mockSecretService,
|
||||
}));
|
||||
|
||||
function createApp() {
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, _res, next) => {
|
||||
(req as any).actor = {
|
||||
type: "board",
|
||||
userId: "user-1",
|
||||
companyIds: ["company-1"],
|
||||
source: "session",
|
||||
isInstanceAdmin: false,
|
||||
};
|
||||
next();
|
||||
});
|
||||
app.use("/api", approvalRoutes({} as any));
|
||||
app.use(errorHandler);
|
||||
return app;
|
||||
}
|
||||
|
||||
describe("approval routes idempotent retries", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockHeartbeatService.wakeup.mockResolvedValue({ id: "wake-1" });
|
||||
mockIssueApprovalService.listIssuesForApproval.mockResolvedValue([{ id: "issue-1" }]);
|
||||
mockLogActivity.mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it("does not emit duplicate approval side effects when approve is already resolved", async () => {
|
||||
mockApprovalService.approve.mockResolvedValue({
|
||||
approval: {
|
||||
id: "approval-1",
|
||||
companyId: "company-1",
|
||||
type: "hire_agent",
|
||||
status: "approved",
|
||||
payload: {},
|
||||
requestedByAgentId: "agent-1",
|
||||
},
|
||||
applied: false,
|
||||
});
|
||||
|
||||
const res = await request(createApp())
|
||||
.post("/api/approvals/approval-1/approve")
|
||||
.send({});
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(mockIssueApprovalService.listIssuesForApproval).not.toHaveBeenCalled();
|
||||
expect(mockHeartbeatService.wakeup).not.toHaveBeenCalled();
|
||||
expect(mockLogActivity).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("does not emit duplicate rejection logs when reject is already resolved", async () => {
|
||||
mockApprovalService.reject.mockResolvedValue({
|
||||
approval: {
|
||||
id: "approval-1",
|
||||
companyId: "company-1",
|
||||
type: "hire_agent",
|
||||
status: "rejected",
|
||||
payload: {},
|
||||
},
|
||||
applied: false,
|
||||
});
|
||||
|
||||
const res = await request(createApp())
|
||||
.post("/api/approvals/approval-1/reject")
|
||||
.send({});
|
||||
|
||||
expect(res.status).toBe(200);
|
||||
expect(mockLogActivity).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
107
server/src/__tests__/approvals-service.test.ts
Normal file
107
server/src/__tests__/approvals-service.test.ts
Normal file
@@ -0,0 +1,107 @@
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { approvalService } from "../services/approvals.ts";
|
||||
|
||||
const mockAgentService = vi.hoisted(() => ({
|
||||
activatePendingApproval: vi.fn(),
|
||||
create: vi.fn(),
|
||||
terminate: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockNotifyHireApproved = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock("../services/agents.js", () => ({
|
||||
agentService: vi.fn(() => mockAgentService),
|
||||
}));
|
||||
|
||||
vi.mock("../services/hire-hook.js", () => ({
|
||||
notifyHireApproved: mockNotifyHireApproved,
|
||||
}));
|
||||
|
||||
type ApprovalRecord = {
|
||||
id: string;
|
||||
companyId: string;
|
||||
type: string;
|
||||
status: string;
|
||||
payload: Record<string, unknown>;
|
||||
requestedByAgentId: string | null;
|
||||
};
|
||||
|
||||
function createApproval(status: string): ApprovalRecord {
|
||||
return {
|
||||
id: "approval-1",
|
||||
companyId: "company-1",
|
||||
type: "hire_agent",
|
||||
status,
|
||||
payload: { agentId: "agent-1" },
|
||||
requestedByAgentId: "requester-1",
|
||||
};
|
||||
}
|
||||
|
||||
function createDbStub(selectResults: ApprovalRecord[][], updateResults: ApprovalRecord[]) {
|
||||
const pendingSelectResults = [...selectResults];
|
||||
const selectWhere = vi.fn(async () => pendingSelectResults.shift() ?? []);
|
||||
const from = vi.fn(() => ({ where: selectWhere }));
|
||||
const select = vi.fn(() => ({ from }));
|
||||
|
||||
const returning = vi.fn(async () => updateResults);
|
||||
const updateWhere = vi.fn(() => ({ returning }));
|
||||
const set = vi.fn(() => ({ where: updateWhere }));
|
||||
const update = vi.fn(() => ({ set }));
|
||||
|
||||
return {
|
||||
db: { select, update },
|
||||
selectWhere,
|
||||
returning,
|
||||
};
|
||||
}
|
||||
|
||||
describe("approvalService resolution idempotency", () => {
|
||||
beforeEach(() => {
|
||||
vi.clearAllMocks();
|
||||
mockAgentService.activatePendingApproval.mockResolvedValue(undefined);
|
||||
mockAgentService.create.mockResolvedValue({ id: "agent-1" });
|
||||
mockAgentService.terminate.mockResolvedValue(undefined);
|
||||
mockNotifyHireApproved.mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it("treats repeated approve retries as no-ops after another worker resolves the approval", async () => {
|
||||
const dbStub = createDbStub(
|
||||
[[createApproval("pending")], [createApproval("approved")]],
|
||||
[],
|
||||
);
|
||||
|
||||
const svc = approvalService(dbStub.db as any);
|
||||
const result = await svc.approve("approval-1", "board", "ship it");
|
||||
|
||||
expect(result.applied).toBe(false);
|
||||
expect(result.approval.status).toBe("approved");
|
||||
expect(mockAgentService.activatePendingApproval).not.toHaveBeenCalled();
|
||||
expect(mockNotifyHireApproved).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("treats repeated reject retries as no-ops after another worker resolves the approval", async () => {
|
||||
const dbStub = createDbStub(
|
||||
[[createApproval("pending")], [createApproval("rejected")]],
|
||||
[],
|
||||
);
|
||||
|
||||
const svc = approvalService(dbStub.db as any);
|
||||
const result = await svc.reject("approval-1", "board", "not now");
|
||||
|
||||
expect(result.applied).toBe(false);
|
||||
expect(result.approval.status).toBe("rejected");
|
||||
expect(mockAgentService.terminate).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it("still performs side effects when the resolution update is newly applied", async () => {
|
||||
const approved = createApproval("approved");
|
||||
const dbStub = createDbStub([[createApproval("pending")]], [approved]);
|
||||
|
||||
const svc = approvalService(dbStub.db as any);
|
||||
const result = await svc.approve("approval-1", "board", "ship it");
|
||||
|
||||
expect(result.applied).toBe(true);
|
||||
expect(mockAgentService.activatePendingApproval).toHaveBeenCalledWith("agent-1");
|
||||
expect(mockNotifyHireApproved).toHaveBeenCalledTimes(1);
|
||||
});
|
||||
});
|
||||
97
server/src/__tests__/attachment-types.test.ts
Normal file
97
server/src/__tests__/attachment-types.test.ts
Normal file
@@ -0,0 +1,97 @@
|
||||
import { describe, it, expect } from "vitest";
|
||||
import {
|
||||
parseAllowedTypes,
|
||||
matchesContentType,
|
||||
DEFAULT_ALLOWED_TYPES,
|
||||
} from "../attachment-types.js";
|
||||
|
||||
describe("parseAllowedTypes", () => {
|
||||
it("returns default image types when input is undefined", () => {
|
||||
expect(parseAllowedTypes(undefined)).toEqual([...DEFAULT_ALLOWED_TYPES]);
|
||||
});
|
||||
|
||||
it("returns default image types when input is empty string", () => {
|
||||
expect(parseAllowedTypes("")).toEqual([...DEFAULT_ALLOWED_TYPES]);
|
||||
});
|
||||
|
||||
it("parses comma-separated types", () => {
|
||||
expect(parseAllowedTypes("image/*,application/pdf")).toEqual([
|
||||
"image/*",
|
||||
"application/pdf",
|
||||
]);
|
||||
});
|
||||
|
||||
it("trims whitespace", () => {
|
||||
expect(parseAllowedTypes(" image/png , application/pdf ")).toEqual([
|
||||
"image/png",
|
||||
"application/pdf",
|
||||
]);
|
||||
});
|
||||
|
||||
it("lowercases entries", () => {
|
||||
expect(parseAllowedTypes("Application/PDF")).toEqual(["application/pdf"]);
|
||||
});
|
||||
|
||||
it("filters empty segments", () => {
|
||||
expect(parseAllowedTypes("image/png,,application/pdf,")).toEqual([
|
||||
"image/png",
|
||||
"application/pdf",
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("matchesContentType", () => {
|
||||
it("matches exact types", () => {
|
||||
const patterns = ["application/pdf", "image/png"];
|
||||
expect(matchesContentType("application/pdf", patterns)).toBe(true);
|
||||
expect(matchesContentType("image/png", patterns)).toBe(true);
|
||||
expect(matchesContentType("text/plain", patterns)).toBe(false);
|
||||
});
|
||||
|
||||
it("matches /* wildcard patterns", () => {
|
||||
const patterns = ["image/*"];
|
||||
expect(matchesContentType("image/png", patterns)).toBe(true);
|
||||
expect(matchesContentType("image/jpeg", patterns)).toBe(true);
|
||||
expect(matchesContentType("image/svg+xml", patterns)).toBe(true);
|
||||
expect(matchesContentType("application/pdf", patterns)).toBe(false);
|
||||
});
|
||||
|
||||
it("matches .* wildcard patterns", () => {
|
||||
const patterns = ["application/vnd.openxmlformats-officedocument.*"];
|
||||
expect(
|
||||
matchesContentType(
|
||||
"application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
patterns,
|
||||
),
|
||||
).toBe(true);
|
||||
expect(
|
||||
matchesContentType(
|
||||
"application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
patterns,
|
||||
),
|
||||
).toBe(true);
|
||||
expect(matchesContentType("application/pdf", patterns)).toBe(false);
|
||||
});
|
||||
|
||||
it("is case-insensitive", () => {
|
||||
const patterns = ["application/pdf"];
|
||||
expect(matchesContentType("APPLICATION/PDF", patterns)).toBe(true);
|
||||
expect(matchesContentType("Application/Pdf", patterns)).toBe(true);
|
||||
});
|
||||
|
||||
it("combines exact and wildcard patterns", () => {
|
||||
const patterns = ["image/*", "application/pdf", "text/*"];
|
||||
expect(matchesContentType("image/webp", patterns)).toBe(true);
|
||||
expect(matchesContentType("application/pdf", patterns)).toBe(true);
|
||||
expect(matchesContentType("text/csv", patterns)).toBe(true);
|
||||
expect(matchesContentType("application/zip", patterns)).toBe(false);
|
||||
});
|
||||
|
||||
it("handles plain * as allow-all wildcard", () => {
|
||||
const patterns = ["*"];
|
||||
expect(matchesContentType("image/png", patterns)).toBe(true);
|
||||
expect(matchesContentType("application/pdf", patterns)).toBe(true);
|
||||
expect(matchesContentType("text/plain", patterns)).toBe(true);
|
||||
expect(matchesContentType("application/zip", patterns)).toBe(true);
|
||||
});
|
||||
});
|
||||
@@ -4,6 +4,8 @@ import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { testEnvironment } from "@paperclipai/adapter-codex-local/server";
|
||||
|
||||
const itWindows = process.platform === "win32" ? it : it.skip;
|
||||
|
||||
describe("codex_local environment diagnostics", () => {
|
||||
it("creates a missing working directory when cwd is absolute", async () => {
|
||||
const cwd = path.join(
|
||||
@@ -29,4 +31,45 @@ describe("codex_local environment diagnostics", () => {
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
await fs.rm(path.dirname(cwd), { recursive: true, force: true });
|
||||
});
|
||||
|
||||
itWindows("runs the hello probe when Codex is available via a Windows .cmd wrapper", async () => {
|
||||
const root = path.join(
|
||||
os.tmpdir(),
|
||||
`paperclip-codex-local-probe-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
);
|
||||
const binDir = path.join(root, "bin");
|
||||
const cwd = path.join(root, "workspace");
|
||||
const fakeCodex = path.join(binDir, "codex.cmd");
|
||||
const script = [
|
||||
"@echo off",
|
||||
"echo {\"type\":\"thread.started\",\"thread_id\":\"test-thread\"}",
|
||||
"echo {\"type\":\"item.completed\",\"item\":{\"type\":\"agent_message\",\"text\":\"hello\"}}",
|
||||
"echo {\"type\":\"turn.completed\",\"usage\":{\"input_tokens\":1,\"cached_input_tokens\":0,\"output_tokens\":1}}",
|
||||
"exit /b 0",
|
||||
"",
|
||||
].join("\r\n");
|
||||
|
||||
try {
|
||||
await fs.mkdir(binDir, { recursive: true });
|
||||
await fs.writeFile(fakeCodex, script, "utf8");
|
||||
|
||||
const result = await testEnvironment({
|
||||
companyId: "company-1",
|
||||
adapterType: "codex_local",
|
||||
config: {
|
||||
command: "codex",
|
||||
cwd,
|
||||
env: {
|
||||
OPENAI_API_KEY: "test-key",
|
||||
PATH: `${binDir}${path.delimiter}${process.env.PATH ?? ""}`,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.status).toBe("pass");
|
||||
expect(result.checks.some((check) => check.code === "codex_hello_probe_passed")).toBe(true);
|
||||
} finally {
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
@@ -70,6 +70,7 @@ describe("codex_local ui stdout parser", () => {
|
||||
kind: "tool_call",
|
||||
ts,
|
||||
name: "command_execution",
|
||||
toolUseId: "item_2",
|
||||
input: { id: "item_2", command: "/bin/zsh -lc ls" },
|
||||
},
|
||||
]);
|
||||
@@ -106,7 +107,7 @@ describe("codex_local ui stdout parser", () => {
|
||||
item: {
|
||||
id: "item_52",
|
||||
type: "file_change",
|
||||
changes: [{ path: "/home/user/project/ui/src/pages/AgentDetail.tsx", kind: "update" }],
|
||||
changes: [{ path: "/Users/paperclipuser/project/ui/src/pages/AgentDetail.tsx", kind: "update" }],
|
||||
status: "completed",
|
||||
},
|
||||
}),
|
||||
@@ -116,7 +117,7 @@ describe("codex_local ui stdout parser", () => {
|
||||
{
|
||||
kind: "system",
|
||||
ts,
|
||||
text: "file changes: update /home/user/project/ui/src/pages/AgentDetail.tsx",
|
||||
text: "file changes: update /Users/[]/project/ui/src/pages/AgentDetail.tsx",
|
||||
},
|
||||
]);
|
||||
});
|
||||
|
||||
208
server/src/__tests__/codex-local-execute.test.ts
Normal file
208
server/src/__tests__/codex-local-execute.test.ts
Normal file
@@ -0,0 +1,208 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { execute } from "@paperclipai/adapter-codex-local/server";
|
||||
|
||||
async function writeFakeCodexCommand(commandPath: string): Promise<void> {
|
||||
const script = `#!/usr/bin/env node
|
||||
const fs = require("node:fs");
|
||||
|
||||
const capturePath = process.env.PAPERCLIP_TEST_CAPTURE_PATH;
|
||||
const payload = {
|
||||
argv: process.argv.slice(2),
|
||||
prompt: fs.readFileSync(0, "utf8"),
|
||||
codexHome: process.env.CODEX_HOME || null,
|
||||
paperclipEnvKeys: Object.keys(process.env)
|
||||
.filter((key) => key.startsWith("PAPERCLIP_"))
|
||||
.sort(),
|
||||
};
|
||||
if (capturePath) {
|
||||
fs.writeFileSync(capturePath, JSON.stringify(payload), "utf8");
|
||||
}
|
||||
console.log(JSON.stringify({ type: "thread.started", thread_id: "codex-session-1" }));
|
||||
console.log(JSON.stringify({ type: "item.completed", item: { type: "agent_message", text: "hello" } }));
|
||||
console.log(JSON.stringify({ type: "turn.completed", usage: { input_tokens: 1, cached_input_tokens: 0, output_tokens: 1 } }));
|
||||
`;
|
||||
await fs.writeFile(commandPath, script, "utf8");
|
||||
await fs.chmod(commandPath, 0o755);
|
||||
}
|
||||
|
||||
type CapturePayload = {
|
||||
argv: string[];
|
||||
prompt: string;
|
||||
codexHome: string | null;
|
||||
paperclipEnvKeys: string[];
|
||||
};
|
||||
|
||||
describe("codex execute", () => {
|
||||
it("uses a worktree-isolated CODEX_HOME while preserving shared auth and config", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-codex-execute-"));
|
||||
const workspace = path.join(root, "workspace");
|
||||
const commandPath = path.join(root, "codex");
|
||||
const capturePath = path.join(root, "capture.json");
|
||||
const sharedCodexHome = path.join(root, "shared-codex-home");
|
||||
const paperclipHome = path.join(root, "paperclip-home");
|
||||
const isolatedCodexHome = path.join(paperclipHome, "instances", "worktree-1", "codex-home");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await fs.mkdir(sharedCodexHome, { recursive: true });
|
||||
await fs.writeFile(path.join(sharedCodexHome, "auth.json"), '{"token":"shared"}\n', "utf8");
|
||||
await fs.writeFile(path.join(sharedCodexHome, "config.toml"), 'model = "codex-mini-latest"\n', "utf8");
|
||||
await writeFakeCodexCommand(commandPath);
|
||||
|
||||
const previousHome = process.env.HOME;
|
||||
const previousPaperclipHome = process.env.PAPERCLIP_HOME;
|
||||
const previousPaperclipInstanceId = process.env.PAPERCLIP_INSTANCE_ID;
|
||||
const previousPaperclipInWorktree = process.env.PAPERCLIP_IN_WORKTREE;
|
||||
const previousCodexHome = process.env.CODEX_HOME;
|
||||
process.env.HOME = root;
|
||||
process.env.PAPERCLIP_HOME = paperclipHome;
|
||||
process.env.PAPERCLIP_INSTANCE_ID = "worktree-1";
|
||||
process.env.PAPERCLIP_IN_WORKTREE = "true";
|
||||
process.env.CODEX_HOME = sharedCodexHome;
|
||||
|
||||
try {
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Codex Coder",
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: commandPath,
|
||||
cwd: workspace,
|
||||
env: {
|
||||
PAPERCLIP_TEST_CAPTURE_PATH: capturePath,
|
||||
},
|
||||
promptTemplate: "Follow the paperclip heartbeat.",
|
||||
},
|
||||
context: {},
|
||||
authToken: "run-jwt-token",
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.errorMessage).toBeNull();
|
||||
|
||||
const capture = JSON.parse(await fs.readFile(capturePath, "utf8")) as CapturePayload;
|
||||
expect(capture.codexHome).toBe(isolatedCodexHome);
|
||||
expect(capture.argv).toEqual(expect.arrayContaining(["exec", "--json", "-"]));
|
||||
expect(capture.prompt).toContain("Follow the paperclip heartbeat.");
|
||||
expect(capture.paperclipEnvKeys).toEqual(
|
||||
expect.arrayContaining([
|
||||
"PAPERCLIP_AGENT_ID",
|
||||
"PAPERCLIP_API_KEY",
|
||||
"PAPERCLIP_API_URL",
|
||||
"PAPERCLIP_COMPANY_ID",
|
||||
"PAPERCLIP_RUN_ID",
|
||||
]),
|
||||
);
|
||||
|
||||
const isolatedAuth = path.join(isolatedCodexHome, "auth.json");
|
||||
const isolatedConfig = path.join(isolatedCodexHome, "config.toml");
|
||||
const isolatedSkill = path.join(isolatedCodexHome, "skills", "paperclip");
|
||||
|
||||
expect((await fs.lstat(isolatedAuth)).isSymbolicLink()).toBe(true);
|
||||
expect(await fs.realpath(isolatedAuth)).toBe(await fs.realpath(path.join(sharedCodexHome, "auth.json")));
|
||||
expect((await fs.lstat(isolatedConfig)).isFile()).toBe(true);
|
||||
expect(await fs.readFile(isolatedConfig, "utf8")).toBe('model = "codex-mini-latest"\n');
|
||||
expect((await fs.lstat(isolatedSkill)).isSymbolicLink()).toBe(true);
|
||||
} finally {
|
||||
if (previousHome === undefined) delete process.env.HOME;
|
||||
else process.env.HOME = previousHome;
|
||||
if (previousPaperclipHome === undefined) delete process.env.PAPERCLIP_HOME;
|
||||
else process.env.PAPERCLIP_HOME = previousPaperclipHome;
|
||||
if (previousPaperclipInstanceId === undefined) delete process.env.PAPERCLIP_INSTANCE_ID;
|
||||
else process.env.PAPERCLIP_INSTANCE_ID = previousPaperclipInstanceId;
|
||||
if (previousPaperclipInWorktree === undefined) delete process.env.PAPERCLIP_IN_WORKTREE;
|
||||
else process.env.PAPERCLIP_IN_WORKTREE = previousPaperclipInWorktree;
|
||||
if (previousCodexHome === undefined) delete process.env.CODEX_HOME;
|
||||
else process.env.CODEX_HOME = previousCodexHome;
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("respects an explicit CODEX_HOME config override even in worktree mode", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-codex-execute-explicit-"));
|
||||
const workspace = path.join(root, "workspace");
|
||||
const commandPath = path.join(root, "codex");
|
||||
const capturePath = path.join(root, "capture.json");
|
||||
const sharedCodexHome = path.join(root, "shared-codex-home");
|
||||
const explicitCodexHome = path.join(root, "explicit-codex-home");
|
||||
const paperclipHome = path.join(root, "paperclip-home");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await fs.mkdir(sharedCodexHome, { recursive: true });
|
||||
await fs.writeFile(path.join(sharedCodexHome, "auth.json"), '{"token":"shared"}\n', "utf8");
|
||||
await writeFakeCodexCommand(commandPath);
|
||||
|
||||
const previousHome = process.env.HOME;
|
||||
const previousPaperclipHome = process.env.PAPERCLIP_HOME;
|
||||
const previousPaperclipInstanceId = process.env.PAPERCLIP_INSTANCE_ID;
|
||||
const previousPaperclipInWorktree = process.env.PAPERCLIP_IN_WORKTREE;
|
||||
const previousCodexHome = process.env.CODEX_HOME;
|
||||
process.env.HOME = root;
|
||||
process.env.PAPERCLIP_HOME = paperclipHome;
|
||||
process.env.PAPERCLIP_INSTANCE_ID = "worktree-1";
|
||||
process.env.PAPERCLIP_IN_WORKTREE = "true";
|
||||
process.env.CODEX_HOME = sharedCodexHome;
|
||||
|
||||
try {
|
||||
const result = await execute({
|
||||
runId: "run-2",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Codex Coder",
|
||||
adapterType: "codex_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: commandPath,
|
||||
cwd: workspace,
|
||||
env: {
|
||||
PAPERCLIP_TEST_CAPTURE_PATH: capturePath,
|
||||
CODEX_HOME: explicitCodexHome,
|
||||
},
|
||||
promptTemplate: "Follow the paperclip heartbeat.",
|
||||
},
|
||||
context: {},
|
||||
authToken: "run-jwt-token",
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.errorMessage).toBeNull();
|
||||
|
||||
const capture = JSON.parse(await fs.readFile(capturePath, "utf8")) as CapturePayload;
|
||||
expect(capture.codexHome).toBe(explicitCodexHome);
|
||||
await expect(fs.lstat(path.join(paperclipHome, "instances", "worktree-1", "codex-home"))).rejects.toThrow();
|
||||
} finally {
|
||||
if (previousHome === undefined) delete process.env.HOME;
|
||||
else process.env.HOME = previousHome;
|
||||
if (previousPaperclipHome === undefined) delete process.env.PAPERCLIP_HOME;
|
||||
else process.env.PAPERCLIP_HOME = previousPaperclipHome;
|
||||
if (previousPaperclipInstanceId === undefined) delete process.env.PAPERCLIP_INSTANCE_ID;
|
||||
else process.env.PAPERCLIP_INSTANCE_ID = previousPaperclipInstanceId;
|
||||
if (previousPaperclipInWorktree === undefined) delete process.env.PAPERCLIP_IN_WORKTREE;
|
||||
else process.env.PAPERCLIP_IN_WORKTREE = previousPaperclipInWorktree;
|
||||
if (previousCodexHome === undefined) delete process.env.CODEX_HOME;
|
||||
else process.env.CODEX_HOME = previousCodexHome;
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
91
server/src/__tests__/codex-local-skill-injection.test.ts
Normal file
91
server/src/__tests__/codex-local-skill-injection.test.ts
Normal file
@@ -0,0 +1,91 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { ensureCodexSkillsInjected } from "@paperclipai/adapter-codex-local/server";
|
||||
|
||||
async function makeTempDir(prefix: string): Promise<string> {
|
||||
return fs.mkdtemp(path.join(os.tmpdir(), prefix));
|
||||
}
|
||||
|
||||
async function createPaperclipRepoSkill(root: string, skillName: string) {
|
||||
await fs.mkdir(path.join(root, "server"), { recursive: true });
|
||||
await fs.mkdir(path.join(root, "packages", "adapter-utils"), { recursive: true });
|
||||
await fs.mkdir(path.join(root, "skills", skillName), { recursive: true });
|
||||
await fs.writeFile(path.join(root, "pnpm-workspace.yaml"), "packages:\n - packages/*\n", "utf8");
|
||||
await fs.writeFile(path.join(root, "package.json"), '{"name":"paperclip"}\n', "utf8");
|
||||
await fs.writeFile(
|
||||
path.join(root, "skills", skillName, "SKILL.md"),
|
||||
`---\nname: ${skillName}\n---\n`,
|
||||
"utf8",
|
||||
);
|
||||
}
|
||||
|
||||
async function createCustomSkill(root: string, skillName: string) {
|
||||
await fs.mkdir(path.join(root, "custom", skillName), { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(root, "custom", skillName, "SKILL.md"),
|
||||
`---\nname: ${skillName}\n---\n`,
|
||||
"utf8",
|
||||
);
|
||||
}
|
||||
|
||||
describe("codex local adapter skill injection", () => {
|
||||
const cleanupDirs = new Set<string>();
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(Array.from(cleanupDirs).map((dir) => fs.rm(dir, { recursive: true, force: true })));
|
||||
cleanupDirs.clear();
|
||||
});
|
||||
|
||||
it("repairs a Codex Paperclip skill symlink that still points at another live checkout", async () => {
|
||||
const currentRepo = await makeTempDir("paperclip-codex-current-");
|
||||
const oldRepo = await makeTempDir("paperclip-codex-old-");
|
||||
const skillsHome = await makeTempDir("paperclip-codex-home-");
|
||||
cleanupDirs.add(currentRepo);
|
||||
cleanupDirs.add(oldRepo);
|
||||
cleanupDirs.add(skillsHome);
|
||||
|
||||
await createPaperclipRepoSkill(currentRepo, "paperclip");
|
||||
await createPaperclipRepoSkill(oldRepo, "paperclip");
|
||||
await fs.symlink(path.join(oldRepo, "skills", "paperclip"), path.join(skillsHome, "paperclip"));
|
||||
|
||||
const logs: string[] = [];
|
||||
await ensureCodexSkillsInjected(
|
||||
async (_stream, chunk) => {
|
||||
logs.push(chunk);
|
||||
},
|
||||
{
|
||||
skillsHome,
|
||||
skillsEntries: [{ name: "paperclip", source: path.join(currentRepo, "skills", "paperclip") }],
|
||||
},
|
||||
);
|
||||
|
||||
expect(await fs.realpath(path.join(skillsHome, "paperclip"))).toBe(
|
||||
await fs.realpath(path.join(currentRepo, "skills", "paperclip")),
|
||||
);
|
||||
expect(logs.some((line) => line.includes('Repaired Codex skill "paperclip"'))).toBe(true);
|
||||
});
|
||||
|
||||
it("preserves a custom Codex skill symlink outside Paperclip repo checkouts", async () => {
|
||||
const currentRepo = await makeTempDir("paperclip-codex-current-");
|
||||
const customRoot = await makeTempDir("paperclip-codex-custom-");
|
||||
const skillsHome = await makeTempDir("paperclip-codex-home-");
|
||||
cleanupDirs.add(currentRepo);
|
||||
cleanupDirs.add(customRoot);
|
||||
cleanupDirs.add(skillsHome);
|
||||
|
||||
await createPaperclipRepoSkill(currentRepo, "paperclip");
|
||||
await createCustomSkill(customRoot, "paperclip");
|
||||
await fs.symlink(path.join(customRoot, "custom", "paperclip"), path.join(skillsHome, "paperclip"));
|
||||
|
||||
await ensureCodexSkillsInjected(async () => {}, {
|
||||
skillsHome,
|
||||
skillsEntries: [{ name: "paperclip", source: path.join(currentRepo, "skills", "paperclip") }],
|
||||
});
|
||||
|
||||
expect(await fs.realpath(path.join(skillsHome, "paperclip"))).toBe(
|
||||
await fs.realpath(path.join(customRoot, "custom", "paperclip")),
|
||||
);
|
||||
});
|
||||
});
|
||||
@@ -165,6 +165,7 @@ describe("cursor ui stdout parser", () => {
|
||||
kind: "tool_call",
|
||||
ts,
|
||||
name: "shellToolCall",
|
||||
toolUseId: "call_shell_1",
|
||||
input: { command: longCommand },
|
||||
},
|
||||
]);
|
||||
@@ -254,7 +255,7 @@ describe("cursor ui stdout parser", () => {
|
||||
}),
|
||||
ts,
|
||||
),
|
||||
).toEqual([{ kind: "tool_call", ts, name: "readToolCall", input: { path: "README.md" } }]);
|
||||
).toEqual([{ kind: "tool_call", ts, name: "readToolCall", toolUseId: "call_1", input: { path: "README.md" } }]);
|
||||
|
||||
expect(
|
||||
parseCursorStdoutLine(
|
||||
|
||||
29
server/src/__tests__/documents.test.ts
Normal file
29
server/src/__tests__/documents.test.ts
Normal file
@@ -0,0 +1,29 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { extractLegacyPlanBody } from "../services/documents.js";
|
||||
|
||||
describe("extractLegacyPlanBody", () => {
|
||||
it("returns null when no plan block exists", () => {
|
||||
expect(extractLegacyPlanBody("hello world")).toBeNull();
|
||||
});
|
||||
|
||||
it("extracts plan body from legacy issue descriptions", () => {
|
||||
expect(
|
||||
extractLegacyPlanBody(`
|
||||
intro
|
||||
|
||||
<plan>
|
||||
|
||||
# Plan
|
||||
|
||||
- one
|
||||
- two
|
||||
|
||||
</plan>
|
||||
`),
|
||||
).toBe("# Plan\n\n- one\n- two");
|
||||
});
|
||||
|
||||
it("ignores empty plan blocks", () => {
|
||||
expect(extractLegacyPlanBody("<plan> </plan>")).toBeNull();
|
||||
});
|
||||
});
|
||||
53
server/src/__tests__/error-handler.test.ts
Normal file
53
server/src/__tests__/error-handler.test.ts
Normal file
@@ -0,0 +1,53 @@
|
||||
import type { NextFunction, Request, Response } from "express";
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import { HttpError } from "../errors.js";
|
||||
import { errorHandler } from "../middleware/error-handler.js";
|
||||
|
||||
function makeReq(): Request {
|
||||
return {
|
||||
method: "GET",
|
||||
originalUrl: "/api/test",
|
||||
body: { a: 1 },
|
||||
params: { id: "123" },
|
||||
query: { q: "x" },
|
||||
} as unknown as Request;
|
||||
}
|
||||
|
||||
function makeRes(): Response {
|
||||
const res = {
|
||||
status: vi.fn(),
|
||||
json: vi.fn(),
|
||||
} as unknown as Response;
|
||||
(res.status as unknown as ReturnType<typeof vi.fn>).mockReturnValue(res);
|
||||
return res;
|
||||
}
|
||||
|
||||
describe("errorHandler", () => {
|
||||
it("attaches the original Error to res.err for 500s", () => {
|
||||
const req = makeReq();
|
||||
const res = makeRes() as any;
|
||||
const next = vi.fn() as unknown as NextFunction;
|
||||
const err = new Error("boom");
|
||||
|
||||
errorHandler(err, req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(500);
|
||||
expect(res.json).toHaveBeenCalledWith({ error: "Internal server error" });
|
||||
expect(res.err).toBe(err);
|
||||
expect(res.__errorContext?.error?.message).toBe("boom");
|
||||
});
|
||||
|
||||
it("attaches HttpError instances for 500 responses", () => {
|
||||
const req = makeReq();
|
||||
const res = makeRes() as any;
|
||||
const next = vi.fn() as unknown as NextFunction;
|
||||
const err = new HttpError(500, "db exploded");
|
||||
|
||||
errorHandler(err, req, res, next);
|
||||
|
||||
expect(res.status).toHaveBeenCalledWith(500);
|
||||
expect(res.json).toHaveBeenCalledWith({ error: "db exploded" });
|
||||
expect(res.err).toBe(err);
|
||||
expect(res.__errorContext?.error?.message).toBe("db exploded");
|
||||
});
|
||||
});
|
||||
143
server/src/__tests__/execution-workspace-policy.test.ts
Normal file
143
server/src/__tests__/execution-workspace-policy.test.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
buildExecutionWorkspaceAdapterConfig,
|
||||
defaultIssueExecutionWorkspaceSettingsForProject,
|
||||
parseIssueExecutionWorkspaceSettings,
|
||||
parseProjectExecutionWorkspacePolicy,
|
||||
resolveExecutionWorkspaceMode,
|
||||
} from "../services/execution-workspace-policy.ts";
|
||||
|
||||
describe("execution workspace policy helpers", () => {
|
||||
it("defaults new issue settings from enabled project policy", () => {
|
||||
expect(
|
||||
defaultIssueExecutionWorkspaceSettingsForProject({
|
||||
enabled: true,
|
||||
defaultMode: "isolated",
|
||||
}),
|
||||
).toEqual({ mode: "isolated" });
|
||||
expect(
|
||||
defaultIssueExecutionWorkspaceSettingsForProject({
|
||||
enabled: true,
|
||||
defaultMode: "project_primary",
|
||||
}),
|
||||
).toEqual({ mode: "project_primary" });
|
||||
expect(defaultIssueExecutionWorkspaceSettingsForProject(null)).toBeNull();
|
||||
});
|
||||
|
||||
it("prefers explicit issue mode over project policy and legacy overrides", () => {
|
||||
expect(
|
||||
resolveExecutionWorkspaceMode({
|
||||
projectPolicy: { enabled: true, defaultMode: "project_primary" },
|
||||
issueSettings: { mode: "isolated" },
|
||||
legacyUseProjectWorkspace: false,
|
||||
}),
|
||||
).toBe("isolated");
|
||||
});
|
||||
|
||||
it("falls back to project policy before legacy project-workspace compatibility flag", () => {
|
||||
expect(
|
||||
resolveExecutionWorkspaceMode({
|
||||
projectPolicy: { enabled: true, defaultMode: "isolated" },
|
||||
issueSettings: null,
|
||||
legacyUseProjectWorkspace: false,
|
||||
}),
|
||||
).toBe("isolated");
|
||||
expect(
|
||||
resolveExecutionWorkspaceMode({
|
||||
projectPolicy: null,
|
||||
issueSettings: null,
|
||||
legacyUseProjectWorkspace: false,
|
||||
}),
|
||||
).toBe("agent_default");
|
||||
});
|
||||
|
||||
it("applies project policy strategy and runtime defaults when isolation is enabled", () => {
|
||||
const result = buildExecutionWorkspaceAdapterConfig({
|
||||
agentConfig: {
|
||||
workspaceStrategy: { type: "project_primary" },
|
||||
},
|
||||
projectPolicy: {
|
||||
enabled: true,
|
||||
defaultMode: "isolated",
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
baseRef: "origin/main",
|
||||
provisionCommand: "bash ./scripts/provision-worktree.sh",
|
||||
},
|
||||
workspaceRuntime: {
|
||||
services: [{ name: "web", command: "pnpm dev" }],
|
||||
},
|
||||
},
|
||||
issueSettings: null,
|
||||
mode: "isolated",
|
||||
legacyUseProjectWorkspace: null,
|
||||
});
|
||||
|
||||
expect(result.workspaceStrategy).toEqual({
|
||||
type: "git_worktree",
|
||||
baseRef: "origin/main",
|
||||
provisionCommand: "bash ./scripts/provision-worktree.sh",
|
||||
});
|
||||
expect(result.workspaceRuntime).toEqual({
|
||||
services: [{ name: "web", command: "pnpm dev" }],
|
||||
});
|
||||
});
|
||||
|
||||
it("clears managed workspace strategy when issue opts out to project primary or agent default", () => {
|
||||
const baseConfig = {
|
||||
workspaceStrategy: { type: "git_worktree", branchTemplate: "{{issue.identifier}}" },
|
||||
workspaceRuntime: { services: [{ name: "web" }] },
|
||||
};
|
||||
|
||||
expect(
|
||||
buildExecutionWorkspaceAdapterConfig({
|
||||
agentConfig: baseConfig,
|
||||
projectPolicy: { enabled: true, defaultMode: "isolated" },
|
||||
issueSettings: { mode: "project_primary" },
|
||||
mode: "project_primary",
|
||||
legacyUseProjectWorkspace: null,
|
||||
}).workspaceStrategy,
|
||||
).toBeUndefined();
|
||||
|
||||
const agentDefault = buildExecutionWorkspaceAdapterConfig({
|
||||
agentConfig: baseConfig,
|
||||
projectPolicy: null,
|
||||
issueSettings: { mode: "agent_default" },
|
||||
mode: "agent_default",
|
||||
legacyUseProjectWorkspace: null,
|
||||
});
|
||||
expect(agentDefault.workspaceStrategy).toBeUndefined();
|
||||
expect(agentDefault.workspaceRuntime).toBeUndefined();
|
||||
});
|
||||
|
||||
it("parses persisted JSON payloads into typed project and issue workspace settings", () => {
|
||||
expect(
|
||||
parseProjectExecutionWorkspacePolicy({
|
||||
enabled: true,
|
||||
defaultMode: "isolated",
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
worktreeParentDir: ".paperclip/worktrees",
|
||||
provisionCommand: "bash ./scripts/provision-worktree.sh",
|
||||
teardownCommand: "bash ./scripts/teardown-worktree.sh",
|
||||
},
|
||||
}),
|
||||
).toEqual({
|
||||
enabled: true,
|
||||
defaultMode: "isolated",
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
worktreeParentDir: ".paperclip/worktrees",
|
||||
provisionCommand: "bash ./scripts/provision-worktree.sh",
|
||||
teardownCommand: "bash ./scripts/teardown-worktree.sh",
|
||||
},
|
||||
});
|
||||
expect(
|
||||
parseIssueExecutionWorkspaceSettings({
|
||||
mode: "project_primary",
|
||||
}),
|
||||
).toEqual({
|
||||
mode: "project_primary",
|
||||
});
|
||||
});
|
||||
});
|
||||
77
server/src/__tests__/forbidden-tokens.test.ts
Normal file
77
server/src/__tests__/forbidden-tokens.test.ts
Normal file
@@ -0,0 +1,77 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
|
||||
const {
|
||||
resolveDynamicForbiddenTokens,
|
||||
resolveForbiddenTokens,
|
||||
runForbiddenTokenCheck,
|
||||
} = await import("../../../scripts/check-forbidden-tokens.mjs");
|
||||
|
||||
describe("forbidden token check", () => {
|
||||
it("derives username tokens without relying on whoami", () => {
|
||||
const tokens = resolveDynamicForbiddenTokens(
|
||||
{ USER: "paperclip", LOGNAME: "paperclip", USERNAME: "pc" },
|
||||
{
|
||||
userInfo: () => ({ username: "paperclip" }),
|
||||
},
|
||||
);
|
||||
|
||||
expect(tokens).toEqual(["paperclip", "pc"]);
|
||||
});
|
||||
|
||||
it("falls back cleanly when user resolution fails", () => {
|
||||
const tokens = resolveDynamicForbiddenTokens(
|
||||
{},
|
||||
{
|
||||
userInfo: () => {
|
||||
throw new Error("missing user");
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
expect(tokens).toEqual([]);
|
||||
});
|
||||
|
||||
it("merges dynamic and file-based forbidden tokens", async () => {
|
||||
const fs = await import("node:fs");
|
||||
const os = await import("node:os");
|
||||
const path = await import("node:path");
|
||||
|
||||
const tokensFile = path.join(os.tmpdir(), `forbidden-tokens-${Date.now()}.txt`);
|
||||
fs.writeFileSync(tokensFile, "# comment\npaperclip\ncustom-token\n");
|
||||
|
||||
try {
|
||||
const tokens = resolveForbiddenTokens(tokensFile, { USER: "paperclip" }, {
|
||||
userInfo: () => ({ username: "paperclip" }),
|
||||
});
|
||||
|
||||
expect(tokens).toEqual(["paperclip", "custom-token"]);
|
||||
} finally {
|
||||
fs.unlinkSync(tokensFile);
|
||||
}
|
||||
});
|
||||
|
||||
it("reports matches without leaking which token was searched", () => {
|
||||
const exec = vi
|
||||
.fn()
|
||||
.mockReturnValueOnce("server/file.ts:1:found\n")
|
||||
.mockImplementation(() => {
|
||||
throw new Error("not found");
|
||||
});
|
||||
const log = vi.fn();
|
||||
const error = vi.fn();
|
||||
|
||||
const exitCode = runForbiddenTokenCheck({
|
||||
repoRoot: "/repo",
|
||||
tokens: ["paperclip", "custom-token"],
|
||||
exec,
|
||||
log,
|
||||
error,
|
||||
});
|
||||
|
||||
expect(exitCode).toBe(1);
|
||||
expect(exec).toHaveBeenCalledTimes(2);
|
||||
expect(error).toHaveBeenCalledWith("ERROR: Forbidden tokens found in tracked files:\n");
|
||||
expect(error).toHaveBeenCalledWith(" server/file.ts:1:found");
|
||||
expect(error).toHaveBeenCalledWith("\nBuild blocked. Remove the forbidden token(s) before publishing.");
|
||||
});
|
||||
});
|
||||
@@ -0,0 +1,91 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { testEnvironment } from "@paperclipai/adapter-gemini-local/server";
|
||||
|
||||
async function writeFakeGeminiCommand(binDir: string, argsCapturePath: string): Promise<string> {
|
||||
const commandPath = path.join(binDir, "gemini");
|
||||
const script = `#!/usr/bin/env node
|
||||
const fs = require("node:fs");
|
||||
const outPath = process.env.PAPERCLIP_TEST_ARGS_PATH;
|
||||
if (outPath) {
|
||||
fs.writeFileSync(outPath, JSON.stringify(process.argv.slice(2)), "utf8");
|
||||
}
|
||||
console.log(JSON.stringify({
|
||||
type: "assistant",
|
||||
message: { content: [{ type: "output_text", text: "hello" }] },
|
||||
}));
|
||||
console.log(JSON.stringify({
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
result: "hello",
|
||||
}));
|
||||
`;
|
||||
await fs.writeFile(commandPath, script, "utf8");
|
||||
await fs.chmod(commandPath, 0o755);
|
||||
return commandPath;
|
||||
}
|
||||
|
||||
describe("gemini_local environment diagnostics", () => {
|
||||
it("creates a missing working directory when cwd is absolute", async () => {
|
||||
const cwd = path.join(
|
||||
os.tmpdir(),
|
||||
`paperclip-gemini-local-cwd-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
"workspace",
|
||||
);
|
||||
|
||||
await fs.rm(path.dirname(cwd), { recursive: true, force: true });
|
||||
|
||||
const result = await testEnvironment({
|
||||
companyId: "company-1",
|
||||
adapterType: "gemini_local",
|
||||
config: {
|
||||
command: process.execPath,
|
||||
cwd,
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.checks.some((check) => check.code === "gemini_cwd_valid")).toBe(true);
|
||||
expect(result.checks.some((check) => check.level === "error")).toBe(false);
|
||||
const stats = await fs.stat(cwd);
|
||||
expect(stats.isDirectory()).toBe(true);
|
||||
await fs.rm(path.dirname(cwd), { recursive: true, force: true });
|
||||
});
|
||||
|
||||
it("passes model and yolo flags to the hello probe", async () => {
|
||||
const root = path.join(
|
||||
os.tmpdir(),
|
||||
`paperclip-gemini-local-probe-${Date.now()}-${Math.random().toString(16).slice(2)}`,
|
||||
);
|
||||
const binDir = path.join(root, "bin");
|
||||
const cwd = path.join(root, "workspace");
|
||||
const argsCapturePath = path.join(root, "args.json");
|
||||
await fs.mkdir(binDir, { recursive: true });
|
||||
await writeFakeGeminiCommand(binDir, argsCapturePath);
|
||||
|
||||
const result = await testEnvironment({
|
||||
companyId: "company-1",
|
||||
adapterType: "gemini_local",
|
||||
config: {
|
||||
command: "gemini",
|
||||
cwd,
|
||||
model: "gemini-2.5-pro",
|
||||
yolo: true,
|
||||
env: {
|
||||
GEMINI_API_KEY: "test-key",
|
||||
PAPERCLIP_TEST_ARGS_PATH: argsCapturePath,
|
||||
PATH: `${binDir}${path.delimiter}${process.env.PATH ?? ""}`,
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.status).not.toBe("fail");
|
||||
const args = JSON.parse(await fs.readFile(argsCapturePath, "utf8")) as string[];
|
||||
expect(args).toContain("--model");
|
||||
expect(args).toContain("gemini-2.5-pro");
|
||||
expect(args).toContain("--approval-mode");
|
||||
expect(args).toContain("yolo");
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
});
|
||||
});
|
||||
189
server/src/__tests__/gemini-local-adapter.test.ts
Normal file
189
server/src/__tests__/gemini-local-adapter.test.ts
Normal file
@@ -0,0 +1,189 @@
|
||||
import { describe, expect, it, vi } from "vitest";
|
||||
import { isGeminiUnknownSessionError, parseGeminiJsonl } from "@paperclipai/adapter-gemini-local/server";
|
||||
import { parseGeminiStdoutLine } from "@paperclipai/adapter-gemini-local/ui";
|
||||
import { printGeminiStreamEvent } from "@paperclipai/adapter-gemini-local/cli";
|
||||
|
||||
describe("gemini_local parser", () => {
|
||||
it("extracts session, summary, usage, cost, and terminal error message", () => {
|
||||
const stdout = [
|
||||
JSON.stringify({ type: "system", subtype: "init", session_id: "gemini-session-1", model: "gemini-2.5-pro" }),
|
||||
JSON.stringify({
|
||||
type: "assistant",
|
||||
message: {
|
||||
content: [{ type: "output_text", text: "hello" }],
|
||||
},
|
||||
}),
|
||||
JSON.stringify({
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
session_id: "gemini-session-1",
|
||||
usage: {
|
||||
promptTokenCount: 12,
|
||||
cachedContentTokenCount: 3,
|
||||
candidatesTokenCount: 7,
|
||||
},
|
||||
total_cost_usd: 0.00123,
|
||||
result: "done",
|
||||
}),
|
||||
JSON.stringify({ type: "error", message: "model access denied" }),
|
||||
].join("\n");
|
||||
|
||||
const parsed = parseGeminiJsonl(stdout);
|
||||
expect(parsed.sessionId).toBe("gemini-session-1");
|
||||
expect(parsed.summary).toBe("hello");
|
||||
expect(parsed.usage).toEqual({
|
||||
inputTokens: 12,
|
||||
cachedInputTokens: 3,
|
||||
outputTokens: 7,
|
||||
});
|
||||
expect(parsed.costUsd).toBeCloseTo(0.00123, 6);
|
||||
expect(parsed.errorMessage).toBe("model access denied");
|
||||
});
|
||||
|
||||
it("extracts structured questions", () => {
|
||||
const stdout = [
|
||||
JSON.stringify({
|
||||
type: "assistant",
|
||||
message: {
|
||||
content: [
|
||||
{ type: "output_text", text: "I have a question." },
|
||||
{
|
||||
type: "question",
|
||||
prompt: "Which model?",
|
||||
choices: [
|
||||
{ key: "pro", label: "Gemini Pro", description: "Better" },
|
||||
{ key: "flash", label: "Gemini Flash" },
|
||||
],
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
].join("\n");
|
||||
|
||||
const parsed = parseGeminiJsonl(stdout);
|
||||
expect(parsed.summary).toBe("I have a question.");
|
||||
expect(parsed.question).toEqual({
|
||||
prompt: "Which model?",
|
||||
choices: [
|
||||
{ key: "pro", label: "Gemini Pro", description: "Better" },
|
||||
{ key: "flash", label: "Gemini Flash", description: undefined },
|
||||
],
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("gemini_local stale session detection", () => {
|
||||
it("treats missing session messages as an unknown session error", () => {
|
||||
expect(isGeminiUnknownSessionError("", "unknown session id abc")).toBe(true);
|
||||
expect(isGeminiUnknownSessionError("", "checkpoint latest not found")).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
describe("gemini_local ui stdout parser", () => {
|
||||
it("parses assistant, thinking, and result events", () => {
|
||||
const ts = "2026-03-08T00:00:00.000Z";
|
||||
|
||||
expect(
|
||||
parseGeminiStdoutLine(
|
||||
JSON.stringify({
|
||||
type: "assistant",
|
||||
message: {
|
||||
content: [
|
||||
{ type: "output_text", text: "I checked the repo." },
|
||||
{ type: "thinking", text: "Reviewing adapter registry" },
|
||||
{ type: "tool_call", name: "shell", input: { command: "ls -1" } },
|
||||
{ type: "tool_result", tool_use_id: "tool_1", output: "AGENTS.md\n", status: "ok" },
|
||||
],
|
||||
},
|
||||
}),
|
||||
ts,
|
||||
),
|
||||
).toEqual([
|
||||
{ kind: "assistant", ts, text: "I checked the repo." },
|
||||
{ kind: "thinking", ts, text: "Reviewing adapter registry" },
|
||||
{ kind: "tool_call", ts, name: "shell", input: { command: "ls -1" } },
|
||||
{ kind: "tool_result", ts, toolUseId: "tool_1", content: "AGENTS.md\n", isError: false },
|
||||
]);
|
||||
|
||||
expect(
|
||||
parseGeminiStdoutLine(
|
||||
JSON.stringify({
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
result: "Done",
|
||||
usage: {
|
||||
promptTokenCount: 10,
|
||||
candidatesTokenCount: 5,
|
||||
cachedContentTokenCount: 2,
|
||||
},
|
||||
total_cost_usd: 0.00042,
|
||||
is_error: false,
|
||||
}),
|
||||
ts,
|
||||
),
|
||||
).toEqual([
|
||||
{
|
||||
kind: "result",
|
||||
ts,
|
||||
text: "Done",
|
||||
inputTokens: 10,
|
||||
outputTokens: 5,
|
||||
cachedTokens: 2,
|
||||
costUsd: 0.00042,
|
||||
subtype: "success",
|
||||
isError: false,
|
||||
errors: [],
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
function stripAnsi(value: string): string {
|
||||
return value.replace(/\x1b\[[0-9;]*m/g, "");
|
||||
}
|
||||
|
||||
describe("gemini_local cli formatter", () => {
|
||||
it("prints init, assistant, result, and error events", () => {
|
||||
const spy = vi.spyOn(console, "log").mockImplementation(() => {});
|
||||
let joined = "";
|
||||
|
||||
try {
|
||||
printGeminiStreamEvent(
|
||||
JSON.stringify({ type: "system", subtype: "init", session_id: "gemini-session-1", model: "gemini-2.5-pro" }),
|
||||
false,
|
||||
);
|
||||
printGeminiStreamEvent(
|
||||
JSON.stringify({
|
||||
type: "assistant",
|
||||
message: { content: [{ type: "output_text", text: "hello" }] },
|
||||
}),
|
||||
false,
|
||||
);
|
||||
printGeminiStreamEvent(
|
||||
JSON.stringify({
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
usage: {
|
||||
promptTokenCount: 10,
|
||||
candidatesTokenCount: 5,
|
||||
cachedContentTokenCount: 2,
|
||||
},
|
||||
total_cost_usd: 0.00042,
|
||||
}),
|
||||
false,
|
||||
);
|
||||
printGeminiStreamEvent(
|
||||
JSON.stringify({ type: "error", message: "boom" }),
|
||||
false,
|
||||
);
|
||||
joined = spy.mock.calls.map((call) => stripAnsi(call.join(" "))).join("\n");
|
||||
} finally {
|
||||
spy.mockRestore();
|
||||
}
|
||||
|
||||
expect(joined).toContain("Gemini init");
|
||||
expect(joined).toContain("assistant: hello");
|
||||
expect(joined).toContain("tokens: in=10 out=5 cached=2 cost=$0.000420");
|
||||
expect(joined).toContain("error: boom");
|
||||
});
|
||||
});
|
||||
168
server/src/__tests__/gemini-local-execute.test.ts
Normal file
168
server/src/__tests__/gemini-local-execute.test.ts
Normal file
@@ -0,0 +1,168 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { execute } from "@paperclipai/adapter-gemini-local/server";
|
||||
|
||||
async function writeFakeGeminiCommand(commandPath: string): Promise<void> {
|
||||
const script = `#!/usr/bin/env node
|
||||
const fs = require("node:fs");
|
||||
|
||||
const capturePath = process.env.PAPERCLIP_TEST_CAPTURE_PATH;
|
||||
const payload = {
|
||||
argv: process.argv.slice(2),
|
||||
paperclipEnvKeys: Object.keys(process.env)
|
||||
.filter((key) => key.startsWith("PAPERCLIP_"))
|
||||
.sort(),
|
||||
};
|
||||
if (capturePath) {
|
||||
fs.writeFileSync(capturePath, JSON.stringify(payload), "utf8");
|
||||
}
|
||||
console.log(JSON.stringify({
|
||||
type: "system",
|
||||
subtype: "init",
|
||||
session_id: "gemini-session-1",
|
||||
model: "gemini-2.5-pro",
|
||||
}));
|
||||
console.log(JSON.stringify({
|
||||
type: "assistant",
|
||||
message: { content: [{ type: "output_text", text: "hello" }] },
|
||||
}));
|
||||
console.log(JSON.stringify({
|
||||
type: "result",
|
||||
subtype: "success",
|
||||
session_id: "gemini-session-1",
|
||||
result: "ok",
|
||||
}));
|
||||
`;
|
||||
await fs.writeFile(commandPath, script, "utf8");
|
||||
await fs.chmod(commandPath, 0o755);
|
||||
}
|
||||
|
||||
type CapturePayload = {
|
||||
argv: string[];
|
||||
paperclipEnvKeys: string[];
|
||||
};
|
||||
|
||||
describe("gemini execute", () => {
|
||||
it("passes prompt as final argument and injects paperclip env vars", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-gemini-execute-"));
|
||||
const workspace = path.join(root, "workspace");
|
||||
const commandPath = path.join(root, "gemini");
|
||||
const capturePath = path.join(root, "capture.json");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await writeFakeGeminiCommand(commandPath);
|
||||
|
||||
const previousHome = process.env.HOME;
|
||||
process.env.HOME = root;
|
||||
|
||||
let invocationPrompt = "";
|
||||
try {
|
||||
const result = await execute({
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
name: "Gemini Coder",
|
||||
adapterType: "gemini_local",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config: {
|
||||
command: commandPath,
|
||||
cwd: workspace,
|
||||
model: "gemini-2.5-pro",
|
||||
env: {
|
||||
PAPERCLIP_TEST_CAPTURE_PATH: capturePath,
|
||||
},
|
||||
promptTemplate: "Follow the paperclip heartbeat.",
|
||||
},
|
||||
context: {},
|
||||
authToken: "run-jwt-token",
|
||||
onLog: async () => {},
|
||||
onMeta: async (meta) => {
|
||||
invocationPrompt = meta.prompt ?? "";
|
||||
},
|
||||
});
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.errorMessage).toBeNull();
|
||||
|
||||
const capture = JSON.parse(await fs.readFile(capturePath, "utf8")) as CapturePayload;
|
||||
expect(capture.argv).toContain("--output-format");
|
||||
expect(capture.argv).toContain("stream-json");
|
||||
expect(capture.argv).toContain("--approval-mode");
|
||||
expect(capture.argv).toContain("yolo");
|
||||
expect(capture.argv.at(-1)).toContain("Follow the paperclip heartbeat.");
|
||||
expect(capture.argv.at(-1)).toContain("Paperclip runtime note:");
|
||||
expect(capture.paperclipEnvKeys).toEqual(
|
||||
expect.arrayContaining([
|
||||
"PAPERCLIP_AGENT_ID",
|
||||
"PAPERCLIP_API_KEY",
|
||||
"PAPERCLIP_API_URL",
|
||||
"PAPERCLIP_COMPANY_ID",
|
||||
"PAPERCLIP_RUN_ID",
|
||||
]),
|
||||
);
|
||||
expect(invocationPrompt).toContain("Paperclip runtime note:");
|
||||
expect(invocationPrompt).toContain("PAPERCLIP_API_URL");
|
||||
expect(invocationPrompt).toContain("Paperclip API access note:");
|
||||
expect(invocationPrompt).toContain("run_shell_command");
|
||||
expect(result.question).toBeNull();
|
||||
} finally {
|
||||
if (previousHome === undefined) {
|
||||
delete process.env.HOME;
|
||||
} else {
|
||||
process.env.HOME = previousHome;
|
||||
}
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
it("always passes --approval-mode yolo", async () => {
|
||||
const root = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-gemini-yolo-"));
|
||||
const workspace = path.join(root, "workspace");
|
||||
const commandPath = path.join(root, "gemini");
|
||||
const capturePath = path.join(root, "capture.json");
|
||||
await fs.mkdir(workspace, { recursive: true });
|
||||
await writeFakeGeminiCommand(commandPath);
|
||||
|
||||
const previousHome = process.env.HOME;
|
||||
process.env.HOME = root;
|
||||
|
||||
try {
|
||||
await execute({
|
||||
runId: "run-yolo",
|
||||
agent: { id: "a1", companyId: "c1", name: "G", adapterType: "gemini_local", adapterConfig: {} },
|
||||
runtime: { sessionId: null, sessionParams: null, sessionDisplayId: null, taskKey: null },
|
||||
config: {
|
||||
command: commandPath,
|
||||
cwd: workspace,
|
||||
env: { PAPERCLIP_TEST_CAPTURE_PATH: capturePath },
|
||||
},
|
||||
context: {},
|
||||
authToken: "t",
|
||||
onLog: async () => {},
|
||||
});
|
||||
|
||||
const capture = JSON.parse(await fs.readFile(capturePath, "utf8")) as CapturePayload;
|
||||
expect(capture.argv).toContain("--approval-mode");
|
||||
expect(capture.argv).toContain("yolo");
|
||||
expect(capture.argv).not.toContain("--policy");
|
||||
expect(capture.argv).not.toContain("--allow-all");
|
||||
expect(capture.argv).not.toContain("--allow-read");
|
||||
} finally {
|
||||
if (previousHome === undefined) {
|
||||
delete process.env.HOME;
|
||||
} else {
|
||||
process.env.HOME = previousHome;
|
||||
}
|
||||
await fs.rm(root, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
});
|
||||
33
server/src/__tests__/heartbeat-run-summary.test.ts
Normal file
33
server/src/__tests__/heartbeat-run-summary.test.ts
Normal file
@@ -0,0 +1,33 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { summarizeHeartbeatRunResultJson } from "../services/heartbeat-run-summary.js";
|
||||
|
||||
describe("summarizeHeartbeatRunResultJson", () => {
|
||||
it("truncates text fields and preserves cost aliases", () => {
|
||||
const summary = summarizeHeartbeatRunResultJson({
|
||||
summary: "a".repeat(600),
|
||||
result: "ok",
|
||||
message: "done",
|
||||
error: "failed",
|
||||
total_cost_usd: 1.23,
|
||||
cost_usd: 0.45,
|
||||
costUsd: 0.67,
|
||||
nested: { ignored: true },
|
||||
});
|
||||
|
||||
expect(summary).toEqual({
|
||||
summary: "a".repeat(500),
|
||||
result: "ok",
|
||||
message: "done",
|
||||
error: "failed",
|
||||
total_cost_usd: 1.23,
|
||||
cost_usd: 0.45,
|
||||
costUsd: 0.67,
|
||||
});
|
||||
});
|
||||
|
||||
it("returns null for non-object and irrelevant payloads", () => {
|
||||
expect(summarizeHeartbeatRunResultJson(null)).toBeNull();
|
||||
expect(summarizeHeartbeatRunResultJson(["nope"] as unknown as Record<string, unknown>)).toBeNull();
|
||||
expect(summarizeHeartbeatRunResultJson({ nested: { only: "ignored" } })).toBeNull();
|
||||
});
|
||||
});
|
||||
@@ -93,16 +93,26 @@ describe("shouldResetTaskSessionForWake", () => {
|
||||
expect(shouldResetTaskSessionForWake({ wakeReason: "issue_assigned" })).toBe(true);
|
||||
});
|
||||
|
||||
it("resets session context on timer heartbeats", () => {
|
||||
expect(shouldResetTaskSessionForWake({ wakeSource: "timer" })).toBe(true);
|
||||
it("preserves session context on timer heartbeats", () => {
|
||||
expect(shouldResetTaskSessionForWake({ wakeSource: "timer" })).toBe(false);
|
||||
});
|
||||
|
||||
it("resets session context on manual on-demand invokes", () => {
|
||||
it("preserves session context on manual on-demand invokes by default", () => {
|
||||
expect(
|
||||
shouldResetTaskSessionForWake({
|
||||
wakeSource: "on_demand",
|
||||
wakeTriggerDetail: "manual",
|
||||
}),
|
||||
).toBe(false);
|
||||
});
|
||||
|
||||
it("resets session context when a fresh session is explicitly requested", () => {
|
||||
expect(
|
||||
shouldResetTaskSessionForWake({
|
||||
wakeSource: "on_demand",
|
||||
wakeTriggerDetail: "manual",
|
||||
forceFreshSession: true,
|
||||
}),
|
||||
).toBe(true);
|
||||
});
|
||||
|
||||
|
||||
@@ -40,7 +40,7 @@ afterEach(() => {
|
||||
describe("notifyHireApproved", () => {
|
||||
it("writes success activity when adapter hook returns ok", async () => {
|
||||
vi.mocked(findServerAdapter).mockReturnValue({
|
||||
type: "openclaw",
|
||||
type: "openclaw_gateway",
|
||||
onHireApproved: vi.fn().mockResolvedValue({ ok: true }),
|
||||
} as any);
|
||||
|
||||
@@ -48,7 +48,7 @@ describe("notifyHireApproved", () => {
|
||||
id: "a1",
|
||||
companyId: "c1",
|
||||
name: "OpenClaw Agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
});
|
||||
|
||||
await expect(
|
||||
@@ -65,7 +65,7 @@ describe("notifyHireApproved", () => {
|
||||
expect.objectContaining({
|
||||
action: "hire_hook.succeeded",
|
||||
entityId: "a1",
|
||||
details: expect.objectContaining({ source: "approval", sourceId: "ap1", adapterType: "openclaw" }),
|
||||
details: expect.objectContaining({ source: "approval", sourceId: "ap1", adapterType: "openclaw_gateway" }),
|
||||
}),
|
||||
);
|
||||
});
|
||||
@@ -116,7 +116,7 @@ describe("notifyHireApproved", () => {
|
||||
|
||||
it("logs failed result when adapter onHireApproved returns ok=false", async () => {
|
||||
vi.mocked(findServerAdapter).mockReturnValue({
|
||||
type: "openclaw",
|
||||
type: "openclaw_gateway",
|
||||
onHireApproved: vi.fn().mockResolvedValue({ ok: false, error: "HTTP 500", detail: { status: 500 } }),
|
||||
} as any);
|
||||
|
||||
@@ -124,7 +124,7 @@ describe("notifyHireApproved", () => {
|
||||
id: "a1",
|
||||
companyId: "c1",
|
||||
name: "OpenClaw Agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
});
|
||||
|
||||
await expect(
|
||||
@@ -148,7 +148,7 @@ describe("notifyHireApproved", () => {
|
||||
|
||||
it("does not throw when adapter onHireApproved throws (non-fatal)", async () => {
|
||||
vi.mocked(findServerAdapter).mockReturnValue({
|
||||
type: "openclaw",
|
||||
type: "openclaw_gateway",
|
||||
onHireApproved: vi.fn().mockRejectedValue(new Error("Network error")),
|
||||
} as any);
|
||||
|
||||
@@ -156,7 +156,7 @@ describe("notifyHireApproved", () => {
|
||||
id: "a1",
|
||||
companyId: "c1",
|
||||
name: "OpenClaw Agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
});
|
||||
|
||||
await expect(
|
||||
|
||||
119
server/src/__tests__/invite-accept-gateway-defaults.test.ts
Normal file
119
server/src/__tests__/invite-accept-gateway-defaults.test.ts
Normal file
@@ -0,0 +1,119 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
buildJoinDefaultsPayloadForAccept,
|
||||
normalizeAgentDefaultsForJoin,
|
||||
} from "../routes/access.js";
|
||||
|
||||
describe("buildJoinDefaultsPayloadForAccept (openclaw_gateway)", () => {
|
||||
it("leaves non-gateway payloads unchanged", () => {
|
||||
const defaultsPayload = { command: "echo hello" };
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "process",
|
||||
defaultsPayload,
|
||||
inboundOpenClawAuthHeader: "ignored-token",
|
||||
});
|
||||
|
||||
expect(result).toEqual(defaultsPayload);
|
||||
});
|
||||
|
||||
it("normalizes wrapped x-openclaw-token header", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw_gateway",
|
||||
defaultsPayload: {
|
||||
url: "ws://127.0.0.1:18789",
|
||||
headers: {
|
||||
"x-openclaw-token": {
|
||||
value: "gateway-token-1234567890",
|
||||
},
|
||||
},
|
||||
},
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
url: "ws://127.0.0.1:18789",
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token-1234567890",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts inbound x-openclaw-token for gateway joins", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw_gateway",
|
||||
defaultsPayload: {
|
||||
url: "ws://127.0.0.1:18789",
|
||||
},
|
||||
inboundOpenClawTokenHeader: "gateway-token-1234567890",
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token-1234567890",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("derives x-openclaw-token from authorization header", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw_gateway",
|
||||
defaultsPayload: {
|
||||
url: "ws://127.0.0.1:18789",
|
||||
headers: {
|
||||
authorization: "Bearer gateway-token-1234567890",
|
||||
},
|
||||
},
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
headers: {
|
||||
authorization: "Bearer gateway-token-1234567890",
|
||||
"x-openclaw-token": "gateway-token-1234567890",
|
||||
},
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeAgentDefaultsForJoin (openclaw_gateway)", () => {
|
||||
it("generates persistent device key when device auth is enabled", () => {
|
||||
const normalized = normalizeAgentDefaultsForJoin({
|
||||
adapterType: "openclaw_gateway",
|
||||
defaultsPayload: {
|
||||
url: "ws://127.0.0.1:18789",
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token-1234567890",
|
||||
},
|
||||
disableDeviceAuth: false,
|
||||
},
|
||||
deploymentMode: "authenticated",
|
||||
deploymentExposure: "private",
|
||||
bindHost: "127.0.0.1",
|
||||
allowedHostnames: [],
|
||||
});
|
||||
|
||||
expect(normalized.fatalErrors).toEqual([]);
|
||||
expect(normalized.normalized?.disableDeviceAuth).toBe(false);
|
||||
expect(typeof normalized.normalized?.devicePrivateKeyPem).toBe("string");
|
||||
expect((normalized.normalized?.devicePrivateKeyPem as string).length).toBeGreaterThan(64);
|
||||
});
|
||||
|
||||
it("does not generate device key when disableDeviceAuth=true", () => {
|
||||
const normalized = normalizeAgentDefaultsForJoin({
|
||||
adapterType: "openclaw_gateway",
|
||||
defaultsPayload: {
|
||||
url: "ws://127.0.0.1:18789",
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token-1234567890",
|
||||
},
|
||||
disableDeviceAuth: true,
|
||||
},
|
||||
deploymentMode: "authenticated",
|
||||
deploymentExposure: "private",
|
||||
bindHost: "127.0.0.1",
|
||||
allowedHostnames: [],
|
||||
});
|
||||
|
||||
expect(normalized.fatalErrors).toEqual([]);
|
||||
expect(normalized.normalized?.disableDeviceAuth).toBe(true);
|
||||
expect(normalized.normalized?.devicePrivateKeyPem).toBeUndefined();
|
||||
});
|
||||
});
|
||||
@@ -1,211 +0,0 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { buildJoinDefaultsPayloadForAccept } from "../routes/access.js";
|
||||
|
||||
describe("buildJoinDefaultsPayloadForAccept", () => {
|
||||
it("maps OpenClaw compatibility fields into agent defaults", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: null,
|
||||
responsesWebhookUrl: "http://localhost:18789/v1/responses",
|
||||
paperclipApiUrl: "http://host.docker.internal:3100",
|
||||
inboundOpenClawAuthHeader: "gateway-token",
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
url: "http://localhost:18789/v1/responses",
|
||||
paperclipApiUrl: "http://host.docker.internal:3100",
|
||||
webhookAuthHeader: "Bearer gateway-token",
|
||||
headers: {
|
||||
"x-openclaw-auth": "gateway-token",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("does not overwrite explicit OpenClaw endpoint defaults when already provided", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: {
|
||||
url: "https://example.com/v1/responses",
|
||||
method: "POST",
|
||||
headers: {
|
||||
"x-openclaw-auth": "existing-token",
|
||||
},
|
||||
paperclipApiUrl: "https://paperclip.example.com",
|
||||
},
|
||||
responsesWebhookUrl: "https://legacy.example.com/v1/responses",
|
||||
responsesWebhookMethod: "PUT",
|
||||
paperclipApiUrl: "https://legacy-paperclip.example.com",
|
||||
inboundOpenClawAuthHeader: "legacy-token",
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
url: "https://example.com/v1/responses",
|
||||
method: "POST",
|
||||
paperclipApiUrl: "https://paperclip.example.com",
|
||||
webhookAuthHeader: "Bearer existing-token",
|
||||
headers: {
|
||||
"x-openclaw-auth": "existing-token",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("preserves explicit webhookAuthHeader when configured", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: {
|
||||
url: "https://example.com/v1/responses",
|
||||
webhookAuthHeader: "Bearer explicit-token",
|
||||
headers: {
|
||||
"x-openclaw-auth": "existing-token",
|
||||
},
|
||||
},
|
||||
inboundOpenClawAuthHeader: "legacy-token",
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
webhookAuthHeader: "Bearer explicit-token",
|
||||
headers: {
|
||||
"x-openclaw-auth": "existing-token",
|
||||
},
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts auth from agentDefaultsPayload.headers.x-openclaw-auth", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: {
|
||||
url: "http://127.0.0.1:18789/v1/responses",
|
||||
method: "POST",
|
||||
headers: {
|
||||
"x-openclaw-auth": "gateway-token",
|
||||
},
|
||||
},
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
headers: {
|
||||
"x-openclaw-auth": "gateway-token",
|
||||
},
|
||||
webhookAuthHeader: "Bearer gateway-token",
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts auth from agentDefaultsPayload.headers.x-openclaw-token", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: {
|
||||
url: "http://127.0.0.1:18789/hooks/agent",
|
||||
method: "POST",
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token",
|
||||
},
|
||||
},
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token",
|
||||
},
|
||||
webhookAuthHeader: "Bearer gateway-token",
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts inbound x-openclaw-token compatibility header", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: null,
|
||||
inboundOpenClawTokenHeader: "gateway-token",
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token",
|
||||
},
|
||||
webhookAuthHeader: "Bearer gateway-token",
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts wrapped auth values in headers for compatibility", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: {
|
||||
headers: {
|
||||
"x-openclaw-auth": {
|
||||
value: "gateway-token",
|
||||
},
|
||||
},
|
||||
},
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
headers: {
|
||||
"x-openclaw-auth": "gateway-token",
|
||||
},
|
||||
webhookAuthHeader: "Bearer gateway-token",
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts auth headers provided as tuple entries", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: {
|
||||
headers: [["x-openclaw-auth", "gateway-token"]],
|
||||
},
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
headers: {
|
||||
"x-openclaw-auth": "gateway-token",
|
||||
},
|
||||
webhookAuthHeader: "Bearer gateway-token",
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts auth headers provided as name/value entries", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: {
|
||||
headers: [{ name: "x-openclaw-auth", value: { authToken: "gateway-token" } }],
|
||||
},
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
headers: {
|
||||
"x-openclaw-auth": "gateway-token",
|
||||
},
|
||||
webhookAuthHeader: "Bearer gateway-token",
|
||||
});
|
||||
});
|
||||
|
||||
it("accepts auth headers wrapped in a single unknown key", () => {
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
defaultsPayload: {
|
||||
headers: {
|
||||
"x-openclaw-auth": {
|
||||
gatewayToken: "gateway-token",
|
||||
},
|
||||
},
|
||||
},
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(result).toMatchObject({
|
||||
headers: {
|
||||
"x-openclaw-auth": "gateway-token",
|
||||
},
|
||||
webhookAuthHeader: "Bearer gateway-token",
|
||||
});
|
||||
});
|
||||
|
||||
it("leaves non-openclaw payloads unchanged", () => {
|
||||
const defaultsPayload = { command: "echo hello" };
|
||||
const result = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "process",
|
||||
defaultsPayload,
|
||||
responsesWebhookUrl: "https://ignored.example.com",
|
||||
inboundOpenClawAuthHeader: "ignored-token",
|
||||
});
|
||||
|
||||
expect(result).toEqual(defaultsPayload);
|
||||
});
|
||||
});
|
||||
@@ -1,63 +1,55 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
buildJoinDefaultsPayloadForAccept,
|
||||
canReplayOpenClawInviteAccept,
|
||||
canReplayOpenClawGatewayInviteAccept,
|
||||
mergeJoinDefaultsPayloadForReplay,
|
||||
} from "../routes/access.js";
|
||||
|
||||
describe("canReplayOpenClawInviteAccept", () => {
|
||||
it("allows replay only for openclaw agent joins in pending or approved state", () => {
|
||||
describe("canReplayOpenClawGatewayInviteAccept", () => {
|
||||
it("allows replay only for openclaw_gateway agent joins in pending or approved state", () => {
|
||||
expect(
|
||||
canReplayOpenClawInviteAccept({
|
||||
canReplayOpenClawGatewayInviteAccept({
|
||||
requestType: "agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
existingJoinRequest: {
|
||||
requestType: "agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
status: "pending_approval",
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
canReplayOpenClawInviteAccept({
|
||||
canReplayOpenClawGatewayInviteAccept({
|
||||
requestType: "agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
existingJoinRequest: {
|
||||
requestType: "agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
status: "approved",
|
||||
},
|
||||
}),
|
||||
).toBe(true);
|
||||
|
||||
expect(
|
||||
canReplayOpenClawInviteAccept({
|
||||
canReplayOpenClawGatewayInviteAccept({
|
||||
requestType: "agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
existingJoinRequest: {
|
||||
requestType: "agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
status: "rejected",
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
|
||||
expect(
|
||||
canReplayOpenClawInviteAccept({
|
||||
canReplayOpenClawGatewayInviteAccept({
|
||||
requestType: "human",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
existingJoinRequest: {
|
||||
requestType: "agent",
|
||||
adapterType: "openclaw",
|
||||
status: "pending_approval",
|
||||
},
|
||||
}),
|
||||
).toBe(false);
|
||||
expect(
|
||||
canReplayOpenClawInviteAccept({
|
||||
requestType: "agent",
|
||||
adapterType: "process",
|
||||
existingJoinRequest: {
|
||||
requestType: "agent",
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
status: "pending_approval",
|
||||
},
|
||||
}),
|
||||
@@ -66,36 +58,34 @@ describe("canReplayOpenClawInviteAccept", () => {
|
||||
});
|
||||
|
||||
describe("mergeJoinDefaultsPayloadForReplay", () => {
|
||||
it("merges replay payloads and preserves existing fields while allowing auth/header overrides", () => {
|
||||
it("merges replay payloads and allows gateway token override", () => {
|
||||
const merged = mergeJoinDefaultsPayloadForReplay(
|
||||
{
|
||||
url: "https://old.example/v1/responses",
|
||||
method: "POST",
|
||||
url: "ws://old.example:18789",
|
||||
paperclipApiUrl: "http://host.docker.internal:3100",
|
||||
headers: {
|
||||
"x-openclaw-auth": "old-token",
|
||||
"x-openclaw-token": "old-token-1234567890",
|
||||
"x-custom": "keep-me",
|
||||
},
|
||||
},
|
||||
{
|
||||
paperclipApiUrl: "https://paperclip.example.com",
|
||||
headers: {
|
||||
"x-openclaw-auth": "new-token",
|
||||
"x-openclaw-token": "new-token-1234567890",
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
const normalized = buildJoinDefaultsPayloadForAccept({
|
||||
adapterType: "openclaw",
|
||||
adapterType: "openclaw_gateway",
|
||||
defaultsPayload: merged,
|
||||
inboundOpenClawAuthHeader: null,
|
||||
}) as Record<string, unknown>;
|
||||
|
||||
expect(normalized.url).toBe("https://old.example/v1/responses");
|
||||
expect(normalized.url).toBe("ws://old.example:18789");
|
||||
expect(normalized.paperclipApiUrl).toBe("https://paperclip.example.com");
|
||||
expect(normalized.webhookAuthHeader).toBe("Bearer new-token");
|
||||
expect(normalized.headers).toMatchObject({
|
||||
"x-openclaw-auth": "new-token",
|
||||
"x-openclaw-token": "new-token-1234567890",
|
||||
"x-custom": "keep-me",
|
||||
});
|
||||
});
|
||||
|
||||
@@ -37,21 +37,22 @@ describe("buildInviteOnboardingTextDocument", () => {
|
||||
allowedHostnames: [],
|
||||
});
|
||||
|
||||
expect(text).toContain("Paperclip OpenClaw Onboarding");
|
||||
expect(text).toContain("Paperclip OpenClaw Gateway Onboarding");
|
||||
expect(text).toContain("/api/invites/token-123/accept");
|
||||
expect(text).toContain("/api/join-requests/{requestId}/claim-api-key");
|
||||
expect(text).toContain("/api/invites/token-123/onboarding.txt");
|
||||
expect(text).toContain("/api/invites/token-123/test-resolution");
|
||||
expect(text).toContain("Suggested Paperclip base URLs to try");
|
||||
expect(text).toContain("http://localhost:3100");
|
||||
expect(text).toContain("host.docker.internal");
|
||||
expect(text).toContain("paperclipApiUrl");
|
||||
expect(text).toContain("You MUST include agentDefaultsPayload.headers.x-openclaw-auth");
|
||||
expect(text).toContain("will fail with 401 Unauthorized");
|
||||
expect(text).toContain("adapterType \"openclaw_gateway\"");
|
||||
expect(text).toContain("headers.x-openclaw-token");
|
||||
expect(text).toContain("Do NOT use /v1/responses or /hooks/*");
|
||||
expect(text).toContain("set the first reachable candidate as agentDefaultsPayload.paperclipApiUrl");
|
||||
expect(text).toContain("~/.openclaw/workspace/paperclip-claimed-api-key.json");
|
||||
expect(text).toContain("PAPERCLIP_API_KEY");
|
||||
expect(text).toContain("saved token field");
|
||||
expect(text).toContain("Gateway token unexpectedly short");
|
||||
});
|
||||
|
||||
it("includes loopback diagnostics for authenticated/private onboarding", () => {
|
||||
|
||||
59
server/src/__tests__/issue-goal-fallback.test.ts
Normal file
59
server/src/__tests__/issue-goal-fallback.test.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
resolveIssueGoalId,
|
||||
resolveNextIssueGoalId,
|
||||
} from "../services/issue-goal-fallback.ts";
|
||||
|
||||
describe("issue goal fallback", () => {
|
||||
it("assigns the company goal when creating an issue without project or goal", () => {
|
||||
expect(
|
||||
resolveIssueGoalId({
|
||||
projectId: null,
|
||||
goalId: null,
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBe("goal-1");
|
||||
});
|
||||
|
||||
it("keeps an explicit goal when creating an issue", () => {
|
||||
expect(
|
||||
resolveIssueGoalId({
|
||||
projectId: null,
|
||||
goalId: "goal-2",
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBe("goal-2");
|
||||
});
|
||||
|
||||
it("does not force a company goal when the issue belongs to a project", () => {
|
||||
expect(
|
||||
resolveIssueGoalId({
|
||||
projectId: "project-1",
|
||||
goalId: null,
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBeNull();
|
||||
});
|
||||
|
||||
it("backfills the company goal on update for legacy no-project issues", () => {
|
||||
expect(
|
||||
resolveNextIssueGoalId({
|
||||
currentProjectId: null,
|
||||
currentGoalId: null,
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBe("goal-1");
|
||||
});
|
||||
|
||||
it("clears the fallback when a project is added later", () => {
|
||||
expect(
|
||||
resolveNextIssueGoalId({
|
||||
currentProjectId: null,
|
||||
currentGoalId: "goal-1",
|
||||
projectId: "project-1",
|
||||
goalId: null,
|
||||
defaultGoalId: "goal-1",
|
||||
}),
|
||||
).toBeNull();
|
||||
});
|
||||
});
|
||||
66
server/src/__tests__/log-redaction.test.ts
Normal file
66
server/src/__tests__/log-redaction.test.ts
Normal file
@@ -0,0 +1,66 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
CURRENT_USER_REDACTION_TOKEN,
|
||||
redactCurrentUserText,
|
||||
redactCurrentUserValue,
|
||||
} from "../log-redaction.js";
|
||||
|
||||
describe("log redaction", () => {
|
||||
it("redacts the active username inside home-directory paths", () => {
|
||||
const userName = "paperclipuser";
|
||||
const input = [
|
||||
`cwd=/Users/${userName}/paperclip`,
|
||||
`home=/home/${userName}/workspace`,
|
||||
`win=C:\\Users\\${userName}\\paperclip`,
|
||||
].join("\n");
|
||||
|
||||
const result = redactCurrentUserText(input, {
|
||||
userNames: [userName],
|
||||
homeDirs: [`/Users/${userName}`, `/home/${userName}`, `C:\\Users\\${userName}`],
|
||||
});
|
||||
|
||||
expect(result).toContain(`cwd=/Users/${CURRENT_USER_REDACTION_TOKEN}/paperclip`);
|
||||
expect(result).toContain(`home=/home/${CURRENT_USER_REDACTION_TOKEN}/workspace`);
|
||||
expect(result).toContain(`win=C:\\Users\\${CURRENT_USER_REDACTION_TOKEN}\\paperclip`);
|
||||
expect(result).not.toContain(userName);
|
||||
});
|
||||
|
||||
it("redacts standalone username mentions without mangling larger tokens", () => {
|
||||
const userName = "paperclipuser";
|
||||
const result = redactCurrentUserText(
|
||||
`user ${userName} said ${userName}/project should stay but apaperclipuserz should not change`,
|
||||
{
|
||||
userNames: [userName],
|
||||
homeDirs: [],
|
||||
},
|
||||
);
|
||||
|
||||
expect(result).toBe(
|
||||
`user ${CURRENT_USER_REDACTION_TOKEN} said ${CURRENT_USER_REDACTION_TOKEN}/project should stay but apaperclipuserz should not change`,
|
||||
);
|
||||
});
|
||||
|
||||
it("recursively redacts nested event payloads", () => {
|
||||
const userName = "paperclipuser";
|
||||
const result = redactCurrentUserValue({
|
||||
cwd: `/Users/${userName}/paperclip`,
|
||||
prompt: `open /Users/${userName}/paperclip/ui`,
|
||||
nested: {
|
||||
author: userName,
|
||||
},
|
||||
values: [userName, `/home/${userName}/project`],
|
||||
}, {
|
||||
userNames: [userName],
|
||||
homeDirs: [`/Users/${userName}`, `/home/${userName}`],
|
||||
});
|
||||
|
||||
expect(result).toEqual({
|
||||
cwd: `/Users/${CURRENT_USER_REDACTION_TOKEN}/paperclip`,
|
||||
prompt: `open /Users/${CURRENT_USER_REDACTION_TOKEN}/paperclip/ui`,
|
||||
nested: {
|
||||
author: CURRENT_USER_REDACTION_TOKEN,
|
||||
},
|
||||
values: [CURRENT_USER_REDACTION_TOKEN, `/home/${CURRENT_USER_REDACTION_TOKEN}/project`],
|
||||
});
|
||||
});
|
||||
});
|
||||
File diff suppressed because it is too large
Load Diff
625
server/src/__tests__/openclaw-gateway-adapter.test.ts
Normal file
625
server/src/__tests__/openclaw-gateway-adapter.test.ts
Normal file
@@ -0,0 +1,625 @@
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { createServer } from "node:http";
|
||||
import { WebSocketServer } from "ws";
|
||||
import { execute, testEnvironment } from "@paperclipai/adapter-openclaw-gateway/server";
|
||||
import {
|
||||
buildOpenClawGatewayConfig,
|
||||
parseOpenClawGatewayStdoutLine,
|
||||
} from "@paperclipai/adapter-openclaw-gateway/ui";
|
||||
import type { AdapterExecutionContext } from "@paperclipai/adapter-utils";
|
||||
|
||||
function buildContext(
|
||||
config: Record<string, unknown>,
|
||||
overrides?: Partial<AdapterExecutionContext>,
|
||||
): AdapterExecutionContext {
|
||||
return {
|
||||
runId: "run-123",
|
||||
agent: {
|
||||
id: "agent-123",
|
||||
companyId: "company-123",
|
||||
name: "OpenClaw Gateway Agent",
|
||||
adapterType: "openclaw_gateway",
|
||||
adapterConfig: {},
|
||||
},
|
||||
runtime: {
|
||||
sessionId: null,
|
||||
sessionParams: null,
|
||||
sessionDisplayId: null,
|
||||
taskKey: null,
|
||||
},
|
||||
config,
|
||||
context: {
|
||||
taskId: "task-123",
|
||||
issueId: "issue-123",
|
||||
wakeReason: "issue_assigned",
|
||||
issueIds: ["issue-123"],
|
||||
},
|
||||
onLog: async () => {},
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
async function createMockGatewayServer(options?: {
|
||||
waitPayload?: Record<string, unknown>;
|
||||
}) {
|
||||
const server = createServer();
|
||||
const wss = new WebSocketServer({ server });
|
||||
|
||||
let agentPayload: Record<string, unknown> | null = null;
|
||||
|
||||
wss.on("connection", (socket) => {
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "event",
|
||||
event: "connect.challenge",
|
||||
payload: { nonce: "nonce-123" },
|
||||
}),
|
||||
);
|
||||
|
||||
socket.on("message", (raw) => {
|
||||
const text = Buffer.isBuffer(raw) ? raw.toString("utf8") : String(raw);
|
||||
const frame = JSON.parse(text) as {
|
||||
type: string;
|
||||
id: string;
|
||||
method: string;
|
||||
params?: Record<string, unknown>;
|
||||
};
|
||||
|
||||
if (frame.type !== "req") return;
|
||||
|
||||
if (frame.method === "connect") {
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: true,
|
||||
payload: {
|
||||
type: "hello-ok",
|
||||
protocol: 3,
|
||||
server: { version: "test", connId: "conn-1" },
|
||||
features: { methods: ["connect", "agent", "agent.wait"], events: ["agent"] },
|
||||
snapshot: { version: 1, ts: Date.now() },
|
||||
policy: { maxPayload: 1_000_000, maxBufferedBytes: 1_000_000, tickIntervalMs: 30_000 },
|
||||
},
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (frame.method === "agent") {
|
||||
agentPayload = frame.params ?? null;
|
||||
const runId =
|
||||
typeof frame.params?.idempotencyKey === "string"
|
||||
? frame.params.idempotencyKey
|
||||
: "run-123";
|
||||
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: true,
|
||||
payload: {
|
||||
runId,
|
||||
status: "accepted",
|
||||
acceptedAt: Date.now(),
|
||||
},
|
||||
}),
|
||||
);
|
||||
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "event",
|
||||
event: "agent",
|
||||
payload: {
|
||||
runId,
|
||||
seq: 1,
|
||||
stream: "assistant",
|
||||
ts: Date.now(),
|
||||
data: { delta: "cha" },
|
||||
},
|
||||
}),
|
||||
);
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "event",
|
||||
event: "agent",
|
||||
payload: {
|
||||
runId,
|
||||
seq: 2,
|
||||
stream: "assistant",
|
||||
ts: Date.now(),
|
||||
data: { delta: "chacha" },
|
||||
},
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (frame.method === "agent.wait") {
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: true,
|
||||
payload: options?.waitPayload ?? {
|
||||
runId: frame.params?.runId,
|
||||
status: "ok",
|
||||
startedAt: 1,
|
||||
endedAt: 2,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
server.listen(0, "127.0.0.1", () => resolve());
|
||||
});
|
||||
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
throw new Error("Failed to resolve test server address");
|
||||
}
|
||||
|
||||
return {
|
||||
url: `ws://127.0.0.1:${address.port}`,
|
||||
getAgentPayload: () => agentPayload,
|
||||
close: async () => {
|
||||
await new Promise<void>((resolve) => wss.close(() => resolve()));
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
async function createMockGatewayServerWithPairing() {
|
||||
const server = createServer();
|
||||
const wss = new WebSocketServer({ server });
|
||||
|
||||
let agentPayload: Record<string, unknown> | null = null;
|
||||
let approved = false;
|
||||
let pendingRequestId = "req-1";
|
||||
let lastSeenDeviceId: string | null = null;
|
||||
|
||||
wss.on("connection", (socket) => {
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "event",
|
||||
event: "connect.challenge",
|
||||
payload: { nonce: "nonce-123" },
|
||||
}),
|
||||
);
|
||||
|
||||
socket.on("message", (raw) => {
|
||||
const text = Buffer.isBuffer(raw) ? raw.toString("utf8") : String(raw);
|
||||
const frame = JSON.parse(text) as {
|
||||
type: string;
|
||||
id: string;
|
||||
method: string;
|
||||
params?: Record<string, unknown>;
|
||||
};
|
||||
|
||||
if (frame.type !== "req") return;
|
||||
|
||||
if (frame.method === "connect") {
|
||||
const device = frame.params?.device as Record<string, unknown> | undefined;
|
||||
const deviceId = typeof device?.id === "string" ? device.id : null;
|
||||
if (deviceId) {
|
||||
lastSeenDeviceId = deviceId;
|
||||
}
|
||||
|
||||
if (deviceId && !approved) {
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: false,
|
||||
error: {
|
||||
code: "NOT_PAIRED",
|
||||
message: "pairing required",
|
||||
details: {
|
||||
code: "PAIRING_REQUIRED",
|
||||
requestId: pendingRequestId,
|
||||
reason: "not-paired",
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
socket.close(1008, "pairing required");
|
||||
return;
|
||||
}
|
||||
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: true,
|
||||
payload: {
|
||||
type: "hello-ok",
|
||||
protocol: 3,
|
||||
server: { version: "test", connId: "conn-1" },
|
||||
features: {
|
||||
methods: ["connect", "agent", "agent.wait", "device.pair.list", "device.pair.approve"],
|
||||
events: ["agent"],
|
||||
},
|
||||
snapshot: { version: 1, ts: Date.now() },
|
||||
policy: { maxPayload: 1_000_000, maxBufferedBytes: 1_000_000, tickIntervalMs: 30_000 },
|
||||
},
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (frame.method === "device.pair.list") {
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: true,
|
||||
payload: {
|
||||
pending: approved
|
||||
? []
|
||||
: [
|
||||
{
|
||||
requestId: pendingRequestId,
|
||||
deviceId: lastSeenDeviceId ?? "device-unknown",
|
||||
},
|
||||
],
|
||||
paired: approved && lastSeenDeviceId ? [{ deviceId: lastSeenDeviceId }] : [],
|
||||
},
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (frame.method === "device.pair.approve") {
|
||||
const requestId = frame.params?.requestId;
|
||||
if (requestId !== pendingRequestId) {
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: false,
|
||||
error: { code: "INVALID_REQUEST", message: "unknown requestId" },
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
approved = true;
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: true,
|
||||
payload: {
|
||||
requestId: pendingRequestId,
|
||||
device: {
|
||||
deviceId: lastSeenDeviceId ?? "device-unknown",
|
||||
},
|
||||
},
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (frame.method === "agent") {
|
||||
agentPayload = frame.params ?? null;
|
||||
const runId =
|
||||
typeof frame.params?.idempotencyKey === "string"
|
||||
? frame.params.idempotencyKey
|
||||
: "run-123";
|
||||
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: true,
|
||||
payload: {
|
||||
runId,
|
||||
status: "accepted",
|
||||
acceptedAt: Date.now(),
|
||||
},
|
||||
}),
|
||||
);
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "event",
|
||||
event: "agent",
|
||||
payload: {
|
||||
runId,
|
||||
seq: 1,
|
||||
stream: "assistant",
|
||||
ts: Date.now(),
|
||||
data: { delta: "ok" },
|
||||
},
|
||||
}),
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
if (frame.method === "agent.wait") {
|
||||
socket.send(
|
||||
JSON.stringify({
|
||||
type: "res",
|
||||
id: frame.id,
|
||||
ok: true,
|
||||
payload: {
|
||||
runId: frame.params?.runId,
|
||||
status: "ok",
|
||||
startedAt: 1,
|
||||
endedAt: 2,
|
||||
},
|
||||
}),
|
||||
);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
await new Promise<void>((resolve) => {
|
||||
server.listen(0, "127.0.0.1", () => resolve());
|
||||
});
|
||||
|
||||
const address = server.address();
|
||||
if (!address || typeof address === "string") {
|
||||
throw new Error("Failed to resolve test server address");
|
||||
}
|
||||
|
||||
return {
|
||||
url: `ws://127.0.0.1:${address.port}`,
|
||||
getAgentPayload: () => agentPayload,
|
||||
close: async () => {
|
||||
await new Promise<void>((resolve) => wss.close(() => resolve()));
|
||||
await new Promise<void>((resolve) => server.close(() => resolve()));
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
afterEach(() => {
|
||||
// no global mocks
|
||||
});
|
||||
|
||||
describe("openclaw gateway ui stdout parser", () => {
|
||||
it("parses assistant deltas from gateway event lines", () => {
|
||||
const ts = "2026-03-06T15:00:00.000Z";
|
||||
const line =
|
||||
'[openclaw-gateway:event] run=run-1 stream=assistant data={"delta":"hello"}';
|
||||
|
||||
expect(parseOpenClawGatewayStdoutLine(line, ts)).toEqual([
|
||||
{
|
||||
kind: "assistant",
|
||||
ts,
|
||||
text: "hello",
|
||||
delta: true,
|
||||
},
|
||||
]);
|
||||
});
|
||||
});
|
||||
|
||||
describe("openclaw gateway adapter execute", () => {
|
||||
it("runs connect -> agent -> agent.wait and forwards wake payload", async () => {
|
||||
const gateway = await createMockGatewayServer();
|
||||
const logs: string[] = [];
|
||||
|
||||
try {
|
||||
const result = await execute(
|
||||
buildContext(
|
||||
{
|
||||
url: gateway.url,
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token",
|
||||
},
|
||||
payloadTemplate: {
|
||||
message: "wake now",
|
||||
},
|
||||
waitTimeoutMs: 2000,
|
||||
},
|
||||
{
|
||||
onLog: async (_stream, chunk) => {
|
||||
logs.push(chunk);
|
||||
},
|
||||
context: {
|
||||
taskId: "task-123",
|
||||
issueId: "issue-123",
|
||||
wakeReason: "issue_assigned",
|
||||
issueIds: ["issue-123"],
|
||||
paperclipWorkspace: {
|
||||
cwd: "/tmp/worktrees/pap-123",
|
||||
strategy: "git_worktree",
|
||||
branchName: "pap-123-test",
|
||||
},
|
||||
paperclipWorkspaces: [
|
||||
{
|
||||
id: "workspace-1",
|
||||
cwd: "/tmp/project",
|
||||
},
|
||||
],
|
||||
paperclipRuntimeServiceIntents: [
|
||||
{
|
||||
name: "preview",
|
||||
lifecycle: "ephemeral",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.timedOut).toBe(false);
|
||||
expect(result.summary).toContain("chachacha");
|
||||
expect(result.provider).toBe("openclaw");
|
||||
|
||||
const payload = gateway.getAgentPayload();
|
||||
expect(payload).toBeTruthy();
|
||||
expect(payload?.idempotencyKey).toBe("run-123");
|
||||
expect(payload?.sessionKey).toBe("paperclip:issue:issue-123");
|
||||
expect(String(payload?.message ?? "")).toContain("wake now");
|
||||
expect(String(payload?.message ?? "")).toContain("PAPERCLIP_RUN_ID=run-123");
|
||||
expect(String(payload?.message ?? "")).toContain("PAPERCLIP_TASK_ID=task-123");
|
||||
|
||||
expect(logs.some((entry) => entry.includes("[openclaw-gateway:event] run=run-123 stream=assistant"))).toBe(true);
|
||||
} finally {
|
||||
await gateway.close();
|
||||
}
|
||||
});
|
||||
|
||||
it("fails fast when url is missing", async () => {
|
||||
const result = await execute(buildContext({}));
|
||||
expect(result.exitCode).toBe(1);
|
||||
expect(result.errorCode).toBe("openclaw_gateway_url_missing");
|
||||
});
|
||||
|
||||
it("returns adapter-managed runtime services from gateway result meta", async () => {
|
||||
const gateway = await createMockGatewayServer({
|
||||
waitPayload: {
|
||||
runId: "run-123",
|
||||
status: "ok",
|
||||
startedAt: 1,
|
||||
endedAt: 2,
|
||||
meta: {
|
||||
runtimeServices: [
|
||||
{
|
||||
name: "preview",
|
||||
scopeType: "run",
|
||||
url: "https://preview.example/run-123",
|
||||
providerRef: "sandbox-123",
|
||||
lifecycle: "ephemeral",
|
||||
},
|
||||
],
|
||||
},
|
||||
},
|
||||
});
|
||||
|
||||
try {
|
||||
const result = await execute(
|
||||
buildContext({
|
||||
url: gateway.url,
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token",
|
||||
},
|
||||
waitTimeoutMs: 2000,
|
||||
}),
|
||||
);
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.runtimeServices).toEqual([
|
||||
expect.objectContaining({
|
||||
serviceName: "preview",
|
||||
scopeType: "run",
|
||||
url: "https://preview.example/run-123",
|
||||
providerRef: "sandbox-123",
|
||||
lifecycle: "ephemeral",
|
||||
status: "running",
|
||||
}),
|
||||
]);
|
||||
} finally {
|
||||
await gateway.close();
|
||||
}
|
||||
});
|
||||
|
||||
it("auto-approves pairing once and retries the run", async () => {
|
||||
const gateway = await createMockGatewayServerWithPairing();
|
||||
const logs: string[] = [];
|
||||
|
||||
try {
|
||||
const result = await execute(
|
||||
buildContext(
|
||||
{
|
||||
url: gateway.url,
|
||||
headers: {
|
||||
"x-openclaw-token": "gateway-token",
|
||||
},
|
||||
payloadTemplate: {
|
||||
message: "wake now",
|
||||
},
|
||||
waitTimeoutMs: 2000,
|
||||
},
|
||||
{
|
||||
onLog: async (_stream, chunk) => {
|
||||
logs.push(chunk);
|
||||
},
|
||||
},
|
||||
),
|
||||
);
|
||||
|
||||
expect(result.exitCode).toBe(0);
|
||||
expect(result.summary).toContain("ok");
|
||||
expect(logs.some((entry) => entry.includes("pairing required; attempting automatic pairing approval"))).toBe(
|
||||
true,
|
||||
);
|
||||
expect(logs.some((entry) => entry.includes("auto-approved pairing request"))).toBe(true);
|
||||
expect(gateway.getAgentPayload()).toBeTruthy();
|
||||
} finally {
|
||||
await gateway.close();
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe("openclaw gateway ui build config", () => {
|
||||
it("parses payload template and runtime services json", () => {
|
||||
const config = buildOpenClawGatewayConfig({
|
||||
adapterType: "openclaw_gateway",
|
||||
cwd: "",
|
||||
promptTemplate: "",
|
||||
model: "",
|
||||
thinkingEffort: "",
|
||||
chrome: false,
|
||||
dangerouslySkipPermissions: false,
|
||||
search: false,
|
||||
dangerouslyBypassSandbox: false,
|
||||
command: "",
|
||||
args: "",
|
||||
extraArgs: "",
|
||||
envVars: "",
|
||||
envBindings: {},
|
||||
url: "wss://gateway.example/ws",
|
||||
payloadTemplateJson: JSON.stringify({
|
||||
agentId: "remote-agent-123",
|
||||
metadata: { team: "platform" },
|
||||
}),
|
||||
runtimeServicesJson: JSON.stringify({
|
||||
services: [
|
||||
{
|
||||
name: "preview",
|
||||
lifecycle: "shared",
|
||||
},
|
||||
],
|
||||
}),
|
||||
bootstrapPrompt: "",
|
||||
maxTurnsPerRun: 0,
|
||||
heartbeatEnabled: true,
|
||||
intervalSec: 300,
|
||||
});
|
||||
|
||||
expect(config).toEqual(
|
||||
expect.objectContaining({
|
||||
url: "wss://gateway.example/ws",
|
||||
payloadTemplate: {
|
||||
agentId: "remote-agent-123",
|
||||
metadata: { team: "platform" },
|
||||
},
|
||||
workspaceRuntime: {
|
||||
services: [
|
||||
{
|
||||
name: "preview",
|
||||
lifecycle: "shared",
|
||||
},
|
||||
],
|
||||
},
|
||||
}),
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe("openclaw gateway testEnvironment", () => {
|
||||
it("reports missing url as failure", async () => {
|
||||
const result = await testEnvironment({
|
||||
companyId: "company-123",
|
||||
adapterType: "openclaw_gateway",
|
||||
config: {},
|
||||
});
|
||||
|
||||
expect(result.status).toBe("fail");
|
||||
expect(result.checks.some((check) => check.code === "openclaw_gateway_url_missing")).toBe(true);
|
||||
});
|
||||
});
|
||||
181
server/src/__tests__/openclaw-invite-prompt-route.test.ts
Normal file
181
server/src/__tests__/openclaw-invite-prompt-route.test.ts
Normal file
@@ -0,0 +1,181 @@
|
||||
import express from "express";
|
||||
import request from "supertest";
|
||||
import { beforeEach, describe, expect, it, vi } from "vitest";
|
||||
import { accessRoutes } from "../routes/access.js";
|
||||
import { errorHandler } from "../middleware/index.js";
|
||||
|
||||
const mockAccessService = vi.hoisted(() => ({
|
||||
hasPermission: vi.fn(),
|
||||
canUser: vi.fn(),
|
||||
isInstanceAdmin: vi.fn(),
|
||||
getMembership: vi.fn(),
|
||||
ensureMembership: vi.fn(),
|
||||
listMembers: vi.fn(),
|
||||
setMemberPermissions: vi.fn(),
|
||||
promoteInstanceAdmin: vi.fn(),
|
||||
demoteInstanceAdmin: vi.fn(),
|
||||
listUserCompanyAccess: vi.fn(),
|
||||
setUserCompanyAccess: vi.fn(),
|
||||
setPrincipalGrants: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockAgentService = vi.hoisted(() => ({
|
||||
getById: vi.fn(),
|
||||
}));
|
||||
|
||||
const mockLogActivity = vi.hoisted(() => vi.fn());
|
||||
|
||||
vi.mock("../services/index.js", () => ({
|
||||
accessService: () => mockAccessService,
|
||||
agentService: () => mockAgentService,
|
||||
deduplicateAgentName: vi.fn(),
|
||||
logActivity: mockLogActivity,
|
||||
notifyHireApproved: vi.fn(),
|
||||
}));
|
||||
|
||||
function createDbStub() {
|
||||
const createdInvite = {
|
||||
id: "invite-1",
|
||||
companyId: "company-1",
|
||||
inviteType: "company_join",
|
||||
allowedJoinTypes: "agent",
|
||||
defaultsPayload: null,
|
||||
expiresAt: new Date("2026-03-07T00:10:00.000Z"),
|
||||
invitedByUserId: null,
|
||||
tokenHash: "hash",
|
||||
revokedAt: null,
|
||||
acceptedAt: null,
|
||||
createdAt: new Date("2026-03-07T00:00:00.000Z"),
|
||||
updatedAt: new Date("2026-03-07T00:00:00.000Z"),
|
||||
};
|
||||
const returning = vi.fn().mockResolvedValue([createdInvite]);
|
||||
const values = vi.fn().mockReturnValue({ returning });
|
||||
const insert = vi.fn().mockReturnValue({ values });
|
||||
return {
|
||||
insert,
|
||||
};
|
||||
}
|
||||
|
||||
function createApp(actor: Record<string, unknown>, db: Record<string, unknown>) {
|
||||
const app = express();
|
||||
app.use(express.json());
|
||||
app.use((req, _res, next) => {
|
||||
(req as any).actor = actor;
|
||||
next();
|
||||
});
|
||||
app.use(
|
||||
"/api",
|
||||
accessRoutes(db as any, {
|
||||
deploymentMode: "local_trusted",
|
||||
deploymentExposure: "private",
|
||||
bindHost: "127.0.0.1",
|
||||
allowedHostnames: [],
|
||||
}),
|
||||
);
|
||||
app.use(errorHandler);
|
||||
return app;
|
||||
}
|
||||
|
||||
describe("POST /companies/:companyId/openclaw/invite-prompt", () => {
|
||||
beforeEach(() => {
|
||||
mockAccessService.canUser.mockResolvedValue(false);
|
||||
mockAgentService.getById.mockReset();
|
||||
mockLogActivity.mockResolvedValue(undefined);
|
||||
});
|
||||
|
||||
it("rejects non-CEO agent callers", async () => {
|
||||
const db = createDbStub();
|
||||
mockAgentService.getById.mockResolvedValue({
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
role: "engineer",
|
||||
});
|
||||
const app = createApp(
|
||||
{
|
||||
type: "agent",
|
||||
agentId: "agent-1",
|
||||
companyId: "company-1",
|
||||
source: "agent_key",
|
||||
},
|
||||
db,
|
||||
);
|
||||
|
||||
const res = await request(app)
|
||||
.post("/api/companies/company-1/openclaw/invite-prompt")
|
||||
.send({});
|
||||
|
||||
expect(res.status).toBe(403);
|
||||
expect(res.body.error).toContain("Only CEO agents");
|
||||
});
|
||||
|
||||
it("allows CEO agent callers and creates an agent-only invite", async () => {
|
||||
const db = createDbStub();
|
||||
mockAgentService.getById.mockResolvedValue({
|
||||
id: "agent-1",
|
||||
companyId: "company-1",
|
||||
role: "ceo",
|
||||
});
|
||||
const app = createApp(
|
||||
{
|
||||
type: "agent",
|
||||
agentId: "agent-1",
|
||||
companyId: "company-1",
|
||||
source: "agent_key",
|
||||
},
|
||||
db,
|
||||
);
|
||||
|
||||
const res = await request(app)
|
||||
.post("/api/companies/company-1/openclaw/invite-prompt")
|
||||
.send({ agentMessage: "Join and configure OpenClaw gateway." });
|
||||
|
||||
expect(res.status).toBe(201);
|
||||
expect(res.body.allowedJoinTypes).toBe("agent");
|
||||
expect(typeof res.body.token).toBe("string");
|
||||
expect(res.body.onboardingTextPath).toContain("/api/invites/");
|
||||
});
|
||||
|
||||
it("allows board callers with invite permission", async () => {
|
||||
const db = createDbStub();
|
||||
mockAccessService.canUser.mockResolvedValue(true);
|
||||
const app = createApp(
|
||||
{
|
||||
type: "board",
|
||||
userId: "user-1",
|
||||
companyIds: ["company-1"],
|
||||
source: "session",
|
||||
isInstanceAdmin: false,
|
||||
},
|
||||
db,
|
||||
);
|
||||
|
||||
const res = await request(app)
|
||||
.post("/api/companies/company-1/openclaw/invite-prompt")
|
||||
.send({});
|
||||
|
||||
expect(res.status).toBe(201);
|
||||
expect(res.body.allowedJoinTypes).toBe("agent");
|
||||
});
|
||||
|
||||
it("rejects board callers without invite permission", async () => {
|
||||
const db = createDbStub();
|
||||
mockAccessService.canUser.mockResolvedValue(false);
|
||||
const app = createApp(
|
||||
{
|
||||
type: "board",
|
||||
userId: "user-1",
|
||||
companyIds: ["company-1"],
|
||||
source: "session",
|
||||
isInstanceAdmin: false,
|
||||
},
|
||||
db,
|
||||
);
|
||||
|
||||
const res = await request(app)
|
||||
.post("/api/companies/company-1/openclaw/invite-prompt")
|
||||
.send({});
|
||||
|
||||
expect(res.status).toBe(403);
|
||||
expect(res.body.error).toBe("Permission denied");
|
||||
});
|
||||
});
|
||||
@@ -103,6 +103,7 @@ describe("opencode_local ui stdout parser", () => {
|
||||
kind: "tool_call",
|
||||
ts,
|
||||
name: "bash",
|
||||
toolUseId: "call_1",
|
||||
input: { command: "ls -1" },
|
||||
},
|
||||
{
|
||||
|
||||
61
server/src/__tests__/paperclip-skill-utils.test.ts
Normal file
61
server/src/__tests__/paperclip-skill-utils.test.ts
Normal file
@@ -0,0 +1,61 @@
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import {
|
||||
listPaperclipSkillEntries,
|
||||
removeMaintainerOnlySkillSymlinks,
|
||||
} from "@paperclipai/adapter-utils/server-utils";
|
||||
|
||||
async function makeTempDir(prefix: string): Promise<string> {
|
||||
return fs.mkdtemp(path.join(os.tmpdir(), prefix));
|
||||
}
|
||||
|
||||
describe("paperclip skill utils", () => {
|
||||
const cleanupDirs = new Set<string>();
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(Array.from(cleanupDirs).map((dir) => fs.rm(dir, { recursive: true, force: true })));
|
||||
cleanupDirs.clear();
|
||||
});
|
||||
|
||||
it("lists runtime skills from ./skills without pulling in .agents/skills", async () => {
|
||||
const root = await makeTempDir("paperclip-skill-roots-");
|
||||
cleanupDirs.add(root);
|
||||
|
||||
const moduleDir = path.join(root, "a", "b", "c", "d", "e");
|
||||
await fs.mkdir(moduleDir, { recursive: true });
|
||||
await fs.mkdir(path.join(root, "skills", "paperclip"), { recursive: true });
|
||||
await fs.mkdir(path.join(root, ".agents", "skills", "release"), { recursive: true });
|
||||
|
||||
const entries = await listPaperclipSkillEntries(moduleDir);
|
||||
|
||||
expect(entries.map((entry) => entry.name)).toEqual(["paperclip"]);
|
||||
expect(entries[0]?.source).toBe(path.join(root, "skills", "paperclip"));
|
||||
});
|
||||
|
||||
it("removes stale maintainer-only symlinks from a shared skills home", async () => {
|
||||
const root = await makeTempDir("paperclip-skill-cleanup-");
|
||||
cleanupDirs.add(root);
|
||||
|
||||
const skillsHome = path.join(root, "skills-home");
|
||||
const runtimeSkill = path.join(root, "skills", "paperclip");
|
||||
const customSkill = path.join(root, "custom", "release-notes");
|
||||
const staleMaintainerSkill = path.join(root, ".agents", "skills", "release");
|
||||
|
||||
await fs.mkdir(skillsHome, { recursive: true });
|
||||
await fs.mkdir(runtimeSkill, { recursive: true });
|
||||
await fs.mkdir(customSkill, { recursive: true });
|
||||
|
||||
await fs.symlink(runtimeSkill, path.join(skillsHome, "paperclip"));
|
||||
await fs.symlink(customSkill, path.join(skillsHome, "release-notes"));
|
||||
await fs.symlink(staleMaintainerSkill, path.join(skillsHome, "release"));
|
||||
|
||||
const removed = await removeMaintainerOnlySkillSymlinks(skillsHome, ["paperclip"]);
|
||||
|
||||
expect(removed).toEqual(["release"]);
|
||||
await expect(fs.lstat(path.join(skillsHome, "release"))).rejects.toThrow();
|
||||
expect((await fs.lstat(path.join(skillsHome, "paperclip"))).isSymbolicLink()).toBe(true);
|
||||
expect((await fs.lstat(path.join(skillsHome, "release-notes"))).isSymbolicLink()).toBe(true);
|
||||
});
|
||||
});
|
||||
68
server/src/__tests__/plugin-dev-watcher.test.ts
Normal file
68
server/src/__tests__/plugin-dev-watcher.test.ts
Normal file
@@ -0,0 +1,68 @@
|
||||
import { mkdtempSync, mkdirSync, rmSync, writeFileSync } from "node:fs";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import { resolvePluginWatchTargets } from "../services/plugin-dev-watcher.js";
|
||||
|
||||
const tempDirs: string[] = [];
|
||||
|
||||
afterEach(() => {
|
||||
while (tempDirs.length > 0) {
|
||||
const dir = tempDirs.pop();
|
||||
if (dir) rmSync(dir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
function makeTempPluginDir(): string {
|
||||
const dir = mkdtempSync(path.join(os.tmpdir(), "paperclip-plugin-watch-"));
|
||||
tempDirs.push(dir);
|
||||
return dir;
|
||||
}
|
||||
|
||||
describe("resolvePluginWatchTargets", () => {
|
||||
it("watches package metadata plus concrete declared runtime files", () => {
|
||||
const pluginDir = makeTempPluginDir();
|
||||
mkdirSync(path.join(pluginDir, "dist", "ui"), { recursive: true });
|
||||
writeFileSync(
|
||||
path.join(pluginDir, "package.json"),
|
||||
JSON.stringify({
|
||||
name: "@acme/example",
|
||||
paperclipPlugin: {
|
||||
manifest: "./dist/manifest.js",
|
||||
worker: "./dist/worker.js",
|
||||
ui: "./dist/ui",
|
||||
},
|
||||
}),
|
||||
);
|
||||
writeFileSync(path.join(pluginDir, "dist", "manifest.js"), "export default {};\n");
|
||||
writeFileSync(path.join(pluginDir, "dist", "worker.js"), "export default {};\n");
|
||||
writeFileSync(path.join(pluginDir, "dist", "ui", "index.js"), "export default {};\n");
|
||||
writeFileSync(path.join(pluginDir, "dist", "ui", "index.css"), "body {}\n");
|
||||
|
||||
const targets = resolvePluginWatchTargets(pluginDir);
|
||||
|
||||
expect(targets).toEqual([
|
||||
{ path: path.join(pluginDir, "dist", "manifest.js"), recursive: false, kind: "file" },
|
||||
{ path: path.join(pluginDir, "dist", "ui", "index.css"), recursive: false, kind: "file" },
|
||||
{ path: path.join(pluginDir, "dist", "ui", "index.js"), recursive: false, kind: "file" },
|
||||
{ path: path.join(pluginDir, "dist", "worker.js"), recursive: false, kind: "file" },
|
||||
{ path: path.join(pluginDir, "package.json"), recursive: false, kind: "file" },
|
||||
]);
|
||||
});
|
||||
|
||||
it("falls back to dist when package metadata does not declare entrypoints", () => {
|
||||
const pluginDir = makeTempPluginDir();
|
||||
mkdirSync(path.join(pluginDir, "dist", "nested"), { recursive: true });
|
||||
writeFileSync(path.join(pluginDir, "package.json"), JSON.stringify({ name: "@acme/example" }));
|
||||
writeFileSync(path.join(pluginDir, "dist", "manifest.js"), "export default {};\n");
|
||||
writeFileSync(path.join(pluginDir, "dist", "nested", "chunk.js"), "export default {};\n");
|
||||
|
||||
const targets = resolvePluginWatchTargets(pluginDir);
|
||||
|
||||
expect(targets).toEqual([
|
||||
{ path: path.join(pluginDir, "package.json"), recursive: false, kind: "file" },
|
||||
{ path: path.join(pluginDir, "dist", "manifest.js"), recursive: false, kind: "file" },
|
||||
{ path: path.join(pluginDir, "dist", "nested", "chunk.js"), recursive: false, kind: "file" },
|
||||
]);
|
||||
});
|
||||
});
|
||||
43
server/src/__tests__/plugin-worker-manager.test.ts
Normal file
43
server/src/__tests__/plugin-worker-manager.test.ts
Normal file
@@ -0,0 +1,43 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { appendStderrExcerpt, formatWorkerFailureMessage } from "../services/plugin-worker-manager.js";
|
||||
|
||||
describe("plugin-worker-manager stderr failure context", () => {
|
||||
it("appends worker stderr context to failure messages", () => {
|
||||
expect(
|
||||
formatWorkerFailureMessage(
|
||||
"Worker process exited (code=1, signal=null)",
|
||||
"TypeError: Unknown file extension \".ts\"",
|
||||
),
|
||||
).toBe(
|
||||
"Worker process exited (code=1, signal=null)\n\nWorker stderr:\nTypeError: Unknown file extension \".ts\"",
|
||||
);
|
||||
});
|
||||
|
||||
it("does not duplicate stderr that is already present", () => {
|
||||
const message = [
|
||||
"Worker process exited (code=1, signal=null)",
|
||||
"",
|
||||
"Worker stderr:",
|
||||
"TypeError: Unknown file extension \".ts\"",
|
||||
].join("\n");
|
||||
|
||||
expect(
|
||||
formatWorkerFailureMessage(message, "TypeError: Unknown file extension \".ts\""),
|
||||
).toBe(message);
|
||||
});
|
||||
|
||||
it("keeps only the latest stderr excerpt", () => {
|
||||
let excerpt = "";
|
||||
excerpt = appendStderrExcerpt(excerpt, "first line");
|
||||
excerpt = appendStderrExcerpt(excerpt, "second line");
|
||||
|
||||
expect(excerpt).toContain("first line");
|
||||
expect(excerpt).toContain("second line");
|
||||
|
||||
excerpt = appendStderrExcerpt(excerpt, "x".repeat(9_000));
|
||||
|
||||
expect(excerpt).not.toContain("first line");
|
||||
expect(excerpt).not.toContain("second line");
|
||||
expect(excerpt.length).toBeLessThanOrEqual(8_000);
|
||||
});
|
||||
});
|
||||
@@ -52,5 +52,5 @@ describe("privateHostnameGuard", () => {
|
||||
const res = await request(app).get("/dashboard").set("Host", "dotta-macbook-pro:3100");
|
||||
expect(res.status).toBe(403);
|
||||
expect(res.text).toContain("please run pnpm paperclipai allowed-hostname dotta-macbook-pro");
|
||||
});
|
||||
}, 20_000);
|
||||
});
|
||||
|
||||
45
server/src/__tests__/project-shortname-resolution.test.ts
Normal file
45
server/src/__tests__/project-shortname-resolution.test.ts
Normal file
@@ -0,0 +1,45 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import { resolveProjectNameForUniqueShortname } from "../services/projects.ts";
|
||||
|
||||
describe("resolveProjectNameForUniqueShortname", () => {
|
||||
it("keeps name when shortname is not used", () => {
|
||||
const resolved = resolveProjectNameForUniqueShortname("Platform", [
|
||||
{ id: "p1", name: "Growth" },
|
||||
]);
|
||||
expect(resolved).toBe("Platform");
|
||||
});
|
||||
|
||||
it("appends numeric suffix when shortname collides", () => {
|
||||
const resolved = resolveProjectNameForUniqueShortname("Growth Team", [
|
||||
{ id: "p1", name: "growth-team" },
|
||||
]);
|
||||
expect(resolved).toBe("Growth Team 2");
|
||||
});
|
||||
|
||||
it("increments suffix until unique", () => {
|
||||
const resolved = resolveProjectNameForUniqueShortname("Growth Team", [
|
||||
{ id: "p1", name: "growth-team" },
|
||||
{ id: "p2", name: "growth-team-2" },
|
||||
]);
|
||||
expect(resolved).toBe("Growth Team 3");
|
||||
});
|
||||
|
||||
it("ignores excluded project id", () => {
|
||||
const resolved = resolveProjectNameForUniqueShortname(
|
||||
"Growth Team",
|
||||
[
|
||||
{ id: "p1", name: "growth-team" },
|
||||
{ id: "p2", name: "platform" },
|
||||
],
|
||||
{ excludeProjectId: "p1" },
|
||||
);
|
||||
expect(resolved).toBe("Growth Team");
|
||||
});
|
||||
|
||||
it("keeps non-normalizable names unchanged", () => {
|
||||
const resolved = resolveProjectNameForUniqueShortname("!!!", [
|
||||
{ id: "p1", name: "growth" },
|
||||
]);
|
||||
expect(resolved).toBe("!!!");
|
||||
});
|
||||
});
|
||||
82
server/src/__tests__/ui-branding.test.ts
Normal file
82
server/src/__tests__/ui-branding.test.ts
Normal file
@@ -0,0 +1,82 @@
|
||||
import { describe, expect, it } from "vitest";
|
||||
import {
|
||||
applyUiBranding,
|
||||
getWorktreeUiBranding,
|
||||
isWorktreeUiBrandingEnabled,
|
||||
renderFaviconLinks,
|
||||
renderRuntimeBrandingMeta,
|
||||
} from "../ui-branding.js";
|
||||
|
||||
const TEMPLATE = `<!doctype html>
|
||||
<head>
|
||||
<!-- PAPERCLIP_RUNTIME_BRANDING_START -->
|
||||
<!-- PAPERCLIP_RUNTIME_BRANDING_END -->
|
||||
<!-- PAPERCLIP_FAVICON_START -->
|
||||
<link rel="icon" href="/favicon.ico" sizes="48x48" />
|
||||
<link rel="icon" href="/favicon.svg" type="image/svg+xml" />
|
||||
<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png" />
|
||||
<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png" />
|
||||
<!-- PAPERCLIP_FAVICON_END -->
|
||||
</head>`;
|
||||
|
||||
describe("ui branding", () => {
|
||||
it("detects worktree mode from PAPERCLIP_IN_WORKTREE", () => {
|
||||
expect(isWorktreeUiBrandingEnabled({ PAPERCLIP_IN_WORKTREE: "true" })).toBe(true);
|
||||
expect(isWorktreeUiBrandingEnabled({ PAPERCLIP_IN_WORKTREE: "1" })).toBe(true);
|
||||
expect(isWorktreeUiBrandingEnabled({ PAPERCLIP_IN_WORKTREE: "false" })).toBe(false);
|
||||
});
|
||||
|
||||
it("resolves name, color, and text color for worktree branding", () => {
|
||||
const branding = getWorktreeUiBranding({
|
||||
PAPERCLIP_IN_WORKTREE: "true",
|
||||
PAPERCLIP_WORKTREE_NAME: "paperclip-pr-432",
|
||||
PAPERCLIP_WORKTREE_COLOR: "#4f86f7",
|
||||
});
|
||||
|
||||
expect(branding.enabled).toBe(true);
|
||||
expect(branding.name).toBe("paperclip-pr-432");
|
||||
expect(branding.color).toBe("#4f86f7");
|
||||
expect(branding.textColor).toMatch(/^#[0-9a-f]{6}$/);
|
||||
expect(branding.faviconHref).toContain("data:image/svg+xml,");
|
||||
});
|
||||
|
||||
it("renders a dynamic worktree favicon when enabled", () => {
|
||||
const links = renderFaviconLinks(
|
||||
getWorktreeUiBranding({
|
||||
PAPERCLIP_IN_WORKTREE: "true",
|
||||
PAPERCLIP_WORKTREE_NAME: "paperclip-pr-432",
|
||||
PAPERCLIP_WORKTREE_COLOR: "#4f86f7",
|
||||
}),
|
||||
);
|
||||
expect(links).toContain("data:image/svg+xml,");
|
||||
expect(links).toContain('rel="shortcut icon"');
|
||||
});
|
||||
|
||||
it("renders runtime branding metadata for the ui", () => {
|
||||
const meta = renderRuntimeBrandingMeta(
|
||||
getWorktreeUiBranding({
|
||||
PAPERCLIP_IN_WORKTREE: "true",
|
||||
PAPERCLIP_WORKTREE_NAME: "paperclip-pr-432",
|
||||
PAPERCLIP_WORKTREE_COLOR: "#4f86f7",
|
||||
}),
|
||||
);
|
||||
expect(meta).toContain('name="paperclip-worktree-name"');
|
||||
expect(meta).toContain('content="paperclip-pr-432"');
|
||||
expect(meta).toContain('name="paperclip-worktree-color"');
|
||||
});
|
||||
|
||||
it("rewrites the favicon and runtime branding blocks for worktree instances only", () => {
|
||||
const branded = applyUiBranding(TEMPLATE, {
|
||||
PAPERCLIP_IN_WORKTREE: "true",
|
||||
PAPERCLIP_WORKTREE_NAME: "paperclip-pr-432",
|
||||
PAPERCLIP_WORKTREE_COLOR: "#4f86f7",
|
||||
});
|
||||
expect(branded).toContain("data:image/svg+xml,");
|
||||
expect(branded).toContain('name="paperclip-worktree-name"');
|
||||
expect(branded).not.toContain('href="/favicon.svg"');
|
||||
|
||||
const defaultHtml = applyUiBranding(TEMPLATE, {});
|
||||
expect(defaultHtml).toContain('href="/favicon.svg"');
|
||||
expect(defaultHtml).not.toContain('name="paperclip-worktree-name"');
|
||||
});
|
||||
});
|
||||
386
server/src/__tests__/workspace-runtime.test.ts
Normal file
386
server/src/__tests__/workspace-runtime.test.ts
Normal file
@@ -0,0 +1,386 @@
|
||||
import { execFile } from "node:child_process";
|
||||
import fs from "node:fs/promises";
|
||||
import os from "node:os";
|
||||
import path from "node:path";
|
||||
import { promisify } from "node:util";
|
||||
import { afterEach, describe, expect, it } from "vitest";
|
||||
import {
|
||||
ensureRuntimeServicesForRun,
|
||||
normalizeAdapterManagedRuntimeServices,
|
||||
realizeExecutionWorkspace,
|
||||
releaseRuntimeServicesForRun,
|
||||
type RealizedExecutionWorkspace,
|
||||
} from "../services/workspace-runtime.ts";
|
||||
|
||||
const execFileAsync = promisify(execFile);
|
||||
const leasedRunIds = new Set<string>();
|
||||
|
||||
async function runGit(cwd: string, args: string[]) {
|
||||
await execFileAsync("git", args, { cwd });
|
||||
}
|
||||
|
||||
async function createTempRepo() {
|
||||
const repoRoot = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-worktree-repo-"));
|
||||
await runGit(repoRoot, ["init"]);
|
||||
await runGit(repoRoot, ["config", "user.email", "paperclip@example.com"]);
|
||||
await runGit(repoRoot, ["config", "user.name", "Paperclip Test"]);
|
||||
await fs.writeFile(path.join(repoRoot, "README.md"), "hello\n", "utf8");
|
||||
await runGit(repoRoot, ["add", "README.md"]);
|
||||
await runGit(repoRoot, ["commit", "-m", "Initial commit"]);
|
||||
await runGit(repoRoot, ["checkout", "-B", "main"]);
|
||||
return repoRoot;
|
||||
}
|
||||
|
||||
function buildWorkspace(cwd: string): RealizedExecutionWorkspace {
|
||||
return {
|
||||
baseCwd: cwd,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
strategy: "project_primary",
|
||||
cwd,
|
||||
branchName: null,
|
||||
worktreePath: null,
|
||||
warnings: [],
|
||||
created: false,
|
||||
};
|
||||
}
|
||||
|
||||
afterEach(async () => {
|
||||
await Promise.all(
|
||||
Array.from(leasedRunIds).map(async (runId) => {
|
||||
await releaseRuntimeServicesForRun(runId);
|
||||
leasedRunIds.delete(runId);
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
describe("realizeExecutionWorkspace", () => {
|
||||
it("creates and reuses a git worktree for an issue-scoped branch", async () => {
|
||||
const repoRoot = await createTempRepo();
|
||||
|
||||
const first = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-447",
|
||||
title: "Add Worktree Support",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
});
|
||||
|
||||
expect(first.strategy).toBe("git_worktree");
|
||||
expect(first.created).toBe(true);
|
||||
expect(first.branchName).toBe("PAP-447-add-worktree-support");
|
||||
expect(first.cwd).toContain(path.join(".paperclip", "worktrees"));
|
||||
await expect(fs.stat(path.join(first.cwd, ".git"))).resolves.toBeTruthy();
|
||||
|
||||
const second = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-447",
|
||||
title: "Add Worktree Support",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
});
|
||||
|
||||
expect(second.created).toBe(false);
|
||||
expect(second.cwd).toBe(first.cwd);
|
||||
expect(second.branchName).toBe(first.branchName);
|
||||
});
|
||||
|
||||
it("runs a configured provision command inside the derived worktree", async () => {
|
||||
const repoRoot = await createTempRepo();
|
||||
await fs.mkdir(path.join(repoRoot, "scripts"), { recursive: true });
|
||||
await fs.writeFile(
|
||||
path.join(repoRoot, "scripts", "provision.sh"),
|
||||
[
|
||||
"#!/usr/bin/env bash",
|
||||
"set -euo pipefail",
|
||||
"printf '%s\\n' \"$PAPERCLIP_WORKSPACE_BRANCH\" > .paperclip-provision-branch",
|
||||
"printf '%s\\n' \"$PAPERCLIP_WORKSPACE_BASE_CWD\" > .paperclip-provision-base",
|
||||
"printf '%s\\n' \"$PAPERCLIP_WORKSPACE_CREATED\" > .paperclip-provision-created",
|
||||
].join("\n"),
|
||||
"utf8",
|
||||
);
|
||||
await runGit(repoRoot, ["add", "scripts/provision.sh"]);
|
||||
await runGit(repoRoot, ["commit", "-m", "Add worktree provision script"]);
|
||||
|
||||
const workspace = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
provisionCommand: "bash ./scripts/provision.sh",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-448",
|
||||
title: "Run provision command",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
});
|
||||
|
||||
await expect(fs.readFile(path.join(workspace.cwd, ".paperclip-provision-branch"), "utf8")).resolves.toBe(
|
||||
"PAP-448-run-provision-command\n",
|
||||
);
|
||||
await expect(fs.readFile(path.join(workspace.cwd, ".paperclip-provision-base"), "utf8")).resolves.toBe(
|
||||
`${repoRoot}\n`,
|
||||
);
|
||||
await expect(fs.readFile(path.join(workspace.cwd, ".paperclip-provision-created"), "utf8")).resolves.toBe(
|
||||
"true\n",
|
||||
);
|
||||
|
||||
const reused = await realizeExecutionWorkspace({
|
||||
base: {
|
||||
baseCwd: repoRoot,
|
||||
source: "project_primary",
|
||||
projectId: "project-1",
|
||||
workspaceId: "workspace-1",
|
||||
repoUrl: null,
|
||||
repoRef: "HEAD",
|
||||
},
|
||||
config: {
|
||||
workspaceStrategy: {
|
||||
type: "git_worktree",
|
||||
branchTemplate: "{{issue.identifier}}-{{slug}}",
|
||||
provisionCommand: "bash ./scripts/provision.sh",
|
||||
},
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-448",
|
||||
title: "Run provision command",
|
||||
},
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
});
|
||||
|
||||
await expect(fs.readFile(path.join(reused.cwd, ".paperclip-provision-created"), "utf8")).resolves.toBe("false\n");
|
||||
});
|
||||
});
|
||||
|
||||
describe("ensureRuntimeServicesForRun", () => {
|
||||
it("reuses shared runtime services across runs and starts a new service after release", async () => {
|
||||
const workspaceRoot = await fs.mkdtemp(path.join(os.tmpdir(), "paperclip-runtime-workspace-"));
|
||||
const workspace = buildWorkspace(workspaceRoot);
|
||||
const serviceCommand =
|
||||
"node -e \"require('node:http').createServer((req,res)=>res.end('ok')).listen(Number(process.env.PORT), '127.0.0.1')\"";
|
||||
|
||||
const config = {
|
||||
workspaceRuntime: {
|
||||
services: [
|
||||
{
|
||||
name: "web",
|
||||
command: serviceCommand,
|
||||
port: { type: "auto" },
|
||||
readiness: {
|
||||
type: "http",
|
||||
urlTemplate: "http://127.0.0.1:{{port}}",
|
||||
timeoutSec: 10,
|
||||
intervalMs: 100,
|
||||
},
|
||||
expose: {
|
||||
type: "url",
|
||||
urlTemplate: "http://127.0.0.1:{{port}}",
|
||||
},
|
||||
lifecycle: "shared",
|
||||
reuseScope: "project_workspace",
|
||||
stopPolicy: {
|
||||
type: "on_run_finish",
|
||||
},
|
||||
},
|
||||
],
|
||||
},
|
||||
};
|
||||
|
||||
const run1 = "run-1";
|
||||
const run2 = "run-2";
|
||||
leasedRunIds.add(run1);
|
||||
leasedRunIds.add(run2);
|
||||
|
||||
const first = await ensureRuntimeServicesForRun({
|
||||
runId: run1,
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
issue: null,
|
||||
workspace,
|
||||
config,
|
||||
adapterEnv: {},
|
||||
});
|
||||
|
||||
expect(first).toHaveLength(1);
|
||||
expect(first[0]?.reused).toBe(false);
|
||||
expect(first[0]?.url).toMatch(/^http:\/\/127\.0\.0\.1:\d+$/);
|
||||
const response = await fetch(first[0]!.url!);
|
||||
expect(await response.text()).toBe("ok");
|
||||
|
||||
const second = await ensureRuntimeServicesForRun({
|
||||
runId: run2,
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
issue: null,
|
||||
workspace,
|
||||
config,
|
||||
adapterEnv: {},
|
||||
});
|
||||
|
||||
expect(second).toHaveLength(1);
|
||||
expect(second[0]?.reused).toBe(true);
|
||||
expect(second[0]?.id).toBe(first[0]?.id);
|
||||
|
||||
await releaseRuntimeServicesForRun(run1);
|
||||
leasedRunIds.delete(run1);
|
||||
await releaseRuntimeServicesForRun(run2);
|
||||
leasedRunIds.delete(run2);
|
||||
|
||||
const run3 = "run-3";
|
||||
leasedRunIds.add(run3);
|
||||
const third = await ensureRuntimeServicesForRun({
|
||||
runId: run3,
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Codex Coder",
|
||||
companyId: "company-1",
|
||||
},
|
||||
issue: null,
|
||||
workspace,
|
||||
config,
|
||||
adapterEnv: {},
|
||||
});
|
||||
|
||||
expect(third).toHaveLength(1);
|
||||
expect(third[0]?.reused).toBe(false);
|
||||
expect(third[0]?.id).not.toBe(first[0]?.id);
|
||||
});
|
||||
});
|
||||
|
||||
describe("normalizeAdapterManagedRuntimeServices", () => {
|
||||
it("fills workspace defaults and derives stable ids for adapter-managed services", () => {
|
||||
const workspace = buildWorkspace("/tmp/project");
|
||||
const now = new Date("2026-03-09T12:00:00.000Z");
|
||||
|
||||
const first = normalizeAdapterManagedRuntimeServices({
|
||||
adapterType: "openclaw_gateway",
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Gateway Agent",
|
||||
companyId: "company-1",
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-447",
|
||||
title: "Worktree support",
|
||||
},
|
||||
workspace,
|
||||
reports: [
|
||||
{
|
||||
serviceName: "preview",
|
||||
url: "https://preview.example/run-1",
|
||||
providerRef: "sandbox-123",
|
||||
scopeType: "run",
|
||||
},
|
||||
],
|
||||
now,
|
||||
});
|
||||
|
||||
const second = normalizeAdapterManagedRuntimeServices({
|
||||
adapterType: "openclaw_gateway",
|
||||
runId: "run-1",
|
||||
agent: {
|
||||
id: "agent-1",
|
||||
name: "Gateway Agent",
|
||||
companyId: "company-1",
|
||||
},
|
||||
issue: {
|
||||
id: "issue-1",
|
||||
identifier: "PAP-447",
|
||||
title: "Worktree support",
|
||||
},
|
||||
workspace,
|
||||
reports: [
|
||||
{
|
||||
serviceName: "preview",
|
||||
url: "https://preview.example/run-1",
|
||||
providerRef: "sandbox-123",
|
||||
scopeType: "run",
|
||||
},
|
||||
],
|
||||
now,
|
||||
});
|
||||
|
||||
expect(first).toHaveLength(1);
|
||||
expect(first[0]).toMatchObject({
|
||||
companyId: "company-1",
|
||||
projectId: "project-1",
|
||||
projectWorkspaceId: "workspace-1",
|
||||
issueId: "issue-1",
|
||||
serviceName: "preview",
|
||||
provider: "adapter_managed",
|
||||
status: "running",
|
||||
healthStatus: "healthy",
|
||||
startedByRunId: "run-1",
|
||||
});
|
||||
expect(first[0]?.id).toBe(second[0]?.id);
|
||||
});
|
||||
});
|
||||
@@ -17,6 +17,12 @@ import {
|
||||
sessionCodec as cursorSessionCodec,
|
||||
} from "@paperclipai/adapter-cursor-local/server";
|
||||
import { agentConfigurationDoc as cursorAgentConfigurationDoc, models as cursorModels } from "@paperclipai/adapter-cursor-local";
|
||||
import {
|
||||
execute as geminiExecute,
|
||||
testEnvironment as geminiTestEnvironment,
|
||||
sessionCodec as geminiSessionCodec,
|
||||
} from "@paperclipai/adapter-gemini-local/server";
|
||||
import { agentConfigurationDoc as geminiAgentConfigurationDoc, models as geminiModels } from "@paperclipai/adapter-gemini-local";
|
||||
import {
|
||||
execute as openCodeExecute,
|
||||
testEnvironment as openCodeTestEnvironment,
|
||||
@@ -27,14 +33,13 @@ import {
|
||||
agentConfigurationDoc as openCodeAgentConfigurationDoc,
|
||||
} from "@paperclipai/adapter-opencode-local";
|
||||
import {
|
||||
execute as openclawExecute,
|
||||
testEnvironment as openclawTestEnvironment,
|
||||
onHireApproved as openclawOnHireApproved,
|
||||
} from "@paperclipai/adapter-openclaw/server";
|
||||
execute as openclawGatewayExecute,
|
||||
testEnvironment as openclawGatewayTestEnvironment,
|
||||
} from "@paperclipai/adapter-openclaw-gateway/server";
|
||||
import {
|
||||
agentConfigurationDoc as openclawAgentConfigurationDoc,
|
||||
models as openclawModels,
|
||||
} from "@paperclipai/adapter-openclaw";
|
||||
agentConfigurationDoc as openclawGatewayAgentConfigurationDoc,
|
||||
models as openclawGatewayModels,
|
||||
} from "@paperclipai/adapter-openclaw-gateway";
|
||||
import { listCodexModels } from "./codex-models.js";
|
||||
import { listCursorModels } from "./cursor-models.js";
|
||||
import {
|
||||
@@ -46,6 +51,15 @@ import {
|
||||
import {
|
||||
agentConfigurationDoc as piAgentConfigurationDoc,
|
||||
} from "@paperclipai/adapter-pi-local";
|
||||
import {
|
||||
execute as hermesExecute,
|
||||
testEnvironment as hermesTestEnvironment,
|
||||
sessionCodec as hermesSessionCodec,
|
||||
} from "hermes-paperclip-adapter/server";
|
||||
import {
|
||||
agentConfigurationDoc as hermesAgentConfigurationDoc,
|
||||
models as hermesModels,
|
||||
} from "hermes-paperclip-adapter";
|
||||
import { processAdapter } from "./process/index.js";
|
||||
import { httpAdapter } from "./http/index.js";
|
||||
|
||||
@@ -81,14 +95,23 @@ const cursorLocalAdapter: ServerAdapterModule = {
|
||||
agentConfigurationDoc: cursorAgentConfigurationDoc,
|
||||
};
|
||||
|
||||
const openclawAdapter: ServerAdapterModule = {
|
||||
type: "openclaw",
|
||||
execute: openclawExecute,
|
||||
testEnvironment: openclawTestEnvironment,
|
||||
onHireApproved: openclawOnHireApproved,
|
||||
models: openclawModels,
|
||||
const geminiLocalAdapter: ServerAdapterModule = {
|
||||
type: "gemini_local",
|
||||
execute: geminiExecute,
|
||||
testEnvironment: geminiTestEnvironment,
|
||||
sessionCodec: geminiSessionCodec,
|
||||
models: geminiModels,
|
||||
supportsLocalAgentJwt: true,
|
||||
agentConfigurationDoc: geminiAgentConfigurationDoc,
|
||||
};
|
||||
|
||||
const openclawGatewayAdapter: ServerAdapterModule = {
|
||||
type: "openclaw_gateway",
|
||||
execute: openclawGatewayExecute,
|
||||
testEnvironment: openclawGatewayTestEnvironment,
|
||||
models: openclawGatewayModels,
|
||||
supportsLocalAgentJwt: false,
|
||||
agentConfigurationDoc: openclawAgentConfigurationDoc,
|
||||
agentConfigurationDoc: openclawGatewayAgentConfigurationDoc,
|
||||
};
|
||||
|
||||
const openCodeLocalAdapter: ServerAdapterModule = {
|
||||
@@ -113,8 +136,29 @@ const piLocalAdapter: ServerAdapterModule = {
|
||||
agentConfigurationDoc: piAgentConfigurationDoc,
|
||||
};
|
||||
|
||||
const hermesLocalAdapter: ServerAdapterModule = {
|
||||
type: "hermes_local",
|
||||
execute: hermesExecute,
|
||||
testEnvironment: hermesTestEnvironment,
|
||||
sessionCodec: hermesSessionCodec,
|
||||
models: hermesModels,
|
||||
supportsLocalAgentJwt: true,
|
||||
agentConfigurationDoc: hermesAgentConfigurationDoc,
|
||||
};
|
||||
|
||||
const adaptersByType = new Map<string, ServerAdapterModule>(
|
||||
[claudeLocalAdapter, codexLocalAdapter, openCodeLocalAdapter, piLocalAdapter, cursorLocalAdapter, openclawAdapter, processAdapter, httpAdapter].map((a) => [a.type, a]),
|
||||
[
|
||||
claudeLocalAdapter,
|
||||
codexLocalAdapter,
|
||||
openCodeLocalAdapter,
|
||||
piLocalAdapter,
|
||||
cursorLocalAdapter,
|
||||
geminiLocalAdapter,
|
||||
openclawGatewayAdapter,
|
||||
hermesLocalAdapter,
|
||||
processAdapter,
|
||||
httpAdapter,
|
||||
].map((a) => [a.type, a]),
|
||||
);
|
||||
|
||||
export function getServerAdapter(type: string): ServerAdapterModule {
|
||||
|
||||
@@ -24,6 +24,24 @@ import { sidebarBadgeRoutes } from "./routes/sidebar-badges.js";
|
||||
import { llmRoutes } from "./routes/llms.js";
|
||||
import { assetRoutes } from "./routes/assets.js";
|
||||
import { accessRoutes } from "./routes/access.js";
|
||||
import { pluginRoutes } from "./routes/plugins.js";
|
||||
import { pluginUiStaticRoutes } from "./routes/plugin-ui-static.js";
|
||||
import { applyUiBranding } from "./ui-branding.js";
|
||||
import { logger } from "./middleware/logger.js";
|
||||
import { DEFAULT_LOCAL_PLUGIN_DIR, pluginLoader } from "./services/plugin-loader.js";
|
||||
import { createPluginWorkerManager } from "./services/plugin-worker-manager.js";
|
||||
import { createPluginJobScheduler } from "./services/plugin-job-scheduler.js";
|
||||
import { pluginJobStore } from "./services/plugin-job-store.js";
|
||||
import { createPluginToolDispatcher } from "./services/plugin-tool-dispatcher.js";
|
||||
import { pluginLifecycleManager } from "./services/plugin-lifecycle.js";
|
||||
import { createPluginJobCoordinator } from "./services/plugin-job-coordinator.js";
|
||||
import { buildHostServices, flushPluginLogBuffer } from "./services/plugin-host-services.js";
|
||||
import { createPluginEventBus } from "./services/plugin-event-bus.js";
|
||||
import { setPluginEventBus } from "./services/activity-log.js";
|
||||
import { createPluginDevWatcher } from "./services/plugin-dev-watcher.js";
|
||||
import { createPluginHostServiceCleanup } from "./services/plugin-host-service-cleanup.js";
|
||||
import { pluginRegistryService } from "./services/plugin-registry.js";
|
||||
import { createHostClientHandlers } from "@paperclipai/plugin-sdk";
|
||||
import type { BetterAuthSessionResult } from "./auth/better-auth.js";
|
||||
|
||||
type UiMode = "none" | "static" | "vite-dev";
|
||||
@@ -32,6 +50,7 @@ export async function createApp(
|
||||
db: Db,
|
||||
opts: {
|
||||
uiMode: UiMode;
|
||||
serverPort: number;
|
||||
storageService: StorageService;
|
||||
deploymentMode: DeploymentMode;
|
||||
deploymentExposure: DeploymentExposure;
|
||||
@@ -39,13 +58,20 @@ export async function createApp(
|
||||
bindHost: string;
|
||||
authReady: boolean;
|
||||
companyDeletionEnabled: boolean;
|
||||
instanceId?: string;
|
||||
hostVersion?: string;
|
||||
localPluginDir?: string;
|
||||
betterAuthHandler?: express.RequestHandler;
|
||||
resolveSession?: (req: ExpressRequest) => Promise<BetterAuthSessionResult | null>;
|
||||
},
|
||||
) {
|
||||
const app = express();
|
||||
|
||||
app.use(express.json());
|
||||
app.use(express.json({
|
||||
verify: (req, _res, buf) => {
|
||||
(req as unknown as { rawBody: Buffer }).rawBody = buf;
|
||||
},
|
||||
}));
|
||||
app.use(httpLogger);
|
||||
const privateHostnameGateEnabled =
|
||||
opts.deploymentMode === "authenticated" && opts.deploymentExposure === "private";
|
||||
@@ -112,6 +138,69 @@ export async function createApp(
|
||||
api.use(activityRoutes(db));
|
||||
api.use(dashboardRoutes(db));
|
||||
api.use(sidebarBadgeRoutes(db));
|
||||
const hostServicesDisposers = new Map<string, () => void>();
|
||||
const workerManager = createPluginWorkerManager();
|
||||
const pluginRegistry = pluginRegistryService(db);
|
||||
const eventBus = createPluginEventBus();
|
||||
setPluginEventBus(eventBus);
|
||||
const jobStore = pluginJobStore(db);
|
||||
const lifecycle = pluginLifecycleManager(db, { workerManager });
|
||||
const scheduler = createPluginJobScheduler({
|
||||
db,
|
||||
jobStore,
|
||||
workerManager,
|
||||
});
|
||||
const toolDispatcher = createPluginToolDispatcher({
|
||||
workerManager,
|
||||
lifecycleManager: lifecycle,
|
||||
db,
|
||||
});
|
||||
const jobCoordinator = createPluginJobCoordinator({
|
||||
db,
|
||||
lifecycle,
|
||||
scheduler,
|
||||
jobStore,
|
||||
});
|
||||
const hostServiceCleanup = createPluginHostServiceCleanup(lifecycle, hostServicesDisposers);
|
||||
const loader = pluginLoader(
|
||||
db,
|
||||
{ localPluginDir: opts.localPluginDir ?? DEFAULT_LOCAL_PLUGIN_DIR },
|
||||
{
|
||||
workerManager,
|
||||
eventBus,
|
||||
jobScheduler: scheduler,
|
||||
jobStore,
|
||||
toolDispatcher,
|
||||
lifecycleManager: lifecycle,
|
||||
instanceInfo: {
|
||||
instanceId: opts.instanceId ?? "default",
|
||||
hostVersion: opts.hostVersion ?? "0.0.0",
|
||||
},
|
||||
buildHostHandlers: (pluginId, manifest) => {
|
||||
const notifyWorker = (method: string, params: unknown) => {
|
||||
const handle = workerManager.getWorker(pluginId);
|
||||
if (handle) handle.notify(method, params);
|
||||
};
|
||||
const services = buildHostServices(db, pluginId, manifest.id, eventBus, notifyWorker);
|
||||
hostServicesDisposers.set(pluginId, () => services.dispose());
|
||||
return createHostClientHandlers({
|
||||
pluginId,
|
||||
capabilities: manifest.capabilities,
|
||||
services,
|
||||
});
|
||||
},
|
||||
},
|
||||
);
|
||||
api.use(
|
||||
pluginRoutes(
|
||||
db,
|
||||
loader,
|
||||
{ scheduler, jobStore },
|
||||
{ workerManager },
|
||||
{ toolDispatcher },
|
||||
{ workerManager },
|
||||
),
|
||||
);
|
||||
api.use(
|
||||
accessRoutes(db, {
|
||||
deploymentMode: opts.deploymentMode,
|
||||
@@ -121,6 +210,12 @@ export async function createApp(
|
||||
}),
|
||||
);
|
||||
app.use("/api", api);
|
||||
app.use("/api", (_req, res) => {
|
||||
res.status(404).json({ error: "API route not found" });
|
||||
});
|
||||
app.use(pluginUiStaticRoutes(db, {
|
||||
localPluginDir: opts.localPluginDir ?? DEFAULT_LOCAL_PLUGIN_DIR,
|
||||
}));
|
||||
|
||||
const __dirname = path.dirname(fileURLToPath(import.meta.url));
|
||||
if (opts.uiMode === "static") {
|
||||
@@ -131,9 +226,10 @@ export async function createApp(
|
||||
];
|
||||
const uiDist = candidates.find((p) => fs.existsSync(path.join(p, "index.html")));
|
||||
if (uiDist) {
|
||||
const indexHtml = applyUiBranding(fs.readFileSync(path.join(uiDist, "index.html"), "utf-8"));
|
||||
app.use(express.static(uiDist));
|
||||
app.get(/.*/, (_req, res) => {
|
||||
res.sendFile("index.html", { root: uiDist });
|
||||
res.status(200).set("Content-Type", "text/html").end(indexHtml);
|
||||
});
|
||||
} else {
|
||||
console.warn("[paperclip] UI dist not found; running in API-only mode");
|
||||
@@ -142,12 +238,18 @@ export async function createApp(
|
||||
|
||||
if (opts.uiMode === "vite-dev") {
|
||||
const uiRoot = path.resolve(__dirname, "../../ui");
|
||||
const hmrPort = opts.serverPort + 10000;
|
||||
const { createServer: createViteServer } = await import("vite");
|
||||
const vite = await createViteServer({
|
||||
root: uiRoot,
|
||||
appType: "spa",
|
||||
appType: "custom",
|
||||
server: {
|
||||
middlewareMode: true,
|
||||
hmr: {
|
||||
host: opts.bindHost,
|
||||
port: hmrPort,
|
||||
clientPort: hmrPort,
|
||||
},
|
||||
allowedHosts: privateHostnameGateEnabled ? Array.from(privateHostnameAllowSet) : undefined,
|
||||
},
|
||||
});
|
||||
@@ -157,7 +259,7 @@ export async function createApp(
|
||||
try {
|
||||
const templatePath = path.resolve(uiRoot, "index.html");
|
||||
const template = fs.readFileSync(templatePath, "utf-8");
|
||||
const html = await vite.transformIndexHtml(req.originalUrl, template);
|
||||
const html = applyUiBranding(await vite.transformIndexHtml(req.originalUrl, template));
|
||||
res.status(200).set({ "Content-Type": "text/html" }).end(html);
|
||||
} catch (err) {
|
||||
next(err);
|
||||
@@ -167,5 +269,35 @@ export async function createApp(
|
||||
|
||||
app.use(errorHandler);
|
||||
|
||||
jobCoordinator.start();
|
||||
scheduler.start();
|
||||
void toolDispatcher.initialize().catch((err) => {
|
||||
logger.error({ err }, "Failed to initialize plugin tool dispatcher");
|
||||
});
|
||||
const devWatcher = opts.uiMode === "vite-dev"
|
||||
? createPluginDevWatcher(
|
||||
lifecycle,
|
||||
async (pluginId) => (await pluginRegistry.getById(pluginId))?.packagePath ?? null,
|
||||
)
|
||||
: null;
|
||||
void loader.loadAll().then((result) => {
|
||||
if (!result) return;
|
||||
for (const loaded of result.results) {
|
||||
if (devWatcher && loaded.success && loaded.plugin.packagePath) {
|
||||
devWatcher.watch(loaded.plugin.id, loaded.plugin.packagePath);
|
||||
}
|
||||
}
|
||||
}).catch((err) => {
|
||||
logger.error({ err }, "Failed to load ready plugins on startup");
|
||||
});
|
||||
process.once("exit", () => {
|
||||
devWatcher?.close();
|
||||
hostServiceCleanup.disposeAll();
|
||||
hostServiceCleanup.teardown();
|
||||
});
|
||||
process.once("beforeExit", () => {
|
||||
void flushPluginLogBuffer();
|
||||
});
|
||||
|
||||
return app;
|
||||
}
|
||||
|
||||
74
server/src/attachment-types.ts
Normal file
74
server/src/attachment-types.ts
Normal file
@@ -0,0 +1,74 @@
|
||||
/**
|
||||
* Shared attachment content-type configuration.
|
||||
*
|
||||
* By default only image types are allowed. Set the
|
||||
* `PAPERCLIP_ALLOWED_ATTACHMENT_TYPES` environment variable to a
|
||||
* comma-separated list of MIME types or wildcard patterns to expand the
|
||||
* allowed set.
|
||||
*
|
||||
* Examples:
|
||||
* PAPERCLIP_ALLOWED_ATTACHMENT_TYPES=image/*,application/pdf
|
||||
* PAPERCLIP_ALLOWED_ATTACHMENT_TYPES=image/*,application/pdf,text/*
|
||||
*
|
||||
* Supported pattern syntax:
|
||||
* - Exact types: "application/pdf"
|
||||
* - Wildcards: "image/*" or "application/vnd.openxmlformats-officedocument.*"
|
||||
*/
|
||||
|
||||
export const DEFAULT_ALLOWED_TYPES: readonly string[] = [
|
||||
"image/png",
|
||||
"image/jpeg",
|
||||
"image/jpg",
|
||||
"image/webp",
|
||||
"image/gif",
|
||||
"application/pdf",
|
||||
"text/markdown",
|
||||
"text/plain",
|
||||
"application/json",
|
||||
"text/csv",
|
||||
"text/html",
|
||||
];
|
||||
|
||||
/**
|
||||
* Parse a comma-separated list of MIME type patterns into a normalised array.
|
||||
* Returns the default image-only list when the input is empty or undefined.
|
||||
*/
|
||||
export function parseAllowedTypes(raw: string | undefined): string[] {
|
||||
if (!raw) return [...DEFAULT_ALLOWED_TYPES];
|
||||
const parsed = raw
|
||||
.split(",")
|
||||
.map((s) => s.trim().toLowerCase())
|
||||
.filter((s) => s.length > 0);
|
||||
return parsed.length > 0 ? parsed : [...DEFAULT_ALLOWED_TYPES];
|
||||
}
|
||||
|
||||
/**
|
||||
* Check whether `contentType` matches any entry in `allowedPatterns`.
|
||||
*
|
||||
* Supports exact matches ("application/pdf") and wildcard / prefix
|
||||
* patterns ("image/*", "application/vnd.openxmlformats-officedocument.*").
|
||||
*/
|
||||
export function matchesContentType(contentType: string, allowedPatterns: string[]): boolean {
|
||||
const ct = contentType.toLowerCase();
|
||||
return allowedPatterns.some((pattern) => {
|
||||
if (pattern === "*") return true;
|
||||
if (pattern.endsWith("/*") || pattern.endsWith(".*")) {
|
||||
return ct.startsWith(pattern.slice(0, -1));
|
||||
}
|
||||
return ct === pattern;
|
||||
});
|
||||
}
|
||||
|
||||
// ---------- Module-level singletons read once at startup ----------
|
||||
|
||||
const allowedPatterns: string[] = parseAllowedTypes(
|
||||
process.env.PAPERCLIP_ALLOWED_ATTACHMENT_TYPES,
|
||||
);
|
||||
|
||||
/** Convenience wrapper using the process-level allowed list. */
|
||||
export function isAllowedContentType(contentType: string): boolean {
|
||||
return matchesContentType(contentType, allowedPatterns);
|
||||
}
|
||||
|
||||
export const MAX_ATTACHMENT_BYTES =
|
||||
Number(process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES) || 10 * 1024 * 1024;
|
||||
@@ -70,6 +70,9 @@ export function createBetterAuthInstance(db: Db, config: Config, trustedOrigins?
|
||||
const secret = process.env.BETTER_AUTH_SECRET ?? process.env.PAPERCLIP_AGENT_JWT_SECRET ?? "paperclip-dev-secret";
|
||||
const effectiveTrustedOrigins = trustedOrigins ?? deriveAuthTrustedOrigins(config);
|
||||
|
||||
const publicUrl = process.env.PAPERCLIP_PUBLIC_URL ?? baseUrl;
|
||||
const isHttpOnly = publicUrl ? publicUrl.startsWith("http://") : false;
|
||||
|
||||
const authConfig = {
|
||||
baseURL: baseUrl,
|
||||
secret,
|
||||
@@ -86,7 +89,9 @@ export function createBetterAuthInstance(db: Db, config: Config, trustedOrigins?
|
||||
emailAndPassword: {
|
||||
enabled: true,
|
||||
requireEmailVerification: false,
|
||||
disableSignUp: config.authDisableSignUp,
|
||||
},
|
||||
...(isHttpOnly ? { advanced: { useSecureCookies: false } } : {}),
|
||||
};
|
||||
|
||||
if (!baseUrl) {
|
||||
|
||||
@@ -1,5 +1,6 @@
|
||||
import { readConfigFile } from "./config-file.js";
|
||||
import { existsSync } from "node:fs";
|
||||
import { existsSync, realpathSync } from "node:fs";
|
||||
import { resolve } from "node:path";
|
||||
import { config as loadDotenv } from "dotenv";
|
||||
import { resolvePaperclipEnvPath } from "./paths.js";
|
||||
import {
|
||||
@@ -27,6 +28,14 @@ if (existsSync(PAPERCLIP_ENV_FILE_PATH)) {
|
||||
loadDotenv({ path: PAPERCLIP_ENV_FILE_PATH, override: false, quiet: true });
|
||||
}
|
||||
|
||||
const CWD_ENV_PATH = resolve(process.cwd(), ".env");
|
||||
const isSameFile = existsSync(CWD_ENV_PATH) && existsSync(PAPERCLIP_ENV_FILE_PATH)
|
||||
? realpathSync(CWD_ENV_PATH) === realpathSync(PAPERCLIP_ENV_FILE_PATH)
|
||||
: CWD_ENV_PATH === PAPERCLIP_ENV_FILE_PATH;
|
||||
if (!isSameFile && existsSync(CWD_ENV_PATH)) {
|
||||
loadDotenv({ path: CWD_ENV_PATH, override: false, quiet: true });
|
||||
}
|
||||
|
||||
type DatabaseMode = "embedded-postgres" | "postgres";
|
||||
|
||||
export interface Config {
|
||||
@@ -37,6 +46,7 @@ export interface Config {
|
||||
allowedHostnames: string[];
|
||||
authBaseUrlMode: AuthBaseUrlMode;
|
||||
authPublicBaseUrl: string | undefined;
|
||||
authDisableSignUp: boolean;
|
||||
databaseMode: DatabaseMode;
|
||||
databaseUrl: string | undefined;
|
||||
embeddedPostgresDataDir: string;
|
||||
@@ -142,6 +152,11 @@ export function loadConfig(): Config {
|
||||
authBaseUrlModeFromEnv ??
|
||||
fileConfig?.auth?.baseUrlMode ??
|
||||
(authPublicBaseUrl ? "explicit" : "auto");
|
||||
const disableSignUpFromEnv = process.env.PAPERCLIP_AUTH_DISABLE_SIGN_UP;
|
||||
const authDisableSignUp: boolean =
|
||||
disableSignUpFromEnv !== undefined
|
||||
? disableSignUpFromEnv === "true"
|
||||
: (fileConfig?.auth?.disableSignUp ?? false);
|
||||
const allowedHostnamesFromEnvRaw = process.env.PAPERCLIP_ALLOWED_HOSTNAMES;
|
||||
const allowedHostnamesFromEnv = allowedHostnamesFromEnvRaw
|
||||
? allowedHostnamesFromEnvRaw
|
||||
@@ -203,6 +218,7 @@ export function loadConfig(): Config {
|
||||
allowedHostnames,
|
||||
authBaseUrlMode,
|
||||
authPublicBaseUrl,
|
||||
authDisableSignUp,
|
||||
databaseMode: fileDatabaseMode,
|
||||
databaseUrl: process.env.DATABASE_URL ?? fileDbUrl,
|
||||
embeddedPostgresDataDir: resolveHomeAwarePath(
|
||||
|
||||
1131
server/src/index.ts
1131
server/src/index.ts
File diff suppressed because it is too large
Load Diff
138
server/src/log-redaction.ts
Normal file
138
server/src/log-redaction.ts
Normal file
@@ -0,0 +1,138 @@
|
||||
import os from "node:os";
|
||||
|
||||
export const CURRENT_USER_REDACTION_TOKEN = "[]";
|
||||
|
||||
interface CurrentUserRedactionOptions {
|
||||
replacement?: string;
|
||||
userNames?: string[];
|
||||
homeDirs?: string[];
|
||||
}
|
||||
|
||||
type CurrentUserCandidates = {
|
||||
userNames: string[];
|
||||
homeDirs: string[];
|
||||
replacement: string;
|
||||
};
|
||||
|
||||
function isPlainObject(value: unknown): value is Record<string, unknown> {
|
||||
if (typeof value !== "object" || value === null || Array.isArray(value)) return false;
|
||||
const proto = Object.getPrototypeOf(value);
|
||||
return proto === Object.prototype || proto === null;
|
||||
}
|
||||
|
||||
function escapeRegExp(value: string) {
|
||||
return value.replace(/[.*+?^${}()|[\]\\]/g, "\\$&");
|
||||
}
|
||||
|
||||
function uniqueNonEmpty(values: Array<string | null | undefined>) {
|
||||
return Array.from(new Set(values.map((value) => value?.trim() ?? "").filter(Boolean)));
|
||||
}
|
||||
|
||||
function splitPathSegments(value: string) {
|
||||
return value.replace(/[\\/]+$/, "").split(/[\\/]+/).filter(Boolean);
|
||||
}
|
||||
|
||||
function replaceLastPathSegment(pathValue: string, replacement: string) {
|
||||
const normalized = pathValue.replace(/[\\/]+$/, "");
|
||||
const lastSeparator = Math.max(normalized.lastIndexOf("/"), normalized.lastIndexOf("\\"));
|
||||
if (lastSeparator < 0) return replacement;
|
||||
return `${normalized.slice(0, lastSeparator + 1)}${replacement}`;
|
||||
}
|
||||
|
||||
function defaultUserNames() {
|
||||
const candidates = [
|
||||
process.env.USER,
|
||||
process.env.LOGNAME,
|
||||
process.env.USERNAME,
|
||||
];
|
||||
|
||||
try {
|
||||
candidates.push(os.userInfo().username);
|
||||
} catch {
|
||||
// Some environments do not expose userInfo; env vars are enough fallback.
|
||||
}
|
||||
|
||||
return uniqueNonEmpty(candidates);
|
||||
}
|
||||
|
||||
function defaultHomeDirs(userNames: string[]) {
|
||||
const candidates: Array<string | null | undefined> = [
|
||||
process.env.HOME,
|
||||
process.env.USERPROFILE,
|
||||
];
|
||||
|
||||
try {
|
||||
candidates.push(os.homedir());
|
||||
} catch {
|
||||
// Ignore and fall back to env hints below.
|
||||
}
|
||||
|
||||
for (const userName of userNames) {
|
||||
candidates.push(`/Users/${userName}`);
|
||||
candidates.push(`/home/${userName}`);
|
||||
candidates.push(`C:\\Users\\${userName}`);
|
||||
}
|
||||
|
||||
return uniqueNonEmpty(candidates);
|
||||
}
|
||||
|
||||
let cachedCurrentUserCandidates: CurrentUserCandidates | null = null;
|
||||
|
||||
function getDefaultCurrentUserCandidates(): CurrentUserCandidates {
|
||||
if (cachedCurrentUserCandidates) return cachedCurrentUserCandidates;
|
||||
const userNames = defaultUserNames();
|
||||
cachedCurrentUserCandidates = {
|
||||
userNames,
|
||||
homeDirs: defaultHomeDirs(userNames),
|
||||
replacement: CURRENT_USER_REDACTION_TOKEN,
|
||||
};
|
||||
return cachedCurrentUserCandidates;
|
||||
}
|
||||
|
||||
function resolveCurrentUserCandidates(opts?: CurrentUserRedactionOptions) {
|
||||
const defaults = getDefaultCurrentUserCandidates();
|
||||
const userNames = uniqueNonEmpty(opts?.userNames ?? defaults.userNames);
|
||||
const homeDirs = uniqueNonEmpty(opts?.homeDirs ?? defaults.homeDirs);
|
||||
const replacement = opts?.replacement?.trim() || defaults.replacement;
|
||||
return { userNames, homeDirs, replacement };
|
||||
}
|
||||
|
||||
export function redactCurrentUserText(input: string, opts?: CurrentUserRedactionOptions) {
|
||||
if (!input) return input;
|
||||
|
||||
const { userNames, homeDirs, replacement } = resolveCurrentUserCandidates(opts);
|
||||
let result = input;
|
||||
|
||||
for (const homeDir of [...homeDirs].sort((a, b) => b.length - a.length)) {
|
||||
const lastSegment = splitPathSegments(homeDir).pop() ?? "";
|
||||
const replacementDir = userNames.includes(lastSegment)
|
||||
? replaceLastPathSegment(homeDir, replacement)
|
||||
: replacement;
|
||||
result = result.split(homeDir).join(replacementDir);
|
||||
}
|
||||
|
||||
for (const userName of [...userNames].sort((a, b) => b.length - a.length)) {
|
||||
const pattern = new RegExp(`(?<![A-Za-z0-9._-])${escapeRegExp(userName)}(?![A-Za-z0-9._-])`, "g");
|
||||
result = result.replace(pattern, replacement);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
export function redactCurrentUserValue<T>(value: T, opts?: CurrentUserRedactionOptions): T {
|
||||
if (typeof value === "string") {
|
||||
return redactCurrentUserText(value, opts) as T;
|
||||
}
|
||||
if (Array.isArray(value)) {
|
||||
return value.map((entry) => redactCurrentUserValue(entry, opts)) as T;
|
||||
}
|
||||
if (!isPlainObject(value)) {
|
||||
return value;
|
||||
}
|
||||
|
||||
const redacted: Record<string, unknown> = {};
|
||||
for (const [key, entry] of Object.entries(value)) {
|
||||
redacted[key] = redactCurrentUserValue(entry, opts);
|
||||
}
|
||||
return redacted as T;
|
||||
}
|
||||
@@ -11,6 +11,25 @@ export interface ErrorContext {
|
||||
reqQuery?: unknown;
|
||||
}
|
||||
|
||||
function attachErrorContext(
|
||||
req: Request,
|
||||
res: Response,
|
||||
payload: ErrorContext["error"],
|
||||
rawError?: Error,
|
||||
) {
|
||||
(res as any).__errorContext = {
|
||||
error: payload,
|
||||
method: req.method,
|
||||
url: req.originalUrl,
|
||||
reqBody: req.body,
|
||||
reqParams: req.params,
|
||||
reqQuery: req.query,
|
||||
} satisfies ErrorContext;
|
||||
if (rawError) {
|
||||
(res as any).err = rawError;
|
||||
}
|
||||
}
|
||||
|
||||
export function errorHandler(
|
||||
err: unknown,
|
||||
req: Request,
|
||||
@@ -19,14 +38,12 @@ export function errorHandler(
|
||||
) {
|
||||
if (err instanceof HttpError) {
|
||||
if (err.status >= 500) {
|
||||
(res as any).__errorContext = {
|
||||
error: { message: err.message, stack: err.stack, name: err.name, details: err.details },
|
||||
method: req.method,
|
||||
url: req.originalUrl,
|
||||
reqBody: req.body,
|
||||
reqParams: req.params,
|
||||
reqQuery: req.query,
|
||||
} satisfies ErrorContext;
|
||||
attachErrorContext(
|
||||
req,
|
||||
res,
|
||||
{ message: err.message, stack: err.stack, name: err.name, details: err.details },
|
||||
err,
|
||||
);
|
||||
}
|
||||
res.status(err.status).json({
|
||||
error: err.message,
|
||||
@@ -40,16 +57,15 @@ export function errorHandler(
|
||||
return;
|
||||
}
|
||||
|
||||
(res as any).__errorContext = {
|
||||
error: err instanceof Error
|
||||
const rootError = err instanceof Error ? err : new Error(String(err));
|
||||
attachErrorContext(
|
||||
req,
|
||||
res,
|
||||
err instanceof Error
|
||||
? { message: err.message, stack: err.stack, name: err.name }
|
||||
: { message: String(err), raw: err },
|
||||
method: req.method,
|
||||
url: req.originalUrl,
|
||||
reqBody: req.body,
|
||||
reqParams: req.params,
|
||||
reqQuery: req.query,
|
||||
} satisfies ErrorContext;
|
||||
: { message: String(err), raw: err, stack: rootError.stack, name: rootError.name },
|
||||
rootError,
|
||||
);
|
||||
|
||||
res.status(500).json({ error: "Internal server error" });
|
||||
}
|
||||
|
||||
@@ -23,6 +23,7 @@ const logFile = path.join(logDir, "server.log");
|
||||
const sharedOpts = {
|
||||
translateTime: "HH:MM:ss",
|
||||
ignore: "pid,hostname",
|
||||
singleLine: true,
|
||||
};
|
||||
|
||||
export const logger = pino({
|
||||
@@ -62,7 +63,7 @@ export const httpLogger = pinoHttp({
|
||||
const ctx = (res as any).__errorContext;
|
||||
if (ctx) {
|
||||
return {
|
||||
err: ctx.error,
|
||||
errorContext: ctx.error,
|
||||
reqBody: ctx.reqBody,
|
||||
reqParams: ctx.reqParams,
|
||||
reqQuery: ctx.reqQuery,
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -22,6 +22,13 @@ export function activityRoutes(db: Db) {
|
||||
const svc = activityService(db);
|
||||
const issueSvc = issueService(db);
|
||||
|
||||
async function resolveIssueByRef(rawId: string) {
|
||||
if (/^[A-Z]+-\d+$/i.test(rawId)) {
|
||||
return issueSvc.getByIdentifier(rawId);
|
||||
}
|
||||
return issueSvc.getById(rawId);
|
||||
}
|
||||
|
||||
router.get("/companies/:companyId/activity", async (req, res) => {
|
||||
const companyId = req.params.companyId as string;
|
||||
assertCompanyAccess(req, companyId);
|
||||
@@ -47,42 +54,27 @@ export function activityRoutes(db: Db) {
|
||||
res.status(201).json(event);
|
||||
});
|
||||
|
||||
// Resolve issue identifiers (e.g. "PAP-39") to UUIDs
|
||||
router.param("id", async (req, res, next, rawId) => {
|
||||
try {
|
||||
if (/^[A-Z]+-\d+$/i.test(rawId)) {
|
||||
const issue = await issueSvc.getByIdentifier(rawId);
|
||||
if (issue) {
|
||||
req.params.id = issue.id;
|
||||
}
|
||||
}
|
||||
next();
|
||||
} catch (err) {
|
||||
next(err);
|
||||
}
|
||||
});
|
||||
|
||||
router.get("/issues/:id/activity", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await issueSvc.getById(id);
|
||||
const rawId = req.params.id as string;
|
||||
const issue = await resolveIssueByRef(rawId);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const result = await svc.forIssue(id);
|
||||
const result = await svc.forIssue(issue.id);
|
||||
res.json(result);
|
||||
});
|
||||
|
||||
router.get("/issues/:id/runs", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await issueSvc.getById(id);
|
||||
const rawId = req.params.id as string;
|
||||
const issue = await resolveIssueByRef(rawId);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const result = await svc.runsForIssue(issue.companyId, id);
|
||||
const result = await svc.runsForIssue(issue.companyId, issue.id);
|
||||
res.json(result);
|
||||
});
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
import { Router, type Request } from "express";
|
||||
import { randomUUID } from "node:crypto";
|
||||
import { generateKeyPairSync, randomUUID } from "node:crypto";
|
||||
import path from "node:path";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { agents as agentsTable, companies, heartbeatRuns } from "@paperclipai/db";
|
||||
@@ -8,9 +8,11 @@ import {
|
||||
createAgentKeySchema,
|
||||
createAgentHireSchema,
|
||||
createAgentSchema,
|
||||
deriveAgentUrlKey,
|
||||
isUuidLike,
|
||||
resetAgentSessionSchema,
|
||||
testAdapterEnvironmentSchema,
|
||||
type InstanceSchedulerHeartbeatAgent,
|
||||
updateAgentPermissionsSchema,
|
||||
updateAgentInstructionsPathSchema,
|
||||
wakeAgentSchema,
|
||||
@@ -31,18 +33,21 @@ import { conflict, forbidden, notFound, unprocessable } from "../errors.js";
|
||||
import { assertBoard, assertCompanyAccess, getActorInfo } from "./authz.js";
|
||||
import { findServerAdapter, listAdapterModels } from "../adapters/index.js";
|
||||
import { redactEventPayload } from "../redaction.js";
|
||||
import { redactCurrentUserValue } from "../log-redaction.js";
|
||||
import { runClaudeLogin } from "@paperclipai/adapter-claude-local/server";
|
||||
import {
|
||||
DEFAULT_CODEX_LOCAL_BYPASS_APPROVALS_AND_SANDBOX,
|
||||
DEFAULT_CODEX_LOCAL_MODEL,
|
||||
} from "@paperclipai/adapter-codex-local";
|
||||
import { DEFAULT_CURSOR_LOCAL_MODEL } from "@paperclipai/adapter-cursor-local";
|
||||
import { DEFAULT_GEMINI_LOCAL_MODEL } from "@paperclipai/adapter-gemini-local";
|
||||
import { ensureOpenCodeModelConfiguredAndAvailable } from "@paperclipai/adapter-opencode-local/server";
|
||||
|
||||
export function agentRoutes(db: Db) {
|
||||
const DEFAULT_INSTRUCTIONS_PATH_KEYS: Record<string, string> = {
|
||||
claude_local: "instructionsFilePath",
|
||||
codex_local: "instructionsFilePath",
|
||||
gemini_local: "instructionsFilePath",
|
||||
opencode_local: "instructionsFilePath",
|
||||
cursor: "instructionsFilePath",
|
||||
};
|
||||
@@ -181,6 +186,55 @@ export function agentRoutes(db: Db) {
|
||||
return trimmed.length > 0 ? trimmed : null;
|
||||
}
|
||||
|
||||
function parseBooleanLike(value: unknown): boolean | null {
|
||||
if (typeof value === "boolean") return value;
|
||||
if (typeof value === "number") {
|
||||
if (value === 1) return true;
|
||||
if (value === 0) return false;
|
||||
return null;
|
||||
}
|
||||
if (typeof value !== "string") return null;
|
||||
const normalized = value.trim().toLowerCase();
|
||||
if (normalized === "true" || normalized === "1" || normalized === "yes" || normalized === "on") {
|
||||
return true;
|
||||
}
|
||||
if (normalized === "false" || normalized === "0" || normalized === "no" || normalized === "off") {
|
||||
return false;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function parseNumberLike(value: unknown): number | null {
|
||||
if (typeof value === "number" && Number.isFinite(value)) return value;
|
||||
if (typeof value !== "string") return null;
|
||||
const parsed = Number(value.trim());
|
||||
return Number.isFinite(parsed) ? parsed : null;
|
||||
}
|
||||
|
||||
function parseSchedulerHeartbeatPolicy(runtimeConfig: unknown) {
|
||||
const heartbeat = asRecord(asRecord(runtimeConfig)?.heartbeat) ?? {};
|
||||
return {
|
||||
enabled: parseBooleanLike(heartbeat.enabled) ?? true,
|
||||
intervalSec: Math.max(0, parseNumberLike(heartbeat.intervalSec) ?? 0),
|
||||
};
|
||||
}
|
||||
|
||||
function generateEd25519PrivateKeyPem(): string {
|
||||
const { privateKey } = generateKeyPairSync("ed25519");
|
||||
return privateKey.export({ type: "pkcs8", format: "pem" }).toString();
|
||||
}
|
||||
|
||||
function ensureGatewayDeviceKey(
|
||||
adapterType: string | null | undefined,
|
||||
adapterConfig: Record<string, unknown>,
|
||||
): Record<string, unknown> {
|
||||
if (adapterType !== "openclaw_gateway") return adapterConfig;
|
||||
const disableDeviceAuth = parseBooleanLike(adapterConfig.disableDeviceAuth) === true;
|
||||
if (disableDeviceAuth) return adapterConfig;
|
||||
if (asNonEmptyString(adapterConfig.devicePrivateKeyPem)) return adapterConfig;
|
||||
return { ...adapterConfig, devicePrivateKeyPem: generateEd25519PrivateKeyPem() };
|
||||
}
|
||||
|
||||
function applyCreateDefaultsByAdapterType(
|
||||
adapterType: string | null | undefined,
|
||||
adapterConfig: Record<string, unknown>,
|
||||
@@ -196,13 +250,17 @@ export function agentRoutes(db: Db) {
|
||||
if (!hasBypassFlag) {
|
||||
next.dangerouslyBypassApprovalsAndSandbox = DEFAULT_CODEX_LOCAL_BYPASS_APPROVALS_AND_SANDBOX;
|
||||
}
|
||||
return next;
|
||||
return ensureGatewayDeviceKey(adapterType, next);
|
||||
}
|
||||
if (adapterType === "gemini_local" && !asNonEmptyString(next.model)) {
|
||||
next.model = DEFAULT_GEMINI_LOCAL_MODEL;
|
||||
return ensureGatewayDeviceKey(adapterType, next);
|
||||
}
|
||||
// OpenCode requires explicit model selection — no default
|
||||
if (adapterType === "cursor" && !asNonEmptyString(next.model)) {
|
||||
next.model = DEFAULT_CURSOR_LOCAL_MODEL;
|
||||
}
|
||||
return next;
|
||||
return ensureGatewayDeviceKey(adapterType, next);
|
||||
}
|
||||
|
||||
async function assertAdapterConfigConstraints(
|
||||
@@ -211,7 +269,7 @@ export function agentRoutes(db: Db) {
|
||||
adapterConfig: Record<string, unknown>,
|
||||
) {
|
||||
if (adapterType !== "opencode_local") return;
|
||||
const runtimeConfig = await secretsSvc.resolveAdapterConfigForRuntime(companyId, adapterConfig);
|
||||
const { config: runtimeConfig } = await secretsSvc.resolveAdapterConfigForRuntime(companyId, adapterConfig);
|
||||
const runtimeEnv = asRecord(runtimeConfig.env) ?? {};
|
||||
try {
|
||||
await ensureOpenCodeModelConfiguredAndAvailable({
|
||||
@@ -386,7 +444,7 @@ export function agentRoutes(db: Db) {
|
||||
inputAdapterConfig,
|
||||
{ strictMode: strictSecretsMode },
|
||||
);
|
||||
const runtimeAdapterConfig = await secretsSvc.resolveAdapterConfigForRuntime(
|
||||
const { config: runtimeAdapterConfig } = await secretsSvc.resolveAdapterConfigForRuntime(
|
||||
companyId,
|
||||
normalizedAdapterConfig,
|
||||
);
|
||||
@@ -413,6 +471,81 @@ export function agentRoutes(db: Db) {
|
||||
res.json(result.map((agent) => redactForRestrictedAgentView(agent)));
|
||||
});
|
||||
|
||||
router.get("/instance/scheduler-heartbeats", async (req, res) => {
|
||||
assertBoard(req);
|
||||
|
||||
const accessConditions = [];
|
||||
if (req.actor.source !== "local_implicit" && !req.actor.isInstanceAdmin) {
|
||||
const allowedCompanyIds = req.actor.companyIds ?? [];
|
||||
if (allowedCompanyIds.length === 0) {
|
||||
res.json([]);
|
||||
return;
|
||||
}
|
||||
accessConditions.push(inArray(agentsTable.companyId, allowedCompanyIds));
|
||||
}
|
||||
|
||||
const rows = await db
|
||||
.select({
|
||||
id: agentsTable.id,
|
||||
companyId: agentsTable.companyId,
|
||||
agentName: agentsTable.name,
|
||||
role: agentsTable.role,
|
||||
title: agentsTable.title,
|
||||
status: agentsTable.status,
|
||||
adapterType: agentsTable.adapterType,
|
||||
runtimeConfig: agentsTable.runtimeConfig,
|
||||
lastHeartbeatAt: agentsTable.lastHeartbeatAt,
|
||||
companyName: companies.name,
|
||||
companyIssuePrefix: companies.issuePrefix,
|
||||
})
|
||||
.from(agentsTable)
|
||||
.innerJoin(companies, eq(agentsTable.companyId, companies.id))
|
||||
.where(accessConditions.length > 0 ? and(...accessConditions) : undefined)
|
||||
.orderBy(companies.name, agentsTable.name);
|
||||
|
||||
const items: InstanceSchedulerHeartbeatAgent[] = rows
|
||||
.map((row) => {
|
||||
const policy = parseSchedulerHeartbeatPolicy(row.runtimeConfig);
|
||||
const statusEligible =
|
||||
row.status !== "paused" &&
|
||||
row.status !== "terminated" &&
|
||||
row.status !== "pending_approval";
|
||||
|
||||
return {
|
||||
id: row.id,
|
||||
companyId: row.companyId,
|
||||
companyName: row.companyName,
|
||||
companyIssuePrefix: row.companyIssuePrefix,
|
||||
agentName: row.agentName,
|
||||
agentUrlKey: deriveAgentUrlKey(row.agentName, row.id),
|
||||
role: row.role as InstanceSchedulerHeartbeatAgent["role"],
|
||||
title: row.title,
|
||||
status: row.status as InstanceSchedulerHeartbeatAgent["status"],
|
||||
adapterType: row.adapterType,
|
||||
intervalSec: policy.intervalSec,
|
||||
heartbeatEnabled: policy.enabled,
|
||||
schedulerActive: statusEligible && policy.enabled && policy.intervalSec > 0,
|
||||
lastHeartbeatAt: row.lastHeartbeatAt,
|
||||
};
|
||||
})
|
||||
.filter((item) =>
|
||||
item.intervalSec > 0 &&
|
||||
item.status !== "paused" &&
|
||||
item.status !== "terminated" &&
|
||||
item.status !== "pending_approval",
|
||||
)
|
||||
.sort((left, right) => {
|
||||
if (left.schedulerActive !== right.schedulerActive) {
|
||||
return left.schedulerActive ? -1 : 1;
|
||||
}
|
||||
const companyOrder = left.companyName.localeCompare(right.companyName);
|
||||
if (companyOrder !== 0) return companyOrder;
|
||||
return left.agentName.localeCompare(right.agentName);
|
||||
});
|
||||
|
||||
res.json(items);
|
||||
});
|
||||
|
||||
router.get("/companies/:companyId/org", async (req, res) => {
|
||||
const companyId = req.params.companyId as string;
|
||||
assertCompanyAccess(req, companyId);
|
||||
@@ -442,6 +575,34 @@ export function agentRoutes(db: Db) {
|
||||
res.json({ ...agent, chainOfCommand });
|
||||
});
|
||||
|
||||
router.get("/agents/me/inbox-lite", async (req, res) => {
|
||||
if (req.actor.type !== "agent" || !req.actor.agentId || !req.actor.companyId) {
|
||||
res.status(401).json({ error: "Agent authentication required" });
|
||||
return;
|
||||
}
|
||||
|
||||
const issuesSvc = issueService(db);
|
||||
const rows = await issuesSvc.list(req.actor.companyId, {
|
||||
assigneeAgentId: req.actor.agentId,
|
||||
status: "todo,in_progress,blocked",
|
||||
});
|
||||
|
||||
res.json(
|
||||
rows.map((issue) => ({
|
||||
id: issue.id,
|
||||
identifier: issue.identifier,
|
||||
title: issue.title,
|
||||
status: issue.status,
|
||||
priority: issue.priority,
|
||||
projectId: issue.projectId,
|
||||
goalId: issue.goalId,
|
||||
parentId: issue.parentId,
|
||||
updatedAt: issue.updatedAt,
|
||||
activeRun: issue.activeRun,
|
||||
})),
|
||||
);
|
||||
});
|
||||
|
||||
router.get("/agents/:id", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const agent = await svc.getById(id);
|
||||
@@ -930,11 +1091,7 @@ export function agentRoutes(db: Db) {
|
||||
if (changingInstructionsPath) {
|
||||
await assertCanManageInstructionsPath(req, existing);
|
||||
}
|
||||
patchData.adapterConfig = await secretsSvc.normalizeAdapterConfigForPersistence(
|
||||
existing.companyId,
|
||||
adapterConfig,
|
||||
{ strictMode: strictSecretsMode },
|
||||
);
|
||||
patchData.adapterConfig = adapterConfig;
|
||||
}
|
||||
|
||||
const requestedAdapterType =
|
||||
@@ -942,15 +1099,23 @@ export function agentRoutes(db: Db) {
|
||||
const touchesAdapterConfiguration =
|
||||
Object.prototype.hasOwnProperty.call(patchData, "adapterType") ||
|
||||
Object.prototype.hasOwnProperty.call(patchData, "adapterConfig");
|
||||
if (touchesAdapterConfiguration && requestedAdapterType === "opencode_local") {
|
||||
if (touchesAdapterConfiguration) {
|
||||
const rawEffectiveAdapterConfig = Object.prototype.hasOwnProperty.call(patchData, "adapterConfig")
|
||||
? (asRecord(patchData.adapterConfig) ?? {})
|
||||
: (asRecord(existing.adapterConfig) ?? {});
|
||||
const effectiveAdapterConfig = await secretsSvc.normalizeAdapterConfigForPersistence(
|
||||
existing.companyId,
|
||||
const effectiveAdapterConfig = applyCreateDefaultsByAdapterType(
|
||||
requestedAdapterType,
|
||||
rawEffectiveAdapterConfig,
|
||||
);
|
||||
const normalizedEffectiveAdapterConfig = await secretsSvc.normalizeAdapterConfigForPersistence(
|
||||
existing.companyId,
|
||||
effectiveAdapterConfig,
|
||||
{ strictMode: strictSecretsMode },
|
||||
);
|
||||
patchData.adapterConfig = normalizedEffectiveAdapterConfig;
|
||||
}
|
||||
if (touchesAdapterConfiguration && requestedAdapterType === "opencode_local") {
|
||||
const effectiveAdapterConfig = asRecord(patchData.adapterConfig) ?? {};
|
||||
await assertAdapterConfigConstraints(
|
||||
existing.companyId,
|
||||
requestedAdapterType,
|
||||
@@ -1138,6 +1303,7 @@ export function agentRoutes(db: Db) {
|
||||
contextSnapshot: {
|
||||
triggeredBy: req.actor.type,
|
||||
actorId: req.actor.type === "agent" ? req.actor.agentId : req.actor.userId,
|
||||
forceFreshSession: req.body.forceFreshSession === true,
|
||||
},
|
||||
});
|
||||
|
||||
@@ -1226,7 +1392,7 @@ export function agentRoutes(db: Db) {
|
||||
}
|
||||
|
||||
const config = asRecord(agent.adapterConfig) ?? {};
|
||||
const runtimeConfig = await secretsSvc.resolveAdapterConfigForRuntime(agent.companyId, config);
|
||||
const { config: runtimeConfig } = await secretsSvc.resolveAdapterConfigForRuntime(agent.companyId, config);
|
||||
const result = await runClaudeLogin({
|
||||
runId: `claude-login-${randomUUID()}`,
|
||||
agent: {
|
||||
@@ -1308,6 +1474,17 @@ export function agentRoutes(db: Db) {
|
||||
res.json(liveRuns);
|
||||
});
|
||||
|
||||
router.get("/heartbeat-runs/:runId", async (req, res) => {
|
||||
const runId = req.params.runId as string;
|
||||
const run = await heartbeat.getRun(runId);
|
||||
if (!run) {
|
||||
res.status(404).json({ error: "Heartbeat run not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, run.companyId);
|
||||
res.json(redactCurrentUserValue(run));
|
||||
});
|
||||
|
||||
router.post("/heartbeat-runs/:runId/cancel", async (req, res) => {
|
||||
assertBoard(req);
|
||||
const runId = req.params.runId as string;
|
||||
@@ -1340,10 +1517,12 @@ export function agentRoutes(db: Db) {
|
||||
const afterSeq = Number(req.query.afterSeq ?? 0);
|
||||
const limit = Number(req.query.limit ?? 200);
|
||||
const events = await heartbeat.listEvents(runId, Number.isFinite(afterSeq) ? afterSeq : 0, Number.isFinite(limit) ? limit : 200);
|
||||
const redactedEvents = events.map((event) => ({
|
||||
...event,
|
||||
payload: redactEventPayload(event.payload),
|
||||
}));
|
||||
const redactedEvents = events.map((event) =>
|
||||
redactCurrentUserValue({
|
||||
...event,
|
||||
payload: redactEventPayload(event.payload),
|
||||
}),
|
||||
);
|
||||
res.json(redactedEvents);
|
||||
});
|
||||
|
||||
@@ -1440,7 +1619,7 @@ export function agentRoutes(db: Db) {
|
||||
}
|
||||
|
||||
res.json({
|
||||
...run,
|
||||
...redactCurrentUserValue(run),
|
||||
agentId: agent.id,
|
||||
agentName: agent.name,
|
||||
adapterType: agent.adapterType,
|
||||
|
||||
@@ -121,85 +121,92 @@ export function approvalRoutes(db: Db) {
|
||||
router.post("/approvals/:id/approve", validate(resolveApprovalSchema), async (req, res) => {
|
||||
assertBoard(req);
|
||||
const id = req.params.id as string;
|
||||
const approval = await svc.approve(id, req.body.decidedByUserId ?? "board", req.body.decisionNote);
|
||||
const linkedIssues = await issueApprovalsSvc.listIssuesForApproval(approval.id);
|
||||
const linkedIssueIds = linkedIssues.map((issue) => issue.id);
|
||||
const primaryIssueId = linkedIssueIds[0] ?? null;
|
||||
const { approval, applied } = await svc.approve(
|
||||
id,
|
||||
req.body.decidedByUserId ?? "board",
|
||||
req.body.decisionNote,
|
||||
);
|
||||
|
||||
await logActivity(db, {
|
||||
companyId: approval.companyId,
|
||||
actorType: "user",
|
||||
actorId: req.actor.userId ?? "board",
|
||||
action: "approval.approved",
|
||||
entityType: "approval",
|
||||
entityId: approval.id,
|
||||
details: {
|
||||
type: approval.type,
|
||||
requestedByAgentId: approval.requestedByAgentId,
|
||||
linkedIssueIds,
|
||||
},
|
||||
});
|
||||
if (applied) {
|
||||
const linkedIssues = await issueApprovalsSvc.listIssuesForApproval(approval.id);
|
||||
const linkedIssueIds = linkedIssues.map((issue) => issue.id);
|
||||
const primaryIssueId = linkedIssueIds[0] ?? null;
|
||||
|
||||
if (approval.requestedByAgentId) {
|
||||
try {
|
||||
const wakeRun = await heartbeat.wakeup(approval.requestedByAgentId, {
|
||||
source: "automation",
|
||||
triggerDetail: "system",
|
||||
reason: "approval_approved",
|
||||
payload: {
|
||||
approvalId: approval.id,
|
||||
approvalStatus: approval.status,
|
||||
issueId: primaryIssueId,
|
||||
issueIds: linkedIssueIds,
|
||||
},
|
||||
requestedByActorType: "user",
|
||||
requestedByActorId: req.actor.userId ?? "board",
|
||||
contextSnapshot: {
|
||||
source: "approval.approved",
|
||||
approvalId: approval.id,
|
||||
approvalStatus: approval.status,
|
||||
issueId: primaryIssueId,
|
||||
issueIds: linkedIssueIds,
|
||||
taskId: primaryIssueId,
|
||||
wakeReason: "approval_approved",
|
||||
},
|
||||
});
|
||||
await logActivity(db, {
|
||||
companyId: approval.companyId,
|
||||
actorType: "user",
|
||||
actorId: req.actor.userId ?? "board",
|
||||
action: "approval.approved",
|
||||
entityType: "approval",
|
||||
entityId: approval.id,
|
||||
details: {
|
||||
type: approval.type,
|
||||
requestedByAgentId: approval.requestedByAgentId,
|
||||
linkedIssueIds,
|
||||
},
|
||||
});
|
||||
|
||||
await logActivity(db, {
|
||||
companyId: approval.companyId,
|
||||
actorType: "user",
|
||||
actorId: req.actor.userId ?? "board",
|
||||
action: "approval.requester_wakeup_queued",
|
||||
entityType: "approval",
|
||||
entityId: approval.id,
|
||||
details: {
|
||||
requesterAgentId: approval.requestedByAgentId,
|
||||
wakeRunId: wakeRun?.id ?? null,
|
||||
linkedIssueIds,
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
{
|
||||
err,
|
||||
approvalId: approval.id,
|
||||
requestedByAgentId: approval.requestedByAgentId,
|
||||
},
|
||||
"failed to queue requester wakeup after approval",
|
||||
);
|
||||
await logActivity(db, {
|
||||
companyId: approval.companyId,
|
||||
actorType: "user",
|
||||
actorId: req.actor.userId ?? "board",
|
||||
action: "approval.requester_wakeup_failed",
|
||||
entityType: "approval",
|
||||
entityId: approval.id,
|
||||
details: {
|
||||
requesterAgentId: approval.requestedByAgentId,
|
||||
linkedIssueIds,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
});
|
||||
if (approval.requestedByAgentId) {
|
||||
try {
|
||||
const wakeRun = await heartbeat.wakeup(approval.requestedByAgentId, {
|
||||
source: "automation",
|
||||
triggerDetail: "system",
|
||||
reason: "approval_approved",
|
||||
payload: {
|
||||
approvalId: approval.id,
|
||||
approvalStatus: approval.status,
|
||||
issueId: primaryIssueId,
|
||||
issueIds: linkedIssueIds,
|
||||
},
|
||||
requestedByActorType: "user",
|
||||
requestedByActorId: req.actor.userId ?? "board",
|
||||
contextSnapshot: {
|
||||
source: "approval.approved",
|
||||
approvalId: approval.id,
|
||||
approvalStatus: approval.status,
|
||||
issueId: primaryIssueId,
|
||||
issueIds: linkedIssueIds,
|
||||
taskId: primaryIssueId,
|
||||
wakeReason: "approval_approved",
|
||||
},
|
||||
});
|
||||
|
||||
await logActivity(db, {
|
||||
companyId: approval.companyId,
|
||||
actorType: "user",
|
||||
actorId: req.actor.userId ?? "board",
|
||||
action: "approval.requester_wakeup_queued",
|
||||
entityType: "approval",
|
||||
entityId: approval.id,
|
||||
details: {
|
||||
requesterAgentId: approval.requestedByAgentId,
|
||||
wakeRunId: wakeRun?.id ?? null,
|
||||
linkedIssueIds,
|
||||
},
|
||||
});
|
||||
} catch (err) {
|
||||
logger.warn(
|
||||
{
|
||||
err,
|
||||
approvalId: approval.id,
|
||||
requestedByAgentId: approval.requestedByAgentId,
|
||||
},
|
||||
"failed to queue requester wakeup after approval",
|
||||
);
|
||||
await logActivity(db, {
|
||||
companyId: approval.companyId,
|
||||
actorType: "user",
|
||||
actorId: req.actor.userId ?? "board",
|
||||
action: "approval.requester_wakeup_failed",
|
||||
entityType: "approval",
|
||||
entityId: approval.id,
|
||||
details: {
|
||||
requesterAgentId: approval.requestedByAgentId,
|
||||
linkedIssueIds,
|
||||
error: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@@ -209,17 +216,23 @@ export function approvalRoutes(db: Db) {
|
||||
router.post("/approvals/:id/reject", validate(resolveApprovalSchema), async (req, res) => {
|
||||
assertBoard(req);
|
||||
const id = req.params.id as string;
|
||||
const approval = await svc.reject(id, req.body.decidedByUserId ?? "board", req.body.decisionNote);
|
||||
const { approval, applied } = await svc.reject(
|
||||
id,
|
||||
req.body.decidedByUserId ?? "board",
|
||||
req.body.decisionNote,
|
||||
);
|
||||
|
||||
await logActivity(db, {
|
||||
companyId: approval.companyId,
|
||||
actorType: "user",
|
||||
actorId: req.actor.userId ?? "board",
|
||||
action: "approval.rejected",
|
||||
entityType: "approval",
|
||||
entityId: approval.id,
|
||||
details: { type: approval.type },
|
||||
});
|
||||
if (applied) {
|
||||
await logActivity(db, {
|
||||
companyId: approval.companyId,
|
||||
actorType: "user",
|
||||
actorId: req.actor.userId ?? "board",
|
||||
action: "approval.rejected",
|
||||
entityType: "approval",
|
||||
entityId: approval.id,
|
||||
details: { type: approval.type },
|
||||
});
|
||||
}
|
||||
|
||||
res.json(redactApprovalPayload(approval));
|
||||
});
|
||||
|
||||
@@ -6,19 +6,10 @@ import type { Db } from "@paperclipai/db";
|
||||
import { createAssetImageMetadataSchema } from "@paperclipai/shared";
|
||||
import type { StorageService } from "../storage/types.js";
|
||||
import { assetService, logActivity } from "../services/index.js";
|
||||
import { isAllowedContentType, MAX_ATTACHMENT_BYTES } from "../attachment-types.js";
|
||||
import { assertCompanyAccess, getActorInfo } from "./authz.js";
|
||||
|
||||
const MAX_ASSET_IMAGE_BYTES = Number(process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES) || 10 * 1024 * 1024;
|
||||
const MAX_COMPANY_LOGO_BYTES = 100 * 1024;
|
||||
const SVG_CONTENT_TYPE = "image/svg+xml";
|
||||
const ALLOWED_IMAGE_CONTENT_TYPES = new Set([
|
||||
"image/png",
|
||||
"image/jpeg",
|
||||
"image/jpg",
|
||||
"image/webp",
|
||||
"image/gif",
|
||||
SVG_CONTENT_TYPE,
|
||||
]);
|
||||
|
||||
function sanitizeSvgBuffer(input: Buffer): Buffer | null {
|
||||
const raw = input.toString("utf8").trim();
|
||||
@@ -89,7 +80,7 @@ export function assetRoutes(db: Db, storage: StorageService) {
|
||||
const svc = assetService(db);
|
||||
const upload = multer({
|
||||
storage: multer.memoryStorage(),
|
||||
limits: { fileSize: MAX_ASSET_IMAGE_BYTES, files: 1 },
|
||||
limits: { fileSize: MAX_ATTACHMENT_BYTES, files: 1 },
|
||||
});
|
||||
|
||||
async function runSingleFileUpload(req: Request, res: Response) {
|
||||
@@ -110,7 +101,7 @@ export function assetRoutes(db: Db, storage: StorageService) {
|
||||
} catch (err) {
|
||||
if (err instanceof multer.MulterError) {
|
||||
if (err.code === "LIMIT_FILE_SIZE") {
|
||||
res.status(422).json({ error: `Image exceeds ${MAX_ASSET_IMAGE_BYTES} bytes` });
|
||||
res.status(422).json({ error: `File exceeds ${MAX_ATTACHMENT_BYTES} bytes` });
|
||||
return;
|
||||
}
|
||||
res.status(400).json({ error: err.message });
|
||||
@@ -125,9 +116,9 @@ export function assetRoutes(db: Db, storage: StorageService) {
|
||||
return;
|
||||
}
|
||||
|
||||
let contentType = (file.mimetype || "").toLowerCase();
|
||||
if (!ALLOWED_IMAGE_CONTENT_TYPES.has(contentType)) {
|
||||
res.status(422).json({ error: `Unsupported image type: ${contentType || "unknown"}` });
|
||||
const contentType = (file.mimetype || "").toLowerCase();
|
||||
if (contentType !== SVG_CONTENT_TYPE && !isAllowedContentType(contentType)) {
|
||||
res.status(422).json({ error: `Unsupported file type: ${contentType || "unknown"}` });
|
||||
return;
|
||||
}
|
||||
let fileBody = file.buffer;
|
||||
|
||||
@@ -1,7 +1,7 @@
|
||||
import { Router } from "express";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { count, sql } from "drizzle-orm";
|
||||
import { instanceUserRoles } from "@paperclipai/db";
|
||||
import { and, count, eq, gt, isNull, sql } from "drizzle-orm";
|
||||
import { instanceUserRoles, invites } from "@paperclipai/db";
|
||||
import type { DeploymentExposure, DeploymentMode } from "@paperclipai/shared";
|
||||
|
||||
export function healthRoutes(
|
||||
@@ -27,6 +27,7 @@ export function healthRoutes(
|
||||
}
|
||||
|
||||
let bootstrapStatus: "ready" | "bootstrap_pending" = "ready";
|
||||
let bootstrapInviteActive = false;
|
||||
if (opts.deploymentMode === "authenticated") {
|
||||
const roleCount = await db
|
||||
.select({ count: count() })
|
||||
@@ -34,6 +35,23 @@ export function healthRoutes(
|
||||
.where(sql`${instanceUserRoles.role} = 'instance_admin'`)
|
||||
.then((rows) => Number(rows[0]?.count ?? 0));
|
||||
bootstrapStatus = roleCount > 0 ? "ready" : "bootstrap_pending";
|
||||
|
||||
if (bootstrapStatus === "bootstrap_pending") {
|
||||
const now = new Date();
|
||||
const inviteCount = await db
|
||||
.select({ count: count() })
|
||||
.from(invites)
|
||||
.where(
|
||||
and(
|
||||
eq(invites.inviteType, "bootstrap_ceo"),
|
||||
isNull(invites.revokedAt),
|
||||
isNull(invites.acceptedAt),
|
||||
gt(invites.expiresAt, now),
|
||||
),
|
||||
)
|
||||
.then((rows) => Number(rows[0]?.count ?? 0));
|
||||
bootstrapInviteActive = inviteCount > 0;
|
||||
}
|
||||
}
|
||||
|
||||
res.json({
|
||||
@@ -42,6 +60,7 @@ export function healthRoutes(
|
||||
deploymentExposure: opts.deploymentExposure,
|
||||
authReady: opts.authReady,
|
||||
bootstrapStatus,
|
||||
bootstrapInviteActive,
|
||||
features: {
|
||||
companyDeletionEnabled: opts.companyDeletionEnabled,
|
||||
},
|
||||
|
||||
@@ -8,6 +8,8 @@ import {
|
||||
checkoutIssueSchema,
|
||||
createIssueSchema,
|
||||
linkIssueApprovalSchema,
|
||||
issueDocumentKeySchema,
|
||||
upsertIssueDocumentSchema,
|
||||
updateIssueSchema,
|
||||
} from "@paperclipai/shared";
|
||||
import type { StorageService } from "../storage/types.js";
|
||||
@@ -19,6 +21,7 @@ import {
|
||||
heartbeatService,
|
||||
issueApprovalService,
|
||||
issueService,
|
||||
documentService,
|
||||
logActivity,
|
||||
projectService,
|
||||
} from "../services/index.js";
|
||||
@@ -26,15 +29,9 @@ import { logger } from "../middleware/logger.js";
|
||||
import { forbidden, HttpError, unauthorized } from "../errors.js";
|
||||
import { assertCompanyAccess, getActorInfo } from "./authz.js";
|
||||
import { shouldWakeAssigneeOnCheckout } from "./issues-checkout-wakeup.js";
|
||||
import { isAllowedContentType, MAX_ATTACHMENT_BYTES } from "../attachment-types.js";
|
||||
|
||||
const MAX_ATTACHMENT_BYTES = Number(process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES) || 10 * 1024 * 1024;
|
||||
const ALLOWED_ATTACHMENT_CONTENT_TYPES = new Set([
|
||||
"image/png",
|
||||
"image/jpeg",
|
||||
"image/jpg",
|
||||
"image/webp",
|
||||
"image/gif",
|
||||
]);
|
||||
const MAX_ISSUE_COMMENT_LIMIT = 500;
|
||||
|
||||
export function issueRoutes(db: Db, storage: StorageService) {
|
||||
const router = Router();
|
||||
@@ -45,6 +42,7 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
const projectsSvc = projectService(db);
|
||||
const goalsSvc = goalService(db);
|
||||
const issueApprovalsSvc = issueApprovalService(db);
|
||||
const documentsSvc = documentService(db);
|
||||
const upload = multer({
|
||||
storage: multer.memoryStorage(),
|
||||
limits: { fileSize: MAX_ATTACHMENT_BYTES, files: 1 },
|
||||
@@ -184,6 +182,13 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
}
|
||||
});
|
||||
|
||||
// Common malformed path when companyId is empty in "/api/companies/{companyId}/issues".
|
||||
router.get("/issues", (_req, res) => {
|
||||
res.status(400).json({
|
||||
error: "Missing companyId in path. Use /api/companies/{companyId}/issues.",
|
||||
});
|
||||
});
|
||||
|
||||
router.get("/companies/:companyId/issues", async (req, res) => {
|
||||
const companyId = req.params.companyId as string;
|
||||
assertCompanyAccess(req, companyId);
|
||||
@@ -223,6 +228,7 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
touchedByUserId,
|
||||
unreadForUserId,
|
||||
projectId: req.query.projectId as string | undefined,
|
||||
parentId: req.query.parentId as string | undefined,
|
||||
labelId: req.query.labelId as string | undefined,
|
||||
q: req.query.q as string | undefined,
|
||||
});
|
||||
@@ -291,16 +297,242 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const [ancestors, project, goal, mentionedProjectIds] = await Promise.all([
|
||||
const [ancestors, project, goal, mentionedProjectIds, documentPayload] = await Promise.all([
|
||||
svc.getAncestors(issue.id),
|
||||
issue.projectId ? projectsSvc.getById(issue.projectId) : null,
|
||||
issue.goalId ? goalsSvc.getById(issue.goalId) : null,
|
||||
issue.goalId
|
||||
? goalsSvc.getById(issue.goalId)
|
||||
: !issue.projectId
|
||||
? goalsSvc.getDefaultCompanyGoal(issue.companyId)
|
||||
: null,
|
||||
svc.findMentionedProjectIds(issue.id),
|
||||
documentsSvc.getIssueDocumentPayload(issue),
|
||||
]);
|
||||
const mentionedProjects = mentionedProjectIds.length > 0
|
||||
? await projectsSvc.listByIds(issue.companyId, mentionedProjectIds)
|
||||
: [];
|
||||
res.json({ ...issue, ancestors, project: project ?? null, goal: goal ?? null, mentionedProjects });
|
||||
res.json({
|
||||
...issue,
|
||||
goalId: goal?.id ?? issue.goalId,
|
||||
ancestors,
|
||||
...documentPayload,
|
||||
project: project ?? null,
|
||||
goal: goal ?? null,
|
||||
mentionedProjects,
|
||||
});
|
||||
});
|
||||
|
||||
router.get("/issues/:id/heartbeat-context", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
|
||||
const wakeCommentId =
|
||||
typeof req.query.wakeCommentId === "string" && req.query.wakeCommentId.trim().length > 0
|
||||
? req.query.wakeCommentId.trim()
|
||||
: null;
|
||||
|
||||
const [ancestors, project, goal, commentCursor, wakeComment] = await Promise.all([
|
||||
svc.getAncestors(issue.id),
|
||||
issue.projectId ? projectsSvc.getById(issue.projectId) : null,
|
||||
issue.goalId
|
||||
? goalsSvc.getById(issue.goalId)
|
||||
: !issue.projectId
|
||||
? goalsSvc.getDefaultCompanyGoal(issue.companyId)
|
||||
: null,
|
||||
svc.getCommentCursor(issue.id),
|
||||
wakeCommentId ? svc.getComment(wakeCommentId) : null,
|
||||
]);
|
||||
|
||||
res.json({
|
||||
issue: {
|
||||
id: issue.id,
|
||||
identifier: issue.identifier,
|
||||
title: issue.title,
|
||||
description: issue.description,
|
||||
status: issue.status,
|
||||
priority: issue.priority,
|
||||
projectId: issue.projectId,
|
||||
goalId: goal?.id ?? issue.goalId,
|
||||
parentId: issue.parentId,
|
||||
assigneeAgentId: issue.assigneeAgentId,
|
||||
assigneeUserId: issue.assigneeUserId,
|
||||
updatedAt: issue.updatedAt,
|
||||
},
|
||||
ancestors: ancestors.map((ancestor) => ({
|
||||
id: ancestor.id,
|
||||
identifier: ancestor.identifier,
|
||||
title: ancestor.title,
|
||||
status: ancestor.status,
|
||||
priority: ancestor.priority,
|
||||
})),
|
||||
project: project
|
||||
? {
|
||||
id: project.id,
|
||||
name: project.name,
|
||||
status: project.status,
|
||||
targetDate: project.targetDate,
|
||||
}
|
||||
: null,
|
||||
goal: goal
|
||||
? {
|
||||
id: goal.id,
|
||||
title: goal.title,
|
||||
status: goal.status,
|
||||
level: goal.level,
|
||||
parentId: goal.parentId,
|
||||
}
|
||||
: null,
|
||||
commentCursor,
|
||||
wakeComment:
|
||||
wakeComment && wakeComment.issueId === issue.id
|
||||
? wakeComment
|
||||
: null,
|
||||
});
|
||||
});
|
||||
|
||||
router.get("/issues/:id/documents", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const docs = await documentsSvc.listIssueDocuments(issue.id);
|
||||
res.json(docs);
|
||||
});
|
||||
|
||||
router.get("/issues/:id/documents/:key", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const keyParsed = issueDocumentKeySchema.safeParse(String(req.params.key ?? "").trim().toLowerCase());
|
||||
if (!keyParsed.success) {
|
||||
res.status(400).json({ error: "Invalid document key", details: keyParsed.error.issues });
|
||||
return;
|
||||
}
|
||||
const doc = await documentsSvc.getIssueDocumentByKey(issue.id, keyParsed.data);
|
||||
if (!doc) {
|
||||
res.status(404).json({ error: "Document not found" });
|
||||
return;
|
||||
}
|
||||
res.json(doc);
|
||||
});
|
||||
|
||||
router.put("/issues/:id/documents/:key", validate(upsertIssueDocumentSchema), async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const keyParsed = issueDocumentKeySchema.safeParse(String(req.params.key ?? "").trim().toLowerCase());
|
||||
if (!keyParsed.success) {
|
||||
res.status(400).json({ error: "Invalid document key", details: keyParsed.error.issues });
|
||||
return;
|
||||
}
|
||||
|
||||
const actor = getActorInfo(req);
|
||||
const result = await documentsSvc.upsertIssueDocument({
|
||||
issueId: issue.id,
|
||||
key: keyParsed.data,
|
||||
title: req.body.title ?? null,
|
||||
format: req.body.format,
|
||||
body: req.body.body,
|
||||
changeSummary: req.body.changeSummary ?? null,
|
||||
baseRevisionId: req.body.baseRevisionId ?? null,
|
||||
createdByAgentId: actor.agentId ?? null,
|
||||
createdByUserId: actor.actorType === "user" ? actor.actorId : null,
|
||||
});
|
||||
const doc = result.document;
|
||||
|
||||
await logActivity(db, {
|
||||
companyId: issue.companyId,
|
||||
actorType: actor.actorType,
|
||||
actorId: actor.actorId,
|
||||
agentId: actor.agentId,
|
||||
runId: actor.runId,
|
||||
action: result.created ? "issue.document_created" : "issue.document_updated",
|
||||
entityType: "issue",
|
||||
entityId: issue.id,
|
||||
details: {
|
||||
key: doc.key,
|
||||
documentId: doc.id,
|
||||
title: doc.title,
|
||||
format: doc.format,
|
||||
revisionNumber: doc.latestRevisionNumber,
|
||||
},
|
||||
});
|
||||
|
||||
res.status(result.created ? 201 : 200).json(doc);
|
||||
});
|
||||
|
||||
router.get("/issues/:id/documents/:key/revisions", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const keyParsed = issueDocumentKeySchema.safeParse(String(req.params.key ?? "").trim().toLowerCase());
|
||||
if (!keyParsed.success) {
|
||||
res.status(400).json({ error: "Invalid document key", details: keyParsed.error.issues });
|
||||
return;
|
||||
}
|
||||
const revisions = await documentsSvc.listIssueDocumentRevisions(issue.id, keyParsed.data);
|
||||
res.json(revisions);
|
||||
});
|
||||
|
||||
router.delete("/issues/:id/documents/:key", async (req, res) => {
|
||||
const id = req.params.id as string;
|
||||
const issue = await svc.getById(id);
|
||||
if (!issue) {
|
||||
res.status(404).json({ error: "Issue not found" });
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
if (req.actor.type !== "board") {
|
||||
res.status(403).json({ error: "Board authentication required" });
|
||||
return;
|
||||
}
|
||||
const keyParsed = issueDocumentKeySchema.safeParse(String(req.params.key ?? "").trim().toLowerCase());
|
||||
if (!keyParsed.success) {
|
||||
res.status(400).json({ error: "Invalid document key", details: keyParsed.error.issues });
|
||||
return;
|
||||
}
|
||||
const removed = await documentsSvc.deleteIssueDocument(issue.id, keyParsed.data);
|
||||
if (!removed) {
|
||||
res.status(404).json({ error: "Document not found" });
|
||||
return;
|
||||
}
|
||||
const actor = getActorInfo(req);
|
||||
await logActivity(db, {
|
||||
companyId: issue.companyId,
|
||||
actorType: actor.actorType,
|
||||
actorId: actor.actorId,
|
||||
agentId: actor.agentId,
|
||||
runId: actor.runId,
|
||||
action: "issue.document_deleted",
|
||||
entityType: "issue",
|
||||
entityId: issue.id,
|
||||
details: {
|
||||
key: removed.key,
|
||||
documentId: removed.id,
|
||||
title: removed.title,
|
||||
},
|
||||
});
|
||||
res.json({ ok: true });
|
||||
});
|
||||
|
||||
router.post("/issues/:id/read", async (req, res) => {
|
||||
@@ -522,6 +754,7 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
}
|
||||
|
||||
const actor = getActorInfo(req);
|
||||
const hasFieldChanges = Object.keys(previous).length > 0;
|
||||
await logActivity(db, {
|
||||
companyId: issue.companyId,
|
||||
actorType: actor.actorType,
|
||||
@@ -531,7 +764,12 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
action: "issue.updated",
|
||||
entityType: "issue",
|
||||
entityId: issue.id,
|
||||
details: { ...updateFields, identifier: issue.identifier, _previous: Object.keys(previous).length > 0 ? previous : undefined },
|
||||
details: {
|
||||
...updateFields,
|
||||
identifier: issue.identifier,
|
||||
...(commentBody ? { source: "comment" } : {}),
|
||||
_previous: hasFieldChanges ? previous : undefined,
|
||||
},
|
||||
});
|
||||
|
||||
let comment = null;
|
||||
@@ -555,12 +793,17 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
bodySnippet: comment.body.slice(0, 120),
|
||||
identifier: issue.identifier,
|
||||
issueTitle: issue.title,
|
||||
...(hasFieldChanges ? { updated: true } : {}),
|
||||
},
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
const assigneeChanged = assigneeWillChange;
|
||||
const statusChangedFromBacklog =
|
||||
existing.status === "backlog" &&
|
||||
issue.status !== "backlog" &&
|
||||
req.body.status !== undefined;
|
||||
|
||||
// Merge all wakeups from this update into one enqueue per agent to avoid duplicate runs.
|
||||
void (async () => {
|
||||
@@ -578,6 +821,18 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
});
|
||||
}
|
||||
|
||||
if (!assigneeChanged && statusChangedFromBacklog && issue.assigneeAgentId) {
|
||||
wakeups.set(issue.assigneeAgentId, {
|
||||
source: "automation",
|
||||
triggerDetail: "system",
|
||||
reason: "issue_status_changed",
|
||||
payload: { issueId: issue.id, mutation: "update" },
|
||||
requestedByActorType: actor.actorType,
|
||||
requestedByActorId: actor.actorId,
|
||||
contextSnapshot: { issueId: issue.id, source: "issue.status_change" },
|
||||
});
|
||||
}
|
||||
|
||||
if (commentBody && comment) {
|
||||
let mentionedIds: string[] = [];
|
||||
try {
|
||||
@@ -757,7 +1012,29 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
return;
|
||||
}
|
||||
assertCompanyAccess(req, issue.companyId);
|
||||
const comments = await svc.listComments(id);
|
||||
const afterCommentId =
|
||||
typeof req.query.after === "string" && req.query.after.trim().length > 0
|
||||
? req.query.after.trim()
|
||||
: typeof req.query.afterCommentId === "string" && req.query.afterCommentId.trim().length > 0
|
||||
? req.query.afterCommentId.trim()
|
||||
: null;
|
||||
const order =
|
||||
typeof req.query.order === "string" && req.query.order.trim().toLowerCase() === "asc"
|
||||
? "asc"
|
||||
: "desc";
|
||||
const limitRaw =
|
||||
typeof req.query.limit === "string" && req.query.limit.trim().length > 0
|
||||
? Number(req.query.limit)
|
||||
: null;
|
||||
const limit =
|
||||
limitRaw && Number.isFinite(limitRaw) && limitRaw > 0
|
||||
? Math.min(Math.floor(limitRaw), MAX_ISSUE_COMMENT_LIMIT)
|
||||
: null;
|
||||
const comments = await svc.listComments(id, {
|
||||
afterCommentId,
|
||||
order,
|
||||
limit,
|
||||
});
|
||||
res.json(comments);
|
||||
});
|
||||
|
||||
@@ -1037,7 +1314,7 @@ export function issueRoutes(db: Db, storage: StorageService) {
|
||||
return;
|
||||
}
|
||||
const contentType = (file.mimetype || "").toLowerCase();
|
||||
if (!ALLOWED_ATTACHMENT_CONTENT_TYPES.has(contentType)) {
|
||||
if (!isAllowedContentType(contentType)) {
|
||||
res.status(422).json({ error: `Unsupported attachment type: ${contentType || "unknown"}` });
|
||||
return;
|
||||
}
|
||||
|
||||
496
server/src/routes/plugin-ui-static.ts
Normal file
496
server/src/routes/plugin-ui-static.ts
Normal file
@@ -0,0 +1,496 @@
|
||||
/**
|
||||
* @fileoverview Plugin UI static file serving route
|
||||
*
|
||||
* Serves plugin UI bundles from the plugin's dist/ui/ directory under the
|
||||
* `/_plugins/:pluginId/ui/*` namespace. This is specified in PLUGIN_SPEC.md
|
||||
* §19.0.3 (Bundle Serving).
|
||||
*
|
||||
* Plugin UI bundles are pre-built ESM that the host serves as static assets.
|
||||
* The host dynamically imports the plugin's UI entry module from this path,
|
||||
* resolves the named export declared in `ui.slots[].exportName`, and mounts
|
||||
* it into the extension slot.
|
||||
*
|
||||
* Security:
|
||||
* - Path traversal is prevented by resolving the requested path and verifying
|
||||
* it stays within the plugin's UI directory.
|
||||
* - Only plugins in 'ready' status have their UI served.
|
||||
* - Only plugins that declare `entrypoints.ui` serve UI bundles.
|
||||
*
|
||||
* Cache Headers:
|
||||
* - Files with content-hash patterns in their name (e.g., `index-a1b2c3d4.js`)
|
||||
* receive `Cache-Control: public, max-age=31536000, immutable`.
|
||||
* - Other files receive `Cache-Control: public, max-age=0, must-revalidate`
|
||||
* with ETag-based conditional request support.
|
||||
*
|
||||
* @module server/routes/plugin-ui-static
|
||||
* @see doc/plugins/PLUGIN_SPEC.md §19.0.3 — Bundle Serving
|
||||
* @see doc/plugins/PLUGIN_SPEC.md §25.4.5 — Frontend Cache Invalidation
|
||||
*/
|
||||
|
||||
import { Router } from "express";
|
||||
import path from "node:path";
|
||||
import fs from "node:fs";
|
||||
import crypto from "node:crypto";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { pluginRegistryService } from "../services/plugin-registry.js";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Constants
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Regex to detect content-hashed filenames.
|
||||
*
|
||||
* Matches patterns like:
|
||||
* - `index-a1b2c3d4.js`
|
||||
* - `styles.abc123def.css`
|
||||
* - `chunk-ABCDEF01.mjs`
|
||||
*
|
||||
* The hash portion must be at least 8 hex characters to avoid false positives.
|
||||
*/
|
||||
const CONTENT_HASH_PATTERN = /[.-][a-fA-F0-9]{8,}\.\w+$/;
|
||||
|
||||
/**
|
||||
* Cache-Control header for content-hashed files.
|
||||
* These files are immutable by definition (the hash changes when content changes).
|
||||
*/
|
||||
/** 1 year in seconds — standard for content-hashed immutable resources. */
|
||||
const ONE_YEAR_SECONDS = 365 * 24 * 60 * 60; // 31_536_000
|
||||
const CACHE_CONTROL_IMMUTABLE = `public, max-age=${ONE_YEAR_SECONDS}, immutable`;
|
||||
|
||||
/**
|
||||
* Cache-Control header for non-hashed files.
|
||||
* These files must be revalidated on each request (ETag-based).
|
||||
*/
|
||||
const CACHE_CONTROL_REVALIDATE = "public, max-age=0, must-revalidate";
|
||||
|
||||
/**
|
||||
* MIME types for common plugin UI bundle file extensions.
|
||||
*/
|
||||
const MIME_TYPES: Record<string, string> = {
|
||||
".js": "application/javascript; charset=utf-8",
|
||||
".mjs": "application/javascript; charset=utf-8",
|
||||
".css": "text/css; charset=utf-8",
|
||||
".json": "application/json; charset=utf-8",
|
||||
".map": "application/json; charset=utf-8",
|
||||
".html": "text/html; charset=utf-8",
|
||||
".svg": "image/svg+xml",
|
||||
".png": "image/png",
|
||||
".jpg": "image/jpeg",
|
||||
".jpeg": "image/jpeg",
|
||||
".gif": "image/gif",
|
||||
".webp": "image/webp",
|
||||
".woff": "font/woff",
|
||||
".woff2": "font/woff2",
|
||||
".ttf": "font/ttf",
|
||||
".eot": "application/vnd.ms-fontobject",
|
||||
".ico": "image/x-icon",
|
||||
".txt": "text/plain; charset=utf-8",
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helper
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Resolve a plugin's UI directory from its package location.
|
||||
*
|
||||
* The plugin's `packageName` is stored in the DB. We resolve the package path
|
||||
* from the local plugin directory (DEFAULT_LOCAL_PLUGIN_DIR) by looking in
|
||||
* `node_modules`. If the plugin was installed from a local path, the manifest
|
||||
* `entrypoints.ui` path is resolved relative to the package directory.
|
||||
*
|
||||
* @param localPluginDir - The plugin installation directory
|
||||
* @param packageName - The npm package name
|
||||
* @param entrypointsUi - The UI entrypoint path from the manifest (e.g., "./dist/ui/")
|
||||
* @returns Absolute path to the UI directory, or null if not found
|
||||
*/
|
||||
export function resolvePluginUiDir(
|
||||
localPluginDir: string,
|
||||
packageName: string,
|
||||
entrypointsUi: string,
|
||||
packagePath?: string | null,
|
||||
): string | null {
|
||||
// For local-path installs, prefer the persisted package path.
|
||||
if (packagePath) {
|
||||
const resolvedPackagePath = path.resolve(packagePath);
|
||||
if (fs.existsSync(resolvedPackagePath)) {
|
||||
const uiDirFromPackagePath = path.resolve(resolvedPackagePath, entrypointsUi);
|
||||
if (
|
||||
uiDirFromPackagePath.startsWith(resolvedPackagePath)
|
||||
&& fs.existsSync(uiDirFromPackagePath)
|
||||
) {
|
||||
return uiDirFromPackagePath;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the package root within the local plugin directory's node_modules.
|
||||
// npm installs go to <localPluginDir>/node_modules/<packageName>/
|
||||
let packageRoot: string;
|
||||
if (packageName.startsWith("@")) {
|
||||
// Scoped package: @scope/name -> node_modules/@scope/name
|
||||
packageRoot = path.join(localPluginDir, "node_modules", ...packageName.split("/"));
|
||||
} else {
|
||||
packageRoot = path.join(localPluginDir, "node_modules", packageName);
|
||||
}
|
||||
|
||||
// If the standard location doesn't exist, the plugin may have been installed
|
||||
// from a local path. Try to check if the package.json is accessible at the
|
||||
// computed path or if the package is found elsewhere.
|
||||
if (!fs.existsSync(packageRoot)) {
|
||||
// For local-path installs, the packageName may be a directory that doesn't
|
||||
// live inside node_modules. Check if the package exists directly at the
|
||||
// localPluginDir level.
|
||||
const directPath = path.join(localPluginDir, packageName);
|
||||
if (fs.existsSync(directPath)) {
|
||||
packageRoot = directPath;
|
||||
} else {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
// Resolve the UI directory relative to the package root
|
||||
const uiDir = path.resolve(packageRoot, entrypointsUi);
|
||||
|
||||
// Verify the resolved UI directory exists and is actually inside the package
|
||||
if (!fs.existsSync(uiDir)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return uiDir;
|
||||
}
|
||||
|
||||
/**
|
||||
* Compute an ETag from file stat (size + mtime).
|
||||
* This is a lightweight approach that avoids reading the file content.
|
||||
*/
|
||||
function computeETag(size: number, mtimeMs: number): string {
|
||||
const ETAG_VERSION = "v2";
|
||||
const hash = crypto
|
||||
.createHash("md5")
|
||||
.update(`${ETAG_VERSION}:${size}-${mtimeMs}`)
|
||||
.digest("hex")
|
||||
.slice(0, 16);
|
||||
return `"${hash}"`;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Route factory
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Options for the plugin UI static route.
|
||||
*/
|
||||
export interface PluginUiStaticRouteOptions {
|
||||
/**
|
||||
* The local plugin installation directory.
|
||||
* This is where plugins are installed via `npm install --prefix`.
|
||||
* Defaults to the standard `~/.paperclip/plugins/` location.
|
||||
*/
|
||||
localPluginDir: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create an Express router that serves plugin UI static files.
|
||||
*
|
||||
* This route handles `GET /_plugins/:pluginId/ui/*` requests by:
|
||||
* 1. Looking up the plugin in the registry by ID or key
|
||||
* 2. Verifying the plugin is in 'ready' status with UI declared
|
||||
* 3. Resolving the file path within the plugin's dist/ui/ directory
|
||||
* 4. Serving the file with appropriate cache headers
|
||||
*
|
||||
* @param db - Database connection for plugin registry lookups
|
||||
* @param options - Configuration options
|
||||
* @returns Express router
|
||||
*/
|
||||
export function pluginUiStaticRoutes(db: Db, options: PluginUiStaticRouteOptions) {
|
||||
const router = Router();
|
||||
const registry = pluginRegistryService(db);
|
||||
const log = logger.child({ service: "plugin-ui-static" });
|
||||
|
||||
/**
|
||||
* GET /_plugins/:pluginId/ui/*
|
||||
*
|
||||
* Serve a static file from a plugin's UI bundle directory.
|
||||
*
|
||||
* The :pluginId parameter accepts either:
|
||||
* - Database UUID
|
||||
* - Plugin key (e.g., "acme.linear")
|
||||
*
|
||||
* The wildcard captures the relative file path within the UI directory.
|
||||
*
|
||||
* Cache strategy:
|
||||
* - Content-hashed filenames → immutable, 1-year max-age
|
||||
* - Other files → must-revalidate with ETag
|
||||
*/
|
||||
router.get("/_plugins/:pluginId/ui/*filePath", async (req, res) => {
|
||||
const { pluginId } = req.params;
|
||||
|
||||
// Extract the relative file path from the named wildcard.
|
||||
// In Express 5 with path-to-regexp v8, named wildcards may return
|
||||
// an array of path segments or a single string.
|
||||
const rawParam = req.params.filePath;
|
||||
const rawFilePath = Array.isArray(rawParam)
|
||||
? rawParam.join("/")
|
||||
: rawParam as string | undefined;
|
||||
|
||||
if (!rawFilePath || rawFilePath.length === 0) {
|
||||
res.status(400).json({ error: "File path is required" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 1: Look up the plugin
|
||||
let plugin = null;
|
||||
try {
|
||||
plugin = await registry.getById(pluginId);
|
||||
} catch (error) {
|
||||
const maybeCode =
|
||||
typeof error === "object" && error !== null && "code" in error
|
||||
? (error as { code?: unknown }).code
|
||||
: undefined;
|
||||
if (maybeCode !== "22P02") {
|
||||
throw error;
|
||||
}
|
||||
}
|
||||
if (!plugin) {
|
||||
plugin = await registry.getByKey(pluginId);
|
||||
}
|
||||
|
||||
if (!plugin) {
|
||||
res.status(404).json({ error: "Plugin not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 2: Verify the plugin is ready and has UI declared
|
||||
if (plugin.status !== "ready") {
|
||||
res.status(403).json({
|
||||
error: `Plugin UI is not available (status: ${plugin.status})`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const manifest = plugin.manifestJson;
|
||||
if (!manifest?.entrypoints?.ui) {
|
||||
res.status(404).json({ error: "Plugin does not declare a UI bundle" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 2b: Check for devUiUrl in plugin config — proxy to local dev server
|
||||
// when a plugin author has configured a dev server URL for hot-reload.
|
||||
// See PLUGIN_SPEC.md §27.2 — Local Development Workflow
|
||||
try {
|
||||
const configRow = await registry.getConfig(plugin.id);
|
||||
const devUiUrl =
|
||||
configRow &&
|
||||
typeof configRow === "object" &&
|
||||
"configJson" in configRow &&
|
||||
(configRow as { configJson: Record<string, unknown> }).configJson?.devUiUrl;
|
||||
|
||||
if (typeof devUiUrl === "string" && devUiUrl.length > 0) {
|
||||
// Dev proxy is only available in development mode
|
||||
if (process.env.NODE_ENV === "production") {
|
||||
log.warn(
|
||||
{ pluginId: plugin.id },
|
||||
"plugin-ui-static: devUiUrl ignored in production",
|
||||
);
|
||||
// Fall through to static file serving below
|
||||
} else {
|
||||
// Guard against rawFilePath overriding the base URL via protocol
|
||||
// scheme (e.g. "https://evil.com/x") or protocol-relative paths
|
||||
// (e.g. "//evil.com/x") which cause `new URL(path, base)` to
|
||||
// ignore the base entirely.
|
||||
// Normalize percent-encoding so encoded slashes (%2F) can't bypass
|
||||
// the protocol/path checks below.
|
||||
let decodedPath: string;
|
||||
try {
|
||||
decodedPath = decodeURIComponent(rawFilePath);
|
||||
} catch {
|
||||
res.status(400).json({ error: "Invalid file path" });
|
||||
return;
|
||||
}
|
||||
if (
|
||||
decodedPath.includes("://") ||
|
||||
decodedPath.startsWith("//") ||
|
||||
decodedPath.startsWith("\\\\")
|
||||
) {
|
||||
res.status(400).json({ error: "Invalid file path" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Proxy the request to the dev server
|
||||
const targetUrl = new URL(rawFilePath, devUiUrl.endsWith("/") ? devUiUrl : devUiUrl + "/");
|
||||
|
||||
// SSRF protection: only allow http/https and localhost targets for dev proxy
|
||||
if (targetUrl.protocol !== "http:" && targetUrl.protocol !== "https:") {
|
||||
res.status(400).json({ error: "devUiUrl must use http or https protocol" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Dev proxy is restricted to loopback addresses only.
|
||||
// Validate the *constructed* targetUrl hostname (not the base) to
|
||||
// catch any path-based override that slipped past the checks above.
|
||||
const devHost = targetUrl.hostname;
|
||||
const isLoopback =
|
||||
devHost === "localhost" ||
|
||||
devHost === "127.0.0.1" ||
|
||||
devHost === "::1" ||
|
||||
devHost === "[::1]";
|
||||
if (!isLoopback) {
|
||||
log.warn(
|
||||
{ pluginId: plugin.id, devUiUrl, host: devHost },
|
||||
"plugin-ui-static: devUiUrl must target localhost, rejecting proxy",
|
||||
);
|
||||
res.status(400).json({ error: "devUiUrl must target localhost" });
|
||||
return;
|
||||
}
|
||||
|
||||
log.debug(
|
||||
{ pluginId: plugin.id, devUiUrl, targetUrl: targetUrl.href },
|
||||
"plugin-ui-static: proxying to devUiUrl",
|
||||
);
|
||||
|
||||
try {
|
||||
const controller = new AbortController();
|
||||
const timeout = setTimeout(() => controller.abort(), 10_000);
|
||||
try {
|
||||
const upstream = await fetch(targetUrl.href, { signal: controller.signal });
|
||||
if (!upstream.ok) {
|
||||
res.status(upstream.status).json({
|
||||
error: `Dev server returned ${upstream.status}`,
|
||||
});
|
||||
return;
|
||||
}
|
||||
|
||||
const contentType = upstream.headers.get("content-type");
|
||||
if (contentType) res.set("Content-Type", contentType);
|
||||
res.set("Cache-Control", "no-cache, no-store, must-revalidate");
|
||||
|
||||
const body = await upstream.arrayBuffer();
|
||||
res.send(Buffer.from(body));
|
||||
return;
|
||||
} finally {
|
||||
clearTimeout(timeout);
|
||||
}
|
||||
} catch (proxyErr) {
|
||||
log.warn(
|
||||
{
|
||||
pluginId: plugin.id,
|
||||
devUiUrl,
|
||||
err: proxyErr instanceof Error ? proxyErr.message : String(proxyErr),
|
||||
},
|
||||
"plugin-ui-static: failed to proxy to devUiUrl, falling back to static",
|
||||
);
|
||||
// Fall through to static serving below
|
||||
}
|
||||
}
|
||||
}
|
||||
} catch {
|
||||
// Config lookup failure is non-fatal — fall through to static serving
|
||||
}
|
||||
|
||||
// Step 3: Resolve the plugin's UI directory
|
||||
const uiDir = resolvePluginUiDir(
|
||||
options.localPluginDir,
|
||||
plugin.packageName,
|
||||
manifest.entrypoints.ui,
|
||||
plugin.packagePath,
|
||||
);
|
||||
|
||||
if (!uiDir) {
|
||||
log.warn(
|
||||
{ pluginId: plugin.id, pluginKey: plugin.pluginKey, packageName: plugin.packageName },
|
||||
"plugin-ui-static: UI directory not found on disk",
|
||||
);
|
||||
res.status(404).json({ error: "Plugin UI directory not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 4: Resolve the requested file path and prevent traversal (including symlinks)
|
||||
const resolvedFilePath = path.resolve(uiDir, rawFilePath);
|
||||
|
||||
// Step 5: Check that the file exists and is a regular file
|
||||
let fileStat: fs.Stats;
|
||||
try {
|
||||
fileStat = fs.statSync(resolvedFilePath);
|
||||
} catch {
|
||||
res.status(404).json({ error: "File not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Security: resolve symlinks via realpathSync and verify containment.
|
||||
// This prevents symlink-based traversal that string-based startsWith misses.
|
||||
let realFilePath: string;
|
||||
let realUiDir: string;
|
||||
try {
|
||||
realFilePath = fs.realpathSync(resolvedFilePath);
|
||||
realUiDir = fs.realpathSync(uiDir);
|
||||
} catch {
|
||||
res.status(404).json({ error: "File not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
const relative = path.relative(realUiDir, realFilePath);
|
||||
if (relative.startsWith("..") || path.isAbsolute(relative)) {
|
||||
res.status(403).json({ error: "Access denied" });
|
||||
return;
|
||||
}
|
||||
|
||||
if (!fileStat.isFile()) {
|
||||
res.status(404).json({ error: "File not found" });
|
||||
return;
|
||||
}
|
||||
|
||||
// Step 6: Determine cache strategy based on filename
|
||||
const basename = path.basename(resolvedFilePath);
|
||||
const isContentHashed = CONTENT_HASH_PATTERN.test(basename);
|
||||
|
||||
// Step 7: Set cache headers
|
||||
if (isContentHashed) {
|
||||
res.set("Cache-Control", CACHE_CONTROL_IMMUTABLE);
|
||||
} else {
|
||||
res.set("Cache-Control", CACHE_CONTROL_REVALIDATE);
|
||||
|
||||
// Compute and set ETag for conditional request support
|
||||
const etag = computeETag(fileStat.size, fileStat.mtimeMs);
|
||||
res.set("ETag", etag);
|
||||
|
||||
// Check If-None-Match for 304 Not Modified
|
||||
const ifNoneMatch = req.headers["if-none-match"];
|
||||
if (ifNoneMatch === etag) {
|
||||
res.status(304).end();
|
||||
return;
|
||||
}
|
||||
}
|
||||
|
||||
// Step 8: Set Content-Type
|
||||
const ext = path.extname(resolvedFilePath).toLowerCase();
|
||||
const contentType = MIME_TYPES[ext];
|
||||
if (contentType) {
|
||||
res.set("Content-Type", contentType);
|
||||
}
|
||||
|
||||
// Step 9: Set CORS headers (plugin UI may be loaded from different origin in dev)
|
||||
res.set("Access-Control-Allow-Origin", "*");
|
||||
|
||||
// Step 10: Send the file
|
||||
// The plugin source can live in Git worktrees (e.g. ".worktrees/...").
|
||||
// `send` defaults to dotfiles:"ignore", which treats dot-directories as
|
||||
// not found. We already enforce traversal safety above, so allow dot paths.
|
||||
res.sendFile(resolvedFilePath, { dotfiles: "allow" }, (err) => {
|
||||
if (err) {
|
||||
log.error(
|
||||
{ err, pluginId: plugin.id, filePath: resolvedFilePath },
|
||||
"plugin-ui-static: error sending file",
|
||||
);
|
||||
// Only send error if headers haven't been sent yet
|
||||
if (!res.headersSent) {
|
||||
res.status(500).json({ error: "Failed to serve file" });
|
||||
}
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
return router;
|
||||
}
|
||||
2219
server/src/routes/plugins.ts
Normal file
2219
server/src/routes/plugins.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -3,7 +3,6 @@ import type { Db } from "@paperclipai/db";
|
||||
import { and, eq, sql } from "drizzle-orm";
|
||||
import { joinRequests } from "@paperclipai/db";
|
||||
import { sidebarBadgeService } from "../services/sidebar-badges.js";
|
||||
import { issueService } from "../services/issues.js";
|
||||
import { accessService } from "../services/access.js";
|
||||
import { dashboardService } from "../services/dashboard.js";
|
||||
import { assertCompanyAccess } from "./authz.js";
|
||||
@@ -11,7 +10,6 @@ import { assertCompanyAccess } from "./authz.js";
|
||||
export function sidebarBadgeRoutes(db: Db) {
|
||||
const router = Router();
|
||||
const svc = sidebarBadgeService(db);
|
||||
const issueSvc = issueService(db);
|
||||
const access = accessService(db);
|
||||
const dashboard = dashboardService(db);
|
||||
|
||||
@@ -40,12 +38,11 @@ export function sidebarBadgeRoutes(db: Db) {
|
||||
joinRequests: joinRequestCount,
|
||||
});
|
||||
const summary = await dashboard.summary(companyId);
|
||||
const staleIssueCount = await issueSvc.staleCount(companyId, 24 * 60);
|
||||
const hasFailedRuns = badges.failedRuns > 0;
|
||||
const alertsCount =
|
||||
(summary.agents.error > 0 && !hasFailedRuns ? 1 : 0) +
|
||||
(summary.costs.monthBudgetCents > 0 && summary.costs.monthUtilizationPercent >= 80 ? 1 : 0);
|
||||
badges.inbox = badges.failedRuns + alertsCount + staleIssueCount + joinRequestCount + badges.approvals;
|
||||
badges.inbox = badges.failedRuns + alertsCount + joinRequestCount + badges.approvals;
|
||||
|
||||
res.json(badges);
|
||||
});
|
||||
|
||||
@@ -1,7 +1,25 @@
|
||||
import { randomUUID } from "node:crypto";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { activityLog } from "@paperclipai/db";
|
||||
import { PLUGIN_EVENT_TYPES, type PluginEventType } from "@paperclipai/shared";
|
||||
import type { PluginEvent } from "@paperclipai/plugin-sdk";
|
||||
import { publishLiveEvent } from "./live-events.js";
|
||||
import { redactCurrentUserValue } from "../log-redaction.js";
|
||||
import { sanitizeRecord } from "../redaction.js";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
import type { PluginEventBus } from "./plugin-event-bus.js";
|
||||
|
||||
const PLUGIN_EVENT_SET: ReadonlySet<string> = new Set(PLUGIN_EVENT_TYPES);
|
||||
|
||||
let _pluginEventBus: PluginEventBus | null = null;
|
||||
|
||||
/** Wire the plugin event bus so domain events are forwarded to plugins. */
|
||||
export function setPluginEventBus(bus: PluginEventBus): void {
|
||||
if (_pluginEventBus) {
|
||||
logger.warn("setPluginEventBus called more than once, replacing existing bus");
|
||||
}
|
||||
_pluginEventBus = bus;
|
||||
}
|
||||
|
||||
export interface LogActivityInput {
|
||||
companyId: string;
|
||||
@@ -17,6 +35,7 @@ export interface LogActivityInput {
|
||||
|
||||
export async function logActivity(db: Db, input: LogActivityInput) {
|
||||
const sanitizedDetails = input.details ? sanitizeRecord(input.details) : null;
|
||||
const redactedDetails = sanitizedDetails ? redactCurrentUserValue(sanitizedDetails) : null;
|
||||
await db.insert(activityLog).values({
|
||||
companyId: input.companyId,
|
||||
actorType: input.actorType,
|
||||
@@ -26,7 +45,7 @@ export async function logActivity(db: Db, input: LogActivityInput) {
|
||||
entityId: input.entityId,
|
||||
agentId: input.agentId ?? null,
|
||||
runId: input.runId ?? null,
|
||||
details: sanitizedDetails,
|
||||
details: redactedDetails,
|
||||
});
|
||||
|
||||
publishLiveEvent({
|
||||
@@ -40,7 +59,30 @@ export async function logActivity(db: Db, input: LogActivityInput) {
|
||||
entityId: input.entityId,
|
||||
agentId: input.agentId ?? null,
|
||||
runId: input.runId ?? null,
|
||||
details: sanitizedDetails,
|
||||
details: redactedDetails,
|
||||
},
|
||||
});
|
||||
|
||||
if (_pluginEventBus && PLUGIN_EVENT_SET.has(input.action)) {
|
||||
const event: PluginEvent = {
|
||||
eventId: randomUUID(),
|
||||
eventType: input.action as PluginEventType,
|
||||
occurredAt: new Date().toISOString(),
|
||||
actorId: input.actorId,
|
||||
actorType: input.actorType,
|
||||
entityId: input.entityId,
|
||||
entityType: input.entityType,
|
||||
companyId: input.companyId,
|
||||
payload: {
|
||||
...redactedDetails,
|
||||
agentId: input.agentId ?? null,
|
||||
runId: input.runId ?? null,
|
||||
},
|
||||
};
|
||||
void _pluginEventBus.emit(event).then(({ errors }) => {
|
||||
for (const { pluginId, error } of errors) {
|
||||
logger.warn({ pluginId, eventType: event.eventType, err: error }, "plugin event handler failed");
|
||||
}
|
||||
}).catch(() => {});
|
||||
}
|
||||
}
|
||||
|
||||
@@ -341,13 +341,17 @@ export function agentService(db: Db) {
|
||||
await ensureManager(companyId, data.reportsTo);
|
||||
}
|
||||
|
||||
await assertCompanyShortnameAvailable(companyId, data.name);
|
||||
const existingAgents = await db
|
||||
.select({ id: agents.id, name: agents.name, status: agents.status })
|
||||
.from(agents)
|
||||
.where(eq(agents.companyId, companyId));
|
||||
const uniqueName = deduplicateAgentName(data.name, existingAgents);
|
||||
|
||||
const role = data.role ?? "general";
|
||||
const normalizedPermissions = normalizeAgentPermissions(data.permissions, role);
|
||||
const created = await db
|
||||
.insert(agents)
|
||||
.values({ ...data, companyId, role, permissions: normalizedPermissions })
|
||||
.values({ ...data, name: uniqueName, companyId, role, permissions: normalizedPermissions })
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
|
||||
|
||||
@@ -1,13 +1,24 @@
|
||||
import { and, asc, eq } from "drizzle-orm";
|
||||
import { and, asc, eq, inArray } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { approvalComments, approvals } from "@paperclipai/db";
|
||||
import { notFound, unprocessable } from "../errors.js";
|
||||
import { redactCurrentUserText } from "../log-redaction.js";
|
||||
import { agentService } from "./agents.js";
|
||||
import { notifyHireApproved } from "./hire-hook.js";
|
||||
|
||||
function redactApprovalComment<T extends { body: string }>(comment: T): T {
|
||||
return {
|
||||
...comment,
|
||||
body: redactCurrentUserText(comment.body),
|
||||
};
|
||||
}
|
||||
|
||||
export function approvalService(db: Db) {
|
||||
const agentsSvc = agentService(db);
|
||||
const canResolveStatuses = new Set(["pending", "revision_requested"]);
|
||||
const resolvableStatuses = Array.from(canResolveStatuses);
|
||||
type ApprovalRecord = typeof approvals.$inferSelect;
|
||||
type ResolutionResult = { approval: ApprovalRecord; applied: boolean };
|
||||
|
||||
async function getExistingApproval(id: string) {
|
||||
const existing = await db
|
||||
@@ -19,6 +30,50 @@ export function approvalService(db: Db) {
|
||||
return existing;
|
||||
}
|
||||
|
||||
async function resolveApproval(
|
||||
id: string,
|
||||
targetStatus: "approved" | "rejected",
|
||||
decidedByUserId: string,
|
||||
decisionNote: string | null | undefined,
|
||||
): Promise<ResolutionResult> {
|
||||
const existing = await getExistingApproval(id);
|
||||
if (!canResolveStatuses.has(existing.status)) {
|
||||
if (existing.status === targetStatus) {
|
||||
return { approval: existing, applied: false };
|
||||
}
|
||||
throw unprocessable(
|
||||
`Only pending or revision requested approvals can be ${targetStatus === "approved" ? "approved" : "rejected"}`,
|
||||
);
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
const updated = await db
|
||||
.update(approvals)
|
||||
.set({
|
||||
status: targetStatus,
|
||||
decidedByUserId,
|
||||
decisionNote: decisionNote ?? null,
|
||||
decidedAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(and(eq(approvals.id, id), inArray(approvals.status, resolvableStatuses)))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (updated) {
|
||||
return { approval: updated, applied: true };
|
||||
}
|
||||
|
||||
const latest = await getExistingApproval(id);
|
||||
if (latest.status === targetStatus) {
|
||||
return { approval: latest, applied: false };
|
||||
}
|
||||
|
||||
throw unprocessable(
|
||||
`Only pending or revision requested approvals can be ${targetStatus === "approved" ? "approved" : "rejected"}`,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
list: (companyId: string, status?: string) => {
|
||||
const conditions = [eq(approvals.companyId, companyId)];
|
||||
@@ -41,27 +96,16 @@ export function approvalService(db: Db) {
|
||||
.then((rows) => rows[0]),
|
||||
|
||||
approve: async (id: string, decidedByUserId: string, decisionNote?: string | null) => {
|
||||
const existing = await getExistingApproval(id);
|
||||
if (!canResolveStatuses.has(existing.status)) {
|
||||
throw unprocessable("Only pending or revision requested approvals can be approved");
|
||||
}
|
||||
|
||||
const now = new Date();
|
||||
const updated = await db
|
||||
.update(approvals)
|
||||
.set({
|
||||
status: "approved",
|
||||
decidedByUserId,
|
||||
decisionNote: decisionNote ?? null,
|
||||
decidedAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(approvals.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
const { approval: updated, applied } = await resolveApproval(
|
||||
id,
|
||||
"approved",
|
||||
decidedByUserId,
|
||||
decisionNote,
|
||||
);
|
||||
|
||||
let hireApprovedAgentId: string | null = null;
|
||||
if (updated.type === "hire_agent") {
|
||||
const now = new Date();
|
||||
if (applied && updated.type === "hire_agent") {
|
||||
const payload = updated.payload as Record<string, unknown>;
|
||||
const payloadAgentId = typeof payload.agentId === "string" ? payload.agentId : null;
|
||||
if (payloadAgentId) {
|
||||
@@ -103,30 +147,18 @@ export function approvalService(db: Db) {
|
||||
}
|
||||
}
|
||||
|
||||
return updated;
|
||||
return { approval: updated, applied };
|
||||
},
|
||||
|
||||
reject: async (id: string, decidedByUserId: string, decisionNote?: string | null) => {
|
||||
const existing = await getExistingApproval(id);
|
||||
if (!canResolveStatuses.has(existing.status)) {
|
||||
throw unprocessable("Only pending or revision requested approvals can be rejected");
|
||||
}
|
||||
const { approval: updated, applied } = await resolveApproval(
|
||||
id,
|
||||
"rejected",
|
||||
decidedByUserId,
|
||||
decisionNote,
|
||||
);
|
||||
|
||||
const now = new Date();
|
||||
const updated = await db
|
||||
.update(approvals)
|
||||
.set({
|
||||
status: "rejected",
|
||||
decidedByUserId,
|
||||
decisionNote: decisionNote ?? null,
|
||||
decidedAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(approvals.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
|
||||
if (updated.type === "hire_agent") {
|
||||
if (applied && updated.type === "hire_agent") {
|
||||
const payload = updated.payload as Record<string, unknown>;
|
||||
const payloadAgentId = typeof payload.agentId === "string" ? payload.agentId : null;
|
||||
if (payloadAgentId) {
|
||||
@@ -134,7 +166,7 @@ export function approvalService(db: Db) {
|
||||
}
|
||||
}
|
||||
|
||||
return updated;
|
||||
return { approval: updated, applied };
|
||||
},
|
||||
|
||||
requestRevision: async (id: string, decidedByUserId: string, decisionNote?: string | null) => {
|
||||
@@ -191,7 +223,8 @@ export function approvalService(db: Db) {
|
||||
eq(approvalComments.companyId, existing.companyId),
|
||||
),
|
||||
)
|
||||
.orderBy(asc(approvalComments.createdAt));
|
||||
.orderBy(asc(approvalComments.createdAt))
|
||||
.then((comments) => comments.map(redactApprovalComment));
|
||||
},
|
||||
|
||||
addComment: async (
|
||||
@@ -200,6 +233,7 @@ export function approvalService(db: Db) {
|
||||
actor: { agentId?: string; userId?: string },
|
||||
) => {
|
||||
const existing = await getExistingApproval(approvalId);
|
||||
const redactedBody = redactCurrentUserText(body);
|
||||
return db
|
||||
.insert(approvalComments)
|
||||
.values({
|
||||
@@ -207,10 +241,10 @@ export function approvalService(db: Db) {
|
||||
approvalId,
|
||||
authorAgentId: actor.agentId ?? null,
|
||||
authorUserId: actor.userId ?? null,
|
||||
body,
|
||||
body: redactedBody,
|
||||
})
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
.then((rows) => redactApprovalComment(rows[0]));
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -70,6 +70,10 @@ const ADAPTER_DEFAULT_RULES_BY_TYPE: Record<string, Array<{ path: string[]; valu
|
||||
{ path: ["timeoutSec"], value: 0 },
|
||||
{ path: ["graceSec"], value: 15 },
|
||||
],
|
||||
gemini_local: [
|
||||
{ path: ["timeoutSec"], value: 0 },
|
||||
{ path: ["graceSec"], value: 15 },
|
||||
],
|
||||
opencode_local: [
|
||||
{ path: ["timeoutSec"], value: 0 },
|
||||
{ path: ["graceSec"], value: 15 },
|
||||
@@ -81,11 +85,15 @@ const ADAPTER_DEFAULT_RULES_BY_TYPE: Record<string, Array<{ path: string[]; valu
|
||||
claude_local: [
|
||||
{ path: ["timeoutSec"], value: 0 },
|
||||
{ path: ["graceSec"], value: 15 },
|
||||
{ path: ["maxTurnsPerRun"], value: 80 },
|
||||
{ path: ["maxTurnsPerRun"], value: 300 },
|
||||
],
|
||||
openclaw: [
|
||||
{ path: ["method"], value: "POST" },
|
||||
{ path: ["timeoutSec"], value: 30 },
|
||||
openclaw_gateway: [
|
||||
{ path: ["timeoutSec"], value: 120 },
|
||||
{ path: ["waitTimeoutMs"], value: 120000 },
|
||||
{ path: ["sessionKeyStrategy"], value: "fixed" },
|
||||
{ path: ["sessionKey"], value: "paperclip" },
|
||||
{ path: ["role"], value: "operator" },
|
||||
{ path: ["scopes"], value: ["operator.admin"] },
|
||||
],
|
||||
};
|
||||
|
||||
|
||||
373
server/src/services/cron.ts
Normal file
373
server/src/services/cron.ts
Normal file
@@ -0,0 +1,373 @@
|
||||
/**
|
||||
* Lightweight cron expression parser and next-run calculator.
|
||||
*
|
||||
* Supports standard 5-field cron expressions:
|
||||
*
|
||||
* ┌────────────── minute (0–59)
|
||||
* │ ┌──────────── hour (0–23)
|
||||
* │ │ ┌────────── day of month (1–31)
|
||||
* │ │ │ ┌──────── month (1–12)
|
||||
* │ │ │ │ ┌────── day of week (0–6, Sun=0)
|
||||
* │ │ │ │ │
|
||||
* * * * * *
|
||||
*
|
||||
* Supported syntax per field:
|
||||
* - `*` — any value
|
||||
* - `N` — exact value
|
||||
* - `N-M` — range (inclusive)
|
||||
* - `N/S` — start at N, step S (within field bounds)
|
||||
* - `* /S` — every S (from field min) [no space — shown to avoid comment termination]
|
||||
* - `N-M/S` — range with step
|
||||
* - `N,M,...` — list of values, ranges, or steps
|
||||
*
|
||||
* @module
|
||||
*/
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* A parsed cron schedule. Each field is a sorted array of valid integer values
|
||||
* for that field.
|
||||
*/
|
||||
export interface ParsedCron {
|
||||
minutes: number[];
|
||||
hours: number[];
|
||||
daysOfMonth: number[];
|
||||
months: number[];
|
||||
daysOfWeek: number[];
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Field bounds
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
interface FieldSpec {
|
||||
min: number;
|
||||
max: number;
|
||||
name: string;
|
||||
}
|
||||
|
||||
const FIELD_SPECS: FieldSpec[] = [
|
||||
{ min: 0, max: 59, name: "minute" },
|
||||
{ min: 0, max: 23, name: "hour" },
|
||||
{ min: 1, max: 31, name: "day of month" },
|
||||
{ min: 1, max: 12, name: "month" },
|
||||
{ min: 0, max: 6, name: "day of week" },
|
||||
];
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Parsing
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Parse a single cron field token (e.g. `"5"`, `"1-3"`, `"* /10"`, `"1,3,5"`).
|
||||
*
|
||||
* @returns Sorted deduplicated array of matching integer values within bounds.
|
||||
* @throws {Error} on invalid syntax or out-of-range values.
|
||||
*/
|
||||
function parseField(token: string, spec: FieldSpec): number[] {
|
||||
const values = new Set<number>();
|
||||
|
||||
// Split on commas first — each part can be a value, range, or step
|
||||
const parts = token.split(",");
|
||||
|
||||
for (const part of parts) {
|
||||
const trimmed = part.trim();
|
||||
if (trimmed === "") {
|
||||
throw new Error(`Empty element in cron ${spec.name} field`);
|
||||
}
|
||||
|
||||
// Check for step syntax: "X/S" where X is "*" or a range or a number
|
||||
const slashIdx = trimmed.indexOf("/");
|
||||
if (slashIdx !== -1) {
|
||||
const base = trimmed.slice(0, slashIdx);
|
||||
const stepStr = trimmed.slice(slashIdx + 1);
|
||||
const step = parseInt(stepStr, 10);
|
||||
if (isNaN(step) || step <= 0) {
|
||||
throw new Error(
|
||||
`Invalid step "${stepStr}" in cron ${spec.name} field`,
|
||||
);
|
||||
}
|
||||
|
||||
let rangeStart = spec.min;
|
||||
let rangeEnd = spec.max;
|
||||
|
||||
if (base === "*") {
|
||||
// */S — every S from field min
|
||||
} else if (base.includes("-")) {
|
||||
// N-M/S — range with step
|
||||
const [a, b] = base.split("-").map((s) => parseInt(s, 10));
|
||||
if (isNaN(a!) || isNaN(b!)) {
|
||||
throw new Error(
|
||||
`Invalid range "${base}" in cron ${spec.name} field`,
|
||||
);
|
||||
}
|
||||
rangeStart = a!;
|
||||
rangeEnd = b!;
|
||||
} else {
|
||||
// N/S — start at N, step S
|
||||
const start = parseInt(base, 10);
|
||||
if (isNaN(start)) {
|
||||
throw new Error(
|
||||
`Invalid start "${base}" in cron ${spec.name} field`,
|
||||
);
|
||||
}
|
||||
rangeStart = start;
|
||||
}
|
||||
|
||||
validateBounds(rangeStart, spec);
|
||||
validateBounds(rangeEnd, spec);
|
||||
|
||||
for (let i = rangeStart; i <= rangeEnd; i += step) {
|
||||
values.add(i);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check for range syntax: "N-M"
|
||||
if (trimmed.includes("-")) {
|
||||
const [aStr, bStr] = trimmed.split("-");
|
||||
const a = parseInt(aStr!, 10);
|
||||
const b = parseInt(bStr!, 10);
|
||||
if (isNaN(a) || isNaN(b)) {
|
||||
throw new Error(
|
||||
`Invalid range "${trimmed}" in cron ${spec.name} field`,
|
||||
);
|
||||
}
|
||||
validateBounds(a, spec);
|
||||
validateBounds(b, spec);
|
||||
if (a > b) {
|
||||
throw new Error(
|
||||
`Invalid range ${a}-${b} in cron ${spec.name} field (start > end)`,
|
||||
);
|
||||
}
|
||||
for (let i = a; i <= b; i++) {
|
||||
values.add(i);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Wildcard
|
||||
if (trimmed === "*") {
|
||||
for (let i = spec.min; i <= spec.max; i++) {
|
||||
values.add(i);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Single value
|
||||
const val = parseInt(trimmed, 10);
|
||||
if (isNaN(val)) {
|
||||
throw new Error(
|
||||
`Invalid value "${trimmed}" in cron ${spec.name} field`,
|
||||
);
|
||||
}
|
||||
validateBounds(val, spec);
|
||||
values.add(val);
|
||||
}
|
||||
|
||||
if (values.size === 0) {
|
||||
throw new Error(`Empty result for cron ${spec.name} field`);
|
||||
}
|
||||
|
||||
return [...values].sort((a, b) => a - b);
|
||||
}
|
||||
|
||||
function validateBounds(value: number, spec: FieldSpec): void {
|
||||
if (value < spec.min || value > spec.max) {
|
||||
throw new Error(
|
||||
`Value ${value} out of range [${spec.min}–${spec.max}] for cron ${spec.name} field`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public API
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Parse a cron expression string into a structured {@link ParsedCron}.
|
||||
*
|
||||
* @param expression — A standard 5-field cron expression.
|
||||
* @returns Parsed cron with sorted valid values for each field.
|
||||
* @throws {Error} on invalid syntax.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const parsed = parseCron("0 * * * *"); // every hour at minute 0
|
||||
* // parsed.minutes === [0]
|
||||
* // parsed.hours === [0,1,2,...,23]
|
||||
* ```
|
||||
*/
|
||||
export function parseCron(expression: string): ParsedCron {
|
||||
const trimmed = expression.trim();
|
||||
if (!trimmed) {
|
||||
throw new Error("Cron expression must not be empty");
|
||||
}
|
||||
|
||||
const tokens = trimmed.split(/\s+/);
|
||||
if (tokens.length !== 5) {
|
||||
throw new Error(
|
||||
`Cron expression must have exactly 5 fields, got ${tokens.length}: "${trimmed}"`,
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
minutes: parseField(tokens[0]!, FIELD_SPECS[0]!),
|
||||
hours: parseField(tokens[1]!, FIELD_SPECS[1]!),
|
||||
daysOfMonth: parseField(tokens[2]!, FIELD_SPECS[2]!),
|
||||
months: parseField(tokens[3]!, FIELD_SPECS[3]!),
|
||||
daysOfWeek: parseField(tokens[4]!, FIELD_SPECS[4]!),
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a cron expression string. Returns `null` if valid, or an error
|
||||
* message string if invalid.
|
||||
*
|
||||
* @param expression — A cron expression string to validate.
|
||||
* @returns `null` on success, error message on failure.
|
||||
*/
|
||||
export function validateCron(expression: string): string | null {
|
||||
try {
|
||||
parseCron(expression);
|
||||
return null;
|
||||
} catch (err) {
|
||||
return err instanceof Error ? err.message : String(err);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate the next run time after `after` for the given parsed cron schedule.
|
||||
*
|
||||
* Starts from the minute immediately following `after` and walks forward
|
||||
* until a matching minute is found (up to a safety limit of ~4 years to
|
||||
* prevent infinite loops on impossible schedules).
|
||||
*
|
||||
* @param cron — Parsed cron schedule.
|
||||
* @param after — The reference date. The returned date will be strictly after this.
|
||||
* @returns The next matching `Date`, or `null` if no match found within the search window.
|
||||
*/
|
||||
export function nextCronTick(cron: ParsedCron, after: Date): Date | null {
|
||||
// Work in local minutes — start from the minute after `after`
|
||||
const d = new Date(after.getTime());
|
||||
// Advance to the next whole minute
|
||||
d.setUTCSeconds(0, 0);
|
||||
d.setUTCMinutes(d.getUTCMinutes() + 1);
|
||||
|
||||
// Safety: search up to 4 years worth of minutes (~2.1M iterations max).
|
||||
// Uses 366 to account for leap years.
|
||||
const MAX_CRON_SEARCH_YEARS = 4;
|
||||
const maxIterations = MAX_CRON_SEARCH_YEARS * 366 * 24 * 60;
|
||||
|
||||
for (let i = 0; i < maxIterations; i++) {
|
||||
const month = d.getUTCMonth() + 1; // 1-12
|
||||
const dayOfMonth = d.getUTCDate(); // 1-31
|
||||
const dayOfWeek = d.getUTCDay(); // 0-6
|
||||
const hour = d.getUTCHours(); // 0-23
|
||||
const minute = d.getUTCMinutes(); // 0-59
|
||||
|
||||
// Check month
|
||||
if (!cron.months.includes(month)) {
|
||||
// Skip to the first day of the next matching month
|
||||
advanceToNextMonth(d, cron.months);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check day of month AND day of week (both must match)
|
||||
if (!cron.daysOfMonth.includes(dayOfMonth) || !cron.daysOfWeek.includes(dayOfWeek)) {
|
||||
// Advance one day
|
||||
d.setUTCDate(d.getUTCDate() + 1);
|
||||
d.setUTCHours(0, 0, 0, 0);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check hour
|
||||
if (!cron.hours.includes(hour)) {
|
||||
// Advance to next matching hour within the day
|
||||
const nextHour = findNext(cron.hours, hour);
|
||||
if (nextHour !== null) {
|
||||
d.setUTCHours(nextHour, 0, 0, 0);
|
||||
} else {
|
||||
// No matching hour left today — advance to next day
|
||||
d.setUTCDate(d.getUTCDate() + 1);
|
||||
d.setUTCHours(0, 0, 0, 0);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check minute
|
||||
if (!cron.minutes.includes(minute)) {
|
||||
const nextMin = findNext(cron.minutes, minute);
|
||||
if (nextMin !== null) {
|
||||
d.setUTCMinutes(nextMin, 0, 0);
|
||||
} else {
|
||||
// No matching minute left this hour — advance to next hour
|
||||
d.setUTCHours(d.getUTCHours() + 1, 0, 0, 0);
|
||||
}
|
||||
continue;
|
||||
}
|
||||
|
||||
// All fields match!
|
||||
return new Date(d.getTime());
|
||||
}
|
||||
|
||||
// No match found within the search window
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Convenience: parse a cron expression and compute the next run time.
|
||||
*
|
||||
* @param expression — 5-field cron expression string.
|
||||
* @param after — Reference date (defaults to `new Date()`).
|
||||
* @returns The next matching Date, or `null` if no match within 4 years.
|
||||
* @throws {Error} if the cron expression is invalid.
|
||||
*/
|
||||
export function nextCronTickFromExpression(
|
||||
expression: string,
|
||||
after: Date = new Date(),
|
||||
): Date | null {
|
||||
const cron = parseCron(expression);
|
||||
return nextCronTick(cron, after);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Internal helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Find the next value in `sortedValues` that is greater than `current`.
|
||||
* Returns `null` if no such value exists.
|
||||
*/
|
||||
function findNext(sortedValues: number[], current: number): number | null {
|
||||
for (const v of sortedValues) {
|
||||
if (v > current) return v;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
/**
|
||||
* Advance `d` (mutated in place) to midnight UTC of the first day of the next
|
||||
* month whose 1-based month number is in `months`.
|
||||
*/
|
||||
function advanceToNextMonth(d: Date, months: number[]): void {
|
||||
let year = d.getUTCFullYear();
|
||||
let month = d.getUTCMonth() + 1; // 1-based
|
||||
|
||||
// Walk months forward until we find one in the set (max 48 iterations = 4 years)
|
||||
for (let i = 0; i < 48; i++) {
|
||||
month++;
|
||||
if (month > 12) {
|
||||
month = 1;
|
||||
year++;
|
||||
}
|
||||
if (months.includes(month)) {
|
||||
d.setUTCFullYear(year, month - 1, 1);
|
||||
d.setUTCHours(0, 0, 0, 0);
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
@@ -32,19 +32,6 @@ export function dashboardService(db: Db) {
|
||||
.where(and(eq(approvals.companyId, companyId), eq(approvals.status, "pending")))
|
||||
.then((rows) => Number(rows[0]?.count ?? 0));
|
||||
|
||||
const staleCutoff = new Date(Date.now() - 60 * 60 * 1000);
|
||||
const staleTasks = await db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(issues)
|
||||
.where(
|
||||
and(
|
||||
eq(issues.companyId, companyId),
|
||||
eq(issues.status, "in_progress"),
|
||||
sql`${issues.startedAt} < ${staleCutoff.toISOString()}`,
|
||||
),
|
||||
)
|
||||
.then((rows) => Number(rows[0]?.count ?? 0));
|
||||
|
||||
const agentCounts: Record<string, number> = {
|
||||
active: 0,
|
||||
running: 0,
|
||||
@@ -107,7 +94,6 @@ export function dashboardService(db: Db) {
|
||||
monthUtilizationPercent: Number(utilization.toFixed(2)),
|
||||
},
|
||||
pendingApprovals,
|
||||
staleTasks,
|
||||
};
|
||||
},
|
||||
};
|
||||
|
||||
433
server/src/services/documents.ts
Normal file
433
server/src/services/documents.ts
Normal file
@@ -0,0 +1,433 @@
|
||||
import { and, asc, desc, eq } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { documentRevisions, documents, issueDocuments, issues } from "@paperclipai/db";
|
||||
import { issueDocumentKeySchema } from "@paperclipai/shared";
|
||||
import { conflict, notFound, unprocessable } from "../errors.js";
|
||||
|
||||
function normalizeDocumentKey(key: string) {
|
||||
const normalized = key.trim().toLowerCase();
|
||||
const parsed = issueDocumentKeySchema.safeParse(normalized);
|
||||
if (!parsed.success) {
|
||||
throw unprocessable("Invalid document key", parsed.error.issues);
|
||||
}
|
||||
return parsed.data;
|
||||
}
|
||||
|
||||
function isUniqueViolation(error: unknown): boolean {
|
||||
return !!error && typeof error === "object" && "code" in error && (error as { code?: string }).code === "23505";
|
||||
}
|
||||
|
||||
export function extractLegacyPlanBody(description: string | null | undefined) {
|
||||
if (!description) return null;
|
||||
const match = /<plan>\s*([\s\S]*?)\s*<\/plan>/i.exec(description);
|
||||
if (!match) return null;
|
||||
const body = match[1]?.trim();
|
||||
return body ? body : null;
|
||||
}
|
||||
|
||||
function mapIssueDocumentRow(
|
||||
row: {
|
||||
id: string;
|
||||
companyId: string;
|
||||
issueId: string;
|
||||
key: string;
|
||||
title: string | null;
|
||||
format: string;
|
||||
latestBody: string;
|
||||
latestRevisionId: string | null;
|
||||
latestRevisionNumber: number;
|
||||
createdByAgentId: string | null;
|
||||
createdByUserId: string | null;
|
||||
updatedByAgentId: string | null;
|
||||
updatedByUserId: string | null;
|
||||
createdAt: Date;
|
||||
updatedAt: Date;
|
||||
},
|
||||
includeBody: boolean,
|
||||
) {
|
||||
return {
|
||||
id: row.id,
|
||||
companyId: row.companyId,
|
||||
issueId: row.issueId,
|
||||
key: row.key,
|
||||
title: row.title,
|
||||
format: row.format,
|
||||
...(includeBody ? { body: row.latestBody } : {}),
|
||||
latestRevisionId: row.latestRevisionId ?? null,
|
||||
latestRevisionNumber: row.latestRevisionNumber,
|
||||
createdByAgentId: row.createdByAgentId,
|
||||
createdByUserId: row.createdByUserId,
|
||||
updatedByAgentId: row.updatedByAgentId,
|
||||
updatedByUserId: row.updatedByUserId,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
export function documentService(db: Db) {
|
||||
return {
|
||||
getIssueDocumentPayload: async (issue: { id: string; description: string | null }) => {
|
||||
const [planDocument, documentSummaries] = await Promise.all([
|
||||
db
|
||||
.select({
|
||||
id: documents.id,
|
||||
companyId: documents.companyId,
|
||||
issueId: issueDocuments.issueId,
|
||||
key: issueDocuments.key,
|
||||
title: documents.title,
|
||||
format: documents.format,
|
||||
latestBody: documents.latestBody,
|
||||
latestRevisionId: documents.latestRevisionId,
|
||||
latestRevisionNumber: documents.latestRevisionNumber,
|
||||
createdByAgentId: documents.createdByAgentId,
|
||||
createdByUserId: documents.createdByUserId,
|
||||
updatedByAgentId: documents.updatedByAgentId,
|
||||
updatedByUserId: documents.updatedByUserId,
|
||||
createdAt: documents.createdAt,
|
||||
updatedAt: documents.updatedAt,
|
||||
})
|
||||
.from(issueDocuments)
|
||||
.innerJoin(documents, eq(issueDocuments.documentId, documents.id))
|
||||
.where(and(eq(issueDocuments.issueId, issue.id), eq(issueDocuments.key, "plan")))
|
||||
.then((rows) => rows[0] ?? null),
|
||||
db
|
||||
.select({
|
||||
id: documents.id,
|
||||
companyId: documents.companyId,
|
||||
issueId: issueDocuments.issueId,
|
||||
key: issueDocuments.key,
|
||||
title: documents.title,
|
||||
format: documents.format,
|
||||
latestBody: documents.latestBody,
|
||||
latestRevisionId: documents.latestRevisionId,
|
||||
latestRevisionNumber: documents.latestRevisionNumber,
|
||||
createdByAgentId: documents.createdByAgentId,
|
||||
createdByUserId: documents.createdByUserId,
|
||||
updatedByAgentId: documents.updatedByAgentId,
|
||||
updatedByUserId: documents.updatedByUserId,
|
||||
createdAt: documents.createdAt,
|
||||
updatedAt: documents.updatedAt,
|
||||
})
|
||||
.from(issueDocuments)
|
||||
.innerJoin(documents, eq(issueDocuments.documentId, documents.id))
|
||||
.where(eq(issueDocuments.issueId, issue.id))
|
||||
.orderBy(asc(issueDocuments.key), desc(documents.updatedAt)),
|
||||
]);
|
||||
|
||||
const legacyPlanBody = planDocument ? null : extractLegacyPlanBody(issue.description);
|
||||
|
||||
return {
|
||||
planDocument: planDocument ? mapIssueDocumentRow(planDocument, true) : null,
|
||||
documentSummaries: documentSummaries.map((row) => mapIssueDocumentRow(row, false)),
|
||||
legacyPlanDocument: legacyPlanBody
|
||||
? {
|
||||
key: "plan" as const,
|
||||
body: legacyPlanBody,
|
||||
source: "issue_description" as const,
|
||||
}
|
||||
: null,
|
||||
};
|
||||
},
|
||||
|
||||
listIssueDocuments: async (issueId: string) => {
|
||||
const rows = await db
|
||||
.select({
|
||||
id: documents.id,
|
||||
companyId: documents.companyId,
|
||||
issueId: issueDocuments.issueId,
|
||||
key: issueDocuments.key,
|
||||
title: documents.title,
|
||||
format: documents.format,
|
||||
latestBody: documents.latestBody,
|
||||
latestRevisionId: documents.latestRevisionId,
|
||||
latestRevisionNumber: documents.latestRevisionNumber,
|
||||
createdByAgentId: documents.createdByAgentId,
|
||||
createdByUserId: documents.createdByUserId,
|
||||
updatedByAgentId: documents.updatedByAgentId,
|
||||
updatedByUserId: documents.updatedByUserId,
|
||||
createdAt: documents.createdAt,
|
||||
updatedAt: documents.updatedAt,
|
||||
})
|
||||
.from(issueDocuments)
|
||||
.innerJoin(documents, eq(issueDocuments.documentId, documents.id))
|
||||
.where(eq(issueDocuments.issueId, issueId))
|
||||
.orderBy(asc(issueDocuments.key), desc(documents.updatedAt));
|
||||
return rows.map((row) => mapIssueDocumentRow(row, true));
|
||||
},
|
||||
|
||||
getIssueDocumentByKey: async (issueId: string, rawKey: string) => {
|
||||
const key = normalizeDocumentKey(rawKey);
|
||||
const row = await db
|
||||
.select({
|
||||
id: documents.id,
|
||||
companyId: documents.companyId,
|
||||
issueId: issueDocuments.issueId,
|
||||
key: issueDocuments.key,
|
||||
title: documents.title,
|
||||
format: documents.format,
|
||||
latestBody: documents.latestBody,
|
||||
latestRevisionId: documents.latestRevisionId,
|
||||
latestRevisionNumber: documents.latestRevisionNumber,
|
||||
createdByAgentId: documents.createdByAgentId,
|
||||
createdByUserId: documents.createdByUserId,
|
||||
updatedByAgentId: documents.updatedByAgentId,
|
||||
updatedByUserId: documents.updatedByUserId,
|
||||
createdAt: documents.createdAt,
|
||||
updatedAt: documents.updatedAt,
|
||||
})
|
||||
.from(issueDocuments)
|
||||
.innerJoin(documents, eq(issueDocuments.documentId, documents.id))
|
||||
.where(and(eq(issueDocuments.issueId, issueId), eq(issueDocuments.key, key)))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
return row ? mapIssueDocumentRow(row, true) : null;
|
||||
},
|
||||
|
||||
listIssueDocumentRevisions: async (issueId: string, rawKey: string) => {
|
||||
const key = normalizeDocumentKey(rawKey);
|
||||
return db
|
||||
.select({
|
||||
id: documentRevisions.id,
|
||||
companyId: documentRevisions.companyId,
|
||||
documentId: documentRevisions.documentId,
|
||||
issueId: issueDocuments.issueId,
|
||||
key: issueDocuments.key,
|
||||
revisionNumber: documentRevisions.revisionNumber,
|
||||
body: documentRevisions.body,
|
||||
changeSummary: documentRevisions.changeSummary,
|
||||
createdByAgentId: documentRevisions.createdByAgentId,
|
||||
createdByUserId: documentRevisions.createdByUserId,
|
||||
createdAt: documentRevisions.createdAt,
|
||||
})
|
||||
.from(issueDocuments)
|
||||
.innerJoin(documents, eq(issueDocuments.documentId, documents.id))
|
||||
.innerJoin(documentRevisions, eq(documentRevisions.documentId, documents.id))
|
||||
.where(and(eq(issueDocuments.issueId, issueId), eq(issueDocuments.key, key)))
|
||||
.orderBy(desc(documentRevisions.revisionNumber));
|
||||
},
|
||||
|
||||
upsertIssueDocument: async (input: {
|
||||
issueId: string;
|
||||
key: string;
|
||||
title?: string | null;
|
||||
format: string;
|
||||
body: string;
|
||||
changeSummary?: string | null;
|
||||
baseRevisionId?: string | null;
|
||||
createdByAgentId?: string | null;
|
||||
createdByUserId?: string | null;
|
||||
}) => {
|
||||
const key = normalizeDocumentKey(input.key);
|
||||
const issue = await db
|
||||
.select({ id: issues.id, companyId: issues.companyId })
|
||||
.from(issues)
|
||||
.where(eq(issues.id, input.issueId))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
if (!issue) throw notFound("Issue not found");
|
||||
|
||||
try {
|
||||
return await db.transaction(async (tx) => {
|
||||
const now = new Date();
|
||||
const existing = await tx
|
||||
.select({
|
||||
id: documents.id,
|
||||
companyId: documents.companyId,
|
||||
issueId: issueDocuments.issueId,
|
||||
key: issueDocuments.key,
|
||||
title: documents.title,
|
||||
format: documents.format,
|
||||
latestBody: documents.latestBody,
|
||||
latestRevisionId: documents.latestRevisionId,
|
||||
latestRevisionNumber: documents.latestRevisionNumber,
|
||||
createdByAgentId: documents.createdByAgentId,
|
||||
createdByUserId: documents.createdByUserId,
|
||||
updatedByAgentId: documents.updatedByAgentId,
|
||||
updatedByUserId: documents.updatedByUserId,
|
||||
createdAt: documents.createdAt,
|
||||
updatedAt: documents.updatedAt,
|
||||
})
|
||||
.from(issueDocuments)
|
||||
.innerJoin(documents, eq(issueDocuments.documentId, documents.id))
|
||||
.where(and(eq(issueDocuments.issueId, issue.id), eq(issueDocuments.key, key)))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (existing) {
|
||||
if (!input.baseRevisionId) {
|
||||
throw conflict("Document update requires baseRevisionId", {
|
||||
currentRevisionId: existing.latestRevisionId,
|
||||
});
|
||||
}
|
||||
if (input.baseRevisionId !== existing.latestRevisionId) {
|
||||
throw conflict("Document was updated by someone else", {
|
||||
currentRevisionId: existing.latestRevisionId,
|
||||
});
|
||||
}
|
||||
|
||||
const nextRevisionNumber = existing.latestRevisionNumber + 1;
|
||||
const [revision] = await tx
|
||||
.insert(documentRevisions)
|
||||
.values({
|
||||
companyId: issue.companyId,
|
||||
documentId: existing.id,
|
||||
revisionNumber: nextRevisionNumber,
|
||||
body: input.body,
|
||||
changeSummary: input.changeSummary ?? null,
|
||||
createdByAgentId: input.createdByAgentId ?? null,
|
||||
createdByUserId: input.createdByUserId ?? null,
|
||||
createdAt: now,
|
||||
})
|
||||
.returning();
|
||||
|
||||
await tx
|
||||
.update(documents)
|
||||
.set({
|
||||
title: input.title ?? null,
|
||||
format: input.format,
|
||||
latestBody: input.body,
|
||||
latestRevisionId: revision.id,
|
||||
latestRevisionNumber: nextRevisionNumber,
|
||||
updatedByAgentId: input.createdByAgentId ?? null,
|
||||
updatedByUserId: input.createdByUserId ?? null,
|
||||
updatedAt: now,
|
||||
})
|
||||
.where(eq(documents.id, existing.id));
|
||||
|
||||
await tx
|
||||
.update(issueDocuments)
|
||||
.set({ updatedAt: now })
|
||||
.where(eq(issueDocuments.documentId, existing.id));
|
||||
|
||||
return {
|
||||
created: false as const,
|
||||
document: {
|
||||
...existing,
|
||||
title: input.title ?? null,
|
||||
format: input.format,
|
||||
body: input.body,
|
||||
latestRevisionId: revision.id,
|
||||
latestRevisionNumber: nextRevisionNumber,
|
||||
updatedByAgentId: input.createdByAgentId ?? null,
|
||||
updatedByUserId: input.createdByUserId ?? null,
|
||||
updatedAt: now,
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
if (input.baseRevisionId) {
|
||||
throw conflict("Document does not exist yet", { key });
|
||||
}
|
||||
|
||||
const [document] = await tx
|
||||
.insert(documents)
|
||||
.values({
|
||||
companyId: issue.companyId,
|
||||
title: input.title ?? null,
|
||||
format: input.format,
|
||||
latestBody: input.body,
|
||||
latestRevisionId: null,
|
||||
latestRevisionNumber: 1,
|
||||
createdByAgentId: input.createdByAgentId ?? null,
|
||||
createdByUserId: input.createdByUserId ?? null,
|
||||
updatedByAgentId: input.createdByAgentId ?? null,
|
||||
updatedByUserId: input.createdByUserId ?? null,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
})
|
||||
.returning();
|
||||
|
||||
const [revision] = await tx
|
||||
.insert(documentRevisions)
|
||||
.values({
|
||||
companyId: issue.companyId,
|
||||
documentId: document.id,
|
||||
revisionNumber: 1,
|
||||
body: input.body,
|
||||
changeSummary: input.changeSummary ?? null,
|
||||
createdByAgentId: input.createdByAgentId ?? null,
|
||||
createdByUserId: input.createdByUserId ?? null,
|
||||
createdAt: now,
|
||||
})
|
||||
.returning();
|
||||
|
||||
await tx
|
||||
.update(documents)
|
||||
.set({ latestRevisionId: revision.id })
|
||||
.where(eq(documents.id, document.id));
|
||||
|
||||
await tx.insert(issueDocuments).values({
|
||||
companyId: issue.companyId,
|
||||
issueId: issue.id,
|
||||
documentId: document.id,
|
||||
key,
|
||||
createdAt: now,
|
||||
updatedAt: now,
|
||||
});
|
||||
|
||||
return {
|
||||
created: true as const,
|
||||
document: {
|
||||
id: document.id,
|
||||
companyId: issue.companyId,
|
||||
issueId: issue.id,
|
||||
key,
|
||||
title: document.title,
|
||||
format: document.format,
|
||||
body: document.latestBody,
|
||||
latestRevisionId: revision.id,
|
||||
latestRevisionNumber: 1,
|
||||
createdByAgentId: document.createdByAgentId,
|
||||
createdByUserId: document.createdByUserId,
|
||||
updatedByAgentId: document.updatedByAgentId,
|
||||
updatedByUserId: document.updatedByUserId,
|
||||
createdAt: document.createdAt,
|
||||
updatedAt: document.updatedAt,
|
||||
},
|
||||
};
|
||||
});
|
||||
} catch (error) {
|
||||
if (isUniqueViolation(error)) {
|
||||
throw conflict("Document key already exists on this issue", { key });
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
deleteIssueDocument: async (issueId: string, rawKey: string) => {
|
||||
const key = normalizeDocumentKey(rawKey);
|
||||
return db.transaction(async (tx) => {
|
||||
const existing = await tx
|
||||
.select({
|
||||
id: documents.id,
|
||||
companyId: documents.companyId,
|
||||
issueId: issueDocuments.issueId,
|
||||
key: issueDocuments.key,
|
||||
title: documents.title,
|
||||
format: documents.format,
|
||||
latestBody: documents.latestBody,
|
||||
latestRevisionId: documents.latestRevisionId,
|
||||
latestRevisionNumber: documents.latestRevisionNumber,
|
||||
createdByAgentId: documents.createdByAgentId,
|
||||
createdByUserId: documents.createdByUserId,
|
||||
updatedByAgentId: documents.updatedByAgentId,
|
||||
updatedByUserId: documents.updatedByUserId,
|
||||
createdAt: documents.createdAt,
|
||||
updatedAt: documents.updatedAt,
|
||||
})
|
||||
.from(issueDocuments)
|
||||
.innerJoin(documents, eq(issueDocuments.documentId, documents.id))
|
||||
.where(and(eq(issueDocuments.issueId, issueId), eq(issueDocuments.key, key)))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (!existing) return null;
|
||||
|
||||
await tx.delete(issueDocuments).where(eq(issueDocuments.documentId, existing.id));
|
||||
await tx.delete(documents).where(eq(documents.id, existing.id));
|
||||
|
||||
return {
|
||||
...existing,
|
||||
body: existing.latestBody,
|
||||
latestRevisionId: existing.latestRevisionId ?? null,
|
||||
};
|
||||
});
|
||||
},
|
||||
};
|
||||
}
|
||||
143
server/src/services/execution-workspace-policy.ts
Normal file
143
server/src/services/execution-workspace-policy.ts
Normal file
@@ -0,0 +1,143 @@
|
||||
import type {
|
||||
ExecutionWorkspaceMode,
|
||||
ExecutionWorkspaceStrategy,
|
||||
IssueExecutionWorkspaceSettings,
|
||||
ProjectExecutionWorkspacePolicy,
|
||||
} from "@paperclipai/shared";
|
||||
import { asString, parseObject } from "../adapters/utils.js";
|
||||
|
||||
type ParsedExecutionWorkspaceMode = Exclude<ExecutionWorkspaceMode, "inherit">;
|
||||
|
||||
function cloneRecord(value: Record<string, unknown> | null | undefined): Record<string, unknown> | null {
|
||||
if (!value) return null;
|
||||
return { ...value };
|
||||
}
|
||||
|
||||
function parseExecutionWorkspaceStrategy(raw: unknown): ExecutionWorkspaceStrategy | null {
|
||||
const parsed = parseObject(raw);
|
||||
const type = asString(parsed.type, "");
|
||||
if (type !== "project_primary" && type !== "git_worktree") {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
type,
|
||||
...(typeof parsed.baseRef === "string" ? { baseRef: parsed.baseRef } : {}),
|
||||
...(typeof parsed.branchTemplate === "string" ? { branchTemplate: parsed.branchTemplate } : {}),
|
||||
...(typeof parsed.worktreeParentDir === "string" ? { worktreeParentDir: parsed.worktreeParentDir } : {}),
|
||||
...(typeof parsed.provisionCommand === "string" ? { provisionCommand: parsed.provisionCommand } : {}),
|
||||
...(typeof parsed.teardownCommand === "string" ? { teardownCommand: parsed.teardownCommand } : {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function parseProjectExecutionWorkspacePolicy(raw: unknown): ProjectExecutionWorkspacePolicy | null {
|
||||
const parsed = parseObject(raw);
|
||||
if (Object.keys(parsed).length === 0) return null;
|
||||
const enabled = typeof parsed.enabled === "boolean" ? parsed.enabled : false;
|
||||
const defaultMode = asString(parsed.defaultMode, "");
|
||||
const allowIssueOverride =
|
||||
typeof parsed.allowIssueOverride === "boolean" ? parsed.allowIssueOverride : undefined;
|
||||
return {
|
||||
enabled,
|
||||
...(defaultMode === "project_primary" || defaultMode === "isolated" ? { defaultMode } : {}),
|
||||
...(allowIssueOverride !== undefined ? { allowIssueOverride } : {}),
|
||||
...(parseExecutionWorkspaceStrategy(parsed.workspaceStrategy)
|
||||
? { workspaceStrategy: parseExecutionWorkspaceStrategy(parsed.workspaceStrategy) }
|
||||
: {}),
|
||||
...(parsed.workspaceRuntime && typeof parsed.workspaceRuntime === "object" && !Array.isArray(parsed.workspaceRuntime)
|
||||
? { workspaceRuntime: { ...(parsed.workspaceRuntime as Record<string, unknown>) } }
|
||||
: {}),
|
||||
...(parsed.branchPolicy && typeof parsed.branchPolicy === "object" && !Array.isArray(parsed.branchPolicy)
|
||||
? { branchPolicy: { ...(parsed.branchPolicy as Record<string, unknown>) } }
|
||||
: {}),
|
||||
...(parsed.pullRequestPolicy && typeof parsed.pullRequestPolicy === "object" && !Array.isArray(parsed.pullRequestPolicy)
|
||||
? { pullRequestPolicy: { ...(parsed.pullRequestPolicy as Record<string, unknown>) } }
|
||||
: {}),
|
||||
...(parsed.cleanupPolicy && typeof parsed.cleanupPolicy === "object" && !Array.isArray(parsed.cleanupPolicy)
|
||||
? { cleanupPolicy: { ...(parsed.cleanupPolicy as Record<string, unknown>) } }
|
||||
: {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function parseIssueExecutionWorkspaceSettings(raw: unknown): IssueExecutionWorkspaceSettings | null {
|
||||
const parsed = parseObject(raw);
|
||||
if (Object.keys(parsed).length === 0) return null;
|
||||
const mode = asString(parsed.mode, "");
|
||||
return {
|
||||
...(mode === "inherit" || mode === "project_primary" || mode === "isolated" || mode === "agent_default"
|
||||
? { mode }
|
||||
: {}),
|
||||
...(parseExecutionWorkspaceStrategy(parsed.workspaceStrategy)
|
||||
? { workspaceStrategy: parseExecutionWorkspaceStrategy(parsed.workspaceStrategy) }
|
||||
: {}),
|
||||
...(parsed.workspaceRuntime && typeof parsed.workspaceRuntime === "object" && !Array.isArray(parsed.workspaceRuntime)
|
||||
? { workspaceRuntime: { ...(parsed.workspaceRuntime as Record<string, unknown>) } }
|
||||
: {}),
|
||||
};
|
||||
}
|
||||
|
||||
export function defaultIssueExecutionWorkspaceSettingsForProject(
|
||||
projectPolicy: ProjectExecutionWorkspacePolicy | null,
|
||||
): IssueExecutionWorkspaceSettings | null {
|
||||
if (!projectPolicy?.enabled) return null;
|
||||
return {
|
||||
mode: projectPolicy.defaultMode === "isolated" ? "isolated" : "project_primary",
|
||||
};
|
||||
}
|
||||
|
||||
export function resolveExecutionWorkspaceMode(input: {
|
||||
projectPolicy: ProjectExecutionWorkspacePolicy | null;
|
||||
issueSettings: IssueExecutionWorkspaceSettings | null;
|
||||
legacyUseProjectWorkspace: boolean | null;
|
||||
}): ParsedExecutionWorkspaceMode {
|
||||
const issueMode = input.issueSettings?.mode;
|
||||
if (issueMode && issueMode !== "inherit") {
|
||||
return issueMode;
|
||||
}
|
||||
if (input.projectPolicy?.enabled) {
|
||||
return input.projectPolicy.defaultMode === "isolated" ? "isolated" : "project_primary";
|
||||
}
|
||||
if (input.legacyUseProjectWorkspace === false) {
|
||||
return "agent_default";
|
||||
}
|
||||
return "project_primary";
|
||||
}
|
||||
|
||||
export function buildExecutionWorkspaceAdapterConfig(input: {
|
||||
agentConfig: Record<string, unknown>;
|
||||
projectPolicy: ProjectExecutionWorkspacePolicy | null;
|
||||
issueSettings: IssueExecutionWorkspaceSettings | null;
|
||||
mode: ParsedExecutionWorkspaceMode;
|
||||
legacyUseProjectWorkspace: boolean | null;
|
||||
}): Record<string, unknown> {
|
||||
const nextConfig = { ...input.agentConfig };
|
||||
const projectHasPolicy = Boolean(input.projectPolicy?.enabled);
|
||||
const issueHasWorkspaceOverrides = Boolean(
|
||||
input.issueSettings?.mode ||
|
||||
input.issueSettings?.workspaceStrategy ||
|
||||
input.issueSettings?.workspaceRuntime,
|
||||
);
|
||||
const hasWorkspaceControl = projectHasPolicy || issueHasWorkspaceOverrides || input.legacyUseProjectWorkspace === false;
|
||||
|
||||
if (hasWorkspaceControl) {
|
||||
if (input.mode === "isolated") {
|
||||
const strategy =
|
||||
input.issueSettings?.workspaceStrategy ??
|
||||
input.projectPolicy?.workspaceStrategy ??
|
||||
parseExecutionWorkspaceStrategy(nextConfig.workspaceStrategy) ??
|
||||
({ type: "git_worktree" } satisfies ExecutionWorkspaceStrategy);
|
||||
nextConfig.workspaceStrategy = strategy as unknown as Record<string, unknown>;
|
||||
} else {
|
||||
delete nextConfig.workspaceStrategy;
|
||||
}
|
||||
|
||||
if (input.mode === "agent_default") {
|
||||
delete nextConfig.workspaceRuntime;
|
||||
} else if (input.issueSettings?.workspaceRuntime) {
|
||||
nextConfig.workspaceRuntime = cloneRecord(input.issueSettings.workspaceRuntime) ?? undefined;
|
||||
} else if (input.projectPolicy?.workspaceRuntime) {
|
||||
nextConfig.workspaceRuntime = cloneRecord(input.projectPolicy.workspaceRuntime) ?? undefined;
|
||||
}
|
||||
}
|
||||
|
||||
return nextConfig;
|
||||
}
|
||||
@@ -1,7 +1,47 @@
|
||||
import { eq } from "drizzle-orm";
|
||||
import { and, asc, eq, isNull } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { goals } from "@paperclipai/db";
|
||||
|
||||
type GoalReader = Pick<Db, "select">;
|
||||
|
||||
export async function getDefaultCompanyGoal(db: GoalReader, companyId: string) {
|
||||
const activeRootGoal = await db
|
||||
.select()
|
||||
.from(goals)
|
||||
.where(
|
||||
and(
|
||||
eq(goals.companyId, companyId),
|
||||
eq(goals.level, "company"),
|
||||
eq(goals.status, "active"),
|
||||
isNull(goals.parentId),
|
||||
),
|
||||
)
|
||||
.orderBy(asc(goals.createdAt))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
if (activeRootGoal) return activeRootGoal;
|
||||
|
||||
const anyRootGoal = await db
|
||||
.select()
|
||||
.from(goals)
|
||||
.where(
|
||||
and(
|
||||
eq(goals.companyId, companyId),
|
||||
eq(goals.level, "company"),
|
||||
isNull(goals.parentId),
|
||||
),
|
||||
)
|
||||
.orderBy(asc(goals.createdAt))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
if (anyRootGoal) return anyRootGoal;
|
||||
|
||||
return db
|
||||
.select()
|
||||
.from(goals)
|
||||
.where(and(eq(goals.companyId, companyId), eq(goals.level, "company")))
|
||||
.orderBy(asc(goals.createdAt))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
}
|
||||
|
||||
export function goalService(db: Db) {
|
||||
return {
|
||||
list: (companyId: string) => db.select().from(goals).where(eq(goals.companyId, companyId)),
|
||||
@@ -13,6 +53,8 @@ export function goalService(db: Db) {
|
||||
.where(eq(goals.id, id))
|
||||
.then((rows) => rows[0] ?? null),
|
||||
|
||||
getDefaultCompanyGoal: (companyId: string) => getDefaultCompanyGoal(db, companyId),
|
||||
|
||||
create: (companyId: string, data: Omit<typeof goals.$inferInsert, "companyId">) =>
|
||||
db
|
||||
.insert(goals)
|
||||
|
||||
35
server/src/services/heartbeat-run-summary.ts
Normal file
35
server/src/services/heartbeat-run-summary.ts
Normal file
@@ -0,0 +1,35 @@
|
||||
function truncateSummaryText(value: unknown, maxLength = 500) {
|
||||
if (typeof value !== "string") return null;
|
||||
return value.length > maxLength ? value.slice(0, maxLength) : value;
|
||||
}
|
||||
|
||||
function readNumericField(record: Record<string, unknown>, key: string) {
|
||||
return key in record ? record[key] ?? null : undefined;
|
||||
}
|
||||
|
||||
export function summarizeHeartbeatRunResultJson(
|
||||
resultJson: Record<string, unknown> | null | undefined,
|
||||
): Record<string, unknown> | null {
|
||||
if (!resultJson || typeof resultJson !== "object" || Array.isArray(resultJson)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const summary: Record<string, unknown> = {};
|
||||
const textFields = ["summary", "result", "message", "error"] as const;
|
||||
for (const key of textFields) {
|
||||
const value = truncateSummaryText(resultJson[key]);
|
||||
if (value !== null) {
|
||||
summary[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
const numericFieldAliases = ["total_cost_usd", "cost_usd", "costUsd"] as const;
|
||||
for (const key of numericFieldAliases) {
|
||||
const value = readNumericField(resultJson, key);
|
||||
if (value !== undefined && value !== null) {
|
||||
summary[key] = value;
|
||||
}
|
||||
}
|
||||
|
||||
return Object.keys(summary).length > 0 ? summary : null;
|
||||
}
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,6 +1,7 @@
|
||||
export { companyService } from "./companies.js";
|
||||
export { agentService, deduplicateAgentName } from "./agents.js";
|
||||
export { assetService } from "./assets.js";
|
||||
export { documentService, extractLegacyPlanBody } from "./documents.js";
|
||||
export { projectService } from "./projects.js";
|
||||
export { issueService, type IssueFilters } from "./issues.js";
|
||||
export { issueApprovalService } from "./issue-approvals.js";
|
||||
@@ -17,4 +18,5 @@ export { companyPortabilityService } from "./company-portability.js";
|
||||
export { logActivity, type LogActivityInput } from "./activity-log.js";
|
||||
export { notifyHireApproved, type NotifyHireApprovedInput } from "./hire-hook.js";
|
||||
export { publishLiveEvent, subscribeCompanyLiveEvents } from "./live-events.js";
|
||||
export { reconcilePersistedRuntimeServicesOnStartup } from "./workspace-runtime.js";
|
||||
export { createStorageServiceFromConfig, getStorageService } from "../storage/index.js";
|
||||
|
||||
30
server/src/services/issue-goal-fallback.ts
Normal file
30
server/src/services/issue-goal-fallback.ts
Normal file
@@ -0,0 +1,30 @@
|
||||
type MaybeId = string | null | undefined;
|
||||
|
||||
export function resolveIssueGoalId(input: {
|
||||
projectId: MaybeId;
|
||||
goalId: MaybeId;
|
||||
defaultGoalId: MaybeId;
|
||||
}): string | null {
|
||||
if (!input.projectId && !input.goalId) {
|
||||
return input.defaultGoalId ?? null;
|
||||
}
|
||||
return input.goalId ?? null;
|
||||
}
|
||||
|
||||
export function resolveNextIssueGoalId(input: {
|
||||
currentProjectId: MaybeId;
|
||||
currentGoalId: MaybeId;
|
||||
projectId?: MaybeId;
|
||||
goalId?: MaybeId;
|
||||
defaultGoalId: MaybeId;
|
||||
}): string | null {
|
||||
const projectId =
|
||||
input.projectId !== undefined ? input.projectId : input.currentProjectId;
|
||||
const goalId =
|
||||
input.goalId !== undefined ? input.goalId : input.currentGoalId;
|
||||
|
||||
if (!projectId && !goalId) {
|
||||
return input.defaultGoalId ?? null;
|
||||
}
|
||||
return goalId ?? null;
|
||||
}
|
||||
@@ -5,11 +5,13 @@ import {
|
||||
assets,
|
||||
companies,
|
||||
companyMemberships,
|
||||
documents,
|
||||
goals,
|
||||
heartbeatRuns,
|
||||
issueAttachments,
|
||||
issueLabels,
|
||||
issueComments,
|
||||
issueDocuments,
|
||||
issueReadStates,
|
||||
issues,
|
||||
labels,
|
||||
@@ -18,8 +20,16 @@ import {
|
||||
} from "@paperclipai/db";
|
||||
import { extractProjectMentionIds } from "@paperclipai/shared";
|
||||
import { conflict, notFound, unprocessable } from "../errors.js";
|
||||
import {
|
||||
defaultIssueExecutionWorkspaceSettingsForProject,
|
||||
parseProjectExecutionWorkspacePolicy,
|
||||
} from "./execution-workspace-policy.js";
|
||||
import { redactCurrentUserText } from "../log-redaction.js";
|
||||
import { resolveIssueGoalId, resolveNextIssueGoalId } from "./issue-goal-fallback.js";
|
||||
import { getDefaultCompanyGoal } from "./goals.js";
|
||||
|
||||
const ALL_ISSUE_STATUSES = ["backlog", "todo", "in_progress", "in_review", "blocked", "done", "cancelled"];
|
||||
const MAX_ISSUE_COMMENT_PAGE_LIMIT = 500;
|
||||
|
||||
function assertTransition(from: string, to: string) {
|
||||
if (from === to) return;
|
||||
@@ -53,6 +63,7 @@ export interface IssueFilters {
|
||||
touchedByUserId?: string;
|
||||
unreadForUserId?: string;
|
||||
projectId?: string;
|
||||
parentId?: string;
|
||||
labelId?: string;
|
||||
q?: string;
|
||||
}
|
||||
@@ -83,6 +94,13 @@ type IssueUserContextInput = {
|
||||
updatedAt: Date | string;
|
||||
};
|
||||
|
||||
function redactIssueComment<T extends { body: string }>(comment: T): T {
|
||||
return {
|
||||
...comment,
|
||||
body: redactCurrentUserText(comment.body),
|
||||
};
|
||||
}
|
||||
|
||||
function sameRunLock(checkoutRunId: string | null, actorRunId: string | null) {
|
||||
if (actorRunId) return checkoutRunId === actorRunId;
|
||||
return checkoutRunId == null;
|
||||
@@ -458,6 +476,7 @@ export function issueService(db: Db) {
|
||||
conditions.push(unreadForUserCondition(companyId, unreadForUserId));
|
||||
}
|
||||
if (filters?.projectId) conditions.push(eq(issues.projectId, filters.projectId));
|
||||
if (filters?.parentId) conditions.push(eq(issues.parentId, filters.parentId));
|
||||
if (filters?.labelId) {
|
||||
const labeledIssueIds = await db
|
||||
.select({ issueId: issueLabels.issueId })
|
||||
@@ -635,6 +654,20 @@ export function issueService(db: Db) {
|
||||
throw unprocessable("in_progress issues require an assignee");
|
||||
}
|
||||
return db.transaction(async (tx) => {
|
||||
const defaultCompanyGoal = await getDefaultCompanyGoal(tx, companyId);
|
||||
let executionWorkspaceSettings =
|
||||
(issueData.executionWorkspaceSettings as Record<string, unknown> | null | undefined) ?? null;
|
||||
if (executionWorkspaceSettings == null && issueData.projectId) {
|
||||
const project = await tx
|
||||
.select({ executionWorkspacePolicy: projects.executionWorkspacePolicy })
|
||||
.from(projects)
|
||||
.where(and(eq(projects.id, issueData.projectId), eq(projects.companyId, companyId)))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
executionWorkspaceSettings =
|
||||
defaultIssueExecutionWorkspaceSettingsForProject(
|
||||
parseProjectExecutionWorkspacePolicy(project?.executionWorkspacePolicy),
|
||||
) as Record<string, unknown> | null;
|
||||
}
|
||||
const [company] = await tx
|
||||
.update(companies)
|
||||
.set({ issueCounter: sql`${companies.issueCounter} + 1` })
|
||||
@@ -644,7 +677,18 @@ export function issueService(db: Db) {
|
||||
const issueNumber = company.issueCounter;
|
||||
const identifier = `${company.issuePrefix}-${issueNumber}`;
|
||||
|
||||
const values = { ...issueData, companyId, issueNumber, identifier } as typeof issues.$inferInsert;
|
||||
const values = {
|
||||
...issueData,
|
||||
goalId: resolveIssueGoalId({
|
||||
projectId: issueData.projectId,
|
||||
goalId: issueData.goalId,
|
||||
defaultGoalId: defaultCompanyGoal?.id ?? null,
|
||||
}),
|
||||
...(executionWorkspaceSettings ? { executionWorkspaceSettings } : {}),
|
||||
companyId,
|
||||
issueNumber,
|
||||
identifier,
|
||||
} as typeof issues.$inferInsert;
|
||||
if (values.status === "in_progress" && !values.startedAt) {
|
||||
values.startedAt = new Date();
|
||||
}
|
||||
@@ -719,6 +763,14 @@ export function issueService(db: Db) {
|
||||
}
|
||||
|
||||
return db.transaction(async (tx) => {
|
||||
const defaultCompanyGoal = await getDefaultCompanyGoal(tx, existing.companyId);
|
||||
patch.goalId = resolveNextIssueGoalId({
|
||||
currentProjectId: existing.projectId,
|
||||
currentGoalId: existing.goalId,
|
||||
projectId: issueData.projectId,
|
||||
goalId: issueData.goalId,
|
||||
defaultGoalId: defaultCompanyGoal?.id ?? null,
|
||||
});
|
||||
const updated = await tx
|
||||
.update(issues)
|
||||
.set(patch)
|
||||
@@ -740,6 +792,10 @@ export function issueService(db: Db) {
|
||||
.select({ assetId: issueAttachments.assetId })
|
||||
.from(issueAttachments)
|
||||
.where(eq(issueAttachments.issueId, id));
|
||||
const issueDocumentIds = await tx
|
||||
.select({ documentId: issueDocuments.documentId })
|
||||
.from(issueDocuments)
|
||||
.where(eq(issueDocuments.issueId, id));
|
||||
|
||||
const removedIssue = await tx
|
||||
.delete(issues)
|
||||
@@ -753,6 +809,12 @@ export function issueService(db: Db) {
|
||||
.where(inArray(assets.id, attachmentAssetIds.map((row) => row.assetId)));
|
||||
}
|
||||
|
||||
if (removedIssue && issueDocumentIds.length > 0) {
|
||||
await tx
|
||||
.delete(documents)
|
||||
.where(inArray(documents.id, issueDocumentIds.map((row) => row.documentId)));
|
||||
}
|
||||
|
||||
if (!removedIssue) return null;
|
||||
const [enriched] = await withIssueLabels(tx, [removedIssue]);
|
||||
return enriched;
|
||||
@@ -1011,19 +1073,96 @@ export function issueService(db: Db) {
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null),
|
||||
|
||||
listComments: (issueId: string) =>
|
||||
db
|
||||
listComments: async (
|
||||
issueId: string,
|
||||
opts?: {
|
||||
afterCommentId?: string | null;
|
||||
order?: "asc" | "desc";
|
||||
limit?: number | null;
|
||||
},
|
||||
) => {
|
||||
const order = opts?.order === "asc" ? "asc" : "desc";
|
||||
const afterCommentId = opts?.afterCommentId?.trim() || null;
|
||||
const limit =
|
||||
opts?.limit && opts.limit > 0
|
||||
? Math.min(Math.floor(opts.limit), MAX_ISSUE_COMMENT_PAGE_LIMIT)
|
||||
: null;
|
||||
|
||||
const conditions = [eq(issueComments.issueId, issueId)];
|
||||
if (afterCommentId) {
|
||||
const anchor = await db
|
||||
.select({
|
||||
id: issueComments.id,
|
||||
createdAt: issueComments.createdAt,
|
||||
})
|
||||
.from(issueComments)
|
||||
.where(and(eq(issueComments.issueId, issueId), eq(issueComments.id, afterCommentId)))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (!anchor) return [];
|
||||
conditions.push(
|
||||
order === "asc"
|
||||
? sql<boolean>`(
|
||||
${issueComments.createdAt} > ${anchor.createdAt}
|
||||
OR (${issueComments.createdAt} = ${anchor.createdAt} AND ${issueComments.id} > ${anchor.id})
|
||||
)`
|
||||
: sql<boolean>`(
|
||||
${issueComments.createdAt} < ${anchor.createdAt}
|
||||
OR (${issueComments.createdAt} = ${anchor.createdAt} AND ${issueComments.id} < ${anchor.id})
|
||||
)`,
|
||||
);
|
||||
}
|
||||
|
||||
const query = db
|
||||
.select()
|
||||
.from(issueComments)
|
||||
.where(eq(issueComments.issueId, issueId))
|
||||
.orderBy(desc(issueComments.createdAt)),
|
||||
.where(and(...conditions))
|
||||
.orderBy(
|
||||
order === "asc" ? asc(issueComments.createdAt) : desc(issueComments.createdAt),
|
||||
order === "asc" ? asc(issueComments.id) : desc(issueComments.id),
|
||||
);
|
||||
|
||||
const comments = limit ? await query.limit(limit) : await query;
|
||||
return comments.map(redactIssueComment);
|
||||
},
|
||||
|
||||
getCommentCursor: async (issueId: string) => {
|
||||
const [latest, countRow] = await Promise.all([
|
||||
db
|
||||
.select({
|
||||
latestCommentId: issueComments.id,
|
||||
latestCommentAt: issueComments.createdAt,
|
||||
})
|
||||
.from(issueComments)
|
||||
.where(eq(issueComments.issueId, issueId))
|
||||
.orderBy(desc(issueComments.createdAt), desc(issueComments.id))
|
||||
.limit(1)
|
||||
.then((rows) => rows[0] ?? null),
|
||||
db
|
||||
.select({
|
||||
totalComments: sql<number>`count(*)::int`,
|
||||
})
|
||||
.from(issueComments)
|
||||
.where(eq(issueComments.issueId, issueId))
|
||||
.then((rows) => rows[0] ?? null),
|
||||
]);
|
||||
|
||||
return {
|
||||
totalComments: Number(countRow?.totalComments ?? 0),
|
||||
latestCommentId: latest?.latestCommentId ?? null,
|
||||
latestCommentAt: latest?.latestCommentAt ?? null,
|
||||
};
|
||||
},
|
||||
|
||||
getComment: (commentId: string) =>
|
||||
db
|
||||
.select()
|
||||
.from(issueComments)
|
||||
.where(eq(issueComments.id, commentId))
|
||||
.then((rows) => rows[0] ?? null),
|
||||
.then((rows) => {
|
||||
const comment = rows[0] ?? null;
|
||||
return comment ? redactIssueComment(comment) : null;
|
||||
}),
|
||||
|
||||
addComment: async (issueId: string, body: string, actor: { agentId?: string; userId?: string }) => {
|
||||
const issue = await db
|
||||
@@ -1034,6 +1173,7 @@ export function issueService(db: Db) {
|
||||
|
||||
if (!issue) throw notFound("Issue not found");
|
||||
|
||||
const redactedBody = redactCurrentUserText(body);
|
||||
const [comment] = await db
|
||||
.insert(issueComments)
|
||||
.values({
|
||||
@@ -1041,7 +1181,7 @@ export function issueService(db: Db) {
|
||||
issueId,
|
||||
authorAgentId: actor.agentId ?? null,
|
||||
authorUserId: actor.userId ?? null,
|
||||
body,
|
||||
body: redactedBody,
|
||||
})
|
||||
.returning();
|
||||
|
||||
@@ -1051,7 +1191,7 @@ export function issueService(db: Db) {
|
||||
.set({ updatedAt: new Date() })
|
||||
.where(eq(issues.id, issueId));
|
||||
|
||||
return comment;
|
||||
return redactIssueComment(comment);
|
||||
},
|
||||
|
||||
createAttachment: async (input: {
|
||||
@@ -1386,23 +1526,5 @@ export function issueService(db: Db) {
|
||||
goal: a.goalId ? goalMap.get(a.goalId) ?? null : null,
|
||||
}));
|
||||
},
|
||||
|
||||
staleCount: async (companyId: string, minutes = 60) => {
|
||||
const cutoff = new Date(Date.now() - minutes * 60 * 1000);
|
||||
const result = await db
|
||||
.select({ count: sql<number>`count(*)` })
|
||||
.from(issues)
|
||||
.where(
|
||||
and(
|
||||
eq(issues.companyId, companyId),
|
||||
eq(issues.status, "in_progress"),
|
||||
isNull(issues.hiddenAt),
|
||||
sql`${issues.startedAt} < ${cutoff.toISOString()}`,
|
||||
),
|
||||
)
|
||||
.then((rows) => rows[0]);
|
||||
|
||||
return Number(result?.count ?? 0);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
@@ -34,7 +34,21 @@ export function publishLiveEvent(input: {
|
||||
return event;
|
||||
}
|
||||
|
||||
export function publishGlobalLiveEvent(input: {
|
||||
type: LiveEventType;
|
||||
payload?: LiveEventPayload;
|
||||
}) {
|
||||
const event = toLiveEvent({ companyId: "*", type: input.type, payload: input.payload });
|
||||
emitter.emit("*", event);
|
||||
return event;
|
||||
}
|
||||
|
||||
export function subscribeCompanyLiveEvents(companyId: string, listener: LiveEventListener) {
|
||||
emitter.on(companyId, listener);
|
||||
return () => emitter.off(companyId, listener);
|
||||
}
|
||||
|
||||
export function subscribeGlobalLiveEvents(listener: LiveEventListener) {
|
||||
emitter.on("*", listener);
|
||||
return () => emitter.off("*", listener);
|
||||
}
|
||||
|
||||
449
server/src/services/plugin-capability-validator.ts
Normal file
449
server/src/services/plugin-capability-validator.ts
Normal file
@@ -0,0 +1,449 @@
|
||||
/**
|
||||
* PluginCapabilityValidator — enforces the capability model at both
|
||||
* install-time and runtime.
|
||||
*
|
||||
* Every plugin declares the capabilities it requires in its manifest
|
||||
* (`manifest.capabilities`). This service checks those declarations
|
||||
* against a mapping of operations → required capabilities so that:
|
||||
*
|
||||
* 1. **Install-time validation** — `validateManifestCapabilities()`
|
||||
* ensures that declared features (tools, jobs, webhooks, UI slots)
|
||||
* have matching capability entries, giving operators clear feedback
|
||||
* before a plugin is activated.
|
||||
*
|
||||
* 2. **Runtime gating** — `checkOperation()` / `assertOperation()` are
|
||||
* called on every worker→host bridge call to enforce least-privilege
|
||||
* access. If a plugin attempts an operation it did not declare, the
|
||||
* call is rejected with a 403 error.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §15 — Capability Model
|
||||
* @see host-client-factory.ts — SDK-side capability gating
|
||||
*/
|
||||
import type {
|
||||
PluginCapability,
|
||||
PaperclipPluginManifestV1,
|
||||
PluginUiSlotType,
|
||||
PluginLauncherPlacementZone,
|
||||
} from "@paperclipai/shared";
|
||||
import { forbidden } from "../errors.js";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Capability requirement mappings
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Maps high-level operations to the capabilities they require.
|
||||
*
|
||||
* When the bridge receives a call from a plugin worker, the host looks up
|
||||
* the operation in this map and checks the plugin's declared capabilities.
|
||||
* If any required capability is missing, the call is rejected.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §15 — Capability Model
|
||||
*/
|
||||
const OPERATION_CAPABILITIES: Record<string, readonly PluginCapability[]> = {
|
||||
// Data read operations
|
||||
"companies.list": ["companies.read"],
|
||||
"companies.get": ["companies.read"],
|
||||
"projects.list": ["projects.read"],
|
||||
"projects.get": ["projects.read"],
|
||||
"project.workspaces.list": ["project.workspaces.read"],
|
||||
"project.workspaces.get": ["project.workspaces.read"],
|
||||
"issues.list": ["issues.read"],
|
||||
"issues.get": ["issues.read"],
|
||||
"issue.comments.list": ["issue.comments.read"],
|
||||
"issue.comments.get": ["issue.comments.read"],
|
||||
"agents.list": ["agents.read"],
|
||||
"agents.get": ["agents.read"],
|
||||
"goals.list": ["goals.read"],
|
||||
"goals.get": ["goals.read"],
|
||||
"activity.list": ["activity.read"],
|
||||
"activity.get": ["activity.read"],
|
||||
"costs.list": ["costs.read"],
|
||||
"costs.get": ["costs.read"],
|
||||
|
||||
// Data write operations
|
||||
"issues.create": ["issues.create"],
|
||||
"issues.update": ["issues.update"],
|
||||
"issue.comments.create": ["issue.comments.create"],
|
||||
"activity.log": ["activity.log.write"],
|
||||
"metrics.write": ["metrics.write"],
|
||||
|
||||
// Plugin state operations
|
||||
"plugin.state.get": ["plugin.state.read"],
|
||||
"plugin.state.list": ["plugin.state.read"],
|
||||
"plugin.state.set": ["plugin.state.write"],
|
||||
"plugin.state.delete": ["plugin.state.write"],
|
||||
|
||||
// Runtime / Integration operations
|
||||
"events.subscribe": ["events.subscribe"],
|
||||
"events.emit": ["events.emit"],
|
||||
"jobs.schedule": ["jobs.schedule"],
|
||||
"jobs.cancel": ["jobs.schedule"],
|
||||
"webhooks.receive": ["webhooks.receive"],
|
||||
"http.request": ["http.outbound"],
|
||||
"secrets.resolve": ["secrets.read-ref"],
|
||||
|
||||
// Agent tools
|
||||
"agent.tools.register": ["agent.tools.register"],
|
||||
"agent.tools.execute": ["agent.tools.register"],
|
||||
};
|
||||
|
||||
/**
|
||||
* Maps UI slot types to the capability required to register them.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §19 — UI Extension Model
|
||||
*/
|
||||
const UI_SLOT_CAPABILITIES: Record<PluginUiSlotType, PluginCapability> = {
|
||||
sidebar: "ui.sidebar.register",
|
||||
sidebarPanel: "ui.sidebar.register",
|
||||
projectSidebarItem: "ui.sidebar.register",
|
||||
page: "ui.page.register",
|
||||
detailTab: "ui.detailTab.register",
|
||||
taskDetailView: "ui.detailTab.register",
|
||||
dashboardWidget: "ui.dashboardWidget.register",
|
||||
globalToolbarButton: "ui.action.register",
|
||||
toolbarButton: "ui.action.register",
|
||||
contextMenuItem: "ui.action.register",
|
||||
commentAnnotation: "ui.commentAnnotation.register",
|
||||
commentContextMenuItem: "ui.action.register",
|
||||
settingsPage: "instance.settings.register",
|
||||
};
|
||||
|
||||
/**
|
||||
* Launcher placement zones align with host UI surfaces and therefore inherit
|
||||
* the same capability requirements as the equivalent slot type.
|
||||
*/
|
||||
const LAUNCHER_PLACEMENT_CAPABILITIES: Record<
|
||||
PluginLauncherPlacementZone,
|
||||
PluginCapability
|
||||
> = {
|
||||
page: "ui.page.register",
|
||||
detailTab: "ui.detailTab.register",
|
||||
taskDetailView: "ui.detailTab.register",
|
||||
dashboardWidget: "ui.dashboardWidget.register",
|
||||
sidebar: "ui.sidebar.register",
|
||||
sidebarPanel: "ui.sidebar.register",
|
||||
projectSidebarItem: "ui.sidebar.register",
|
||||
globalToolbarButton: "ui.action.register",
|
||||
toolbarButton: "ui.action.register",
|
||||
contextMenuItem: "ui.action.register",
|
||||
commentAnnotation: "ui.commentAnnotation.register",
|
||||
commentContextMenuItem: "ui.action.register",
|
||||
settingsPage: "instance.settings.register",
|
||||
};
|
||||
|
||||
/**
|
||||
* Maps feature declarations in the manifest to their required capabilities.
|
||||
*/
|
||||
const FEATURE_CAPABILITIES: Record<string, PluginCapability> = {
|
||||
tools: "agent.tools.register",
|
||||
jobs: "jobs.schedule",
|
||||
webhooks: "webhooks.receive",
|
||||
};
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Result types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Result of a capability check. When `allowed` is false, `missing` contains
|
||||
* the capabilities that the plugin does not declare but the operation requires.
|
||||
*/
|
||||
export interface CapabilityCheckResult {
|
||||
allowed: boolean;
|
||||
missing: PluginCapability[];
|
||||
operation?: string;
|
||||
pluginId?: string;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// PluginCapabilityValidator interface
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface PluginCapabilityValidator {
|
||||
/**
|
||||
* Check whether a plugin has a specific capability.
|
||||
*/
|
||||
hasCapability(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
capability: PluginCapability,
|
||||
): boolean;
|
||||
|
||||
/**
|
||||
* Check whether a plugin has all of the specified capabilities.
|
||||
*/
|
||||
hasAllCapabilities(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
capabilities: PluginCapability[],
|
||||
): CapabilityCheckResult;
|
||||
|
||||
/**
|
||||
* Check whether a plugin has at least one of the specified capabilities.
|
||||
*/
|
||||
hasAnyCapability(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
capabilities: PluginCapability[],
|
||||
): boolean;
|
||||
|
||||
/**
|
||||
* Check whether a plugin is allowed to perform the named operation.
|
||||
*
|
||||
* Operations are mapped to required capabilities via OPERATION_CAPABILITIES.
|
||||
* Unknown operations are rejected by default.
|
||||
*/
|
||||
checkOperation(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
operation: string,
|
||||
): CapabilityCheckResult;
|
||||
|
||||
/**
|
||||
* Assert that a plugin is allowed to perform an operation.
|
||||
* Throws a 403 HttpError if the capability check fails.
|
||||
*/
|
||||
assertOperation(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
operation: string,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Assert that a plugin has a specific capability.
|
||||
* Throws a 403 HttpError if the capability is missing.
|
||||
*/
|
||||
assertCapability(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
capability: PluginCapability,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Check whether a plugin can register the given UI slot type.
|
||||
*/
|
||||
checkUiSlot(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
slotType: PluginUiSlotType,
|
||||
): CapabilityCheckResult;
|
||||
|
||||
/**
|
||||
* Validate that a manifest's declared capabilities are consistent with its
|
||||
* declared features (tools, jobs, webhooks, UI slots).
|
||||
*
|
||||
* Returns all missing capabilities rather than failing on the first one.
|
||||
* This is useful for install-time validation to give comprehensive feedback.
|
||||
*/
|
||||
validateManifestCapabilities(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
): CapabilityCheckResult;
|
||||
|
||||
/**
|
||||
* Get the capabilities required for a named operation.
|
||||
* Returns an empty array if the operation is unknown.
|
||||
*/
|
||||
getRequiredCapabilities(operation: string): readonly PluginCapability[];
|
||||
|
||||
/**
|
||||
* Get the capability required for a UI slot type.
|
||||
*/
|
||||
getUiSlotCapability(slotType: PluginUiSlotType): PluginCapability;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Factory
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a PluginCapabilityValidator.
|
||||
*
|
||||
* This service enforces capability gates for plugin operations. The host
|
||||
* uses it to verify that a plugin's declared capabilities permit the
|
||||
* operation it is attempting, both at install time (manifest validation)
|
||||
* and at runtime (bridge call gating).
|
||||
*
|
||||
* Usage:
|
||||
* ```ts
|
||||
* const validator = pluginCapabilityValidator();
|
||||
*
|
||||
* // Runtime: gate a bridge call
|
||||
* validator.assertOperation(plugin.manifestJson, "issues.create");
|
||||
*
|
||||
* // Install time: validate manifest consistency
|
||||
* const result = validator.validateManifestCapabilities(manifest);
|
||||
* if (!result.allowed) {
|
||||
* throw badRequest("Missing capabilities", result.missing);
|
||||
* }
|
||||
* ```
|
||||
*/
|
||||
export function pluginCapabilityValidator(): PluginCapabilityValidator {
|
||||
const log = logger.child({ service: "plugin-capability-validator" });
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal helpers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
function capabilitySet(manifest: PaperclipPluginManifestV1): Set<PluginCapability> {
|
||||
return new Set(manifest.capabilities);
|
||||
}
|
||||
|
||||
function buildForbiddenMessage(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
operation: string,
|
||||
missing: PluginCapability[],
|
||||
): string {
|
||||
return (
|
||||
`Plugin '${manifest.id}' is not allowed to perform '${operation}'. ` +
|
||||
`Missing required capabilities: ${missing.join(", ")}`
|
||||
);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Public API
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
return {
|
||||
hasCapability(manifest, capability) {
|
||||
return manifest.capabilities.includes(capability);
|
||||
},
|
||||
|
||||
hasAllCapabilities(manifest, capabilities) {
|
||||
const declared = capabilitySet(manifest);
|
||||
const missing = capabilities.filter((cap) => !declared.has(cap));
|
||||
return {
|
||||
allowed: missing.length === 0,
|
||||
missing,
|
||||
pluginId: manifest.id,
|
||||
};
|
||||
},
|
||||
|
||||
hasAnyCapability(manifest, capabilities) {
|
||||
const declared = capabilitySet(manifest);
|
||||
return capabilities.some((cap) => declared.has(cap));
|
||||
},
|
||||
|
||||
checkOperation(manifest, operation) {
|
||||
const required = OPERATION_CAPABILITIES[operation];
|
||||
|
||||
if (!required) {
|
||||
log.warn(
|
||||
{ pluginId: manifest.id, operation },
|
||||
"capability check for unknown operation – rejecting by default",
|
||||
);
|
||||
return {
|
||||
allowed: false,
|
||||
missing: [],
|
||||
operation,
|
||||
pluginId: manifest.id,
|
||||
};
|
||||
}
|
||||
|
||||
const declared = capabilitySet(manifest);
|
||||
const missing = required.filter((cap) => !declared.has(cap));
|
||||
|
||||
if (missing.length > 0) {
|
||||
log.debug(
|
||||
{ pluginId: manifest.id, operation, missing },
|
||||
"capability check failed",
|
||||
);
|
||||
}
|
||||
|
||||
return {
|
||||
allowed: missing.length === 0,
|
||||
missing,
|
||||
operation,
|
||||
pluginId: manifest.id,
|
||||
};
|
||||
},
|
||||
|
||||
assertOperation(manifest, operation) {
|
||||
const result = this.checkOperation(manifest, operation);
|
||||
if (!result.allowed) {
|
||||
const msg = result.missing.length > 0
|
||||
? buildForbiddenMessage(manifest, operation, result.missing)
|
||||
: `Plugin '${manifest.id}' attempted unknown operation '${operation}'`;
|
||||
throw forbidden(msg);
|
||||
}
|
||||
},
|
||||
|
||||
assertCapability(manifest, capability) {
|
||||
if (!this.hasCapability(manifest, capability)) {
|
||||
throw forbidden(
|
||||
`Plugin '${manifest.id}' lacks required capability '${capability}'`,
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
checkUiSlot(manifest, slotType) {
|
||||
const required = UI_SLOT_CAPABILITIES[slotType];
|
||||
if (!required) {
|
||||
return {
|
||||
allowed: false,
|
||||
missing: [],
|
||||
operation: `ui.${slotType}.register`,
|
||||
pluginId: manifest.id,
|
||||
};
|
||||
}
|
||||
|
||||
const has = manifest.capabilities.includes(required);
|
||||
return {
|
||||
allowed: has,
|
||||
missing: has ? [] : [required],
|
||||
operation: `ui.${slotType}.register`,
|
||||
pluginId: manifest.id,
|
||||
};
|
||||
},
|
||||
|
||||
validateManifestCapabilities(manifest) {
|
||||
const declared = capabilitySet(manifest);
|
||||
const allMissing: PluginCapability[] = [];
|
||||
|
||||
// Check feature declarations → required capabilities
|
||||
for (const [feature, requiredCap] of Object.entries(FEATURE_CAPABILITIES)) {
|
||||
const featureValue = manifest[feature as keyof PaperclipPluginManifestV1];
|
||||
if (Array.isArray(featureValue) && featureValue.length > 0) {
|
||||
if (!declared.has(requiredCap)) {
|
||||
allMissing.push(requiredCap);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check UI slots → required capabilities
|
||||
const uiSlots = manifest.ui?.slots ?? [];
|
||||
if (uiSlots.length > 0) {
|
||||
for (const slot of uiSlots) {
|
||||
const requiredCap = UI_SLOT_CAPABILITIES[slot.type];
|
||||
if (requiredCap && !declared.has(requiredCap)) {
|
||||
if (!allMissing.includes(requiredCap)) {
|
||||
allMissing.push(requiredCap);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Check launcher declarations → required capabilities
|
||||
const launchers = [
|
||||
...(manifest.launchers ?? []),
|
||||
...(manifest.ui?.launchers ?? []),
|
||||
];
|
||||
if (launchers.length > 0) {
|
||||
for (const launcher of launchers) {
|
||||
const requiredCap = LAUNCHER_PLACEMENT_CAPABILITIES[launcher.placementZone];
|
||||
if (requiredCap && !declared.has(requiredCap) && !allMissing.includes(requiredCap)) {
|
||||
allMissing.push(requiredCap);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
allowed: allMissing.length === 0,
|
||||
missing: allMissing,
|
||||
pluginId: manifest.id,
|
||||
};
|
||||
},
|
||||
|
||||
getRequiredCapabilities(operation) {
|
||||
return OPERATION_CAPABILITIES[operation] ?? [];
|
||||
},
|
||||
|
||||
getUiSlotCapability(slotType) {
|
||||
return UI_SLOT_CAPABILITIES[slotType];
|
||||
},
|
||||
};
|
||||
}
|
||||
54
server/src/services/plugin-config-validator.ts
Normal file
54
server/src/services/plugin-config-validator.ts
Normal file
@@ -0,0 +1,54 @@
|
||||
/**
|
||||
* @fileoverview Validates plugin instance configuration against its JSON Schema.
|
||||
*
|
||||
* Uses Ajv to validate `configJson` values against the `instanceConfigSchema`
|
||||
* declared in a plugin's manifest. This ensures that invalid configuration is
|
||||
* rejected at the API boundary, not discovered later at worker startup.
|
||||
*
|
||||
* @module server/services/plugin-config-validator
|
||||
*/
|
||||
|
||||
import Ajv, { type ErrorObject } from "ajv";
|
||||
import addFormats from "ajv-formats";
|
||||
import type { JsonSchema } from "@paperclipai/shared";
|
||||
|
||||
export interface ConfigValidationResult {
|
||||
valid: boolean;
|
||||
errors?: { field: string; message: string }[];
|
||||
}
|
||||
|
||||
/**
|
||||
* Validate a config object against a JSON Schema.
|
||||
*
|
||||
* @param configJson - The configuration values to validate.
|
||||
* @param schema - The JSON Schema from the plugin manifest's `instanceConfigSchema`.
|
||||
* @returns Validation result with structured field errors on failure.
|
||||
*/
|
||||
export function validateInstanceConfig(
|
||||
configJson: Record<string, unknown>,
|
||||
schema: JsonSchema,
|
||||
): ConfigValidationResult {
|
||||
// eslint-disable-next-line @typescript-eslint/no-explicit-any
|
||||
const AjvCtor = (Ajv as any).default ?? Ajv;
|
||||
const ajv = new AjvCtor({ allErrors: true });
|
||||
// ajv-formats v3 default export is a FormatsPlugin object; call it as a plugin.
|
||||
const applyFormats = (addFormats as any).default ?? addFormats;
|
||||
applyFormats(ajv);
|
||||
// Register the secret-ref format used by plugin manifests to mark fields that
|
||||
// hold a Paperclip secret UUID rather than a raw value. The format is a UI
|
||||
// hint only — UUID validation happens in the secrets handler at resolve time.
|
||||
ajv.addFormat("secret-ref", { validate: () => true });
|
||||
const validate = ajv.compile(schema);
|
||||
const valid = validate(configJson);
|
||||
|
||||
if (valid) {
|
||||
return { valid: true };
|
||||
}
|
||||
|
||||
const errors = (validate.errors ?? []).map((err: ErrorObject) => ({
|
||||
field: err.instancePath || "/",
|
||||
message: err.message ?? "validation failed",
|
||||
}));
|
||||
|
||||
return { valid: false, errors };
|
||||
}
|
||||
339
server/src/services/plugin-dev-watcher.ts
Normal file
339
server/src/services/plugin-dev-watcher.ts
Normal file
@@ -0,0 +1,339 @@
|
||||
/**
|
||||
* PluginDevWatcher — watches local-path plugin directories for file changes
|
||||
* and triggers worker restarts so plugin authors get a fast rebuild-and-reload
|
||||
* cycle without manually restarting the server.
|
||||
*
|
||||
* Only plugins installed from a local path (i.e. those with a non-null
|
||||
* `packagePath` in the DB) are watched. File changes in the plugin's package
|
||||
* directory trigger a debounced worker restart via the lifecycle manager.
|
||||
*
|
||||
* Uses chokidar rather than raw fs.watch so we get a production-grade watcher
|
||||
* backend across platforms and avoid exhausting file descriptors as quickly in
|
||||
* large dev workspaces.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §27.2 — Local Development Workflow
|
||||
*/
|
||||
import chokidar, { type FSWatcher } from "chokidar";
|
||||
import { existsSync, readFileSync, readdirSync, statSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
import type { PluginLifecycleManager } from "./plugin-lifecycle.js";
|
||||
|
||||
const log = logger.child({ service: "plugin-dev-watcher" });
|
||||
|
||||
/** Debounce interval for file changes (ms). */
|
||||
const DEBOUNCE_MS = 500;
|
||||
|
||||
export interface PluginDevWatcher {
|
||||
/** Start watching a local-path plugin directory. */
|
||||
watch(pluginId: string, packagePath: string): void;
|
||||
/** Stop watching a specific plugin. */
|
||||
unwatch(pluginId: string): void;
|
||||
/** Stop all watchers and clean up. */
|
||||
close(): void;
|
||||
}
|
||||
|
||||
export type ResolvePluginPackagePath = (
|
||||
pluginId: string,
|
||||
) => Promise<string | null | undefined>;
|
||||
|
||||
export interface PluginDevWatcherFsDeps {
|
||||
existsSync?: typeof existsSync;
|
||||
readFileSync?: typeof readFileSync;
|
||||
readdirSync?: typeof readdirSync;
|
||||
statSync?: typeof statSync;
|
||||
}
|
||||
|
||||
type PluginWatchTarget = {
|
||||
path: string;
|
||||
recursive: boolean;
|
||||
kind: "file" | "dir";
|
||||
};
|
||||
|
||||
type PluginPackageJson = {
|
||||
paperclipPlugin?: {
|
||||
manifest?: string;
|
||||
worker?: string;
|
||||
ui?: string;
|
||||
};
|
||||
};
|
||||
|
||||
function shouldIgnorePath(filename: string | null | undefined): boolean {
|
||||
if (!filename) return false;
|
||||
const normalized = filename.replace(/\\/g, "/");
|
||||
const segments = normalized.split("/").filter(Boolean);
|
||||
return segments.some(
|
||||
(segment) =>
|
||||
segment === "node_modules" ||
|
||||
segment === ".git" ||
|
||||
segment === ".vite" ||
|
||||
segment === ".paperclip-sdk" ||
|
||||
segment.startsWith("."),
|
||||
);
|
||||
}
|
||||
|
||||
export function resolvePluginWatchTargets(
|
||||
packagePath: string,
|
||||
fsDeps?: Pick<PluginDevWatcherFsDeps, "existsSync" | "readFileSync" | "readdirSync" | "statSync">,
|
||||
): PluginWatchTarget[] {
|
||||
const fileExists = fsDeps?.existsSync ?? existsSync;
|
||||
const readFile = fsDeps?.readFileSync ?? readFileSync;
|
||||
const readDir = fsDeps?.readdirSync ?? readdirSync;
|
||||
const statFile = fsDeps?.statSync ?? statSync;
|
||||
const absPath = path.resolve(packagePath);
|
||||
const targets = new Map<string, PluginWatchTarget>();
|
||||
|
||||
function addWatchTarget(targetPath: string, recursive: boolean, kind?: "file" | "dir"): void {
|
||||
const resolved = path.resolve(targetPath);
|
||||
if (!fileExists(resolved)) return;
|
||||
const inferredKind = kind ?? (statFile(resolved).isDirectory() ? "dir" : "file");
|
||||
|
||||
const existing = targets.get(resolved);
|
||||
if (existing) {
|
||||
existing.recursive = existing.recursive || recursive;
|
||||
return;
|
||||
}
|
||||
|
||||
targets.set(resolved, { path: resolved, recursive, kind: inferredKind });
|
||||
}
|
||||
|
||||
function addRuntimeFilesFromDir(dirPath: string): void {
|
||||
if (!fileExists(dirPath)) return;
|
||||
|
||||
for (const entry of readDir(dirPath, { withFileTypes: true })) {
|
||||
const entryPath = path.join(dirPath, entry.name);
|
||||
if (entry.isDirectory()) {
|
||||
addRuntimeFilesFromDir(entryPath);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (!entry.isFile()) continue;
|
||||
if (!entry.name.endsWith(".js") && !entry.name.endsWith(".css")) continue;
|
||||
addWatchTarget(entryPath, false, "file");
|
||||
}
|
||||
}
|
||||
|
||||
const packageJsonPath = path.join(absPath, "package.json");
|
||||
addWatchTarget(packageJsonPath, false, "file");
|
||||
if (!fileExists(packageJsonPath)) {
|
||||
return [...targets.values()];
|
||||
}
|
||||
|
||||
let packageJson: PluginPackageJson | null = null;
|
||||
try {
|
||||
packageJson = JSON.parse(readFile(packageJsonPath, "utf8")) as PluginPackageJson;
|
||||
} catch {
|
||||
packageJson = null;
|
||||
}
|
||||
|
||||
const entrypointPaths = [
|
||||
packageJson?.paperclipPlugin?.manifest,
|
||||
packageJson?.paperclipPlugin?.worker,
|
||||
packageJson?.paperclipPlugin?.ui,
|
||||
].filter((value): value is string => typeof value === "string" && value.length > 0);
|
||||
|
||||
if (entrypointPaths.length === 0) {
|
||||
addRuntimeFilesFromDir(path.join(absPath, "dist"));
|
||||
return [...targets.values()];
|
||||
}
|
||||
|
||||
for (const relativeEntrypoint of entrypointPaths) {
|
||||
const resolvedEntrypoint = path.resolve(absPath, relativeEntrypoint);
|
||||
if (!fileExists(resolvedEntrypoint)) continue;
|
||||
|
||||
const stat = statFile(resolvedEntrypoint);
|
||||
if (stat.isDirectory()) {
|
||||
addRuntimeFilesFromDir(resolvedEntrypoint);
|
||||
} else {
|
||||
addWatchTarget(resolvedEntrypoint, false, "file");
|
||||
}
|
||||
}
|
||||
|
||||
return [...targets.values()].sort((a, b) => a.path.localeCompare(b.path));
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a PluginDevWatcher that monitors local plugin directories and
|
||||
* restarts workers on file changes.
|
||||
*/
|
||||
export function createPluginDevWatcher(
|
||||
lifecycle: PluginLifecycleManager,
|
||||
resolvePluginPackagePath?: ResolvePluginPackagePath,
|
||||
fsDeps?: PluginDevWatcherFsDeps,
|
||||
): PluginDevWatcher {
|
||||
const watchers = new Map<string, FSWatcher>();
|
||||
const debounceTimers = new Map<string, ReturnType<typeof setTimeout>>();
|
||||
const fileExists = fsDeps?.existsSync ?? existsSync;
|
||||
|
||||
function watchPlugin(pluginId: string, packagePath: string): void {
|
||||
// Don't double-watch
|
||||
if (watchers.has(pluginId)) return;
|
||||
|
||||
const absPath = path.resolve(packagePath);
|
||||
if (!fileExists(absPath)) {
|
||||
log.warn(
|
||||
{ pluginId, packagePath: absPath },
|
||||
"plugin-dev-watcher: package path does not exist, skipping watch",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
try {
|
||||
const watcherTargets = resolvePluginWatchTargets(absPath, fsDeps);
|
||||
if (watcherTargets.length === 0) {
|
||||
log.warn(
|
||||
{ pluginId, packagePath: absPath },
|
||||
"plugin-dev-watcher: no valid watch targets found, skipping watch",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const watcher = chokidar.watch(
|
||||
watcherTargets.map((target) => target.path),
|
||||
{
|
||||
ignoreInitial: true,
|
||||
awaitWriteFinish: {
|
||||
stabilityThreshold: 200,
|
||||
pollInterval: 100,
|
||||
},
|
||||
ignored: (watchedPath) => {
|
||||
const relativePath = path.relative(absPath, watchedPath);
|
||||
return shouldIgnorePath(relativePath);
|
||||
},
|
||||
},
|
||||
);
|
||||
|
||||
watcher.on("all", (_eventName, changedPath) => {
|
||||
const relativePath = path.relative(absPath, changedPath);
|
||||
if (shouldIgnorePath(relativePath)) return;
|
||||
|
||||
const existing = debounceTimers.get(pluginId);
|
||||
if (existing) clearTimeout(existing);
|
||||
|
||||
debounceTimers.set(
|
||||
pluginId,
|
||||
setTimeout(() => {
|
||||
debounceTimers.delete(pluginId);
|
||||
log.info(
|
||||
{ pluginId, changedFile: relativePath || path.basename(changedPath) },
|
||||
"plugin-dev-watcher: file change detected, restarting worker",
|
||||
);
|
||||
|
||||
lifecycle.restartWorker(pluginId).catch((err) => {
|
||||
log.warn(
|
||||
{
|
||||
pluginId,
|
||||
err: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
"plugin-dev-watcher: failed to restart worker after file change",
|
||||
);
|
||||
});
|
||||
}, DEBOUNCE_MS),
|
||||
);
|
||||
});
|
||||
|
||||
watcher.on("error", (err) => {
|
||||
log.warn(
|
||||
{
|
||||
pluginId,
|
||||
packagePath: absPath,
|
||||
err: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
"plugin-dev-watcher: watcher error, stopping watch for this plugin",
|
||||
);
|
||||
unwatchPlugin(pluginId);
|
||||
});
|
||||
|
||||
watchers.set(pluginId, watcher);
|
||||
log.info(
|
||||
{
|
||||
pluginId,
|
||||
packagePath: absPath,
|
||||
watchTargets: watcherTargets.map((target) => ({
|
||||
path: target.path,
|
||||
kind: target.kind,
|
||||
})),
|
||||
},
|
||||
"plugin-dev-watcher: watching local plugin for changes",
|
||||
);
|
||||
} catch (err) {
|
||||
log.warn(
|
||||
{
|
||||
pluginId,
|
||||
packagePath: absPath,
|
||||
err: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
"plugin-dev-watcher: failed to start file watcher",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function unwatchPlugin(pluginId: string): void {
|
||||
const pluginWatcher = watchers.get(pluginId);
|
||||
if (pluginWatcher) {
|
||||
void pluginWatcher.close();
|
||||
watchers.delete(pluginId);
|
||||
}
|
||||
const timer = debounceTimers.get(pluginId);
|
||||
if (timer) {
|
||||
clearTimeout(timer);
|
||||
debounceTimers.delete(pluginId);
|
||||
}
|
||||
}
|
||||
|
||||
function close(): void {
|
||||
lifecycle.off("plugin.loaded", handlePluginLoaded);
|
||||
lifecycle.off("plugin.enabled", handlePluginEnabled);
|
||||
lifecycle.off("plugin.disabled", handlePluginDisabled);
|
||||
lifecycle.off("plugin.unloaded", handlePluginUnloaded);
|
||||
|
||||
for (const [pluginId] of watchers) {
|
||||
unwatchPlugin(pluginId);
|
||||
}
|
||||
}
|
||||
|
||||
async function watchLocalPluginById(pluginId: string): Promise<void> {
|
||||
if (!resolvePluginPackagePath) return;
|
||||
|
||||
try {
|
||||
const packagePath = await resolvePluginPackagePath(pluginId);
|
||||
if (!packagePath) return;
|
||||
watchPlugin(pluginId, packagePath);
|
||||
} catch (err) {
|
||||
log.warn(
|
||||
{
|
||||
pluginId,
|
||||
err: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
"plugin-dev-watcher: failed to resolve plugin package path",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function handlePluginLoaded(payload: { pluginId: string }): void {
|
||||
void watchLocalPluginById(payload.pluginId);
|
||||
}
|
||||
|
||||
function handlePluginEnabled(payload: { pluginId: string }): void {
|
||||
void watchLocalPluginById(payload.pluginId);
|
||||
}
|
||||
|
||||
function handlePluginDisabled(payload: { pluginId: string }): void {
|
||||
unwatchPlugin(payload.pluginId);
|
||||
}
|
||||
|
||||
function handlePluginUnloaded(payload: { pluginId: string }): void {
|
||||
unwatchPlugin(payload.pluginId);
|
||||
}
|
||||
|
||||
lifecycle.on("plugin.loaded", handlePluginLoaded);
|
||||
lifecycle.on("plugin.enabled", handlePluginEnabled);
|
||||
lifecycle.on("plugin.disabled", handlePluginDisabled);
|
||||
lifecycle.on("plugin.unloaded", handlePluginUnloaded);
|
||||
|
||||
return {
|
||||
watch: watchPlugin,
|
||||
unwatch: unwatchPlugin,
|
||||
close,
|
||||
};
|
||||
}
|
||||
412
server/src/services/plugin-event-bus.ts
Normal file
412
server/src/services/plugin-event-bus.ts
Normal file
@@ -0,0 +1,412 @@
|
||||
/**
|
||||
* PluginEventBus — typed in-process event bus for the Paperclip plugin system.
|
||||
*
|
||||
* Responsibilities:
|
||||
* - Deliver core domain events to subscribing plugin workers (server-side).
|
||||
* - Apply `EventFilter` server-side so filtered-out events never reach the handler.
|
||||
* - Namespace plugin-emitted events as `plugin.<pluginId>.<eventName>`.
|
||||
* - Guard the core namespace: plugins may not emit events with the `plugin.` prefix.
|
||||
* - Isolate subscriptions per plugin — a plugin cannot enumerate or interfere with
|
||||
* another plugin's subscriptions.
|
||||
* - Support wildcard subscriptions via prefix matching (e.g. `plugin.acme.linear.*`).
|
||||
*
|
||||
* The bus operates in-process. In the full out-of-process architecture the host
|
||||
* calls `bus.emit()` after receiving events from the DB/queue layer, and the bus
|
||||
* forwards to handlers that proxy the call to the relevant worker process via IPC.
|
||||
* That IPC layer is separate; this module only handles routing and filtering.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §16 — Event System
|
||||
* @see PLUGIN_SPEC.md §16.1 — Event Filtering
|
||||
* @see PLUGIN_SPEC.md §16.2 — Plugin-to-Plugin Events
|
||||
*/
|
||||
|
||||
import type { PluginEventType } from "@paperclipai/shared";
|
||||
import type { PluginEvent, EventFilter } from "@paperclipai/plugin-sdk";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Internal types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* A registered subscription record stored per plugin.
|
||||
*/
|
||||
interface Subscription {
|
||||
/** The event name or prefix pattern this subscription matches. */
|
||||
eventPattern: string;
|
||||
/** Optional server-side filter applied before delivery. */
|
||||
filter: EventFilter | null;
|
||||
/** Async handler to invoke when a matching event passes the filter. */
|
||||
handler: (event: PluginEvent) => Promise<void>;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Pattern matching helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Returns true if the event type matches the subscription pattern.
|
||||
*
|
||||
* Matching rules:
|
||||
* - Exact match: `"issue.created"` matches `"issue.created"`.
|
||||
* - Wildcard suffix: `"plugin.acme.*"` matches any event type that starts with
|
||||
* `"plugin.acme."`. The wildcard `*` is only supported as a trailing token.
|
||||
*
|
||||
* No full glob syntax is supported — only trailing `*` after a `.` separator.
|
||||
*/
|
||||
function matchesPattern(eventType: string, pattern: string): boolean {
|
||||
if (pattern === eventType) return true;
|
||||
|
||||
// Trailing wildcard: "plugin.foo.*" → prefix is "plugin.foo."
|
||||
if (pattern.endsWith(".*")) {
|
||||
const prefix = pattern.slice(0, -1); // remove the trailing "*", keep the "."
|
||||
return eventType.startsWith(prefix);
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
/**
|
||||
* Returns true if the event passes all fields of the filter.
|
||||
* A `null` or empty filter object passes all events.
|
||||
*
|
||||
* **Resolution strategy per field:**
|
||||
*
|
||||
* - `projectId` — checked against `event.entityId` when `entityType === "project"`,
|
||||
* otherwise against `payload.projectId`. This covers both direct project events
|
||||
* (e.g. `project.created`) and secondary events that embed a project reference in
|
||||
* their payload (e.g. `issue.created` with `payload.projectId`).
|
||||
*
|
||||
* - `companyId` — always resolved from `payload.companyId`. Core domain events that
|
||||
* belong to a company embed the company ID in their payload.
|
||||
*
|
||||
* - `agentId` — checked against `event.entityId` when `entityType === "agent"`,
|
||||
* otherwise against `payload.agentId`. Covers both direct agent lifecycle events
|
||||
* (e.g. `agent.created`) and run-level events with `payload.agentId` (e.g.
|
||||
* `agent.run.started`).
|
||||
*
|
||||
* Multiple filter fields are ANDed — all specified fields must match.
|
||||
*/
|
||||
function passesFilter(event: PluginEvent, filter: EventFilter | null): boolean {
|
||||
if (!filter) return true;
|
||||
|
||||
const payload = event.payload as Record<string, unknown> | null;
|
||||
|
||||
if (filter.projectId !== undefined) {
|
||||
const projectId = event.entityType === "project"
|
||||
? event.entityId
|
||||
: (typeof payload?.projectId === "string" ? payload.projectId : undefined);
|
||||
if (projectId !== filter.projectId) return false;
|
||||
}
|
||||
|
||||
if (filter.companyId !== undefined) {
|
||||
if (event.companyId !== filter.companyId) return false;
|
||||
}
|
||||
|
||||
if (filter.agentId !== undefined) {
|
||||
const agentId = event.entityType === "agent"
|
||||
? event.entityId
|
||||
: (typeof payload?.agentId === "string" ? payload.agentId : undefined);
|
||||
if (agentId !== filter.agentId) return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Event bus factory
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Creates and returns a new `PluginEventBus` instance.
|
||||
*
|
||||
* A single bus instance should be shared across the server process. Each
|
||||
* plugin interacts with the bus through a scoped handle obtained via
|
||||
* {@link PluginEventBus.forPlugin}.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const bus = createPluginEventBus();
|
||||
*
|
||||
* // Give the Linear plugin a scoped handle
|
||||
* const linearBus = bus.forPlugin("acme.linear");
|
||||
*
|
||||
* // Subscribe from the plugin's perspective
|
||||
* linearBus.subscribe("issue.created", async (event) => {
|
||||
* // handle event
|
||||
* });
|
||||
*
|
||||
* // Emit a core domain event (called by the host, not the plugin)
|
||||
* await bus.emit({
|
||||
* eventId: "evt-1",
|
||||
* eventType: "issue.created",
|
||||
* occurredAt: new Date().toISOString(),
|
||||
* entityId: "iss-1",
|
||||
* entityType: "issue",
|
||||
* payload: { title: "Fix login bug", projectId: "proj-1" },
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export function createPluginEventBus(): PluginEventBus {
|
||||
// Subscription registry: pluginKey → list of subscriptions
|
||||
const registry = new Map<string, Subscription[]>();
|
||||
|
||||
/**
|
||||
* Retrieve or create the subscription list for a plugin.
|
||||
*/
|
||||
function subsFor(pluginId: string): Subscription[] {
|
||||
let subs = registry.get(pluginId);
|
||||
if (!subs) {
|
||||
subs = [];
|
||||
registry.set(pluginId, subs);
|
||||
}
|
||||
return subs;
|
||||
}
|
||||
|
||||
/**
|
||||
* Emit an event envelope to all matching subscribers across all plugins.
|
||||
*
|
||||
* Subscribers are called concurrently (Promise.all). Each handler's errors
|
||||
* are caught individually and collected in the returned `errors` array so a
|
||||
* single misbehaving plugin cannot interrupt delivery to other plugins.
|
||||
*/
|
||||
async function emit(event: PluginEvent): Promise<PluginEventBusEmitResult> {
|
||||
const errors: Array<{ pluginId: string; error: unknown }> = [];
|
||||
const promises: Promise<void>[] = [];
|
||||
|
||||
for (const [pluginId, subs] of registry) {
|
||||
for (const sub of subs) {
|
||||
if (!matchesPattern(event.eventType, sub.eventPattern)) continue;
|
||||
if (!passesFilter(event, sub.filter)) continue;
|
||||
|
||||
// Use Promise.resolve().then() so that synchronous throws from handlers
|
||||
// are also caught inside the promise chain. Calling
|
||||
// Promise.resolve(syncThrowingFn()) does NOT catch sync throws — the
|
||||
// throw escapes before Promise.resolve() can wrap it. Using .then()
|
||||
// ensures the call is deferred into the microtask queue where all
|
||||
// exceptions become rejections. Each .catch() swallows the rejection
|
||||
// and records it — the promise always resolves, so Promise.all never rejects.
|
||||
promises.push(
|
||||
Promise.resolve().then(() => sub.handler(event)).catch((error: unknown) => {
|
||||
errors.push({ pluginId, error });
|
||||
}),
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
await Promise.all(promises);
|
||||
return { errors };
|
||||
}
|
||||
|
||||
/**
|
||||
* Remove all subscriptions for a plugin (e.g. on worker shutdown or uninstall).
|
||||
*/
|
||||
function clearPlugin(pluginId: string): void {
|
||||
registry.delete(pluginId);
|
||||
}
|
||||
|
||||
/**
|
||||
* Return a scoped handle for a specific plugin. The handle exposes only the
|
||||
* plugin's own subscription list and enforces the plugin namespace on `emit`.
|
||||
*/
|
||||
function forPlugin(pluginId: string): ScopedPluginEventBus {
|
||||
return {
|
||||
/**
|
||||
* Subscribe to a core domain event or a plugin-namespaced event.
|
||||
*
|
||||
* For wildcard subscriptions use a trailing `.*` pattern, e.g.
|
||||
* `"plugin.acme.linear.*"`.
|
||||
*
|
||||
* Requires the `events.subscribe` capability (capability enforcement is
|
||||
* done by the host layer before calling this method).
|
||||
*/
|
||||
subscribe(
|
||||
eventPattern: PluginEventType | `plugin.${string}`,
|
||||
fnOrFilter: EventFilter | ((event: PluginEvent) => Promise<void>),
|
||||
maybeFn?: (event: PluginEvent) => Promise<void>,
|
||||
): void {
|
||||
let filter: EventFilter | null = null;
|
||||
let handler: (event: PluginEvent) => Promise<void>;
|
||||
|
||||
if (typeof fnOrFilter === "function") {
|
||||
handler = fnOrFilter;
|
||||
} else {
|
||||
filter = fnOrFilter;
|
||||
if (!maybeFn) throw new Error("Handler function is required when a filter is provided");
|
||||
handler = maybeFn;
|
||||
}
|
||||
|
||||
subsFor(pluginId).push({ eventPattern, filter, handler });
|
||||
},
|
||||
|
||||
/**
|
||||
* Emit a plugin-namespaced event. The event type is automatically
|
||||
* prefixed with `plugin.<pluginId>.` so:
|
||||
* - `emit("sync-done", payload)` becomes `"plugin.acme.linear.sync-done"`.
|
||||
*
|
||||
* Requires the `events.emit` capability (enforced by the host layer).
|
||||
*
|
||||
* @throws {Error} if `name` already contains the `plugin.` prefix
|
||||
* (prevents cross-namespace spoofing).
|
||||
*/
|
||||
async emit(name: string, companyId: string, payload: unknown): Promise<PluginEventBusEmitResult> {
|
||||
if (!name || name.trim() === "") {
|
||||
throw new Error(`Plugin "${pluginId}" must provide a non-empty event name.`);
|
||||
}
|
||||
|
||||
if (!companyId || companyId.trim() === "") {
|
||||
throw new Error(`Plugin "${pluginId}" must provide a companyId when emitting events.`);
|
||||
}
|
||||
|
||||
if (name.startsWith("plugin.")) {
|
||||
throw new Error(
|
||||
`Plugin "${pluginId}" must not include the "plugin." prefix when emitting events. ` +
|
||||
`Emit the bare event name (e.g. "sync-done") and the bus will namespace it automatically.`,
|
||||
);
|
||||
}
|
||||
|
||||
const eventType = `plugin.${pluginId}.${name}` as const;
|
||||
const event: PluginEvent = {
|
||||
eventId: crypto.randomUUID(),
|
||||
eventType,
|
||||
companyId,
|
||||
occurredAt: new Date().toISOString(),
|
||||
actorType: "plugin",
|
||||
actorId: pluginId,
|
||||
payload,
|
||||
};
|
||||
|
||||
return emit(event);
|
||||
},
|
||||
|
||||
/** Remove all subscriptions registered by this plugin. */
|
||||
clear(): void {
|
||||
clearPlugin(pluginId);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
return {
|
||||
emit,
|
||||
forPlugin,
|
||||
clearPlugin,
|
||||
/** Expose subscription count for a plugin (useful for tests and diagnostics). */
|
||||
subscriptionCount(pluginId?: string): number {
|
||||
if (pluginId !== undefined) {
|
||||
return registry.get(pluginId)?.length ?? 0;
|
||||
}
|
||||
let total = 0;
|
||||
for (const subs of registry.values()) total += subs.length;
|
||||
return total;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Public types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Result returned from `emit()`. Handler errors are collected and returned
|
||||
* rather than thrown so a single misbehaving plugin cannot block delivery to
|
||||
* other plugins.
|
||||
*/
|
||||
export interface PluginEventBusEmitResult {
|
||||
/** Errors thrown by individual handlers, keyed by the plugin that failed. */
|
||||
errors: Array<{ pluginId: string; error: unknown }>;
|
||||
}
|
||||
|
||||
/**
|
||||
* The full event bus — held by the host process.
|
||||
*
|
||||
* Call `forPlugin(id)` to obtain a `ScopedPluginEventBus` for each plugin worker.
|
||||
*/
|
||||
export interface PluginEventBus {
|
||||
/**
|
||||
* Emit a typed domain event to all matching subscribers.
|
||||
*
|
||||
* Called by the host when a domain event occurs (e.g. from the DB layer or
|
||||
* message queue). All registered subscriptions across all plugins are checked.
|
||||
*/
|
||||
emit(event: PluginEvent): Promise<PluginEventBusEmitResult>;
|
||||
|
||||
/**
|
||||
* Get a scoped handle for a specific plugin worker.
|
||||
*
|
||||
* The scoped handle isolates the plugin's subscriptions and enforces the
|
||||
* plugin namespace on outbound events.
|
||||
*/
|
||||
forPlugin(pluginId: string): ScopedPluginEventBus;
|
||||
|
||||
/**
|
||||
* Remove all subscriptions for a plugin (called on worker shutdown/uninstall).
|
||||
*/
|
||||
clearPlugin(pluginId: string): void;
|
||||
|
||||
/**
|
||||
* Return the total number of active subscriptions, or the count for a
|
||||
* specific plugin if `pluginId` is provided.
|
||||
*/
|
||||
subscriptionCount(pluginId?: string): number;
|
||||
}
|
||||
|
||||
/**
|
||||
* A plugin-scoped view of the event bus. Handed to the plugin worker (or its
|
||||
* host-side proxy) during initialisation.
|
||||
*
|
||||
* Plugins use this to:
|
||||
* 1. Subscribe to domain events (with optional server-side filter).
|
||||
* 2. Emit plugin-namespaced events for other plugins to consume.
|
||||
*
|
||||
* Note: `subscribe` overloads mirror the `PluginEventsClient.on()` interface
|
||||
* from the SDK. `emit` intentionally returns `PluginEventBusEmitResult` rather
|
||||
* than `void` so the host layer can inspect handler errors; the SDK-facing
|
||||
* `PluginEventsClient.emit()` wraps this and returns `void`.
|
||||
*/
|
||||
export interface ScopedPluginEventBus {
|
||||
/**
|
||||
* Subscribe to a core domain event or a plugin-namespaced event.
|
||||
*
|
||||
* **Pattern syntax:**
|
||||
* - Exact match: `"issue.created"` — receives only that event type.
|
||||
* - Wildcard suffix: `"plugin.acme.linear.*"` — receives all events emitted by
|
||||
* the `acme.linear` plugin. The `*` is supported only as a trailing token after
|
||||
* a `.` separator; no other glob syntax is supported.
|
||||
* - Top-level plugin wildcard: `"plugin.*"` — receives all plugin-emitted events
|
||||
* regardless of which plugin emitted them.
|
||||
*
|
||||
* Wildcards apply only to the `plugin.*` namespace. Core domain events must be
|
||||
* subscribed to by exact name (e.g. `"issue.created"`, not `"issue.*"`).
|
||||
*
|
||||
* An optional `EventFilter` can be passed as the second argument to perform
|
||||
* server-side pre-filtering; filtered-out events are never delivered to the handler.
|
||||
*/
|
||||
subscribe(
|
||||
eventPattern: PluginEventType | `plugin.${string}`,
|
||||
fn: (event: PluginEvent) => Promise<void>,
|
||||
): void;
|
||||
subscribe(
|
||||
eventPattern: PluginEventType | `plugin.${string}`,
|
||||
filter: EventFilter,
|
||||
fn: (event: PluginEvent) => Promise<void>,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Emit a plugin-namespaced event. The bus automatically prepends
|
||||
* `plugin.<pluginId>.` to the `name`, so passing `"sync-done"` from plugin
|
||||
* `"acme.linear"` produces the event type `"plugin.acme.linear.sync-done"`.
|
||||
*
|
||||
* @param name Bare event name (e.g. `"sync-done"`). Must be non-empty and
|
||||
* must not include the `plugin.` prefix — the bus adds that automatically.
|
||||
* @param companyId UUID of the company this event belongs to.
|
||||
* @param payload Arbitrary JSON-serializable data to attach to the event.
|
||||
*
|
||||
* @throws {Error} if `name` is empty or whitespace-only.
|
||||
* @throws {Error} if `name` starts with `"plugin."` (namespace spoofing guard).
|
||||
*/
|
||||
emit(name: string, companyId: string, payload: unknown): Promise<PluginEventBusEmitResult>;
|
||||
|
||||
/**
|
||||
* Remove all subscriptions registered by this plugin.
|
||||
*/
|
||||
clear(): void;
|
||||
}
|
||||
59
server/src/services/plugin-host-service-cleanup.ts
Normal file
59
server/src/services/plugin-host-service-cleanup.ts
Normal file
@@ -0,0 +1,59 @@
|
||||
import type { PluginLifecycleManager } from "./plugin-lifecycle.js";
|
||||
|
||||
type LifecycleLike = Pick<PluginLifecycleManager, "on" | "off">;
|
||||
|
||||
export interface PluginWorkerRuntimeEvent {
|
||||
type: "plugin.worker.crashed" | "plugin.worker.restarted";
|
||||
pluginId: string;
|
||||
}
|
||||
|
||||
export interface PluginHostServiceCleanupController {
|
||||
handleWorkerEvent(event: PluginWorkerRuntimeEvent): void;
|
||||
disposeAll(): void;
|
||||
teardown(): void;
|
||||
}
|
||||
|
||||
export function createPluginHostServiceCleanup(
|
||||
lifecycle: LifecycleLike,
|
||||
disposers: Map<string, () => void>,
|
||||
): PluginHostServiceCleanupController {
|
||||
const runDispose = (pluginId: string, remove = false) => {
|
||||
const dispose = disposers.get(pluginId);
|
||||
if (!dispose) return;
|
||||
dispose();
|
||||
if (remove) {
|
||||
disposers.delete(pluginId);
|
||||
}
|
||||
};
|
||||
|
||||
const handleWorkerStopped = ({ pluginId }: { pluginId: string }) => {
|
||||
runDispose(pluginId);
|
||||
};
|
||||
|
||||
const handlePluginUnloaded = ({ pluginId }: { pluginId: string }) => {
|
||||
runDispose(pluginId, true);
|
||||
};
|
||||
|
||||
lifecycle.on("plugin.worker_stopped", handleWorkerStopped);
|
||||
lifecycle.on("plugin.unloaded", handlePluginUnloaded);
|
||||
|
||||
return {
|
||||
handleWorkerEvent(event) {
|
||||
if (event.type === "plugin.worker.crashed") {
|
||||
runDispose(event.pluginId);
|
||||
}
|
||||
},
|
||||
|
||||
disposeAll() {
|
||||
for (const dispose of disposers.values()) {
|
||||
dispose();
|
||||
}
|
||||
disposers.clear();
|
||||
},
|
||||
|
||||
teardown() {
|
||||
lifecycle.off("plugin.worker_stopped", handleWorkerStopped);
|
||||
lifecycle.off("plugin.unloaded", handlePluginUnloaded);
|
||||
},
|
||||
};
|
||||
}
|
||||
1094
server/src/services/plugin-host-services.ts
Normal file
1094
server/src/services/plugin-host-services.ts
Normal file
File diff suppressed because it is too large
Load Diff
260
server/src/services/plugin-job-coordinator.ts
Normal file
260
server/src/services/plugin-job-coordinator.ts
Normal file
@@ -0,0 +1,260 @@
|
||||
/**
|
||||
* PluginJobCoordinator — bridges the plugin lifecycle manager with the
|
||||
* job scheduler and job store.
|
||||
*
|
||||
* This service listens to lifecycle events and performs the corresponding
|
||||
* scheduler and job store operations:
|
||||
*
|
||||
* - **plugin.loaded** → sync job declarations from manifest, then register
|
||||
* the plugin with the scheduler (computes `nextRunAt` for active jobs).
|
||||
*
|
||||
* - **plugin.disabled / plugin.unloaded** → unregister the plugin from the
|
||||
* scheduler (cancels in-flight runs, clears tracking state).
|
||||
*
|
||||
* ## Why a separate coordinator?
|
||||
*
|
||||
* The lifecycle manager, scheduler, and job store are independent services
|
||||
* with clean single-responsibility boundaries. The coordinator provides
|
||||
* the "glue" between them without adding coupling. This pattern is used
|
||||
* throughout Paperclip (e.g. heartbeat service coordinates timers + runs).
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §17 — Scheduled Jobs
|
||||
* @see ./plugin-job-scheduler.ts — Scheduler service
|
||||
* @see ./plugin-job-store.ts — Persistence layer
|
||||
* @see ./plugin-lifecycle.ts — Plugin state machine
|
||||
*/
|
||||
|
||||
import type { PluginLifecycleManager } from "./plugin-lifecycle.js";
|
||||
import type { PluginJobScheduler } from "./plugin-job-scheduler.js";
|
||||
import type { PluginJobStore } from "./plugin-job-store.js";
|
||||
import { pluginRegistryService } from "./plugin-registry.js";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Options for creating a PluginJobCoordinator.
|
||||
*/
|
||||
export interface PluginJobCoordinatorOptions {
|
||||
/** Drizzle database instance. */
|
||||
db: Db;
|
||||
/** The plugin lifecycle manager to listen to. */
|
||||
lifecycle: PluginLifecycleManager;
|
||||
/** The job scheduler to register/unregister plugins with. */
|
||||
scheduler: PluginJobScheduler;
|
||||
/** The job store for syncing declarations. */
|
||||
jobStore: PluginJobStore;
|
||||
}
|
||||
|
||||
/**
|
||||
* The public interface of the job coordinator.
|
||||
*/
|
||||
export interface PluginJobCoordinator {
|
||||
/**
|
||||
* Start listening to lifecycle events.
|
||||
*
|
||||
* This wires up the `plugin.loaded`, `plugin.disabled`, and
|
||||
* `plugin.unloaded` event handlers.
|
||||
*/
|
||||
start(): void;
|
||||
|
||||
/**
|
||||
* Stop listening to lifecycle events.
|
||||
*
|
||||
* Removes all event subscriptions added by `start()`.
|
||||
*/
|
||||
stop(): void;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Implementation
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a PluginJobCoordinator.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const coordinator = createPluginJobCoordinator({
|
||||
* db,
|
||||
* lifecycle,
|
||||
* scheduler,
|
||||
* jobStore,
|
||||
* });
|
||||
*
|
||||
* // Start listening to lifecycle events
|
||||
* coordinator.start();
|
||||
*
|
||||
* // On server shutdown
|
||||
* coordinator.stop();
|
||||
* ```
|
||||
*/
|
||||
export function createPluginJobCoordinator(
|
||||
options: PluginJobCoordinatorOptions,
|
||||
): PluginJobCoordinator {
|
||||
const { db, lifecycle, scheduler, jobStore } = options;
|
||||
const log = logger.child({ service: "plugin-job-coordinator" });
|
||||
const registry = pluginRegistryService(db);
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Event handlers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* When a plugin is loaded (transitions to `ready`):
|
||||
* 1. Look up the manifest from the registry
|
||||
* 2. Sync job declarations from the manifest into the DB
|
||||
* 3. Register the plugin with the scheduler (computes nextRunAt)
|
||||
*/
|
||||
async function onPluginLoaded(payload: { pluginId: string; pluginKey: string }): Promise<void> {
|
||||
const { pluginId, pluginKey } = payload;
|
||||
log.info({ pluginId, pluginKey }, "plugin loaded — syncing jobs and registering with scheduler");
|
||||
|
||||
try {
|
||||
// Get the manifest from the registry
|
||||
const plugin = await registry.getById(pluginId);
|
||||
if (!plugin?.manifestJson) {
|
||||
log.warn({ pluginId, pluginKey }, "plugin loaded but no manifest found — skipping job sync");
|
||||
return;
|
||||
}
|
||||
|
||||
// Sync job declarations from the manifest
|
||||
const manifest = plugin.manifestJson;
|
||||
const jobDeclarations = manifest.jobs ?? [];
|
||||
|
||||
if (jobDeclarations.length > 0) {
|
||||
log.info(
|
||||
{ pluginId, pluginKey, jobCount: jobDeclarations.length },
|
||||
"syncing job declarations from manifest",
|
||||
);
|
||||
await jobStore.syncJobDeclarations(pluginId, jobDeclarations);
|
||||
}
|
||||
|
||||
// Register with the scheduler (computes nextRunAt for active jobs)
|
||||
await scheduler.registerPlugin(pluginId);
|
||||
} catch (err) {
|
||||
log.error(
|
||||
{
|
||||
pluginId,
|
||||
pluginKey,
|
||||
err: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
"failed to sync jobs or register plugin with scheduler",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* When a plugin is disabled (transitions to `error` with "disabled by
|
||||
* operator" or genuine error): unregister from the scheduler.
|
||||
*/
|
||||
async function onPluginDisabled(payload: {
|
||||
pluginId: string;
|
||||
pluginKey: string;
|
||||
reason?: string;
|
||||
}): Promise<void> {
|
||||
const { pluginId, pluginKey, reason } = payload;
|
||||
log.info(
|
||||
{ pluginId, pluginKey, reason },
|
||||
"plugin disabled — unregistering from scheduler",
|
||||
);
|
||||
|
||||
try {
|
||||
await scheduler.unregisterPlugin(pluginId);
|
||||
} catch (err) {
|
||||
log.error(
|
||||
{
|
||||
pluginId,
|
||||
pluginKey,
|
||||
err: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
"failed to unregister plugin from scheduler",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* When a plugin is unloaded (uninstalled): unregister from the scheduler.
|
||||
*/
|
||||
async function onPluginUnloaded(payload: {
|
||||
pluginId: string;
|
||||
pluginKey: string;
|
||||
removeData: boolean;
|
||||
}): Promise<void> {
|
||||
const { pluginId, pluginKey, removeData } = payload;
|
||||
log.info(
|
||||
{ pluginId, pluginKey, removeData },
|
||||
"plugin unloaded — unregistering from scheduler",
|
||||
);
|
||||
|
||||
try {
|
||||
await scheduler.unregisterPlugin(pluginId);
|
||||
|
||||
// If data is being purged, also delete all job definitions and runs
|
||||
if (removeData) {
|
||||
log.info({ pluginId, pluginKey }, "purging job data for uninstalled plugin");
|
||||
await jobStore.deleteAllJobs(pluginId);
|
||||
}
|
||||
} catch (err) {
|
||||
log.error(
|
||||
{
|
||||
pluginId,
|
||||
pluginKey,
|
||||
err: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
"failed to unregister plugin from scheduler during unload",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// State
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
let attached = false;
|
||||
|
||||
// We need stable references for on/off since the lifecycle manager
|
||||
// uses them for matching. We wrap the async handlers in sync wrappers
|
||||
// that fire-and-forget (swallowing unhandled rejections via the try/catch
|
||||
// inside each handler).
|
||||
const boundOnLoaded = (payload: { pluginId: string; pluginKey: string }) => {
|
||||
void onPluginLoaded(payload);
|
||||
};
|
||||
const boundOnDisabled = (payload: { pluginId: string; pluginKey: string; reason?: string }) => {
|
||||
void onPluginDisabled(payload);
|
||||
};
|
||||
const boundOnUnloaded = (payload: { pluginId: string; pluginKey: string; removeData: boolean }) => {
|
||||
void onPluginUnloaded(payload);
|
||||
};
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Public API
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
return {
|
||||
start(): void {
|
||||
if (attached) return;
|
||||
attached = true;
|
||||
|
||||
lifecycle.on("plugin.loaded", boundOnLoaded);
|
||||
lifecycle.on("plugin.disabled", boundOnDisabled);
|
||||
lifecycle.on("plugin.unloaded", boundOnUnloaded);
|
||||
|
||||
log.info("plugin job coordinator started — listening to lifecycle events");
|
||||
},
|
||||
|
||||
stop(): void {
|
||||
if (!attached) return;
|
||||
attached = false;
|
||||
|
||||
lifecycle.off("plugin.loaded", boundOnLoaded);
|
||||
lifecycle.off("plugin.disabled", boundOnDisabled);
|
||||
lifecycle.off("plugin.unloaded", boundOnUnloaded);
|
||||
|
||||
log.info("plugin job coordinator stopped");
|
||||
},
|
||||
};
|
||||
}
|
||||
752
server/src/services/plugin-job-scheduler.ts
Normal file
752
server/src/services/plugin-job-scheduler.ts
Normal file
@@ -0,0 +1,752 @@
|
||||
/**
|
||||
* PluginJobScheduler — tick-based scheduler for plugin scheduled jobs.
|
||||
*
|
||||
* The scheduler is the central coordinator for all plugin cron jobs. It
|
||||
* periodically ticks (default every 30 seconds), queries the `plugin_jobs`
|
||||
* table for jobs whose `nextRunAt` has passed, dispatches `runJob` RPC calls
|
||||
* to the appropriate worker processes, records each execution in the
|
||||
* `plugin_job_runs` table, and advances the scheduling pointer.
|
||||
*
|
||||
* ## Responsibilities
|
||||
*
|
||||
* 1. **Tick loop** — A `setInterval`-based loop fires every `tickIntervalMs`
|
||||
* (default 30s). Each tick scans for due jobs and dispatches them.
|
||||
*
|
||||
* 2. **Cron parsing & next-run calculation** — Uses the lightweight built-in
|
||||
* cron parser ({@link parseCron}, {@link nextCronTick}) to compute the
|
||||
* `nextRunAt` timestamp after each run or when a new job is registered.
|
||||
*
|
||||
* 3. **Overlap prevention** — Before dispatching a job, the scheduler checks
|
||||
* for an existing `running` run for the same job. If one exists, the job
|
||||
* is skipped for that tick.
|
||||
*
|
||||
* 4. **Job run recording** — Every execution creates a `plugin_job_runs` row:
|
||||
* `queued` → `running` → `succeeded` | `failed`. Duration and error are
|
||||
* captured.
|
||||
*
|
||||
* 5. **Lifecycle integration** — The scheduler exposes `registerPlugin()` and
|
||||
* `unregisterPlugin()` so the host lifecycle manager can wire up job
|
||||
* scheduling when plugins start/stop. On registration, the scheduler
|
||||
* computes `nextRunAt` for all active jobs that don't already have one.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §17 — Scheduled Jobs
|
||||
* @see ./plugin-job-store.ts — Persistence layer
|
||||
* @see ./cron.ts — Cron parsing utilities
|
||||
*/
|
||||
|
||||
import { and, eq, lte, or } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { pluginJobs, pluginJobRuns } from "@paperclipai/db";
|
||||
import type { PluginJobStore } from "./plugin-job-store.js";
|
||||
import type { PluginWorkerManager } from "./plugin-worker-manager.js";
|
||||
import { parseCron, nextCronTick, validateCron } from "./cron.js";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Constants
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Default interval between scheduler ticks (30 seconds). */
|
||||
const DEFAULT_TICK_INTERVAL_MS = 30_000;
|
||||
|
||||
/** Default timeout for a runJob RPC call (5 minutes). */
|
||||
const DEFAULT_JOB_TIMEOUT_MS = 5 * 60 * 1_000;
|
||||
|
||||
/** Maximum number of concurrent job executions across all plugins. */
|
||||
const DEFAULT_MAX_CONCURRENT_JOBS = 10;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Options for creating a PluginJobScheduler.
|
||||
*/
|
||||
export interface PluginJobSchedulerOptions {
|
||||
/** Drizzle database instance. */
|
||||
db: Db;
|
||||
/** Persistence layer for jobs and runs. */
|
||||
jobStore: PluginJobStore;
|
||||
/** Worker process manager for RPC calls. */
|
||||
workerManager: PluginWorkerManager;
|
||||
/** Interval between scheduler ticks in ms (default: 30s). */
|
||||
tickIntervalMs?: number;
|
||||
/** Timeout for individual job RPC calls in ms (default: 5min). */
|
||||
jobTimeoutMs?: number;
|
||||
/** Maximum number of concurrent job executions (default: 10). */
|
||||
maxConcurrentJobs?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of a manual job trigger.
|
||||
*/
|
||||
export interface TriggerJobResult {
|
||||
/** The created run ID. */
|
||||
runId: string;
|
||||
/** The job ID that was triggered. */
|
||||
jobId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Diagnostic information about the scheduler.
|
||||
*/
|
||||
export interface SchedulerDiagnostics {
|
||||
/** Whether the tick loop is running. */
|
||||
running: boolean;
|
||||
/** Number of jobs currently executing. */
|
||||
activeJobCount: number;
|
||||
/** Set of job IDs currently in-flight. */
|
||||
activeJobIds: string[];
|
||||
/** Total number of ticks executed since start. */
|
||||
tickCount: number;
|
||||
/** Timestamp of the last tick (ISO 8601). */
|
||||
lastTickAt: string | null;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Scheduler
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The public interface of the job scheduler.
|
||||
*/
|
||||
export interface PluginJobScheduler {
|
||||
/**
|
||||
* Start the scheduler tick loop.
|
||||
*
|
||||
* Safe to call multiple times — subsequent calls are no-ops.
|
||||
*/
|
||||
start(): void;
|
||||
|
||||
/**
|
||||
* Stop the scheduler tick loop.
|
||||
*
|
||||
* In-flight job runs are NOT cancelled — they are allowed to finish
|
||||
* naturally. The tick loop simply stops firing.
|
||||
*/
|
||||
stop(): void;
|
||||
|
||||
/**
|
||||
* Register a plugin with the scheduler.
|
||||
*
|
||||
* Computes `nextRunAt` for all active jobs that are missing it. This is
|
||||
* typically called after a plugin's worker process starts and
|
||||
* `syncJobDeclarations()` has been called.
|
||||
*
|
||||
* @param pluginId - UUID of the plugin
|
||||
*/
|
||||
registerPlugin(pluginId: string): Promise<void>;
|
||||
|
||||
/**
|
||||
* Unregister a plugin from the scheduler.
|
||||
*
|
||||
* Cancels any in-flight runs for the plugin and removes tracking state.
|
||||
*
|
||||
* @param pluginId - UUID of the plugin
|
||||
*/
|
||||
unregisterPlugin(pluginId: string): Promise<void>;
|
||||
|
||||
/**
|
||||
* Manually trigger a specific job (outside of the cron schedule).
|
||||
*
|
||||
* Creates a run with `trigger: "manual"` and dispatches immediately,
|
||||
* respecting the overlap prevention check.
|
||||
*
|
||||
* @param jobId - UUID of the job to trigger
|
||||
* @param trigger - What triggered this run (default: "manual")
|
||||
* @returns The created run info
|
||||
* @throws {Error} if the job is not found, not active, or already running
|
||||
*/
|
||||
triggerJob(jobId: string, trigger?: "manual" | "retry"): Promise<TriggerJobResult>;
|
||||
|
||||
/**
|
||||
* Run a single scheduler tick immediately (for testing).
|
||||
*
|
||||
* @internal
|
||||
*/
|
||||
tick(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Get diagnostic information about the scheduler state.
|
||||
*/
|
||||
diagnostics(): SchedulerDiagnostics;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Implementation
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a new PluginJobScheduler.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const scheduler = createPluginJobScheduler({
|
||||
* db,
|
||||
* jobStore,
|
||||
* workerManager,
|
||||
* });
|
||||
*
|
||||
* // Start the tick loop
|
||||
* scheduler.start();
|
||||
*
|
||||
* // When a plugin comes online, register it
|
||||
* await scheduler.registerPlugin(pluginId);
|
||||
*
|
||||
* // Manually trigger a job
|
||||
* const { runId } = await scheduler.triggerJob(jobId);
|
||||
*
|
||||
* // On server shutdown
|
||||
* scheduler.stop();
|
||||
* ```
|
||||
*/
|
||||
export function createPluginJobScheduler(
|
||||
options: PluginJobSchedulerOptions,
|
||||
): PluginJobScheduler {
|
||||
const {
|
||||
db,
|
||||
jobStore,
|
||||
workerManager,
|
||||
tickIntervalMs = DEFAULT_TICK_INTERVAL_MS,
|
||||
jobTimeoutMs = DEFAULT_JOB_TIMEOUT_MS,
|
||||
maxConcurrentJobs = DEFAULT_MAX_CONCURRENT_JOBS,
|
||||
} = options;
|
||||
|
||||
const log = logger.child({ service: "plugin-job-scheduler" });
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// State
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/** Timer handle for the tick loop. */
|
||||
let tickTimer: ReturnType<typeof setInterval> | null = null;
|
||||
|
||||
/** Whether the scheduler is running. */
|
||||
let running = false;
|
||||
|
||||
/** Set of job IDs currently being executed (for overlap prevention). */
|
||||
const activeJobs = new Set<string>();
|
||||
|
||||
/** Total number of ticks since start. */
|
||||
let tickCount = 0;
|
||||
|
||||
/** Timestamp of the last tick. */
|
||||
let lastTickAt: Date | null = null;
|
||||
|
||||
/** Guard against concurrent tick execution. */
|
||||
let tickInProgress = false;
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Core: tick
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* A single scheduler tick. Queries for due jobs and dispatches them.
|
||||
*/
|
||||
async function tick(): Promise<void> {
|
||||
// Prevent overlapping ticks (in case a tick takes longer than the interval)
|
||||
if (tickInProgress) {
|
||||
log.debug("skipping tick — previous tick still in progress");
|
||||
return;
|
||||
}
|
||||
|
||||
tickInProgress = true;
|
||||
tickCount++;
|
||||
lastTickAt = new Date();
|
||||
|
||||
try {
|
||||
const now = new Date();
|
||||
|
||||
// Query for jobs whose nextRunAt has passed and are active.
|
||||
// We include jobs with null nextRunAt since they may have just been
|
||||
// registered and need their first run calculated.
|
||||
const dueJobs = await db
|
||||
.select()
|
||||
.from(pluginJobs)
|
||||
.where(
|
||||
and(
|
||||
eq(pluginJobs.status, "active"),
|
||||
lte(pluginJobs.nextRunAt, now),
|
||||
),
|
||||
);
|
||||
|
||||
if (dueJobs.length === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
log.debug({ count: dueJobs.length }, "found due jobs");
|
||||
|
||||
// Dispatch each due job (respecting concurrency limits)
|
||||
const dispatches: Promise<void>[] = [];
|
||||
|
||||
for (const job of dueJobs) {
|
||||
// Concurrency limit
|
||||
if (activeJobs.size >= maxConcurrentJobs) {
|
||||
log.warn(
|
||||
{ maxConcurrentJobs, activeJobCount: activeJobs.size },
|
||||
"max concurrent jobs reached, deferring remaining jobs",
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
// Overlap prevention: skip if this job is already running
|
||||
if (activeJobs.has(job.id)) {
|
||||
log.debug(
|
||||
{ jobId: job.id, jobKey: job.jobKey, pluginId: job.pluginId },
|
||||
"skipping job — already running (overlap prevention)",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Check if the worker is available
|
||||
if (!workerManager.isRunning(job.pluginId)) {
|
||||
log.debug(
|
||||
{ jobId: job.id, pluginId: job.pluginId },
|
||||
"skipping job — worker not running",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Validate cron expression before dispatching
|
||||
if (!job.schedule) {
|
||||
log.warn(
|
||||
{ jobId: job.id, jobKey: job.jobKey },
|
||||
"skipping job — no schedule defined",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
dispatches.push(dispatchJob(job));
|
||||
}
|
||||
|
||||
if (dispatches.length > 0) {
|
||||
await Promise.allSettled(dispatches);
|
||||
}
|
||||
} catch (err) {
|
||||
log.error(
|
||||
{ err: err instanceof Error ? err.message : String(err) },
|
||||
"scheduler tick error",
|
||||
);
|
||||
} finally {
|
||||
tickInProgress = false;
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Core: dispatch a single job
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Dispatch a single job run — create the run record, call the worker,
|
||||
* record the result, and advance the schedule pointer.
|
||||
*/
|
||||
async function dispatchJob(
|
||||
job: typeof pluginJobs.$inferSelect,
|
||||
): Promise<void> {
|
||||
const { id: jobId, pluginId, jobKey, schedule } = job;
|
||||
const jobLog = log.child({ jobId, pluginId, jobKey });
|
||||
|
||||
// Mark as active (overlap prevention)
|
||||
activeJobs.add(jobId);
|
||||
|
||||
let runId: string | undefined;
|
||||
const startedAt = Date.now();
|
||||
|
||||
try {
|
||||
// 1. Create run record
|
||||
const run = await jobStore.createRun({
|
||||
jobId,
|
||||
pluginId,
|
||||
trigger: "schedule",
|
||||
});
|
||||
runId = run.id;
|
||||
|
||||
jobLog.info({ runId }, "dispatching scheduled job");
|
||||
|
||||
// 2. Mark run as running
|
||||
await jobStore.markRunning(runId);
|
||||
|
||||
// 3. Call worker via RPC
|
||||
await workerManager.call(
|
||||
pluginId,
|
||||
"runJob",
|
||||
{
|
||||
job: {
|
||||
jobKey,
|
||||
runId,
|
||||
trigger: "schedule" as const,
|
||||
scheduledAt: (job.nextRunAt ?? new Date()).toISOString(),
|
||||
},
|
||||
},
|
||||
jobTimeoutMs,
|
||||
);
|
||||
|
||||
// 4. Mark run as succeeded
|
||||
const durationMs = Date.now() - startedAt;
|
||||
await jobStore.completeRun(runId, {
|
||||
status: "succeeded",
|
||||
durationMs,
|
||||
});
|
||||
|
||||
jobLog.info({ runId, durationMs }, "job completed successfully");
|
||||
} catch (err) {
|
||||
const durationMs = Date.now() - startedAt;
|
||||
const errorMessage = err instanceof Error ? err.message : String(err);
|
||||
|
||||
jobLog.error(
|
||||
{ runId, durationMs, err: errorMessage },
|
||||
"job execution failed",
|
||||
);
|
||||
|
||||
// Record the failure
|
||||
if (runId) {
|
||||
try {
|
||||
await jobStore.completeRun(runId, {
|
||||
status: "failed",
|
||||
error: errorMessage,
|
||||
durationMs,
|
||||
});
|
||||
} catch (completeErr) {
|
||||
jobLog.error(
|
||||
{
|
||||
runId,
|
||||
err: completeErr instanceof Error ? completeErr.message : String(completeErr),
|
||||
},
|
||||
"failed to record job failure",
|
||||
);
|
||||
}
|
||||
}
|
||||
} finally {
|
||||
// Remove from active set
|
||||
activeJobs.delete(jobId);
|
||||
|
||||
// 5. Always advance the schedule pointer (even on failure)
|
||||
try {
|
||||
await advanceSchedulePointer(job);
|
||||
} catch (err) {
|
||||
jobLog.error(
|
||||
{ err: err instanceof Error ? err.message : String(err) },
|
||||
"failed to advance schedule pointer",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Core: manual trigger
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
async function triggerJob(
|
||||
jobId: string,
|
||||
trigger: "manual" | "retry" = "manual",
|
||||
): Promise<TriggerJobResult> {
|
||||
const job = await jobStore.getJobById(jobId);
|
||||
if (!job) {
|
||||
throw new Error(`Job not found: ${jobId}`);
|
||||
}
|
||||
|
||||
if (job.status !== "active") {
|
||||
throw new Error(
|
||||
`Job "${job.jobKey}" is not active (status: ${job.status})`,
|
||||
);
|
||||
}
|
||||
|
||||
// Overlap prevention
|
||||
if (activeJobs.has(jobId)) {
|
||||
throw new Error(
|
||||
`Job "${job.jobKey}" is already running — cannot trigger while in progress`,
|
||||
);
|
||||
}
|
||||
|
||||
// Also check DB for running runs (defensive — covers multi-instance)
|
||||
const existingRuns = await db
|
||||
.select()
|
||||
.from(pluginJobRuns)
|
||||
.where(
|
||||
and(
|
||||
eq(pluginJobRuns.jobId, jobId),
|
||||
eq(pluginJobRuns.status, "running"),
|
||||
),
|
||||
);
|
||||
|
||||
if (existingRuns.length > 0) {
|
||||
throw new Error(
|
||||
`Job "${job.jobKey}" already has a running execution — cannot trigger while in progress`,
|
||||
);
|
||||
}
|
||||
|
||||
// Check worker availability
|
||||
if (!workerManager.isRunning(job.pluginId)) {
|
||||
throw new Error(
|
||||
`Worker for plugin "${job.pluginId}" is not running — cannot trigger job`,
|
||||
);
|
||||
}
|
||||
|
||||
// Create the run and dispatch (non-blocking)
|
||||
const run = await jobStore.createRun({
|
||||
jobId,
|
||||
pluginId: job.pluginId,
|
||||
trigger,
|
||||
});
|
||||
|
||||
// Dispatch in background — don't block the caller
|
||||
void dispatchManualRun(job, run.id, trigger);
|
||||
|
||||
return { runId: run.id, jobId };
|
||||
}
|
||||
|
||||
/**
|
||||
* Dispatch a manually triggered job run.
|
||||
*/
|
||||
async function dispatchManualRun(
|
||||
job: typeof pluginJobs.$inferSelect,
|
||||
runId: string,
|
||||
trigger: "manual" | "retry",
|
||||
): Promise<void> {
|
||||
const { id: jobId, pluginId, jobKey } = job;
|
||||
const jobLog = log.child({ jobId, pluginId, jobKey, runId, trigger });
|
||||
|
||||
activeJobs.add(jobId);
|
||||
const startedAt = Date.now();
|
||||
|
||||
try {
|
||||
await jobStore.markRunning(runId);
|
||||
|
||||
await workerManager.call(
|
||||
pluginId,
|
||||
"runJob",
|
||||
{
|
||||
job: {
|
||||
jobKey,
|
||||
runId,
|
||||
trigger,
|
||||
scheduledAt: new Date().toISOString(),
|
||||
},
|
||||
},
|
||||
jobTimeoutMs,
|
||||
);
|
||||
|
||||
const durationMs = Date.now() - startedAt;
|
||||
await jobStore.completeRun(runId, {
|
||||
status: "succeeded",
|
||||
durationMs,
|
||||
});
|
||||
|
||||
jobLog.info({ durationMs }, "manual job completed successfully");
|
||||
} catch (err) {
|
||||
const durationMs = Date.now() - startedAt;
|
||||
const errorMessage = err instanceof Error ? err.message : String(err);
|
||||
jobLog.error({ durationMs, err: errorMessage }, "manual job failed");
|
||||
|
||||
try {
|
||||
await jobStore.completeRun(runId, {
|
||||
status: "failed",
|
||||
error: errorMessage,
|
||||
durationMs,
|
||||
});
|
||||
} catch (completeErr) {
|
||||
jobLog.error(
|
||||
{
|
||||
err: completeErr instanceof Error ? completeErr.message : String(completeErr),
|
||||
},
|
||||
"failed to record manual job failure",
|
||||
);
|
||||
}
|
||||
} finally {
|
||||
activeJobs.delete(jobId);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Schedule pointer management
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Advance the `lastRunAt` and `nextRunAt` timestamps on a job after a run.
|
||||
*/
|
||||
async function advanceSchedulePointer(
|
||||
job: typeof pluginJobs.$inferSelect,
|
||||
): Promise<void> {
|
||||
const now = new Date();
|
||||
let nextRunAt: Date | null = null;
|
||||
|
||||
if (job.schedule) {
|
||||
const validationError = validateCron(job.schedule);
|
||||
if (validationError) {
|
||||
log.warn(
|
||||
{ jobId: job.id, schedule: job.schedule, error: validationError },
|
||||
"invalid cron schedule — cannot compute next run",
|
||||
);
|
||||
} else {
|
||||
const cron = parseCron(job.schedule);
|
||||
nextRunAt = nextCronTick(cron, now);
|
||||
}
|
||||
}
|
||||
|
||||
await jobStore.updateRunTimestamps(job.id, now, nextRunAt);
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure all active jobs for a plugin have a `nextRunAt` value.
|
||||
* Called when a plugin is registered with the scheduler.
|
||||
*/
|
||||
async function ensureNextRunTimestamps(pluginId: string): Promise<void> {
|
||||
const jobs = await jobStore.listJobs(pluginId, "active");
|
||||
|
||||
for (const job of jobs) {
|
||||
// Skip jobs that already have a valid nextRunAt in the future
|
||||
if (job.nextRunAt && job.nextRunAt.getTime() > Date.now()) {
|
||||
continue;
|
||||
}
|
||||
|
||||
// Skip jobs without a schedule
|
||||
if (!job.schedule) {
|
||||
continue;
|
||||
}
|
||||
|
||||
const validationError = validateCron(job.schedule);
|
||||
if (validationError) {
|
||||
log.warn(
|
||||
{ jobId: job.id, jobKey: job.jobKey, schedule: job.schedule, error: validationError },
|
||||
"skipping job with invalid cron schedule",
|
||||
);
|
||||
continue;
|
||||
}
|
||||
|
||||
const cron = parseCron(job.schedule);
|
||||
const nextRunAt = nextCronTick(cron, new Date());
|
||||
|
||||
if (nextRunAt) {
|
||||
await jobStore.updateRunTimestamps(
|
||||
job.id,
|
||||
job.lastRunAt ?? new Date(0),
|
||||
nextRunAt,
|
||||
);
|
||||
log.debug(
|
||||
{ jobId: job.id, jobKey: job.jobKey, nextRunAt: nextRunAt.toISOString() },
|
||||
"computed nextRunAt for job",
|
||||
);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Plugin registration
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
async function registerPlugin(pluginId: string): Promise<void> {
|
||||
log.info({ pluginId }, "registering plugin with job scheduler");
|
||||
await ensureNextRunTimestamps(pluginId);
|
||||
}
|
||||
|
||||
async function unregisterPlugin(pluginId: string): Promise<void> {
|
||||
log.info({ pluginId }, "unregistering plugin from job scheduler");
|
||||
|
||||
// Cancel any in-flight run records for this plugin that are still
|
||||
// queued or running. Active jobs in-memory will finish naturally.
|
||||
try {
|
||||
const runningRuns = await db
|
||||
.select()
|
||||
.from(pluginJobRuns)
|
||||
.where(
|
||||
and(
|
||||
eq(pluginJobRuns.pluginId, pluginId),
|
||||
or(
|
||||
eq(pluginJobRuns.status, "running"),
|
||||
eq(pluginJobRuns.status, "queued"),
|
||||
),
|
||||
),
|
||||
);
|
||||
|
||||
for (const run of runningRuns) {
|
||||
await jobStore.completeRun(run.id, {
|
||||
status: "cancelled",
|
||||
error: "Plugin unregistered",
|
||||
durationMs: run.startedAt
|
||||
? Date.now() - run.startedAt.getTime()
|
||||
: null,
|
||||
});
|
||||
}
|
||||
} catch (err) {
|
||||
log.error(
|
||||
{
|
||||
pluginId,
|
||||
err: err instanceof Error ? err.message : String(err),
|
||||
},
|
||||
"error cancelling in-flight runs during unregister",
|
||||
);
|
||||
}
|
||||
|
||||
// Remove any active tracking for jobs owned by this plugin
|
||||
const jobs = await jobStore.listJobs(pluginId);
|
||||
for (const job of jobs) {
|
||||
activeJobs.delete(job.id);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Lifecycle: start / stop
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
function start(): void {
|
||||
if (running) {
|
||||
log.debug("scheduler already running");
|
||||
return;
|
||||
}
|
||||
|
||||
running = true;
|
||||
tickTimer = setInterval(() => {
|
||||
void tick();
|
||||
}, tickIntervalMs);
|
||||
|
||||
log.info(
|
||||
{ tickIntervalMs, maxConcurrentJobs },
|
||||
"plugin job scheduler started",
|
||||
);
|
||||
}
|
||||
|
||||
function stop(): void {
|
||||
// Always clear the timer defensively, even if `running` is already false,
|
||||
// to prevent leaked interval timers.
|
||||
if (tickTimer !== null) {
|
||||
clearInterval(tickTimer);
|
||||
tickTimer = null;
|
||||
}
|
||||
|
||||
if (!running) return;
|
||||
running = false;
|
||||
|
||||
log.info(
|
||||
{ activeJobCount: activeJobs.size },
|
||||
"plugin job scheduler stopped",
|
||||
);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Diagnostics
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
function diagnostics(): SchedulerDiagnostics {
|
||||
return {
|
||||
running,
|
||||
activeJobCount: activeJobs.size,
|
||||
activeJobIds: [...activeJobs],
|
||||
tickCount,
|
||||
lastTickAt: lastTickAt?.toISOString() ?? null,
|
||||
};
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Public API
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
return {
|
||||
start,
|
||||
stop,
|
||||
registerPlugin,
|
||||
unregisterPlugin,
|
||||
triggerJob,
|
||||
tick,
|
||||
diagnostics,
|
||||
};
|
||||
}
|
||||
465
server/src/services/plugin-job-store.ts
Normal file
465
server/src/services/plugin-job-store.ts
Normal file
@@ -0,0 +1,465 @@
|
||||
/**
|
||||
* Plugin Job Store — persistence layer for scheduled plugin jobs and their
|
||||
* execution history.
|
||||
*
|
||||
* This service manages the `plugin_jobs` and `plugin_job_runs` tables. It is
|
||||
* the server-side backing store for the `ctx.jobs` SDK surface exposed to
|
||||
* plugin workers.
|
||||
*
|
||||
* ## Responsibilities
|
||||
*
|
||||
* 1. **Sync job declarations** — When a plugin is installed or started, the
|
||||
* host calls `syncJobDeclarations()` to upsert the manifest's declared jobs
|
||||
* into the `plugin_jobs` table. Jobs removed from the manifest are marked
|
||||
* `paused` (not deleted) to preserve history.
|
||||
*
|
||||
* 2. **Job CRUD** — List, get, pause, and resume jobs for a given plugin.
|
||||
*
|
||||
* 3. **Run lifecycle** — Create job run records, update their status, and
|
||||
* record results (duration, errors, logs).
|
||||
*
|
||||
* 4. **Next-run calculation** — After a run completes the host should call
|
||||
* `updateNextRunAt()` with the next cron tick so the scheduler knows when
|
||||
* to fire next.
|
||||
*
|
||||
* The capability check (`jobs.schedule`) is enforced upstream by the host
|
||||
* client factory and manifest validator — this store trusts that the caller
|
||||
* has already been authorised.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §17 — Scheduled Jobs
|
||||
* @see PLUGIN_SPEC.md §21.3 — `plugin_jobs` / `plugin_job_runs` tables
|
||||
*/
|
||||
|
||||
import { and, desc, eq } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { plugins, pluginJobs, pluginJobRuns } from "@paperclipai/db";
|
||||
import type {
|
||||
PluginJobDeclaration,
|
||||
PluginJobRunStatus,
|
||||
PluginJobRunTrigger,
|
||||
PluginJobRecord,
|
||||
} from "@paperclipai/shared";
|
||||
import { notFound } from "../errors.js";
|
||||
|
||||
/**
|
||||
* The statuses used for job *definitions* in the `plugin_jobs` table.
|
||||
* Aliased from `PluginJobRecord` to keep the store API aligned with
|
||||
* the domain type (`"active" | "paused" | "failed"`).
|
||||
*/
|
||||
type JobDefinitionStatus = PluginJobRecord["status"];
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Input for creating a job run record.
|
||||
*/
|
||||
export interface CreateJobRunInput {
|
||||
/** FK to the plugin_jobs row. */
|
||||
jobId: string;
|
||||
/** FK to the plugins row. */
|
||||
pluginId: string;
|
||||
/** What triggered this run. */
|
||||
trigger: PluginJobRunTrigger;
|
||||
}
|
||||
|
||||
/**
|
||||
* Input for completing (or failing) a job run.
|
||||
*/
|
||||
export interface CompleteJobRunInput {
|
||||
/** Final run status. */
|
||||
status: PluginJobRunStatus;
|
||||
/** Error message if the run failed. */
|
||||
error?: string | null;
|
||||
/** Run duration in milliseconds. */
|
||||
durationMs?: number | null;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Service
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a PluginJobStore backed by the given Drizzle database instance.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const jobStore = pluginJobStore(db);
|
||||
*
|
||||
* // On plugin install/start — sync declared jobs into the DB
|
||||
* await jobStore.syncJobDeclarations(pluginId, manifest.jobs ?? []);
|
||||
*
|
||||
* // Before dispatching a runJob RPC — create a run record
|
||||
* const run = await jobStore.createRun({ jobId, pluginId, trigger: "schedule" });
|
||||
*
|
||||
* // After the RPC completes — record the result
|
||||
* await jobStore.completeRun(run.id, {
|
||||
* status: "succeeded",
|
||||
* durationMs: Date.now() - startedAt,
|
||||
* });
|
||||
* ```
|
||||
*/
|
||||
export function pluginJobStore(db: Db) {
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal helpers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
async function assertPluginExists(pluginId: string): Promise<void> {
|
||||
const rows = await db
|
||||
.select({ id: plugins.id })
|
||||
.from(plugins)
|
||||
.where(eq(plugins.id, pluginId));
|
||||
if (rows.length === 0) {
|
||||
throw notFound(`Plugin not found: ${pluginId}`);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Public API
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
return {
|
||||
// =====================================================================
|
||||
// Job declarations (plugin_jobs)
|
||||
// =====================================================================
|
||||
|
||||
/**
|
||||
* Sync declared jobs from a plugin manifest into the `plugin_jobs` table.
|
||||
*
|
||||
* This is called at plugin install and on each worker startup so the DB
|
||||
* always reflects the manifest's declared jobs:
|
||||
*
|
||||
* - **New jobs** are inserted with status `active`.
|
||||
* - **Existing jobs** have their `schedule` updated if it changed.
|
||||
* - **Removed jobs** (present in DB but absent from the manifest) are
|
||||
* set to `paused` so their history is preserved.
|
||||
*
|
||||
* The unique constraint `(pluginId, jobKey)` is used for conflict
|
||||
* resolution.
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
* @param declarations - Job declarations from the plugin manifest
|
||||
*/
|
||||
async syncJobDeclarations(
|
||||
pluginId: string,
|
||||
declarations: PluginJobDeclaration[],
|
||||
): Promise<void> {
|
||||
await assertPluginExists(pluginId);
|
||||
|
||||
// Fetch existing jobs for this plugin
|
||||
const existingJobs = await db
|
||||
.select()
|
||||
.from(pluginJobs)
|
||||
.where(eq(pluginJobs.pluginId, pluginId));
|
||||
|
||||
const existingByKey = new Map(
|
||||
existingJobs.map((j) => [j.jobKey, j]),
|
||||
);
|
||||
|
||||
const declaredKeys = new Set<string>();
|
||||
|
||||
// Upsert each declared job
|
||||
for (const decl of declarations) {
|
||||
declaredKeys.add(decl.jobKey);
|
||||
|
||||
const existing = existingByKey.get(decl.jobKey);
|
||||
const schedule = decl.schedule ?? "";
|
||||
|
||||
if (existing) {
|
||||
// Update schedule if it changed; re-activate if it was paused
|
||||
const updates: Record<string, unknown> = {
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
if (existing.schedule !== schedule) {
|
||||
updates.schedule = schedule;
|
||||
}
|
||||
if (existing.status === "paused") {
|
||||
updates.status = "active";
|
||||
}
|
||||
|
||||
await db
|
||||
.update(pluginJobs)
|
||||
.set(updates)
|
||||
.where(eq(pluginJobs.id, existing.id));
|
||||
} else {
|
||||
// Insert new job
|
||||
await db.insert(pluginJobs).values({
|
||||
pluginId,
|
||||
jobKey: decl.jobKey,
|
||||
schedule,
|
||||
status: "active",
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Pause jobs that are no longer declared in the manifest
|
||||
for (const existing of existingJobs) {
|
||||
if (!declaredKeys.has(existing.jobKey) && existing.status !== "paused") {
|
||||
await db
|
||||
.update(pluginJobs)
|
||||
.set({ status: "paused", updatedAt: new Date() })
|
||||
.where(eq(pluginJobs.id, existing.id));
|
||||
}
|
||||
}
|
||||
},
|
||||
|
||||
/**
|
||||
* List all jobs for a plugin, optionally filtered by status.
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
* @param status - Optional status filter
|
||||
*/
|
||||
async listJobs(
|
||||
pluginId: string,
|
||||
status?: JobDefinitionStatus,
|
||||
): Promise<(typeof pluginJobs.$inferSelect)[]> {
|
||||
const conditions = [eq(pluginJobs.pluginId, pluginId)];
|
||||
if (status) {
|
||||
conditions.push(eq(pluginJobs.status, status));
|
||||
}
|
||||
return db
|
||||
.select()
|
||||
.from(pluginJobs)
|
||||
.where(and(...conditions));
|
||||
},
|
||||
|
||||
/**
|
||||
* Get a single job by its composite key `(pluginId, jobKey)`.
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
* @param jobKey - Stable job identifier from the manifest
|
||||
* @returns The job row, or `null` if not found
|
||||
*/
|
||||
async getJobByKey(
|
||||
pluginId: string,
|
||||
jobKey: string,
|
||||
): Promise<(typeof pluginJobs.$inferSelect) | null> {
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(pluginJobs)
|
||||
.where(
|
||||
and(
|
||||
eq(pluginJobs.pluginId, pluginId),
|
||||
eq(pluginJobs.jobKey, jobKey),
|
||||
),
|
||||
);
|
||||
return rows[0] ?? null;
|
||||
},
|
||||
|
||||
/**
|
||||
* Get a single job by its primary key (UUID).
|
||||
*
|
||||
* @param jobId - UUID of the job row
|
||||
* @returns The job row, or `null` if not found
|
||||
*/
|
||||
async getJobById(
|
||||
jobId: string,
|
||||
): Promise<(typeof pluginJobs.$inferSelect) | null> {
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(pluginJobs)
|
||||
.where(eq(pluginJobs.id, jobId));
|
||||
return rows[0] ?? null;
|
||||
},
|
||||
|
||||
/**
|
||||
* Fetch a single job by ID, scoped to a specific plugin.
|
||||
*
|
||||
* Returns `null` if the job does not exist or does not belong to the
|
||||
* given plugin — callers should treat both cases as "not found".
|
||||
*/
|
||||
async getJobByIdForPlugin(
|
||||
pluginId: string,
|
||||
jobId: string,
|
||||
): Promise<(typeof pluginJobs.$inferSelect) | null> {
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(pluginJobs)
|
||||
.where(and(eq(pluginJobs.id, jobId), eq(pluginJobs.pluginId, pluginId)));
|
||||
return rows[0] ?? null;
|
||||
},
|
||||
|
||||
/**
|
||||
* Update a job's status.
|
||||
*
|
||||
* @param jobId - UUID of the job row
|
||||
* @param status - New status
|
||||
*/
|
||||
async updateJobStatus(
|
||||
jobId: string,
|
||||
status: JobDefinitionStatus,
|
||||
): Promise<void> {
|
||||
await db
|
||||
.update(pluginJobs)
|
||||
.set({ status, updatedAt: new Date() })
|
||||
.where(eq(pluginJobs.id, jobId));
|
||||
},
|
||||
|
||||
/**
|
||||
* Update the `lastRunAt` and `nextRunAt` timestamps on a job.
|
||||
*
|
||||
* Called by the scheduler after a run completes to advance the
|
||||
* scheduling pointer.
|
||||
*
|
||||
* @param jobId - UUID of the job row
|
||||
* @param lastRunAt - When the last run started
|
||||
* @param nextRunAt - When the next run should fire
|
||||
*/
|
||||
async updateRunTimestamps(
|
||||
jobId: string,
|
||||
lastRunAt: Date,
|
||||
nextRunAt: Date | null,
|
||||
): Promise<void> {
|
||||
await db
|
||||
.update(pluginJobs)
|
||||
.set({
|
||||
lastRunAt,
|
||||
nextRunAt,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(pluginJobs.id, jobId));
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete all jobs (and cascaded runs) owned by a plugin.
|
||||
*
|
||||
* Called during plugin uninstall when `removeData = true`.
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
*/
|
||||
async deleteAllJobs(pluginId: string): Promise<void> {
|
||||
await db
|
||||
.delete(pluginJobs)
|
||||
.where(eq(pluginJobs.pluginId, pluginId));
|
||||
},
|
||||
|
||||
// =====================================================================
|
||||
// Job runs (plugin_job_runs)
|
||||
// =====================================================================
|
||||
|
||||
/**
|
||||
* Create a new job run record with status `queued`.
|
||||
*
|
||||
* The caller should create the run record *before* dispatching the
|
||||
* `runJob` RPC to the worker, then update it to `running` once the
|
||||
* worker begins execution.
|
||||
*
|
||||
* @param input - Job run input (jobId, pluginId, trigger)
|
||||
* @returns The newly created run row
|
||||
*/
|
||||
async createRun(
|
||||
input: CreateJobRunInput,
|
||||
): Promise<typeof pluginJobRuns.$inferSelect> {
|
||||
const rows = await db
|
||||
.insert(pluginJobRuns)
|
||||
.values({
|
||||
jobId: input.jobId,
|
||||
pluginId: input.pluginId,
|
||||
trigger: input.trigger,
|
||||
status: "queued",
|
||||
})
|
||||
.returning();
|
||||
|
||||
return rows[0]!;
|
||||
},
|
||||
|
||||
/**
|
||||
* Mark a run as `running` and set its `startedAt` timestamp.
|
||||
*
|
||||
* @param runId - UUID of the run row
|
||||
*/
|
||||
async markRunning(runId: string): Promise<void> {
|
||||
await db
|
||||
.update(pluginJobRuns)
|
||||
.set({
|
||||
status: "running" as PluginJobRunStatus,
|
||||
startedAt: new Date(),
|
||||
})
|
||||
.where(eq(pluginJobRuns.id, runId));
|
||||
},
|
||||
|
||||
/**
|
||||
* Complete a run — set its final status, error, duration, and
|
||||
* `finishedAt` timestamp.
|
||||
*
|
||||
* @param runId - UUID of the run row
|
||||
* @param input - Completion details
|
||||
*/
|
||||
async completeRun(
|
||||
runId: string,
|
||||
input: CompleteJobRunInput,
|
||||
): Promise<void> {
|
||||
await db
|
||||
.update(pluginJobRuns)
|
||||
.set({
|
||||
status: input.status,
|
||||
error: input.error ?? null,
|
||||
durationMs: input.durationMs ?? null,
|
||||
finishedAt: new Date(),
|
||||
})
|
||||
.where(eq(pluginJobRuns.id, runId));
|
||||
},
|
||||
|
||||
/**
|
||||
* Get a run by its primary key.
|
||||
*
|
||||
* @param runId - UUID of the run row
|
||||
* @returns The run row, or `null` if not found
|
||||
*/
|
||||
async getRunById(
|
||||
runId: string,
|
||||
): Promise<(typeof pluginJobRuns.$inferSelect) | null> {
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(pluginJobRuns)
|
||||
.where(eq(pluginJobRuns.id, runId));
|
||||
return rows[0] ?? null;
|
||||
},
|
||||
|
||||
/**
|
||||
* List runs for a specific job, ordered by creation time descending.
|
||||
*
|
||||
* @param jobId - UUID of the job
|
||||
* @param limit - Maximum number of rows to return (default: 50)
|
||||
*/
|
||||
async listRunsByJob(
|
||||
jobId: string,
|
||||
limit = 50,
|
||||
): Promise<(typeof pluginJobRuns.$inferSelect)[]> {
|
||||
return db
|
||||
.select()
|
||||
.from(pluginJobRuns)
|
||||
.where(eq(pluginJobRuns.jobId, jobId))
|
||||
.orderBy(desc(pluginJobRuns.createdAt))
|
||||
.limit(limit);
|
||||
},
|
||||
|
||||
/**
|
||||
* List runs for a plugin, optionally filtered by status.
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
* @param status - Optional status filter
|
||||
* @param limit - Maximum number of rows to return (default: 50)
|
||||
*/
|
||||
async listRunsByPlugin(
|
||||
pluginId: string,
|
||||
status?: PluginJobRunStatus,
|
||||
limit = 50,
|
||||
): Promise<(typeof pluginJobRuns.$inferSelect)[]> {
|
||||
const conditions = [eq(pluginJobRuns.pluginId, pluginId)];
|
||||
if (status) {
|
||||
conditions.push(eq(pluginJobRuns.status, status));
|
||||
}
|
||||
return db
|
||||
.select()
|
||||
.from(pluginJobRuns)
|
||||
.where(and(...conditions))
|
||||
.orderBy(desc(pluginJobRuns.createdAt))
|
||||
.limit(limit);
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/** Type alias for the return value of `pluginJobStore()`. */
|
||||
export type PluginJobStore = ReturnType<typeof pluginJobStore>;
|
||||
821
server/src/services/plugin-lifecycle.ts
Normal file
821
server/src/services/plugin-lifecycle.ts
Normal file
@@ -0,0 +1,821 @@
|
||||
/**
|
||||
* PluginLifecycleManager — state-machine controller for plugin status
|
||||
* transitions and worker process coordination.
|
||||
*
|
||||
* Each plugin moves through a well-defined state machine:
|
||||
*
|
||||
* ```
|
||||
* installed ──→ ready ──→ disabled
|
||||
* │ │ │
|
||||
* │ ├──→ error│
|
||||
* │ ↓ │
|
||||
* │ upgrade_pending │
|
||||
* │ │ │
|
||||
* ↓ ↓ ↓
|
||||
* uninstalled
|
||||
* ```
|
||||
*
|
||||
* The lifecycle manager:
|
||||
*
|
||||
* 1. **Validates transitions** — Only transitions defined in
|
||||
* `VALID_TRANSITIONS` are allowed; invalid transitions throw.
|
||||
*
|
||||
* 2. **Coordinates workers** — When a plugin moves to `ready`, its
|
||||
* worker process is started. When it moves out of `ready`, the
|
||||
* worker is stopped gracefully.
|
||||
*
|
||||
* 3. **Emits events** — `plugin.loaded`, `plugin.enabled`,
|
||||
* `plugin.disabled`, `plugin.unloaded`, `plugin.status_changed`
|
||||
* events are emitted so that other services (job coordinator,
|
||||
* tool dispatcher, event bus) can react accordingly.
|
||||
*
|
||||
* 4. **Persists state** — Status changes are written to the database
|
||||
* through the plugin registry service.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §12 — Process Model
|
||||
* @see PLUGIN_SPEC.md §12.5 — Graceful Shutdown Policy
|
||||
*/
|
||||
import { EventEmitter } from "node:events";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import type {
|
||||
PluginStatus,
|
||||
PluginRecord,
|
||||
PaperclipPluginManifestV1,
|
||||
} from "@paperclipai/shared";
|
||||
import { pluginRegistryService } from "./plugin-registry.js";
|
||||
import { pluginLoader, type PluginLoader } from "./plugin-loader.js";
|
||||
import type { PluginWorkerManager, WorkerStartOptions } from "./plugin-worker-manager.js";
|
||||
import { badRequest, notFound } from "../errors.js";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Lifecycle state machine
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Valid state transitions for the plugin lifecycle.
|
||||
*
|
||||
* installed → ready (initial load succeeds)
|
||||
* installed → error (initial load fails)
|
||||
* installed → uninstalled (abort installation)
|
||||
*
|
||||
* ready → disabled (operator disables plugin)
|
||||
* ready → error (runtime failure)
|
||||
* ready → upgrade_pending (upgrade with new capabilities)
|
||||
* ready → uninstalled (uninstall)
|
||||
*
|
||||
* disabled → ready (operator re-enables plugin)
|
||||
* disabled → uninstalled (uninstall while disabled)
|
||||
*
|
||||
* error → ready (retry / recovery)
|
||||
* error → uninstalled (give up and uninstall)
|
||||
*
|
||||
* upgrade_pending → ready (operator approves new capabilities)
|
||||
* upgrade_pending → error (upgrade worker fails)
|
||||
* upgrade_pending → uninstalled (reject upgrade and uninstall)
|
||||
*
|
||||
* uninstalled → installed (reinstall)
|
||||
*/
|
||||
const VALID_TRANSITIONS: Record<string, readonly PluginStatus[]> = {
|
||||
installed: ["ready", "error", "uninstalled"],
|
||||
ready: ["ready", "disabled", "error", "upgrade_pending", "uninstalled"],
|
||||
disabled: ["ready", "uninstalled"],
|
||||
error: ["ready", "uninstalled"],
|
||||
upgrade_pending: ["ready", "error", "uninstalled"],
|
||||
uninstalled: ["installed"], // reinstall
|
||||
};
|
||||
|
||||
/**
|
||||
* Check whether a transition from `from` → `to` is valid.
|
||||
*/
|
||||
function isValidTransition(from: PluginStatus, to: PluginStatus): boolean {
|
||||
return VALID_TRANSITIONS[from]?.includes(to) ?? false;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Lifecycle events
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Events emitted by the PluginLifecycleManager.
|
||||
* Consumers can subscribe to these for routing-table updates, UI refresh
|
||||
* notifications, and observability.
|
||||
*/
|
||||
export interface PluginLifecycleEvents {
|
||||
/** Emitted after a plugin is loaded (installed → ready). */
|
||||
"plugin.loaded": { pluginId: string; pluginKey: string };
|
||||
/** Emitted after a plugin transitions to ready (enabled). */
|
||||
"plugin.enabled": { pluginId: string; pluginKey: string };
|
||||
/** Emitted after a plugin is disabled (ready → disabled). */
|
||||
"plugin.disabled": { pluginId: string; pluginKey: string; reason?: string };
|
||||
/** Emitted after a plugin is unloaded (any → uninstalled). */
|
||||
"plugin.unloaded": { pluginId: string; pluginKey: string; removeData: boolean };
|
||||
/** Emitted on any status change. */
|
||||
"plugin.status_changed": {
|
||||
pluginId: string;
|
||||
pluginKey: string;
|
||||
previousStatus: PluginStatus;
|
||||
newStatus: PluginStatus;
|
||||
};
|
||||
/** Emitted when a plugin enters an error state. */
|
||||
"plugin.error": { pluginId: string; pluginKey: string; error: string };
|
||||
/** Emitted when a plugin enters upgrade_pending. */
|
||||
"plugin.upgrade_pending": { pluginId: string; pluginKey: string };
|
||||
/** Emitted when a plugin worker process has been started. */
|
||||
"plugin.worker_started": { pluginId: string; pluginKey: string };
|
||||
/** Emitted when a plugin worker process has been stopped. */
|
||||
"plugin.worker_stopped": { pluginId: string; pluginKey: string };
|
||||
}
|
||||
|
||||
type LifecycleEventName = keyof PluginLifecycleEvents;
|
||||
type LifecycleEventPayload<K extends LifecycleEventName> = PluginLifecycleEvents[K];
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// PluginLifecycleManager
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
export interface PluginLifecycleManager {
|
||||
/**
|
||||
* Load a newly installed plugin – transitions `installed` → `ready`.
|
||||
*
|
||||
* This is called after the registry has persisted the initial install record.
|
||||
* The caller should have already spawned the worker and performed health
|
||||
* checks before calling this. If the worker fails, call `markError` instead.
|
||||
*/
|
||||
load(pluginId: string): Promise<PluginRecord>;
|
||||
|
||||
/**
|
||||
* Enable a plugin that is in `disabled`, `error`, or `upgrade_pending` state.
|
||||
* Transitions → `ready`.
|
||||
*/
|
||||
enable(pluginId: string): Promise<PluginRecord>;
|
||||
|
||||
/**
|
||||
* Disable a running plugin.
|
||||
* Transitions `ready` → `disabled`.
|
||||
*/
|
||||
disable(pluginId: string, reason?: string): Promise<PluginRecord>;
|
||||
|
||||
/**
|
||||
* Unload (uninstall) a plugin from any active state.
|
||||
* Transitions → `uninstalled`.
|
||||
*
|
||||
* When `removeData` is true, the plugin row and cascaded config are
|
||||
* hard-deleted. Otherwise a soft-delete sets status to `uninstalled`.
|
||||
*/
|
||||
unload(pluginId: string, removeData?: boolean): Promise<PluginRecord | null>;
|
||||
|
||||
/**
|
||||
* Mark a plugin as errored (e.g. worker crash, health-check failure).
|
||||
* Transitions → `error`.
|
||||
*/
|
||||
markError(pluginId: string, error: string): Promise<PluginRecord>;
|
||||
|
||||
/**
|
||||
* Mark a plugin as requiring upgrade approval.
|
||||
* Transitions `ready` → `upgrade_pending`.
|
||||
*/
|
||||
markUpgradePending(pluginId: string): Promise<PluginRecord>;
|
||||
|
||||
/**
|
||||
* Upgrade a plugin to a newer version.
|
||||
* This is a placeholder that handles the lifecycle state transition.
|
||||
* The actual package installation is handled by plugin-loader.
|
||||
*
|
||||
* If the upgrade adds new capabilities, transitions to `upgrade_pending`.
|
||||
* Otherwise, transitions to `ready` directly.
|
||||
*/
|
||||
upgrade(pluginId: string, version?: string): Promise<PluginRecord>;
|
||||
|
||||
/**
|
||||
* Start the worker process for a plugin that is already in `ready` state.
|
||||
*
|
||||
* This is used by the server startup orchestration to start workers for
|
||||
* plugins that were persisted as `ready`. It requires a `PluginWorkerManager`
|
||||
* to have been provided at construction time.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin to start
|
||||
* @param options - Worker start options (entrypoint path, config, etc.)
|
||||
* @throws if no worker manager is configured or the plugin is not ready
|
||||
*/
|
||||
startWorker(pluginId: string, options: WorkerStartOptions): Promise<void>;
|
||||
|
||||
/**
|
||||
* Stop the worker process for a plugin without changing lifecycle state.
|
||||
*
|
||||
* This is used during server shutdown to gracefully stop all workers.
|
||||
* It does not transition the plugin state — plugins remain in their
|
||||
* current status so they can be restarted on next server boot.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin to stop
|
||||
*/
|
||||
stopWorker(pluginId: string): Promise<void>;
|
||||
|
||||
/**
|
||||
* Restart the worker process for a running plugin.
|
||||
*
|
||||
* Stops and re-starts the worker process. The plugin remains in `ready`
|
||||
* state throughout. This is typically called after a config change.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin to restart
|
||||
* @throws if no worker manager is configured or the plugin is not ready
|
||||
*/
|
||||
restartWorker(pluginId: string): Promise<void>;
|
||||
|
||||
/**
|
||||
* Get the current lifecycle state for a plugin.
|
||||
*/
|
||||
getStatus(pluginId: string): Promise<PluginStatus | null>;
|
||||
|
||||
/**
|
||||
* Check whether a transition is allowed from the plugin's current state.
|
||||
*/
|
||||
canTransition(pluginId: string, to: PluginStatus): Promise<boolean>;
|
||||
|
||||
/**
|
||||
* Subscribe to lifecycle events.
|
||||
*/
|
||||
on<K extends LifecycleEventName>(
|
||||
event: K,
|
||||
listener: (payload: LifecycleEventPayload<K>) => void,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Unsubscribe from lifecycle events.
|
||||
*/
|
||||
off<K extends LifecycleEventName>(
|
||||
event: K,
|
||||
listener: (payload: LifecycleEventPayload<K>) => void,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Subscribe to a lifecycle event once.
|
||||
*/
|
||||
once<K extends LifecycleEventName>(
|
||||
event: K,
|
||||
listener: (payload: LifecycleEventPayload<K>) => void,
|
||||
): void;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Factory
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Options for constructing a PluginLifecycleManager.
|
||||
*/
|
||||
export interface PluginLifecycleManagerOptions {
|
||||
/** Plugin loader instance. Falls back to the default if omitted. */
|
||||
loader?: PluginLoader;
|
||||
|
||||
/**
|
||||
* Worker process manager. When provided, lifecycle transitions that bring
|
||||
* a plugin online (load, enable, upgrade-to-ready) will start the worker
|
||||
* process, and transitions that take a plugin offline (disable, unload,
|
||||
* markError) will stop it.
|
||||
*
|
||||
* When omitted the lifecycle manager operates in state-only mode — the
|
||||
* caller is responsible for managing worker processes externally.
|
||||
*/
|
||||
workerManager?: PluginWorkerManager;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a PluginLifecycleManager.
|
||||
*
|
||||
* This service orchestrates plugin state transitions on top of the
|
||||
* `pluginRegistryService` (which handles raw DB persistence). It enforces
|
||||
* the lifecycle state machine, emits events for downstream consumers
|
||||
* (routing tables, UI, observability), and manages worker processes via
|
||||
* the `PluginWorkerManager` when one is provided.
|
||||
*
|
||||
* Usage:
|
||||
* ```ts
|
||||
* const lifecycle = pluginLifecycleManager(db, {
|
||||
* workerManager: createPluginWorkerManager(),
|
||||
* });
|
||||
* lifecycle.on("plugin.enabled", ({ pluginId }) => { ... });
|
||||
* await lifecycle.load(pluginId);
|
||||
* ```
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §21.3 — `plugins.status` column
|
||||
* @see PLUGIN_SPEC.md §12 — Process Model
|
||||
*/
|
||||
export function pluginLifecycleManager(
|
||||
db: Db,
|
||||
options?: PluginLoader | PluginLifecycleManagerOptions,
|
||||
): PluginLifecycleManager {
|
||||
// Support the legacy signature: pluginLifecycleManager(db, loader)
|
||||
// as well as the new options object form.
|
||||
let loaderArg: PluginLoader | undefined;
|
||||
let workerManager: PluginWorkerManager | undefined;
|
||||
|
||||
if (options && typeof options === "object" && "discoverAll" in options) {
|
||||
// Legacy: second arg is a PluginLoader directly
|
||||
loaderArg = options as PluginLoader;
|
||||
} else if (options && typeof options === "object") {
|
||||
const opts = options as PluginLifecycleManagerOptions;
|
||||
loaderArg = opts.loader;
|
||||
workerManager = opts.workerManager;
|
||||
}
|
||||
|
||||
const registry = pluginRegistryService(db);
|
||||
const pluginLoaderInstance = loaderArg ?? pluginLoader(db);
|
||||
const emitter = new EventEmitter();
|
||||
emitter.setMaxListeners(100); // plugins may have many listeners; 100 is a safe upper bound
|
||||
|
||||
const log = logger.child({ service: "plugin-lifecycle" });
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal helpers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
async function requirePlugin(pluginId: string): Promise<PluginRecord> {
|
||||
const plugin = await registry.getById(pluginId);
|
||||
if (!plugin) throw notFound(`Plugin not found: ${pluginId}`);
|
||||
return plugin as PluginRecord;
|
||||
}
|
||||
|
||||
function assertTransition(plugin: PluginRecord, to: PluginStatus): void {
|
||||
if (!isValidTransition(plugin.status, to)) {
|
||||
throw badRequest(
|
||||
`Invalid lifecycle transition: ${plugin.status} → ${to} for plugin ${plugin.pluginKey}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function transition(
|
||||
pluginId: string,
|
||||
to: PluginStatus,
|
||||
lastError: string | null = null,
|
||||
existingPlugin?: PluginRecord,
|
||||
): Promise<PluginRecord> {
|
||||
const plugin = existingPlugin ?? await requirePlugin(pluginId);
|
||||
assertTransition(plugin, to);
|
||||
|
||||
const previousStatus = plugin.status;
|
||||
|
||||
const updated = await registry.updateStatus(pluginId, {
|
||||
status: to,
|
||||
lastError,
|
||||
});
|
||||
|
||||
if (!updated) throw notFound(`Plugin not found after status update: ${pluginId}`);
|
||||
const result = updated as PluginRecord;
|
||||
|
||||
log.info(
|
||||
{ pluginId, pluginKey: result.pluginKey, from: previousStatus, to },
|
||||
`plugin lifecycle: ${previousStatus} → ${to}`,
|
||||
);
|
||||
|
||||
// Emit the generic status_changed event
|
||||
emitter.emit("plugin.status_changed", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
previousStatus,
|
||||
newStatus: to,
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
function emitDomain(
|
||||
event: LifecycleEventName,
|
||||
payload: PluginLifecycleEvents[LifecycleEventName],
|
||||
): void {
|
||||
emitter.emit(event, payload);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Worker management helpers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Stop the worker for a plugin if one is running.
|
||||
* This is a best-effort operation — if no worker manager is configured
|
||||
* or no worker is running, it silently succeeds.
|
||||
*/
|
||||
async function stopWorkerIfRunning(
|
||||
pluginId: string,
|
||||
pluginKey: string,
|
||||
): Promise<void> {
|
||||
if (!workerManager) return;
|
||||
if (!workerManager.isRunning(pluginId) && !workerManager.getWorker(pluginId)) return;
|
||||
|
||||
try {
|
||||
await workerManager.stopWorker(pluginId);
|
||||
log.info({ pluginId, pluginKey }, "plugin lifecycle: worker stopped");
|
||||
emitDomain("plugin.worker_stopped", { pluginId, pluginKey });
|
||||
} catch (err) {
|
||||
log.warn(
|
||||
{ pluginId, pluginKey, err: err instanceof Error ? err.message : String(err) },
|
||||
"plugin lifecycle: failed to stop worker (best-effort)",
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function activateReadyPlugin(pluginId: string): Promise<void> {
|
||||
const supportsRuntimeActivation =
|
||||
typeof pluginLoaderInstance.hasRuntimeServices === "function"
|
||||
&& typeof pluginLoaderInstance.loadSingle === "function";
|
||||
if (!supportsRuntimeActivation || !pluginLoaderInstance.hasRuntimeServices()) {
|
||||
return;
|
||||
}
|
||||
|
||||
const loadResult = await pluginLoaderInstance.loadSingle(pluginId);
|
||||
if (!loadResult.success) {
|
||||
throw new Error(
|
||||
loadResult.error
|
||||
?? `Failed to activate plugin ${loadResult.plugin.pluginKey}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
async function deactivatePluginRuntime(
|
||||
pluginId: string,
|
||||
pluginKey: string,
|
||||
): Promise<void> {
|
||||
const supportsRuntimeDeactivation =
|
||||
typeof pluginLoaderInstance.hasRuntimeServices === "function"
|
||||
&& typeof pluginLoaderInstance.unloadSingle === "function";
|
||||
|
||||
if (supportsRuntimeDeactivation && pluginLoaderInstance.hasRuntimeServices()) {
|
||||
await pluginLoaderInstance.unloadSingle(pluginId, pluginKey);
|
||||
return;
|
||||
}
|
||||
|
||||
await stopWorkerIfRunning(pluginId, pluginKey);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Public API
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
return {
|
||||
// -- load -------------------------------------------------------------
|
||||
/**
|
||||
* load — Transitions a plugin to 'ready' status and starts its worker.
|
||||
*
|
||||
* This method is called after a plugin has been successfully installed and
|
||||
* validated. It marks the plugin as ready in the database and immediately
|
||||
* triggers the plugin loader to start the worker process.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin to load.
|
||||
* @returns The updated plugin record.
|
||||
*/
|
||||
async load(pluginId: string): Promise<PluginRecord> {
|
||||
const result = await transition(pluginId, "ready");
|
||||
await activateReadyPlugin(pluginId);
|
||||
|
||||
emitDomain("plugin.loaded", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
});
|
||||
emitDomain("plugin.enabled", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
});
|
||||
return result;
|
||||
},
|
||||
|
||||
// -- enable -----------------------------------------------------------
|
||||
/**
|
||||
* enable — Re-enables a plugin that was previously in an error or upgrade state.
|
||||
*
|
||||
* Similar to load(), this method transitions the plugin to 'ready' and starts
|
||||
* its worker, but it specifically targets plugins that are currently disabled.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin to enable.
|
||||
* @returns The updated plugin record.
|
||||
*/
|
||||
async enable(pluginId: string): Promise<PluginRecord> {
|
||||
const plugin = await requirePlugin(pluginId);
|
||||
|
||||
// Only allow enabling from disabled, error, or upgrade_pending states
|
||||
if (plugin.status !== "disabled" && plugin.status !== "error" && plugin.status !== "upgrade_pending") {
|
||||
throw badRequest(
|
||||
`Cannot enable plugin in status '${plugin.status}'. ` +
|
||||
`Plugin must be in 'disabled', 'error', or 'upgrade_pending' status to be enabled.`,
|
||||
);
|
||||
}
|
||||
|
||||
const result = await transition(pluginId, "ready", null, plugin);
|
||||
await activateReadyPlugin(pluginId);
|
||||
emitDomain("plugin.enabled", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
});
|
||||
return result;
|
||||
},
|
||||
|
||||
// -- disable ----------------------------------------------------------
|
||||
async disable(pluginId: string, reason?: string): Promise<PluginRecord> {
|
||||
const plugin = await requirePlugin(pluginId);
|
||||
|
||||
// Only allow disabling from ready state
|
||||
if (plugin.status !== "ready") {
|
||||
throw badRequest(
|
||||
`Cannot disable plugin in status '${plugin.status}'. ` +
|
||||
`Plugin must be in 'ready' status to be disabled.`,
|
||||
);
|
||||
}
|
||||
|
||||
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
|
||||
|
||||
const result = await transition(pluginId, "disabled", reason ?? null, plugin);
|
||||
emitDomain("plugin.disabled", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
reason,
|
||||
});
|
||||
return result;
|
||||
},
|
||||
|
||||
// -- unload -----------------------------------------------------------
|
||||
async unload(
|
||||
pluginId: string,
|
||||
removeData = false,
|
||||
): Promise<PluginRecord | null> {
|
||||
const plugin = await requirePlugin(pluginId);
|
||||
|
||||
// If already uninstalled and removeData, hard-delete
|
||||
if (plugin.status === "uninstalled") {
|
||||
if (removeData) {
|
||||
await pluginLoaderInstance.cleanupInstallArtifacts(plugin);
|
||||
const deleted = await registry.uninstall(pluginId, true);
|
||||
log.info(
|
||||
{ pluginId, pluginKey: plugin.pluginKey },
|
||||
"plugin lifecycle: hard-deleted already-uninstalled plugin",
|
||||
);
|
||||
emitDomain("plugin.unloaded", {
|
||||
pluginId,
|
||||
pluginKey: plugin.pluginKey,
|
||||
removeData: true,
|
||||
});
|
||||
return deleted as PluginRecord | null;
|
||||
}
|
||||
throw badRequest(
|
||||
`Plugin ${plugin.pluginKey} is already uninstalled. ` +
|
||||
`Use removeData=true to permanently delete it.`,
|
||||
);
|
||||
}
|
||||
|
||||
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
|
||||
await pluginLoaderInstance.cleanupInstallArtifacts(plugin);
|
||||
|
||||
// Perform the uninstall via registry (handles soft/hard delete)
|
||||
const result = await registry.uninstall(pluginId, removeData);
|
||||
|
||||
log.info(
|
||||
{ pluginId, pluginKey: plugin.pluginKey, removeData },
|
||||
`plugin lifecycle: ${plugin.status} → uninstalled${removeData ? " (hard delete)" : ""}`,
|
||||
);
|
||||
|
||||
emitter.emit("plugin.status_changed", {
|
||||
pluginId,
|
||||
pluginKey: plugin.pluginKey,
|
||||
previousStatus: plugin.status,
|
||||
newStatus: "uninstalled" as PluginStatus,
|
||||
});
|
||||
|
||||
emitDomain("plugin.unloaded", {
|
||||
pluginId,
|
||||
pluginKey: plugin.pluginKey,
|
||||
removeData,
|
||||
});
|
||||
|
||||
return result as PluginRecord | null;
|
||||
},
|
||||
|
||||
// -- markError --------------------------------------------------------
|
||||
async markError(pluginId: string, error: string): Promise<PluginRecord> {
|
||||
// Stop the worker — the plugin is in an error state and should not
|
||||
// continue running. The worker manager's auto-restart is disabled
|
||||
// because we are intentionally taking the plugin offline.
|
||||
const plugin = await requirePlugin(pluginId);
|
||||
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
|
||||
|
||||
const result = await transition(pluginId, "error", error, plugin);
|
||||
emitDomain("plugin.error", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
error,
|
||||
});
|
||||
return result;
|
||||
},
|
||||
|
||||
// -- markUpgradePending -----------------------------------------------
|
||||
async markUpgradePending(pluginId: string): Promise<PluginRecord> {
|
||||
const plugin = await requirePlugin(pluginId);
|
||||
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
|
||||
|
||||
const result = await transition(pluginId, "upgrade_pending", null, plugin);
|
||||
emitDomain("plugin.upgrade_pending", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
});
|
||||
return result;
|
||||
},
|
||||
|
||||
// -- upgrade ----------------------------------------------------------
|
||||
/**
|
||||
* Upgrade a plugin to a newer version by performing a package update and
|
||||
* managing the lifecycle state transition.
|
||||
*
|
||||
* Following PLUGIN_SPEC.md §25.3, the upgrade process:
|
||||
* 1. Stops the current worker process (if running).
|
||||
* 2. Fetches and validates the new plugin package via the `PluginLoader`.
|
||||
* 3. Compares the capabilities declared in the new manifest against the old one.
|
||||
* 4. If new capabilities are added, transitions the plugin to `upgrade_pending`
|
||||
* to await operator approval (worker stays stopped).
|
||||
* 5. If no new capabilities are added, transitions the plugin back to `ready`
|
||||
* with the updated version and manifest metadata.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin to upgrade.
|
||||
* @param version - Optional target version specifier.
|
||||
* @returns The updated `PluginRecord`.
|
||||
* @throws {BadRequest} If the plugin is not in a ready or upgrade_pending state.
|
||||
*/
|
||||
async upgrade(pluginId: string, version?: string): Promise<PluginRecord> {
|
||||
const plugin = await requirePlugin(pluginId);
|
||||
|
||||
// Can only upgrade plugins that are ready or already in upgrade_pending
|
||||
if (plugin.status !== "ready" && plugin.status !== "upgrade_pending") {
|
||||
throw badRequest(
|
||||
`Cannot upgrade plugin in status '${plugin.status}'. ` +
|
||||
`Plugin must be in 'ready' or 'upgrade_pending' status to be upgraded.`,
|
||||
);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ pluginId, pluginKey: plugin.pluginKey, targetVersion: version },
|
||||
"plugin lifecycle: upgrade requested",
|
||||
);
|
||||
|
||||
await deactivatePluginRuntime(pluginId, plugin.pluginKey);
|
||||
|
||||
// 1. Download and validate new package via loader
|
||||
const { oldManifest, newManifest, discovered } =
|
||||
await pluginLoaderInstance.upgradePlugin(pluginId, { version });
|
||||
|
||||
log.info(
|
||||
{
|
||||
pluginId,
|
||||
pluginKey: plugin.pluginKey,
|
||||
oldVersion: oldManifest.version,
|
||||
newVersion: newManifest.version,
|
||||
},
|
||||
"plugin lifecycle: package upgraded on disk",
|
||||
);
|
||||
|
||||
// 2. Compare capabilities
|
||||
const addedCaps = newManifest.capabilities.filter(
|
||||
(cap) => !oldManifest.capabilities.includes(cap),
|
||||
);
|
||||
|
||||
// 3. Transition state
|
||||
if (addedCaps.length > 0) {
|
||||
// New capabilities require operator approval — worker stays stopped
|
||||
log.info(
|
||||
{ pluginId, pluginKey: plugin.pluginKey, addedCaps },
|
||||
"plugin lifecycle: new capabilities detected, transitioning to upgrade_pending",
|
||||
);
|
||||
// Skip the inner stopWorkerIfRunning since we already stopped above
|
||||
const result = await transition(pluginId, "upgrade_pending", null, plugin);
|
||||
emitDomain("plugin.upgrade_pending", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
});
|
||||
return result;
|
||||
} else {
|
||||
const result = await transition(pluginId, "ready", null, {
|
||||
...plugin,
|
||||
version: discovered.version,
|
||||
manifestJson: newManifest,
|
||||
} as PluginRecord);
|
||||
await activateReadyPlugin(pluginId);
|
||||
|
||||
emitDomain("plugin.loaded", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
});
|
||||
emitDomain("plugin.enabled", {
|
||||
pluginId,
|
||||
pluginKey: result.pluginKey,
|
||||
});
|
||||
|
||||
return result;
|
||||
}
|
||||
},
|
||||
|
||||
// -- startWorker ------------------------------------------------------
|
||||
async startWorker(
|
||||
pluginId: string,
|
||||
options: WorkerStartOptions,
|
||||
): Promise<void> {
|
||||
if (!workerManager) {
|
||||
throw badRequest(
|
||||
"Cannot start worker: no PluginWorkerManager is configured. " +
|
||||
"Provide a workerManager option when constructing the lifecycle manager.",
|
||||
);
|
||||
}
|
||||
|
||||
const plugin = await requirePlugin(pluginId);
|
||||
if (plugin.status !== "ready") {
|
||||
throw badRequest(
|
||||
`Cannot start worker for plugin in status '${plugin.status}'. ` +
|
||||
`Plugin must be in 'ready' status.`,
|
||||
);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ pluginId, pluginKey: plugin.pluginKey },
|
||||
"plugin lifecycle: starting worker",
|
||||
);
|
||||
|
||||
await workerManager.startWorker(pluginId, options);
|
||||
emitDomain("plugin.worker_started", {
|
||||
pluginId,
|
||||
pluginKey: plugin.pluginKey,
|
||||
});
|
||||
|
||||
log.info(
|
||||
{ pluginId, pluginKey: plugin.pluginKey },
|
||||
"plugin lifecycle: worker started",
|
||||
);
|
||||
},
|
||||
|
||||
// -- stopWorker -------------------------------------------------------
|
||||
async stopWorker(pluginId: string): Promise<void> {
|
||||
if (!workerManager) return; // No worker manager — nothing to stop
|
||||
|
||||
const plugin = await requirePlugin(pluginId);
|
||||
await stopWorkerIfRunning(pluginId, plugin.pluginKey);
|
||||
},
|
||||
|
||||
// -- restartWorker ----------------------------------------------------
|
||||
async restartWorker(pluginId: string): Promise<void> {
|
||||
if (!workerManager) {
|
||||
throw badRequest(
|
||||
"Cannot restart worker: no PluginWorkerManager is configured.",
|
||||
);
|
||||
}
|
||||
|
||||
const plugin = await requirePlugin(pluginId);
|
||||
if (plugin.status !== "ready") {
|
||||
throw badRequest(
|
||||
`Cannot restart worker for plugin in status '${plugin.status}'. ` +
|
||||
`Plugin must be in 'ready' status.`,
|
||||
);
|
||||
}
|
||||
|
||||
const handle = workerManager.getWorker(pluginId);
|
||||
if (!handle) {
|
||||
throw badRequest(
|
||||
`Cannot restart worker for plugin "${plugin.pluginKey}": no worker is running.`,
|
||||
);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ pluginId, pluginKey: plugin.pluginKey },
|
||||
"plugin lifecycle: restarting worker",
|
||||
);
|
||||
|
||||
await handle.restart();
|
||||
|
||||
emitDomain("plugin.worker_stopped", { pluginId, pluginKey: plugin.pluginKey });
|
||||
emitDomain("plugin.worker_started", { pluginId, pluginKey: plugin.pluginKey });
|
||||
|
||||
log.info(
|
||||
{ pluginId, pluginKey: plugin.pluginKey },
|
||||
"plugin lifecycle: worker restarted",
|
||||
);
|
||||
},
|
||||
|
||||
// -- getStatus --------------------------------------------------------
|
||||
async getStatus(pluginId: string): Promise<PluginStatus | null> {
|
||||
const plugin = await registry.getById(pluginId);
|
||||
return plugin?.status ?? null;
|
||||
},
|
||||
|
||||
// -- canTransition ----------------------------------------------------
|
||||
async canTransition(pluginId: string, to: PluginStatus): Promise<boolean> {
|
||||
const plugin = await registry.getById(pluginId);
|
||||
if (!plugin) return false;
|
||||
return isValidTransition(plugin.status, to);
|
||||
},
|
||||
|
||||
// -- Event subscriptions ----------------------------------------------
|
||||
on(event, listener) {
|
||||
emitter.on(event, listener);
|
||||
},
|
||||
|
||||
off(event, listener) {
|
||||
emitter.off(event, listener);
|
||||
},
|
||||
|
||||
once(event, listener) {
|
||||
emitter.once(event, listener);
|
||||
},
|
||||
};
|
||||
}
|
||||
1954
server/src/services/plugin-loader.ts
Normal file
1954
server/src/services/plugin-loader.ts
Normal file
File diff suppressed because it is too large
Load Diff
86
server/src/services/plugin-log-retention.ts
Normal file
86
server/src/services/plugin-log-retention.ts
Normal file
@@ -0,0 +1,86 @@
|
||||
import { lt, sql } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { pluginLogs } from "@paperclipai/db";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
|
||||
/** Default retention period: 7 days. */
|
||||
const DEFAULT_RETENTION_DAYS = 7;
|
||||
|
||||
/** Maximum rows to delete per sweep to avoid long-running transactions. */
|
||||
const DELETE_BATCH_SIZE = 5_000;
|
||||
|
||||
/** Maximum number of batches per sweep to guard against unbounded loops. */
|
||||
const MAX_ITERATIONS = 100;
|
||||
|
||||
/**
|
||||
* Delete plugin log rows older than `retentionDays`.
|
||||
*
|
||||
* Deletes in batches of `DELETE_BATCH_SIZE` to keep transaction sizes
|
||||
* bounded and avoid holding locks for extended periods.
|
||||
*
|
||||
* @returns The total number of rows deleted.
|
||||
*/
|
||||
export async function prunePluginLogs(
|
||||
db: Db,
|
||||
retentionDays: number = DEFAULT_RETENTION_DAYS,
|
||||
): Promise<number> {
|
||||
const cutoff = new Date();
|
||||
cutoff.setDate(cutoff.getDate() - retentionDays);
|
||||
|
||||
let totalDeleted = 0;
|
||||
let iterations = 0;
|
||||
|
||||
// Delete in batches to avoid long-running transactions
|
||||
while (iterations < MAX_ITERATIONS) {
|
||||
const deleted = await db
|
||||
.delete(pluginLogs)
|
||||
.where(lt(pluginLogs.createdAt, cutoff))
|
||||
.returning({ id: pluginLogs.id })
|
||||
.then((rows) => rows.length);
|
||||
|
||||
totalDeleted += deleted;
|
||||
iterations++;
|
||||
|
||||
if (deleted < DELETE_BATCH_SIZE) break;
|
||||
}
|
||||
|
||||
if (iterations >= MAX_ITERATIONS) {
|
||||
logger.warn(
|
||||
{ totalDeleted, iterations, cutoffDate: cutoff },
|
||||
"Plugin log retention hit iteration limit; some logs may remain",
|
||||
);
|
||||
}
|
||||
|
||||
if (totalDeleted > 0) {
|
||||
logger.info({ totalDeleted, retentionDays }, "Pruned expired plugin logs");
|
||||
}
|
||||
|
||||
return totalDeleted;
|
||||
}
|
||||
|
||||
/**
|
||||
* Start a periodic plugin log cleanup interval.
|
||||
*
|
||||
* @param db - Database connection
|
||||
* @param intervalMs - How often to run (default: 1 hour)
|
||||
* @param retentionDays - How many days of logs to keep (default: 7)
|
||||
* @returns A cleanup function that stops the interval
|
||||
*/
|
||||
export function startPluginLogRetention(
|
||||
db: Db,
|
||||
intervalMs: number = 60 * 60 * 1_000,
|
||||
retentionDays: number = DEFAULT_RETENTION_DAYS,
|
||||
): () => void {
|
||||
const timer = setInterval(() => {
|
||||
prunePluginLogs(db, retentionDays).catch((err) => {
|
||||
logger.warn({ err }, "Plugin log retention sweep failed");
|
||||
});
|
||||
}, intervalMs);
|
||||
|
||||
// Run once immediately on startup
|
||||
prunePluginLogs(db, retentionDays).catch((err) => {
|
||||
logger.warn({ err }, "Initial plugin log retention sweep failed");
|
||||
});
|
||||
|
||||
return () => clearInterval(timer);
|
||||
}
|
||||
163
server/src/services/plugin-manifest-validator.ts
Normal file
163
server/src/services/plugin-manifest-validator.ts
Normal file
@@ -0,0 +1,163 @@
|
||||
/**
|
||||
* PluginManifestValidator — schema validation for plugin manifest files.
|
||||
*
|
||||
* Uses the shared Zod schema (`pluginManifestV1Schema`) to validate
|
||||
* manifest payloads. Provides both a safe `parse()` variant (returns
|
||||
* a result union) and a throwing `parseOrThrow()` for HTTP error
|
||||
* propagation at install time.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §10 — Plugin Manifest
|
||||
* @see packages/shared/src/validators/plugin.ts — Zod schema definition
|
||||
*/
|
||||
import { pluginManifestV1Schema } from "@paperclipai/shared";
|
||||
import type { PaperclipPluginManifestV1 } from "@paperclipai/shared";
|
||||
import { PLUGIN_API_VERSION } from "@paperclipai/shared";
|
||||
import { badRequest } from "../errors.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Supported manifest API versions
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The set of plugin API versions this host can accept.
|
||||
* When a new API version is introduced, add it here. Old versions should be
|
||||
* retained until the host drops support for them.
|
||||
*/
|
||||
const SUPPORTED_VERSIONS = [PLUGIN_API_VERSION] as const;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Parse result types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Successful parse result.
|
||||
*/
|
||||
export interface ManifestParseSuccess {
|
||||
success: true;
|
||||
manifest: PaperclipPluginManifestV1;
|
||||
}
|
||||
|
||||
/**
|
||||
* Failed parse result. `errors` is a human-readable description of what went
|
||||
* wrong; `details` is the raw Zod error list for programmatic inspection.
|
||||
*/
|
||||
export interface ManifestParseFailure {
|
||||
success: false;
|
||||
errors: string;
|
||||
details: Array<{ path: (string | number)[]; message: string }>;
|
||||
}
|
||||
|
||||
/** Union of parse outcomes. */
|
||||
export type ManifestParseResult = ManifestParseSuccess | ManifestParseFailure;
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// PluginManifestValidator interface
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Service for parsing and validating plugin manifests.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §10 — Plugin Manifest
|
||||
*/
|
||||
export interface PluginManifestValidator {
|
||||
/**
|
||||
* Try to parse `input` as a plugin manifest.
|
||||
*
|
||||
* Returns a {@link ManifestParseSuccess} when the input passes all
|
||||
* validation rules, or a {@link ManifestParseFailure} with human-readable
|
||||
* error messages when it does not.
|
||||
*
|
||||
* This is the "safe" variant — it never throws.
|
||||
*/
|
||||
parse(input: unknown): ManifestParseResult;
|
||||
|
||||
/**
|
||||
* Parse `input` as a plugin manifest, throwing a 400 HttpError on failure.
|
||||
*
|
||||
* Use this at install time when an invalid manifest should surface as an
|
||||
* HTTP error to the caller.
|
||||
*
|
||||
* @throws {HttpError} 400 Bad Request if the manifest is invalid.
|
||||
*/
|
||||
parseOrThrow(input: unknown): PaperclipPluginManifestV1;
|
||||
|
||||
/**
|
||||
* Return the list of plugin API versions supported by this host.
|
||||
*
|
||||
* Callers can use this to present the supported version range to operators
|
||||
* or to decide whether a candidate plugin can be installed.
|
||||
*/
|
||||
getSupportedVersions(): readonly number[];
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Factory
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a {@link PluginManifestValidator}.
|
||||
*
|
||||
* Usage:
|
||||
* ```ts
|
||||
* const validator = pluginManifestValidator();
|
||||
*
|
||||
* // Safe parse — inspect the result
|
||||
* const result = validator.parse(rawManifest);
|
||||
* if (!result.success) {
|
||||
* console.error(result.errors);
|
||||
* return;
|
||||
* }
|
||||
* const manifest = result.manifest;
|
||||
*
|
||||
* // Throwing parse — use at install time
|
||||
* const manifest = validator.parseOrThrow(rawManifest);
|
||||
*
|
||||
* // Check supported versions
|
||||
* const versions = validator.getSupportedVersions(); // [1]
|
||||
* ```
|
||||
*/
|
||||
export function pluginManifestValidator(): PluginManifestValidator {
|
||||
return {
|
||||
parse(input: unknown): ManifestParseResult {
|
||||
const result = pluginManifestV1Schema.safeParse(input);
|
||||
|
||||
if (result.success) {
|
||||
return {
|
||||
success: true,
|
||||
manifest: result.data as PaperclipPluginManifestV1,
|
||||
};
|
||||
}
|
||||
|
||||
const details = result.error.errors.map((issue) => ({
|
||||
path: issue.path,
|
||||
message: issue.message,
|
||||
}));
|
||||
|
||||
const errors = details
|
||||
.map(({ path, message }) =>
|
||||
path.length > 0 ? `${path.join(".")}: ${message}` : message,
|
||||
)
|
||||
.join("; ");
|
||||
|
||||
return {
|
||||
success: false,
|
||||
errors,
|
||||
details,
|
||||
};
|
||||
},
|
||||
|
||||
parseOrThrow(input: unknown): PaperclipPluginManifestV1 {
|
||||
const result = this.parse(input);
|
||||
|
||||
if (!result.success) {
|
||||
throw badRequest(`Invalid plugin manifest: ${result.errors}`, result.details);
|
||||
}
|
||||
|
||||
return result.manifest;
|
||||
},
|
||||
|
||||
getSupportedVersions(): readonly number[] {
|
||||
return SUPPORTED_VERSIONS;
|
||||
},
|
||||
};
|
||||
}
|
||||
682
server/src/services/plugin-registry.ts
Normal file
682
server/src/services/plugin-registry.ts
Normal file
@@ -0,0 +1,682 @@
|
||||
import { asc, eq, ne, sql, and } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import {
|
||||
plugins,
|
||||
pluginConfig,
|
||||
pluginEntities,
|
||||
pluginJobs,
|
||||
pluginJobRuns,
|
||||
pluginWebhookDeliveries,
|
||||
} from "@paperclipai/db";
|
||||
import type {
|
||||
PaperclipPluginManifestV1,
|
||||
PluginStatus,
|
||||
InstallPlugin,
|
||||
UpdatePluginStatus,
|
||||
UpsertPluginConfig,
|
||||
PatchPluginConfig,
|
||||
PluginEntityRecord,
|
||||
PluginEntityQuery,
|
||||
PluginJobRecord,
|
||||
PluginJobRunRecord,
|
||||
PluginWebhookDeliveryRecord,
|
||||
PluginJobStatus,
|
||||
PluginJobRunStatus,
|
||||
PluginJobRunTrigger,
|
||||
PluginWebhookDeliveryStatus,
|
||||
} from "@paperclipai/shared";
|
||||
import { conflict, notFound } from "../errors.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Detect if a Postgres error is a unique-constraint violation on the
|
||||
* `plugins_plugin_key_idx` unique index.
|
||||
*/
|
||||
function isPluginKeyConflict(error: unknown): boolean {
|
||||
if (typeof error !== "object" || error === null) return false;
|
||||
const err = error as { code?: string; constraint?: string; constraint_name?: string };
|
||||
const constraint = err.constraint ?? err.constraint_name;
|
||||
return err.code === "23505" && constraint === "plugins_plugin_key_idx";
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Service
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* PluginRegistry – CRUD operations for the `plugins` and `plugin_config`
|
||||
* tables. Follows the same factory-function pattern used by the rest of
|
||||
* the Paperclip service layer.
|
||||
*
|
||||
* This is the lowest-level persistence layer for plugins. Higher-level
|
||||
* concerns such as lifecycle state-machine enforcement and capability
|
||||
* gating are handled by {@link pluginLifecycleManager} and
|
||||
* {@link pluginCapabilityValidator} respectively.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §21.3 — Required Tables
|
||||
*/
|
||||
export function pluginRegistryService(db: Db) {
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal helpers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
async function getById(id: string) {
|
||||
return db
|
||||
.select()
|
||||
.from(plugins)
|
||||
.where(eq(plugins.id, id))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
}
|
||||
|
||||
async function getByKey(pluginKey: string) {
|
||||
return db
|
||||
.select()
|
||||
.from(plugins)
|
||||
.where(eq(plugins.pluginKey, pluginKey))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
}
|
||||
|
||||
async function nextInstallOrder(): Promise<number> {
|
||||
const result = await db
|
||||
.select({ maxOrder: sql<number>`coalesce(max(${plugins.installOrder}), 0)` })
|
||||
.from(plugins);
|
||||
return (result[0]?.maxOrder ?? 0) + 1;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Public API
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
return {
|
||||
// ----- Read -----------------------------------------------------------
|
||||
|
||||
/** List all registered plugins ordered by install order. */
|
||||
list: () =>
|
||||
db
|
||||
.select()
|
||||
.from(plugins)
|
||||
.orderBy(asc(plugins.installOrder)),
|
||||
|
||||
/**
|
||||
* List installed plugins (excludes soft-deleted/uninstalled).
|
||||
* Use for Plugin Manager and default API list so uninstalled plugins do not appear.
|
||||
*/
|
||||
listInstalled: () =>
|
||||
db
|
||||
.select()
|
||||
.from(plugins)
|
||||
.where(ne(plugins.status, "uninstalled"))
|
||||
.orderBy(asc(plugins.installOrder)),
|
||||
|
||||
/** List plugins filtered by status. */
|
||||
listByStatus: (status: PluginStatus) =>
|
||||
db
|
||||
.select()
|
||||
.from(plugins)
|
||||
.where(eq(plugins.status, status))
|
||||
.orderBy(asc(plugins.installOrder)),
|
||||
|
||||
/** Get a single plugin by primary key. */
|
||||
getById,
|
||||
|
||||
/** Get a single plugin by its unique `pluginKey`. */
|
||||
getByKey,
|
||||
|
||||
// ----- Install / Register --------------------------------------------
|
||||
|
||||
/**
|
||||
* Register (install) a new plugin.
|
||||
*
|
||||
* The caller is expected to have already resolved and validated the
|
||||
* manifest from the package. This method persists the plugin row and
|
||||
* assigns the next install order.
|
||||
*/
|
||||
install: async (input: InstallPlugin, manifest: PaperclipPluginManifestV1) => {
|
||||
const existing = await getByKey(manifest.id);
|
||||
if (existing) {
|
||||
if (existing.status !== "uninstalled") {
|
||||
throw conflict(`Plugin already installed: ${manifest.id}`);
|
||||
}
|
||||
|
||||
// Reinstall after soft-delete: reactivate the existing row so plugin-scoped
|
||||
// data and references remain stable across uninstall/reinstall cycles.
|
||||
return db
|
||||
.update(plugins)
|
||||
.set({
|
||||
packageName: input.packageName,
|
||||
packagePath: input.packagePath ?? null,
|
||||
version: manifest.version,
|
||||
apiVersion: manifest.apiVersion,
|
||||
categories: manifest.categories,
|
||||
manifestJson: manifest,
|
||||
status: "installed" as PluginStatus,
|
||||
lastError: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(plugins.id, existing.id))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
}
|
||||
|
||||
const installOrder = await nextInstallOrder();
|
||||
|
||||
try {
|
||||
const rows = await db
|
||||
.insert(plugins)
|
||||
.values({
|
||||
pluginKey: manifest.id,
|
||||
packageName: input.packageName,
|
||||
version: manifest.version,
|
||||
apiVersion: manifest.apiVersion,
|
||||
categories: manifest.categories,
|
||||
manifestJson: manifest,
|
||||
status: "installed" as PluginStatus,
|
||||
installOrder,
|
||||
packagePath: input.packagePath ?? null,
|
||||
})
|
||||
.returning();
|
||||
return rows[0];
|
||||
} catch (error) {
|
||||
if (isPluginKeyConflict(error)) {
|
||||
throw conflict(`Plugin already installed: ${manifest.id}`);
|
||||
}
|
||||
throw error;
|
||||
}
|
||||
},
|
||||
|
||||
// ----- Update ---------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Update a plugin's manifest and version (e.g. on upgrade).
|
||||
* The plugin must already exist.
|
||||
*/
|
||||
update: async (
|
||||
id: string,
|
||||
data: {
|
||||
packageName?: string;
|
||||
version?: string;
|
||||
manifest?: PaperclipPluginManifestV1;
|
||||
},
|
||||
) => {
|
||||
const plugin = await getById(id);
|
||||
if (!plugin) throw notFound("Plugin not found");
|
||||
|
||||
const setClause: Partial<typeof plugins.$inferInsert> & { updatedAt: Date } = {
|
||||
updatedAt: new Date(),
|
||||
};
|
||||
if (data.packageName !== undefined) setClause.packageName = data.packageName;
|
||||
if (data.version !== undefined) setClause.version = data.version;
|
||||
if (data.manifest !== undefined) {
|
||||
setClause.manifestJson = data.manifest;
|
||||
setClause.apiVersion = data.manifest.apiVersion;
|
||||
setClause.categories = data.manifest.categories;
|
||||
}
|
||||
|
||||
return db
|
||||
.update(plugins)
|
||||
.set(setClause)
|
||||
.where(eq(plugins.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
},
|
||||
|
||||
// ----- Status ---------------------------------------------------------
|
||||
|
||||
/** Update a plugin's lifecycle status and optional error message. */
|
||||
updateStatus: async (id: string, input: UpdatePluginStatus) => {
|
||||
const plugin = await getById(id);
|
||||
if (!plugin) throw notFound("Plugin not found");
|
||||
|
||||
return db
|
||||
.update(plugins)
|
||||
.set({
|
||||
status: input.status,
|
||||
lastError: input.lastError ?? null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(plugins.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
},
|
||||
|
||||
// ----- Uninstall / Remove --------------------------------------------
|
||||
|
||||
/**
|
||||
* Uninstall a plugin.
|
||||
*
|
||||
* When `removeData` is true the plugin row (and cascaded config) is
|
||||
* hard-deleted. Otherwise the status is set to `"uninstalled"` for
|
||||
* a soft-delete that preserves the record.
|
||||
*/
|
||||
uninstall: async (id: string, removeData = false) => {
|
||||
const plugin = await getById(id);
|
||||
if (!plugin) throw notFound("Plugin not found");
|
||||
|
||||
if (removeData) {
|
||||
// Hard delete – plugin_config cascades via FK onDelete
|
||||
return db
|
||||
.delete(plugins)
|
||||
.where(eq(plugins.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
}
|
||||
|
||||
// Soft delete – mark as uninstalled
|
||||
return db
|
||||
.update(plugins)
|
||||
.set({
|
||||
status: "uninstalled" as PluginStatus,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(plugins.id, id))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
},
|
||||
|
||||
// ----- Config ---------------------------------------------------------
|
||||
|
||||
/** Retrieve a plugin's instance configuration. */
|
||||
getConfig: (pluginId: string) =>
|
||||
db
|
||||
.select()
|
||||
.from(pluginConfig)
|
||||
.where(eq(pluginConfig.pluginId, pluginId))
|
||||
.then((rows) => rows[0] ?? null),
|
||||
|
||||
/**
|
||||
* Create or fully replace a plugin's instance configuration.
|
||||
* If a config row already exists for the plugin it is replaced;
|
||||
* otherwise a new row is inserted.
|
||||
*/
|
||||
upsertConfig: async (pluginId: string, input: UpsertPluginConfig) => {
|
||||
const plugin = await getById(pluginId);
|
||||
if (!plugin) throw notFound("Plugin not found");
|
||||
|
||||
const existing = await db
|
||||
.select()
|
||||
.from(pluginConfig)
|
||||
.where(eq(pluginConfig.pluginId, pluginId))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (existing) {
|
||||
return db
|
||||
.update(pluginConfig)
|
||||
.set({
|
||||
configJson: input.configJson,
|
||||
lastError: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(pluginConfig.pluginId, pluginId))
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
}
|
||||
|
||||
return db
|
||||
.insert(pluginConfig)
|
||||
.values({
|
||||
pluginId,
|
||||
configJson: input.configJson,
|
||||
})
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
},
|
||||
|
||||
/**
|
||||
* Partially update a plugin's instance configuration via shallow merge.
|
||||
* If no config row exists yet one is created with the supplied values.
|
||||
*/
|
||||
patchConfig: async (pluginId: string, input: PatchPluginConfig) => {
|
||||
const plugin = await getById(pluginId);
|
||||
if (!plugin) throw notFound("Plugin not found");
|
||||
|
||||
const existing = await db
|
||||
.select()
|
||||
.from(pluginConfig)
|
||||
.where(eq(pluginConfig.pluginId, pluginId))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (existing) {
|
||||
const merged = { ...existing.configJson, ...input.configJson };
|
||||
return db
|
||||
.update(pluginConfig)
|
||||
.set({
|
||||
configJson: merged,
|
||||
lastError: null,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(pluginConfig.pluginId, pluginId))
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
}
|
||||
|
||||
return db
|
||||
.insert(pluginConfig)
|
||||
.values({
|
||||
pluginId,
|
||||
configJson: input.configJson,
|
||||
})
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
},
|
||||
|
||||
/**
|
||||
* Record an error against a plugin's config (e.g. validation failure
|
||||
* against the plugin's instanceConfigSchema).
|
||||
*/
|
||||
setConfigError: async (pluginId: string, lastError: string | null) => {
|
||||
const rows = await db
|
||||
.update(pluginConfig)
|
||||
.set({ lastError, updatedAt: new Date() })
|
||||
.where(eq(pluginConfig.pluginId, pluginId))
|
||||
.returning();
|
||||
|
||||
if (rows.length === 0) throw notFound("Plugin config not found");
|
||||
return rows[0];
|
||||
},
|
||||
|
||||
/** Delete a plugin's config row. */
|
||||
deleteConfig: async (pluginId: string) => {
|
||||
const rows = await db
|
||||
.delete(pluginConfig)
|
||||
.where(eq(pluginConfig.pluginId, pluginId))
|
||||
.returning();
|
||||
|
||||
return rows[0] ?? null;
|
||||
},
|
||||
|
||||
// ----- Entities -------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List persistent entity mappings owned by a specific plugin, with filtering and pagination.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin.
|
||||
* @param query - Optional filters (type, externalId) and pagination (limit, offset).
|
||||
* @returns A list of matching `PluginEntityRecord` objects.
|
||||
*/
|
||||
listEntities: (pluginId: string, query?: PluginEntityQuery) => {
|
||||
const conditions = [eq(pluginEntities.pluginId, pluginId)];
|
||||
if (query?.entityType) conditions.push(eq(pluginEntities.entityType, query.entityType));
|
||||
if (query?.externalId) conditions.push(eq(pluginEntities.externalId, query.externalId));
|
||||
|
||||
return db
|
||||
.select()
|
||||
.from(pluginEntities)
|
||||
.where(and(...conditions))
|
||||
.orderBy(asc(pluginEntities.createdAt))
|
||||
.limit(query?.limit ?? 100)
|
||||
.offset(query?.offset ?? 0);
|
||||
},
|
||||
|
||||
/**
|
||||
* Look up a plugin-owned entity mapping by its external identifier.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin.
|
||||
* @param entityType - The type of entity (e.g., 'project', 'issue').
|
||||
* @param externalId - The identifier in the external system.
|
||||
* @returns The matching `PluginEntityRecord` or null.
|
||||
*/
|
||||
getEntityByExternalId: (
|
||||
pluginId: string,
|
||||
entityType: string,
|
||||
externalId: string,
|
||||
) =>
|
||||
db
|
||||
.select()
|
||||
.from(pluginEntities)
|
||||
.where(
|
||||
and(
|
||||
eq(pluginEntities.pluginId, pluginId),
|
||||
eq(pluginEntities.entityType, entityType),
|
||||
eq(pluginEntities.externalId, externalId),
|
||||
),
|
||||
)
|
||||
.then((rows) => rows[0] ?? null),
|
||||
|
||||
/**
|
||||
* Create or update a persistent mapping between a Paperclip object and an
|
||||
* external entity.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin.
|
||||
* @param input - The entity data to persist.
|
||||
* @returns The newly created or updated `PluginEntityRecord`.
|
||||
*/
|
||||
upsertEntity: async (
|
||||
pluginId: string,
|
||||
input: Omit<typeof pluginEntities.$inferInsert, "id" | "pluginId" | "createdAt" | "updatedAt">,
|
||||
) => {
|
||||
// Drizzle doesn't support pg-specific onConflictDoUpdate easily in the insert() call
|
||||
// with complex where clauses, so we do it manually.
|
||||
const existing = await db
|
||||
.select()
|
||||
.from(pluginEntities)
|
||||
.where(
|
||||
and(
|
||||
eq(pluginEntities.pluginId, pluginId),
|
||||
eq(pluginEntities.entityType, input.entityType),
|
||||
eq(pluginEntities.externalId, input.externalId ?? ""),
|
||||
),
|
||||
)
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (existing) {
|
||||
return db
|
||||
.update(pluginEntities)
|
||||
.set({
|
||||
...input,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(pluginEntities.id, existing.id))
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
}
|
||||
|
||||
return db
|
||||
.insert(pluginEntities)
|
||||
.values({
|
||||
...input,
|
||||
pluginId,
|
||||
} as any)
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a specific plugin-owned entity mapping by its internal UUID.
|
||||
*
|
||||
* @param id - The UUID of the entity record.
|
||||
* @returns The deleted record, or null if not found.
|
||||
*/
|
||||
deleteEntity: async (id: string) => {
|
||||
const rows = await db
|
||||
.delete(pluginEntities)
|
||||
.where(eq(pluginEntities.id, id))
|
||||
.returning();
|
||||
return rows[0] ?? null;
|
||||
},
|
||||
|
||||
// ----- Jobs -----------------------------------------------------------
|
||||
|
||||
/**
|
||||
* List all scheduled jobs registered for a specific plugin.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin.
|
||||
* @returns A list of `PluginJobRecord` objects.
|
||||
*/
|
||||
listJobs: (pluginId: string) =>
|
||||
db
|
||||
.select()
|
||||
.from(pluginJobs)
|
||||
.where(eq(pluginJobs.pluginId, pluginId))
|
||||
.orderBy(asc(pluginJobs.jobKey)),
|
||||
|
||||
/**
|
||||
* Look up a plugin job by its unique job key.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin.
|
||||
* @param jobKey - The key defined in the plugin manifest.
|
||||
* @returns The matching `PluginJobRecord` or null.
|
||||
*/
|
||||
getJobByKey: (pluginId: string, jobKey: string) =>
|
||||
db
|
||||
.select()
|
||||
.from(pluginJobs)
|
||||
.where(and(eq(pluginJobs.pluginId, pluginId), eq(pluginJobs.jobKey, jobKey)))
|
||||
.then((rows) => rows[0] ?? null),
|
||||
|
||||
/**
|
||||
* Register or update a scheduled job for a plugin.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin.
|
||||
* @param jobKey - The unique key for the job.
|
||||
* @param input - The schedule (cron) and optional status.
|
||||
* @returns The updated or created `PluginJobRecord`.
|
||||
*/
|
||||
upsertJob: async (
|
||||
pluginId: string,
|
||||
jobKey: string,
|
||||
input: { schedule: string; status?: PluginJobStatus },
|
||||
) => {
|
||||
const existing = await db
|
||||
.select()
|
||||
.from(pluginJobs)
|
||||
.where(and(eq(pluginJobs.pluginId, pluginId), eq(pluginJobs.jobKey, jobKey)))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (existing) {
|
||||
return db
|
||||
.update(pluginJobs)
|
||||
.set({
|
||||
schedule: input.schedule,
|
||||
status: input.status ?? existing.status,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.where(eq(pluginJobs.id, existing.id))
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
}
|
||||
|
||||
return db
|
||||
.insert(pluginJobs)
|
||||
.values({
|
||||
pluginId,
|
||||
jobKey,
|
||||
schedule: input.schedule,
|
||||
status: input.status ?? "active",
|
||||
})
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
},
|
||||
|
||||
/**
|
||||
* Record the start of a specific job execution.
|
||||
*
|
||||
* @param pluginId - The UUID of the plugin.
|
||||
* @param jobId - The UUID of the parent job record.
|
||||
* @param trigger - What triggered this run (e.g., 'schedule', 'manual').
|
||||
* @returns The newly created `PluginJobRunRecord` in 'pending' status.
|
||||
*/
|
||||
createJobRun: async (
|
||||
pluginId: string,
|
||||
jobId: string,
|
||||
trigger: PluginJobRunTrigger,
|
||||
) => {
|
||||
return db
|
||||
.insert(pluginJobRuns)
|
||||
.values({
|
||||
pluginId,
|
||||
jobId,
|
||||
trigger,
|
||||
status: "pending",
|
||||
})
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
},
|
||||
|
||||
/**
|
||||
* Update the status, duration, and logs of a job execution record.
|
||||
*
|
||||
* @param runId - The UUID of the job run.
|
||||
* @param input - The update fields (status, error, duration, etc.).
|
||||
* @returns The updated `PluginJobRunRecord`.
|
||||
*/
|
||||
updateJobRun: async (
|
||||
runId: string,
|
||||
input: {
|
||||
status: PluginJobRunStatus;
|
||||
durationMs?: number;
|
||||
error?: string;
|
||||
logs?: string[];
|
||||
startedAt?: Date;
|
||||
finishedAt?: Date;
|
||||
},
|
||||
) => {
|
||||
return db
|
||||
.update(pluginJobRuns)
|
||||
.set(input)
|
||||
.where(eq(pluginJobRuns.id, runId))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
},
|
||||
|
||||
// ----- Webhooks -------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a record for an incoming webhook delivery.
|
||||
*
|
||||
* @param pluginId - The UUID of the receiving plugin.
|
||||
* @param webhookKey - The endpoint key defined in the manifest.
|
||||
* @param input - The payload, headers, and optional external ID.
|
||||
* @returns The newly created `PluginWebhookDeliveryRecord` in 'pending' status.
|
||||
*/
|
||||
createWebhookDelivery: async (
|
||||
pluginId: string,
|
||||
webhookKey: string,
|
||||
input: {
|
||||
externalId?: string;
|
||||
payload: Record<string, unknown>;
|
||||
headers?: Record<string, string>;
|
||||
},
|
||||
) => {
|
||||
return db
|
||||
.insert(pluginWebhookDeliveries)
|
||||
.values({
|
||||
pluginId,
|
||||
webhookKey,
|
||||
externalId: input.externalId,
|
||||
payload: input.payload,
|
||||
headers: input.headers ?? {},
|
||||
status: "pending",
|
||||
})
|
||||
.returning()
|
||||
.then((rows) => rows[0]);
|
||||
},
|
||||
|
||||
/**
|
||||
* Update the status and processing metrics of a webhook delivery.
|
||||
*
|
||||
* @param deliveryId - The UUID of the delivery record.
|
||||
* @param input - The update fields (status, error, duration, etc.).
|
||||
* @returns The updated `PluginWebhookDeliveryRecord`.
|
||||
*/
|
||||
updateWebhookDelivery: async (
|
||||
deliveryId: string,
|
||||
input: {
|
||||
status: PluginWebhookDeliveryStatus;
|
||||
durationMs?: number;
|
||||
error?: string;
|
||||
startedAt?: Date;
|
||||
finishedAt?: Date;
|
||||
},
|
||||
) => {
|
||||
return db
|
||||
.update(pluginWebhookDeliveries)
|
||||
.set(input)
|
||||
.where(eq(pluginWebhookDeliveries.id, deliveryId))
|
||||
.returning()
|
||||
.then((rows) => rows[0] ?? null);
|
||||
},
|
||||
};
|
||||
}
|
||||
221
server/src/services/plugin-runtime-sandbox.ts
Normal file
221
server/src/services/plugin-runtime-sandbox.ts
Normal file
@@ -0,0 +1,221 @@
|
||||
import { existsSync, readFileSync, realpathSync } from "node:fs";
|
||||
import path from "node:path";
|
||||
import vm from "node:vm";
|
||||
import type { PaperclipPluginManifestV1 } from "@paperclipai/shared";
|
||||
import type { PluginCapabilityValidator } from "./plugin-capability-validator.js";
|
||||
|
||||
export class PluginSandboxError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = "PluginSandboxError";
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Sandbox runtime options used when loading a plugin worker module.
|
||||
*
|
||||
* `allowedModuleSpecifiers` controls which bare module specifiers are permitted.
|
||||
* `allowedModules` provides concrete host-provided bindings for those specifiers.
|
||||
*/
|
||||
export interface PluginSandboxOptions {
|
||||
entrypointPath: string;
|
||||
allowedModuleSpecifiers?: ReadonlySet<string>;
|
||||
allowedModules?: Readonly<Record<string, Record<string, unknown>>>;
|
||||
allowedGlobals?: Record<string, unknown>;
|
||||
timeoutMs?: number;
|
||||
}
|
||||
|
||||
/**
|
||||
* Operation-level runtime gate for plugin host API calls.
|
||||
* Every host operation must be checked against manifest capabilities before execution.
|
||||
*/
|
||||
export interface CapabilityScopedInvoker {
|
||||
invoke<T>(operation: string, fn: () => Promise<T> | T): Promise<T>;
|
||||
}
|
||||
|
||||
interface LoadedModule {
|
||||
namespace: Record<string, unknown>;
|
||||
}
|
||||
|
||||
const DEFAULT_TIMEOUT_MS = 2_000;
|
||||
const MODULE_PATH_SUFFIXES = ["", ".js", ".mjs", ".cjs", "/index.js", "/index.mjs", "/index.cjs"];
|
||||
const DEFAULT_GLOBALS: Record<string, unknown> = {
|
||||
console,
|
||||
setTimeout,
|
||||
clearTimeout,
|
||||
setInterval,
|
||||
clearInterval,
|
||||
URL,
|
||||
URLSearchParams,
|
||||
TextEncoder,
|
||||
TextDecoder,
|
||||
AbortController,
|
||||
AbortSignal,
|
||||
};
|
||||
|
||||
export function createCapabilityScopedInvoker(
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
validator: PluginCapabilityValidator,
|
||||
): CapabilityScopedInvoker {
|
||||
return {
|
||||
async invoke<T>(operation: string, fn: () => Promise<T> | T): Promise<T> {
|
||||
validator.assertOperation(manifest, operation);
|
||||
return await fn();
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
/**
|
||||
* Load a CommonJS plugin module in a VM context with explicit module import allow-listing.
|
||||
*
|
||||
* Security properties:
|
||||
* - no implicit access to host globals like `process`
|
||||
* - no unrestricted built-in module imports
|
||||
* - relative imports are resolved only inside the plugin root directory
|
||||
*/
|
||||
export async function loadPluginModuleInSandbox(
|
||||
options: PluginSandboxOptions,
|
||||
): Promise<LoadedModule> {
|
||||
const timeoutMs = options.timeoutMs ?? DEFAULT_TIMEOUT_MS;
|
||||
const allowedSpecifiers = options.allowedModuleSpecifiers ?? new Set<string>();
|
||||
const entrypointPath = path.resolve(options.entrypointPath);
|
||||
const pluginRoot = path.dirname(entrypointPath);
|
||||
|
||||
const context = vm.createContext({
|
||||
...DEFAULT_GLOBALS,
|
||||
...options.allowedGlobals,
|
||||
});
|
||||
|
||||
const moduleCache = new Map<string, Record<string, unknown>>();
|
||||
const allowedModules = options.allowedModules ?? {};
|
||||
|
||||
const realPluginRoot = realpathSync(pluginRoot);
|
||||
|
||||
const loadModuleSync = (modulePath: string): Record<string, unknown> => {
|
||||
const resolvedPath = resolveModulePathSync(path.resolve(modulePath));
|
||||
const realPath = realpathSync(resolvedPath);
|
||||
|
||||
if (!isWithinRoot(realPath, realPluginRoot)) {
|
||||
throw new PluginSandboxError(
|
||||
`Import '${modulePath}' escapes plugin root and is not allowed`,
|
||||
);
|
||||
}
|
||||
|
||||
const cached = moduleCache.get(realPath);
|
||||
if (cached) return cached;
|
||||
|
||||
const code = readModuleSourceSync(realPath);
|
||||
|
||||
if (looksLikeEsm(code)) {
|
||||
throw new PluginSandboxError(
|
||||
"Sandbox loader only supports CommonJS modules. Build plugin worker entrypoints as CJS for sandboxed loading.",
|
||||
);
|
||||
}
|
||||
|
||||
const module = { exports: {} as Record<string, unknown> };
|
||||
// Cache the module before execution to preserve CommonJS cycle semantics.
|
||||
moduleCache.set(realPath, module.exports);
|
||||
|
||||
const requireInSandbox = (specifier: string): Record<string, unknown> => {
|
||||
if (!specifier.startsWith(".") && !specifier.startsWith("/")) {
|
||||
if (!allowedSpecifiers.has(specifier)) {
|
||||
throw new PluginSandboxError(
|
||||
`Import denied for module '${specifier}'. Add an explicit sandbox allow-list entry.`,
|
||||
);
|
||||
}
|
||||
|
||||
const binding = allowedModules[specifier];
|
||||
if (!binding) {
|
||||
throw new PluginSandboxError(
|
||||
`Bare module '${specifier}' is allow-listed but no host binding is registered.`,
|
||||
);
|
||||
}
|
||||
|
||||
return binding;
|
||||
}
|
||||
|
||||
const candidatePath = path.resolve(path.dirname(realPath), specifier);
|
||||
return loadModuleSync(candidatePath);
|
||||
};
|
||||
|
||||
// Inject the CJS module arguments into the context so the script can call
|
||||
// the wrapper immediately. This is critical: the timeout in runInContext
|
||||
// only applies during script evaluation. By including the self-invocation
|
||||
// `(fn)(exports, module, ...)` in the script text, the timeout also covers
|
||||
// the actual module body execution — preventing infinite loops from hanging.
|
||||
const sandboxArgs = {
|
||||
__paperclip_exports: module.exports,
|
||||
__paperclip_module: module,
|
||||
__paperclip_require: requireInSandbox,
|
||||
__paperclip_filename: realPath,
|
||||
__paperclip_dirname: path.dirname(realPath),
|
||||
};
|
||||
// Temporarily inject args into the context, run, then remove to avoid pollution.
|
||||
Object.assign(context, sandboxArgs);
|
||||
const wrapped = `(function (exports, module, require, __filename, __dirname) {\n${code}\n})(__paperclip_exports, __paperclip_module, __paperclip_require, __paperclip_filename, __paperclip_dirname)`;
|
||||
const script = new vm.Script(wrapped, { filename: realPath });
|
||||
try {
|
||||
script.runInContext(context, { timeout: timeoutMs });
|
||||
} finally {
|
||||
for (const key of Object.keys(sandboxArgs)) {
|
||||
// eslint-disable-next-line @typescript-eslint/no-dynamic-delete
|
||||
delete (context as Record<string, unknown>)[key];
|
||||
}
|
||||
}
|
||||
|
||||
const normalizedExports = normalizeModuleExports(module.exports);
|
||||
moduleCache.set(realPath, normalizedExports);
|
||||
return normalizedExports;
|
||||
};
|
||||
|
||||
const entryExports = loadModuleSync(entrypointPath);
|
||||
|
||||
return {
|
||||
namespace: { ...entryExports },
|
||||
};
|
||||
}
|
||||
|
||||
function resolveModulePathSync(candidatePath: string): string {
|
||||
for (const suffix of MODULE_PATH_SUFFIXES) {
|
||||
const fullPath = `${candidatePath}${suffix}`;
|
||||
if (existsSync(fullPath)) {
|
||||
return fullPath;
|
||||
}
|
||||
}
|
||||
|
||||
throw new PluginSandboxError(`Unable to resolve module import at path '${candidatePath}'`);
|
||||
}
|
||||
|
||||
/**
|
||||
* True when `targetPath` is inside `rootPath` (or equals rootPath), false otherwise.
|
||||
* Uses `path.relative` so sibling-prefix paths (e.g. `/root-a` vs `/root`) cannot bypass checks.
|
||||
*/
|
||||
function isWithinRoot(targetPath: string, rootPath: string): boolean {
|
||||
const relative = path.relative(rootPath, targetPath);
|
||||
return relative === "" || (!relative.startsWith("..") && !path.isAbsolute(relative));
|
||||
}
|
||||
|
||||
function readModuleSourceSync(modulePath: string): string {
|
||||
try {
|
||||
return readFileSync(modulePath, "utf8");
|
||||
} catch (error) {
|
||||
throw new PluginSandboxError(
|
||||
`Failed to read sandbox module '${modulePath}': ${error instanceof Error ? error.message : String(error)}`,
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
function normalizeModuleExports(exportsValue: unknown): Record<string, unknown> {
|
||||
if (typeof exportsValue === "object" && exportsValue !== null) {
|
||||
return exportsValue as Record<string, unknown>;
|
||||
}
|
||||
|
||||
return { default: exportsValue };
|
||||
}
|
||||
|
||||
/**
|
||||
* Lightweight guard to reject ESM syntax in the VM CommonJS loader.
|
||||
*/
|
||||
function looksLikeEsm(code: string): boolean {
|
||||
return /(^|\n)\s*import\s+/m.test(code) || /(^|\n)\s*export\s+/m.test(code);
|
||||
}
|
||||
354
server/src/services/plugin-secrets-handler.ts
Normal file
354
server/src/services/plugin-secrets-handler.ts
Normal file
@@ -0,0 +1,354 @@
|
||||
/**
|
||||
* Plugin secrets host-side handler — resolves secret references through the
|
||||
* Paperclip secret provider system.
|
||||
*
|
||||
* When a plugin worker calls `ctx.secrets.resolve(secretRef)`, the JSON-RPC
|
||||
* request arrives at the host with `{ secretRef }`. This module provides the
|
||||
* concrete `HostServices.secrets` adapter that:
|
||||
*
|
||||
* 1. Parses the `secretRef` string to identify the secret.
|
||||
* 2. Looks up the secret record and its latest version in the database.
|
||||
* 3. Delegates to the configured `SecretProviderModule` to decrypt /
|
||||
* resolve the raw value.
|
||||
* 4. Returns the resolved plaintext value to the worker.
|
||||
*
|
||||
* ## Secret Reference Format
|
||||
*
|
||||
* A `secretRef` is a **secret UUID** — the primary key (`id`) of a row in
|
||||
* the `company_secrets` table. Operators place these UUIDs into plugin
|
||||
* config values; plugin workers resolve them at execution time via
|
||||
* `ctx.secrets.resolve(secretId)`.
|
||||
*
|
||||
* ## Security Invariants
|
||||
*
|
||||
* - Resolved values are **never** logged, persisted, or included in error
|
||||
* messages (per PLUGIN_SPEC.md §22).
|
||||
* - The handler is capability-gated: only plugins with `secrets.read-ref`
|
||||
* declared in their manifest may call it (enforced by `host-client-factory`).
|
||||
* - The host handler itself does not cache resolved values. Each call goes
|
||||
* through the secret provider to honour rotation.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §22 — Secrets
|
||||
* @see host-client-factory.ts — capability gating
|
||||
* @see services/secrets.ts — secretService used by agent env bindings
|
||||
*/
|
||||
|
||||
import { eq, and, desc } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { companySecrets, companySecretVersions, pluginConfig } from "@paperclipai/db";
|
||||
import type { SecretProvider } from "@paperclipai/shared";
|
||||
import { getSecretProvider } from "../secrets/provider-registry.js";
|
||||
import { pluginRegistryService } from "./plugin-registry.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Error helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a sanitised error that never leaks secret material.
|
||||
* Only the ref identifier is included; never the resolved value.
|
||||
*/
|
||||
function secretNotFound(secretRef: string): Error {
|
||||
const err = new Error(`Secret not found: ${secretRef}`);
|
||||
err.name = "SecretNotFoundError";
|
||||
return err;
|
||||
}
|
||||
|
||||
function secretVersionNotFound(secretRef: string): Error {
|
||||
const err = new Error(`No version found for secret: ${secretRef}`);
|
||||
err.name = "SecretVersionNotFoundError";
|
||||
return err;
|
||||
}
|
||||
|
||||
function invalidSecretRef(secretRef: string): Error {
|
||||
const err = new Error(`Invalid secret reference: ${secretRef}`);
|
||||
err.name = "InvalidSecretRefError";
|
||||
return err;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Validation
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** UUID v4 regex for validating secretRef format. */
|
||||
const UUID_RE =
|
||||
/^[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}$/i;
|
||||
|
||||
/**
|
||||
* Check whether a secretRef looks like a valid UUID.
|
||||
*/
|
||||
function isUuid(value: string): boolean {
|
||||
return UUID_RE.test(value);
|
||||
}
|
||||
|
||||
/**
|
||||
* Collect the property paths (dot-separated keys) whose schema node declares
|
||||
* `format: "secret-ref"`. Only top-level and nested `properties` are walked —
|
||||
* this mirrors the flat/nested object shapes that `JsonSchemaForm` renders.
|
||||
*/
|
||||
function collectSecretRefPaths(
|
||||
schema: Record<string, unknown> | null | undefined,
|
||||
): Set<string> {
|
||||
const paths = new Set<string>();
|
||||
if (!schema || typeof schema !== "object") return paths;
|
||||
|
||||
function walk(node: Record<string, unknown>, prefix: string): void {
|
||||
const props = node.properties as Record<string, Record<string, unknown>> | undefined;
|
||||
if (!props || typeof props !== "object") return;
|
||||
for (const [key, propSchema] of Object.entries(props)) {
|
||||
if (!propSchema || typeof propSchema !== "object") continue;
|
||||
const path = prefix ? `${prefix}.${key}` : key;
|
||||
if (propSchema.format === "secret-ref") {
|
||||
paths.add(path);
|
||||
}
|
||||
// Recurse into nested object schemas
|
||||
if (propSchema.type === "object") {
|
||||
walk(propSchema, path);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
walk(schema, "");
|
||||
return paths;
|
||||
}
|
||||
|
||||
/**
|
||||
* Extract secret reference UUIDs from a plugin's configJson, scoped to only
|
||||
* the fields annotated with `format: "secret-ref"` in the schema.
|
||||
*
|
||||
* When no schema is provided, falls back to collecting all UUID-shaped strings
|
||||
* (backwards-compatible for plugins without a declared instanceConfigSchema).
|
||||
*/
|
||||
export function extractSecretRefsFromConfig(
|
||||
configJson: unknown,
|
||||
schema?: Record<string, unknown> | null,
|
||||
): Set<string> {
|
||||
const refs = new Set<string>();
|
||||
if (configJson == null || typeof configJson !== "object") return refs;
|
||||
|
||||
const secretPaths = collectSecretRefPaths(schema);
|
||||
|
||||
// If schema declares secret-ref paths, extract only those values.
|
||||
if (secretPaths.size > 0) {
|
||||
for (const dotPath of secretPaths) {
|
||||
const keys = dotPath.split(".");
|
||||
let current: unknown = configJson;
|
||||
for (const k of keys) {
|
||||
if (current == null || typeof current !== "object") { current = undefined; break; }
|
||||
current = (current as Record<string, unknown>)[k];
|
||||
}
|
||||
if (typeof current === "string" && isUuid(current)) {
|
||||
refs.add(current);
|
||||
}
|
||||
}
|
||||
return refs;
|
||||
}
|
||||
|
||||
// Fallback: no schema or no secret-ref annotations — collect all UUIDs.
|
||||
// This preserves backwards compatibility for plugins that omit
|
||||
// instanceConfigSchema.
|
||||
function walkAll(value: unknown): void {
|
||||
if (typeof value === "string") {
|
||||
if (isUuid(value)) refs.add(value);
|
||||
} else if (Array.isArray(value)) {
|
||||
for (const item of value) walkAll(item);
|
||||
} else if (value !== null && typeof value === "object") {
|
||||
for (const v of Object.values(value as Record<string, unknown>)) walkAll(v);
|
||||
}
|
||||
}
|
||||
|
||||
walkAll(configJson);
|
||||
return refs;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Handler factory
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Input shape for the `secrets.resolve` handler.
|
||||
*
|
||||
* Matches `WorkerToHostMethods["secrets.resolve"][0]` from `protocol.ts`.
|
||||
*/
|
||||
export interface PluginSecretsResolveParams {
|
||||
/** The secret reference string (a secret UUID). */
|
||||
secretRef: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating the plugin secrets handler.
|
||||
*/
|
||||
export interface PluginSecretsHandlerOptions {
|
||||
/** Database connection. */
|
||||
db: Db;
|
||||
/**
|
||||
* The plugin ID using this handler.
|
||||
* Used for logging context only; never included in error payloads
|
||||
* that reach the plugin worker.
|
||||
*/
|
||||
pluginId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* The `HostServices.secrets` adapter for the plugin host-client factory.
|
||||
*/
|
||||
export interface PluginSecretsService {
|
||||
/**
|
||||
* Resolve a secret reference to its current plaintext value.
|
||||
*
|
||||
* @param params - Contains the `secretRef` (UUID of the secret)
|
||||
* @returns The resolved secret value
|
||||
* @throws {Error} If the secret is not found, has no versions, or
|
||||
* the provider fails to resolve
|
||||
*/
|
||||
resolve(params: PluginSecretsResolveParams): Promise<string>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a `HostServices.secrets` adapter for a specific plugin.
|
||||
*
|
||||
* The returned service looks up secrets by UUID, fetches the latest version
|
||||
* material, and delegates to the appropriate `SecretProviderModule` for
|
||||
* decryption.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const secretsHandler = createPluginSecretsHandler({ db, pluginId });
|
||||
* const handlers = createHostClientHandlers({
|
||||
* pluginId,
|
||||
* capabilities: manifest.capabilities,
|
||||
* services: {
|
||||
* secrets: secretsHandler,
|
||||
* // ...
|
||||
* },
|
||||
* });
|
||||
* ```
|
||||
*
|
||||
* @param options - Database connection and plugin identity
|
||||
* @returns A `PluginSecretsService` suitable for `HostServices.secrets`
|
||||
*/
|
||||
/** Simple sliding-window rate limiter for secret resolution attempts. */
|
||||
function createRateLimiter(maxAttempts: number, windowMs: number) {
|
||||
const attempts = new Map<string, number[]>();
|
||||
|
||||
return {
|
||||
check(key: string): boolean {
|
||||
const now = Date.now();
|
||||
const windowStart = now - windowMs;
|
||||
const existing = (attempts.get(key) ?? []).filter((ts) => ts > windowStart);
|
||||
if (existing.length >= maxAttempts) return false;
|
||||
existing.push(now);
|
||||
attempts.set(key, existing);
|
||||
return true;
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export function createPluginSecretsHandler(
|
||||
options: PluginSecretsHandlerOptions,
|
||||
): PluginSecretsService {
|
||||
const { db, pluginId } = options;
|
||||
const registry = pluginRegistryService(db);
|
||||
|
||||
// Rate limit: max 30 resolution attempts per plugin per minute
|
||||
const rateLimiter = createRateLimiter(30, 60_000);
|
||||
|
||||
let cachedAllowedRefs: Set<string> | null = null;
|
||||
let cachedAllowedRefsExpiry = 0;
|
||||
const CONFIG_CACHE_TTL_MS = 30_000; // 30 seconds, matches event bus TTL
|
||||
|
||||
return {
|
||||
async resolve(params: PluginSecretsResolveParams): Promise<string> {
|
||||
const { secretRef } = params;
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// 0. Rate limiting — prevent brute-force UUID enumeration
|
||||
// ---------------------------------------------------------------
|
||||
if (!rateLimiter.check(pluginId)) {
|
||||
const err = new Error("Rate limit exceeded for secret resolution");
|
||||
err.name = "RateLimitExceededError";
|
||||
throw err;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// 1. Validate the ref format
|
||||
// ---------------------------------------------------------------
|
||||
if (!secretRef || typeof secretRef !== "string" || secretRef.trim().length === 0) {
|
||||
throw invalidSecretRef(secretRef ?? "<empty>");
|
||||
}
|
||||
|
||||
const trimmedRef = secretRef.trim();
|
||||
|
||||
if (!isUuid(trimmedRef)) {
|
||||
throw invalidSecretRef(trimmedRef);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// 1b. Scope check — only allow secrets referenced in this plugin's config
|
||||
// ---------------------------------------------------------------
|
||||
const now = Date.now();
|
||||
if (!cachedAllowedRefs || now > cachedAllowedRefsExpiry) {
|
||||
const [configRow, plugin] = await Promise.all([
|
||||
db
|
||||
.select()
|
||||
.from(pluginConfig)
|
||||
.where(eq(pluginConfig.pluginId, pluginId))
|
||||
.then((rows) => rows[0] ?? null),
|
||||
registry.getById(pluginId),
|
||||
]);
|
||||
|
||||
const schema = (plugin?.manifestJson as unknown as Record<string, unknown> | null)
|
||||
?.instanceConfigSchema as Record<string, unknown> | undefined;
|
||||
cachedAllowedRefs = extractSecretRefsFromConfig(configRow?.configJson, schema);
|
||||
cachedAllowedRefsExpiry = now + CONFIG_CACHE_TTL_MS;
|
||||
}
|
||||
|
||||
if (!cachedAllowedRefs.has(trimmedRef)) {
|
||||
// Return "not found" to avoid leaking whether the secret exists
|
||||
throw secretNotFound(trimmedRef);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// 2. Look up the secret record by UUID
|
||||
// ---------------------------------------------------------------
|
||||
const secret = await db
|
||||
.select()
|
||||
.from(companySecrets)
|
||||
.where(eq(companySecrets.id, trimmedRef))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (!secret) {
|
||||
throw secretNotFound(trimmedRef);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// 3. Fetch the latest version's material
|
||||
// ---------------------------------------------------------------
|
||||
const versionRow = await db
|
||||
.select()
|
||||
.from(companySecretVersions)
|
||||
.where(
|
||||
and(
|
||||
eq(companySecretVersions.secretId, secret.id),
|
||||
eq(companySecretVersions.version, secret.latestVersion),
|
||||
),
|
||||
)
|
||||
.then((rows) => rows[0] ?? null);
|
||||
|
||||
if (!versionRow) {
|
||||
throw secretVersionNotFound(trimmedRef);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------
|
||||
// 4. Resolve through the appropriate secret provider
|
||||
// ---------------------------------------------------------------
|
||||
const provider = getSecretProvider(secret.provider as SecretProvider);
|
||||
const resolved = await provider.resolveVersion({
|
||||
material: versionRow.material as Record<string, unknown>,
|
||||
externalRef: secret.externalRef,
|
||||
});
|
||||
|
||||
return resolved;
|
||||
},
|
||||
};
|
||||
}
|
||||
237
server/src/services/plugin-state-store.ts
Normal file
237
server/src/services/plugin-state-store.ts
Normal file
@@ -0,0 +1,237 @@
|
||||
import { and, eq, isNull } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { plugins, pluginState } from "@paperclipai/db";
|
||||
import type {
|
||||
PluginStateScopeKind,
|
||||
SetPluginState,
|
||||
ListPluginState,
|
||||
} from "@paperclipai/shared";
|
||||
import { notFound } from "../errors.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Helpers
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/** Default namespace used when the plugin does not specify one. */
|
||||
const DEFAULT_NAMESPACE = "default";
|
||||
|
||||
/**
|
||||
* Build the WHERE clause conditions for a scoped state lookup.
|
||||
*
|
||||
* The five-part composite key is:
|
||||
* `(pluginId, scopeKind, scopeId, namespace, stateKey)`
|
||||
*
|
||||
* `scopeId` may be null (for `instance` scope) or a non-empty string.
|
||||
*/
|
||||
function scopeConditions(
|
||||
pluginId: string,
|
||||
scopeKind: PluginStateScopeKind,
|
||||
scopeId: string | undefined | null,
|
||||
namespace: string,
|
||||
stateKey: string,
|
||||
) {
|
||||
const conditions = [
|
||||
eq(pluginState.pluginId, pluginId),
|
||||
eq(pluginState.scopeKind, scopeKind),
|
||||
eq(pluginState.namespace, namespace),
|
||||
eq(pluginState.stateKey, stateKey),
|
||||
];
|
||||
|
||||
if (scopeId != null && scopeId !== "") {
|
||||
conditions.push(eq(pluginState.scopeId, scopeId));
|
||||
} else {
|
||||
conditions.push(isNull(pluginState.scopeId));
|
||||
}
|
||||
|
||||
return and(...conditions);
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Service
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Plugin State Store — scoped key-value persistence for plugin workers.
|
||||
*
|
||||
* Provides `get`, `set`, `delete`, and `list` operations over the
|
||||
* `plugin_state` table. Each plugin's data is strictly namespaced by
|
||||
* `pluginId` so plugins cannot read or write each other's state.
|
||||
*
|
||||
* This service implements the server-side backing for the `ctx.state` SDK
|
||||
* client exposed to plugin workers. The host is responsible for:
|
||||
* - enforcing `plugin.state.read` capability before calling `get` / `list`
|
||||
* - enforcing `plugin.state.write` capability before calling `set` / `delete`
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §14 — SDK Surface (`ctx.state`)
|
||||
* @see PLUGIN_SPEC.md §15.1 — Capabilities: Plugin State
|
||||
* @see PLUGIN_SPEC.md §21.3 — `plugin_state` table
|
||||
*/
|
||||
export function pluginStateStore(db: Db) {
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal helpers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
async function assertPluginExists(pluginId: string): Promise<void> {
|
||||
const rows = await db
|
||||
.select({ id: plugins.id })
|
||||
.from(plugins)
|
||||
.where(eq(plugins.id, pluginId));
|
||||
if (rows.length === 0) {
|
||||
throw notFound(`Plugin not found: ${pluginId}`);
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Public API
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
return {
|
||||
/**
|
||||
* Read a state value.
|
||||
*
|
||||
* Returns the stored JSON value, or `null` if no entry exists for the
|
||||
* given scope and key.
|
||||
*
|
||||
* Requires `plugin.state.read` capability (enforced by the caller).
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
* @param scopeKind - Granularity of the scope
|
||||
* @param scopeId - Identifier for the scoped entity (null for `instance` scope)
|
||||
* @param stateKey - The key to read
|
||||
* @param namespace - Sub-namespace (defaults to `"default"`)
|
||||
*/
|
||||
get: async (
|
||||
pluginId: string,
|
||||
scopeKind: PluginStateScopeKind,
|
||||
stateKey: string,
|
||||
{
|
||||
scopeId,
|
||||
namespace = DEFAULT_NAMESPACE,
|
||||
}: { scopeId?: string; namespace?: string } = {},
|
||||
): Promise<unknown> => {
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(pluginState)
|
||||
.where(scopeConditions(pluginId, scopeKind, scopeId, namespace, stateKey));
|
||||
|
||||
return rows[0]?.valueJson ?? null;
|
||||
},
|
||||
|
||||
/**
|
||||
* Write (create or replace) a state value.
|
||||
*
|
||||
* Uses an upsert so the caller does not need to check for prior existence.
|
||||
* On conflict (same composite key) the existing row's `value_json` and
|
||||
* `updated_at` are overwritten.
|
||||
*
|
||||
* Requires `plugin.state.write` capability (enforced by the caller).
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
* @param input - Scope key and value to store
|
||||
*/
|
||||
set: async (pluginId: string, input: SetPluginState): Promise<void> => {
|
||||
await assertPluginExists(pluginId);
|
||||
|
||||
const namespace = input.namespace ?? DEFAULT_NAMESPACE;
|
||||
const scopeId = input.scopeId ?? null;
|
||||
|
||||
await db
|
||||
.insert(pluginState)
|
||||
.values({
|
||||
pluginId,
|
||||
scopeKind: input.scopeKind,
|
||||
scopeId,
|
||||
namespace,
|
||||
stateKey: input.stateKey,
|
||||
valueJson: input.value,
|
||||
updatedAt: new Date(),
|
||||
})
|
||||
.onConflictDoUpdate({
|
||||
target: [
|
||||
pluginState.pluginId,
|
||||
pluginState.scopeKind,
|
||||
pluginState.scopeId,
|
||||
pluginState.namespace,
|
||||
pluginState.stateKey,
|
||||
],
|
||||
set: {
|
||||
valueJson: input.value,
|
||||
updatedAt: new Date(),
|
||||
},
|
||||
});
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete a state value.
|
||||
*
|
||||
* No-ops silently if the entry does not exist (idempotent by design).
|
||||
*
|
||||
* Requires `plugin.state.write` capability (enforced by the caller).
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
* @param scopeKind - Granularity of the scope
|
||||
* @param stateKey - The key to delete
|
||||
* @param scopeId - Identifier for the scoped entity (null for `instance` scope)
|
||||
* @param namespace - Sub-namespace (defaults to `"default"`)
|
||||
*/
|
||||
delete: async (
|
||||
pluginId: string,
|
||||
scopeKind: PluginStateScopeKind,
|
||||
stateKey: string,
|
||||
{
|
||||
scopeId,
|
||||
namespace = DEFAULT_NAMESPACE,
|
||||
}: { scopeId?: string; namespace?: string } = {},
|
||||
): Promise<void> => {
|
||||
await db
|
||||
.delete(pluginState)
|
||||
.where(scopeConditions(pluginId, scopeKind, scopeId, namespace, stateKey));
|
||||
},
|
||||
|
||||
/**
|
||||
* List all state entries for a plugin, optionally filtered by scope.
|
||||
*
|
||||
* Returns all matching rows as `PluginStateRecord`-shaped objects.
|
||||
* The `valueJson` field contains the stored value.
|
||||
*
|
||||
* Requires `plugin.state.read` capability (enforced by the caller).
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
* @param filter - Optional scope filters (scopeKind, scopeId, namespace)
|
||||
*/
|
||||
list: async (pluginId: string, filter: ListPluginState = {}): Promise<typeof pluginState.$inferSelect[]> => {
|
||||
const conditions = [eq(pluginState.pluginId, pluginId)];
|
||||
|
||||
if (filter.scopeKind !== undefined) {
|
||||
conditions.push(eq(pluginState.scopeKind, filter.scopeKind));
|
||||
}
|
||||
if (filter.scopeId !== undefined) {
|
||||
conditions.push(eq(pluginState.scopeId, filter.scopeId));
|
||||
}
|
||||
if (filter.namespace !== undefined) {
|
||||
conditions.push(eq(pluginState.namespace, filter.namespace));
|
||||
}
|
||||
|
||||
return db
|
||||
.select()
|
||||
.from(pluginState)
|
||||
.where(and(...conditions));
|
||||
},
|
||||
|
||||
/**
|
||||
* Delete all state entries owned by a plugin.
|
||||
*
|
||||
* Called during plugin uninstall when `removeData = true`. Also useful
|
||||
* for resetting a plugin's state during testing.
|
||||
*
|
||||
* @param pluginId - UUID of the owning plugin
|
||||
*/
|
||||
deleteAll: async (pluginId: string): Promise<void> => {
|
||||
await db
|
||||
.delete(pluginState)
|
||||
.where(eq(pluginState.pluginId, pluginId));
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
export type PluginStateStore = ReturnType<typeof pluginStateStore>;
|
||||
81
server/src/services/plugin-stream-bus.ts
Normal file
81
server/src/services/plugin-stream-bus.ts
Normal file
@@ -0,0 +1,81 @@
|
||||
/**
|
||||
* In-memory pub/sub bus for plugin SSE streams.
|
||||
*
|
||||
* Workers emit stream events via JSON-RPC notifications. The bus fans out
|
||||
* each event to all connected SSE clients that match the (pluginId, channel,
|
||||
* companyId) tuple.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §19.8 — Real-Time Streaming
|
||||
*/
|
||||
|
||||
/** Valid SSE event types for plugin streams. */
|
||||
export type StreamEventType = "message" | "open" | "close" | "error";
|
||||
|
||||
export type StreamSubscriber = (event: unknown, eventType: StreamEventType) => void;
|
||||
|
||||
/**
|
||||
* Composite key for stream subscriptions: pluginId:channel:companyId
|
||||
*/
|
||||
function streamKey(pluginId: string, channel: string, companyId: string): string {
|
||||
return `${pluginId}:${channel}:${companyId}`;
|
||||
}
|
||||
|
||||
export interface PluginStreamBus {
|
||||
/**
|
||||
* Subscribe to stream events for a specific (pluginId, channel, companyId).
|
||||
* Returns an unsubscribe function.
|
||||
*/
|
||||
subscribe(
|
||||
pluginId: string,
|
||||
channel: string,
|
||||
companyId: string,
|
||||
listener: StreamSubscriber,
|
||||
): () => void;
|
||||
|
||||
/**
|
||||
* Publish an event to all subscribers of (pluginId, channel, companyId).
|
||||
* Called by the worker manager when it receives a stream notification.
|
||||
*/
|
||||
publish(
|
||||
pluginId: string,
|
||||
channel: string,
|
||||
companyId: string,
|
||||
event: unknown,
|
||||
eventType?: StreamEventType,
|
||||
): void;
|
||||
}
|
||||
|
||||
/**
|
||||
* Create a new PluginStreamBus instance.
|
||||
*/
|
||||
export function createPluginStreamBus(): PluginStreamBus {
|
||||
const subscribers = new Map<string, Set<StreamSubscriber>>();
|
||||
|
||||
return {
|
||||
subscribe(pluginId, channel, companyId, listener) {
|
||||
const key = streamKey(pluginId, channel, companyId);
|
||||
let set = subscribers.get(key);
|
||||
if (!set) {
|
||||
set = new Set();
|
||||
subscribers.set(key, set);
|
||||
}
|
||||
set.add(listener);
|
||||
|
||||
return () => {
|
||||
set!.delete(listener);
|
||||
if (set!.size === 0) {
|
||||
subscribers.delete(key);
|
||||
}
|
||||
};
|
||||
},
|
||||
|
||||
publish(pluginId, channel, companyId, event, eventType: StreamEventType = "message") {
|
||||
const key = streamKey(pluginId, channel, companyId);
|
||||
const set = subscribers.get(key);
|
||||
if (!set) return;
|
||||
for (const listener of set) {
|
||||
listener(event, eventType);
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
448
server/src/services/plugin-tool-dispatcher.ts
Normal file
448
server/src/services/plugin-tool-dispatcher.ts
Normal file
@@ -0,0 +1,448 @@
|
||||
/**
|
||||
* PluginToolDispatcher — orchestrates plugin tool discovery, lifecycle
|
||||
* integration, and execution routing for the agent service.
|
||||
*
|
||||
* This service sits between the agent service and the lower-level
|
||||
* `PluginToolRegistry` + `PluginWorkerManager`, providing a clean API that:
|
||||
*
|
||||
* - Discovers tools from loaded plugin manifests and registers them
|
||||
* in the tool registry.
|
||||
* - Hooks into `PluginLifecycleManager` events to automatically register
|
||||
* and unregister tools when plugins are enabled or disabled.
|
||||
* - Exposes the tool list in an agent-friendly format (with namespaced
|
||||
* names, descriptions, parameter schemas).
|
||||
* - Routes `executeTool` calls to the correct plugin worker and returns
|
||||
* structured results.
|
||||
* - Validates tool parameters against declared schemas before dispatch.
|
||||
*
|
||||
* The dispatcher is created once at server startup and shared across
|
||||
* the application.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §11 — Agent Tools
|
||||
* @see PLUGIN_SPEC.md §13.10 — `executeTool`
|
||||
*/
|
||||
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import type {
|
||||
PaperclipPluginManifestV1,
|
||||
PluginRecord,
|
||||
} from "@paperclipai/shared";
|
||||
import type { ToolRunContext, ToolResult } from "@paperclipai/plugin-sdk";
|
||||
import type { PluginWorkerManager } from "./plugin-worker-manager.js";
|
||||
import type { PluginLifecycleManager } from "./plugin-lifecycle.js";
|
||||
import {
|
||||
createPluginToolRegistry,
|
||||
type PluginToolRegistry,
|
||||
type RegisteredTool,
|
||||
type ToolListFilter,
|
||||
type ToolExecutionResult,
|
||||
} from "./plugin-tool-registry.js";
|
||||
import { pluginRegistryService } from "./plugin-registry.js";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* An agent-facing tool descriptor — the shape returned when agents
|
||||
* query for available tools.
|
||||
*
|
||||
* This is intentionally simpler than `RegisteredTool`, exposing only
|
||||
* what agents need to decide whether and how to call a tool.
|
||||
*/
|
||||
export interface AgentToolDescriptor {
|
||||
/** Fully namespaced tool name (e.g. `"acme.linear:search-issues"`). */
|
||||
name: string;
|
||||
/** Human-readable display name. */
|
||||
displayName: string;
|
||||
/** Description for the agent — explains when and how to use this tool. */
|
||||
description: string;
|
||||
/** JSON Schema describing the tool's input parameters. */
|
||||
parametersSchema: Record<string, unknown>;
|
||||
/** The plugin that provides this tool. */
|
||||
pluginId: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Options for creating the plugin tool dispatcher.
|
||||
*/
|
||||
export interface PluginToolDispatcherOptions {
|
||||
/** The worker manager used to dispatch RPC calls to plugin workers. */
|
||||
workerManager?: PluginWorkerManager;
|
||||
/** The lifecycle manager to listen for plugin state changes. */
|
||||
lifecycleManager?: PluginLifecycleManager;
|
||||
/** Database connection for looking up plugin records. */
|
||||
db?: Db;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// PluginToolDispatcher interface
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The plugin tool dispatcher — the primary integration point between the
|
||||
* agent service and the plugin tool system.
|
||||
*
|
||||
* Agents use this service to:
|
||||
* 1. List all available tools (for prompt construction / tool choice)
|
||||
* 2. Execute a specific tool by its namespaced name
|
||||
*
|
||||
* The dispatcher handles lifecycle management internally — when a plugin
|
||||
* is loaded or unloaded, its tools are automatically registered or removed.
|
||||
*/
|
||||
export interface PluginToolDispatcher {
|
||||
/**
|
||||
* Initialize the dispatcher — load tools from all currently-ready plugins
|
||||
* and start listening for lifecycle events.
|
||||
*
|
||||
* Must be called once at server startup after the lifecycle manager
|
||||
* and worker manager are ready.
|
||||
*/
|
||||
initialize(): Promise<void>;
|
||||
|
||||
/**
|
||||
* Tear down the dispatcher — unregister lifecycle event listeners
|
||||
* and clear all tool registrations.
|
||||
*
|
||||
* Called during server shutdown.
|
||||
*/
|
||||
teardown(): void;
|
||||
|
||||
/**
|
||||
* List all available tools for agents, optionally filtered.
|
||||
*
|
||||
* Returns tool descriptors in an agent-friendly format.
|
||||
*
|
||||
* @param filter - Optional filter criteria
|
||||
* @returns Array of agent tool descriptors
|
||||
*/
|
||||
listToolsForAgent(filter?: ToolListFilter): AgentToolDescriptor[];
|
||||
|
||||
/**
|
||||
* Look up a tool by its namespaced name.
|
||||
*
|
||||
* @param namespacedName - e.g. `"acme.linear:search-issues"`
|
||||
* @returns The registered tool, or `null` if not found
|
||||
*/
|
||||
getTool(namespacedName: string): RegisteredTool | null;
|
||||
|
||||
/**
|
||||
* Execute a tool by its namespaced name, routing to the correct
|
||||
* plugin worker.
|
||||
*
|
||||
* @param namespacedName - Fully qualified tool name
|
||||
* @param parameters - Input parameters matching the tool's schema
|
||||
* @param runContext - Agent run context
|
||||
* @returns The execution result with routing metadata
|
||||
* @throws {Error} if the tool is not found, the worker is not running,
|
||||
* or the tool execution fails
|
||||
*/
|
||||
executeTool(
|
||||
namespacedName: string,
|
||||
parameters: unknown,
|
||||
runContext: ToolRunContext,
|
||||
): Promise<ToolExecutionResult>;
|
||||
|
||||
/**
|
||||
* Register all tools from a plugin manifest.
|
||||
*
|
||||
* This is called automatically when a plugin transitions to `ready`.
|
||||
* Can also be called manually for testing or recovery scenarios.
|
||||
*
|
||||
* @param pluginId - The plugin's unique identifier
|
||||
* @param manifest - The plugin manifest containing tool declarations
|
||||
*/
|
||||
registerPluginTools(
|
||||
pluginId: string,
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
): void;
|
||||
|
||||
/**
|
||||
* Unregister all tools for a plugin.
|
||||
*
|
||||
* Called automatically when a plugin is disabled or unloaded.
|
||||
*
|
||||
* @param pluginId - The plugin to unregister
|
||||
*/
|
||||
unregisterPluginTools(pluginId: string): void;
|
||||
|
||||
/**
|
||||
* Get the total number of registered tools, optionally scoped to a plugin.
|
||||
*
|
||||
* @param pluginId - If provided, count only this plugin's tools
|
||||
*/
|
||||
toolCount(pluginId?: string): number;
|
||||
|
||||
/**
|
||||
* Access the underlying tool registry for advanced operations.
|
||||
*
|
||||
* This escape hatch exists for internal use (e.g. diagnostics).
|
||||
* Prefer the dispatcher's own methods for normal operations.
|
||||
*/
|
||||
getRegistry(): PluginToolRegistry;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Factory: createPluginToolDispatcher
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a new `PluginToolDispatcher`.
|
||||
*
|
||||
* The dispatcher:
|
||||
* 1. Creates and owns a `PluginToolRegistry` backed by the given worker manager.
|
||||
* 2. Listens for lifecycle events (plugin.enabled, plugin.disabled, plugin.unloaded)
|
||||
* to automatically register and unregister tools.
|
||||
* 3. On `initialize()`, loads tools from all currently-ready plugins via the DB.
|
||||
*
|
||||
* @param options - Configuration options
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* // At server startup
|
||||
* const dispatcher = createPluginToolDispatcher({
|
||||
* workerManager,
|
||||
* lifecycleManager,
|
||||
* db,
|
||||
* });
|
||||
* await dispatcher.initialize();
|
||||
*
|
||||
* // In agent service — list tools for prompt construction
|
||||
* const tools = dispatcher.listToolsForAgent();
|
||||
*
|
||||
* // In agent service — execute a tool
|
||||
* const result = await dispatcher.executeTool(
|
||||
* "acme.linear:search-issues",
|
||||
* { query: "auth bug" },
|
||||
* { agentId: "a-1", runId: "r-1", companyId: "c-1", projectId: "p-1" },
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
export function createPluginToolDispatcher(
|
||||
options: PluginToolDispatcherOptions = {},
|
||||
): PluginToolDispatcher {
|
||||
const { workerManager, lifecycleManager, db } = options;
|
||||
const log = logger.child({ service: "plugin-tool-dispatcher" });
|
||||
|
||||
// Create the underlying tool registry, backed by the worker manager
|
||||
const registry = createPluginToolRegistry(workerManager);
|
||||
|
||||
// Track lifecycle event listeners so we can remove them on teardown
|
||||
let enabledListener: ((payload: { pluginId: string; pluginKey: string }) => void) | null = null;
|
||||
let disabledListener: ((payload: { pluginId: string; pluginKey: string; reason?: string }) => void) | null = null;
|
||||
let unloadedListener: ((payload: { pluginId: string; pluginKey: string; removeData: boolean }) => void) | null = null;
|
||||
|
||||
let initialized = false;
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal helpers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Attempt to register tools for a plugin by looking up its manifest
|
||||
* from the DB. No-ops gracefully if the plugin or manifest is missing.
|
||||
*/
|
||||
async function registerFromDb(pluginId: string): Promise<void> {
|
||||
if (!db) {
|
||||
log.warn(
|
||||
{ pluginId },
|
||||
"cannot register tools from DB — no database connection configured",
|
||||
);
|
||||
return;
|
||||
}
|
||||
|
||||
const pluginRegistry = pluginRegistryService(db);
|
||||
const plugin = await pluginRegistry.getById(pluginId) as PluginRecord | null;
|
||||
|
||||
if (!plugin) {
|
||||
log.warn({ pluginId }, "plugin not found in registry, cannot register tools");
|
||||
return;
|
||||
}
|
||||
|
||||
const manifest = plugin.manifestJson;
|
||||
if (!manifest) {
|
||||
log.warn({ pluginId }, "plugin has no manifest, cannot register tools");
|
||||
return;
|
||||
}
|
||||
|
||||
registry.registerPlugin(plugin.pluginKey, manifest, plugin.id);
|
||||
}
|
||||
|
||||
/**
|
||||
* Convert a `RegisteredTool` to an `AgentToolDescriptor`.
|
||||
*/
|
||||
function toAgentDescriptor(tool: RegisteredTool): AgentToolDescriptor {
|
||||
return {
|
||||
name: tool.namespacedName,
|
||||
displayName: tool.displayName,
|
||||
description: tool.description,
|
||||
parametersSchema: tool.parametersSchema,
|
||||
pluginId: tool.pluginDbId,
|
||||
};
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Lifecycle event handlers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
function handlePluginEnabled(payload: { pluginId: string; pluginKey: string }): void {
|
||||
log.debug({ pluginId: payload.pluginId, pluginKey: payload.pluginKey }, "plugin enabled — registering tools");
|
||||
// Async registration from DB — we fire-and-forget since the lifecycle
|
||||
// event handler must be synchronous. Any errors are logged.
|
||||
void registerFromDb(payload.pluginId).catch((err) => {
|
||||
log.error(
|
||||
{ pluginId: payload.pluginId, err: err instanceof Error ? err.message : String(err) },
|
||||
"failed to register tools after plugin enabled",
|
||||
);
|
||||
});
|
||||
}
|
||||
|
||||
function handlePluginDisabled(payload: { pluginId: string; pluginKey: string; reason?: string }): void {
|
||||
log.debug({ pluginId: payload.pluginId, pluginKey: payload.pluginKey }, "plugin disabled — unregistering tools");
|
||||
registry.unregisterPlugin(payload.pluginKey);
|
||||
}
|
||||
|
||||
function handlePluginUnloaded(payload: { pluginId: string; pluginKey: string; removeData: boolean }): void {
|
||||
log.debug({ pluginId: payload.pluginId, pluginKey: payload.pluginKey }, "plugin unloaded — unregistering tools");
|
||||
registry.unregisterPlugin(payload.pluginKey);
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Public API
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
return {
|
||||
async initialize(): Promise<void> {
|
||||
if (initialized) {
|
||||
log.warn("dispatcher already initialized, skipping");
|
||||
return;
|
||||
}
|
||||
|
||||
log.info("initializing plugin tool dispatcher");
|
||||
|
||||
// Step 1: Load tools from all currently-ready plugins
|
||||
if (db) {
|
||||
const pluginRegistry = pluginRegistryService(db);
|
||||
const readyPlugins = await pluginRegistry.listByStatus("ready") as PluginRecord[];
|
||||
|
||||
let totalTools = 0;
|
||||
for (const plugin of readyPlugins) {
|
||||
const manifest = plugin.manifestJson;
|
||||
if (manifest?.tools && manifest.tools.length > 0) {
|
||||
registry.registerPlugin(plugin.pluginKey, manifest, plugin.id);
|
||||
totalTools += manifest.tools.length;
|
||||
}
|
||||
}
|
||||
|
||||
log.info(
|
||||
{ readyPlugins: readyPlugins.length, registeredTools: totalTools },
|
||||
"loaded tools from ready plugins",
|
||||
);
|
||||
}
|
||||
|
||||
// Step 2: Subscribe to lifecycle events for dynamic updates
|
||||
if (lifecycleManager) {
|
||||
enabledListener = handlePluginEnabled;
|
||||
disabledListener = handlePluginDisabled;
|
||||
unloadedListener = handlePluginUnloaded;
|
||||
|
||||
lifecycleManager.on("plugin.enabled", enabledListener);
|
||||
lifecycleManager.on("plugin.disabled", disabledListener);
|
||||
lifecycleManager.on("plugin.unloaded", unloadedListener);
|
||||
|
||||
log.debug("subscribed to lifecycle events");
|
||||
} else {
|
||||
log.warn("no lifecycle manager provided — tools will not auto-update on plugin state changes");
|
||||
}
|
||||
|
||||
initialized = true;
|
||||
log.info(
|
||||
{ totalTools: registry.toolCount() },
|
||||
"plugin tool dispatcher initialized",
|
||||
);
|
||||
},
|
||||
|
||||
teardown(): void {
|
||||
if (!initialized) return;
|
||||
|
||||
// Unsubscribe from lifecycle events
|
||||
if (lifecycleManager) {
|
||||
if (enabledListener) lifecycleManager.off("plugin.enabled", enabledListener);
|
||||
if (disabledListener) lifecycleManager.off("plugin.disabled", disabledListener);
|
||||
if (unloadedListener) lifecycleManager.off("plugin.unloaded", unloadedListener);
|
||||
|
||||
enabledListener = null;
|
||||
disabledListener = null;
|
||||
unloadedListener = null;
|
||||
}
|
||||
|
||||
// Note: we do NOT clear the registry here because teardown may be
|
||||
// called during graceful shutdown where in-flight tool calls should
|
||||
// still be able to resolve their tool entries.
|
||||
|
||||
initialized = false;
|
||||
log.info("plugin tool dispatcher torn down");
|
||||
},
|
||||
|
||||
listToolsForAgent(filter?: ToolListFilter): AgentToolDescriptor[] {
|
||||
return registry.listTools(filter).map(toAgentDescriptor);
|
||||
},
|
||||
|
||||
getTool(namespacedName: string): RegisteredTool | null {
|
||||
return registry.getTool(namespacedName);
|
||||
},
|
||||
|
||||
async executeTool(
|
||||
namespacedName: string,
|
||||
parameters: unknown,
|
||||
runContext: ToolRunContext,
|
||||
): Promise<ToolExecutionResult> {
|
||||
log.debug(
|
||||
{
|
||||
tool: namespacedName,
|
||||
agentId: runContext.agentId,
|
||||
runId: runContext.runId,
|
||||
},
|
||||
"dispatching tool execution",
|
||||
);
|
||||
|
||||
const result = await registry.executeTool(
|
||||
namespacedName,
|
||||
parameters,
|
||||
runContext,
|
||||
);
|
||||
|
||||
log.debug(
|
||||
{
|
||||
tool: namespacedName,
|
||||
pluginId: result.pluginId,
|
||||
hasContent: !!result.result.content,
|
||||
hasError: !!result.result.error,
|
||||
},
|
||||
"tool execution completed",
|
||||
);
|
||||
|
||||
return result;
|
||||
},
|
||||
|
||||
registerPluginTools(
|
||||
pluginId: string,
|
||||
manifest: PaperclipPluginManifestV1,
|
||||
): void {
|
||||
registry.registerPlugin(pluginId, manifest);
|
||||
},
|
||||
|
||||
unregisterPluginTools(pluginId: string): void {
|
||||
registry.unregisterPlugin(pluginId);
|
||||
},
|
||||
|
||||
toolCount(pluginId?: string): number {
|
||||
return registry.toolCount(pluginId);
|
||||
},
|
||||
|
||||
getRegistry(): PluginToolRegistry {
|
||||
return registry;
|
||||
},
|
||||
};
|
||||
}
|
||||
449
server/src/services/plugin-tool-registry.ts
Normal file
449
server/src/services/plugin-tool-registry.ts
Normal file
@@ -0,0 +1,449 @@
|
||||
/**
|
||||
* PluginToolRegistry — host-side registry for plugin-contributed agent tools.
|
||||
*
|
||||
* Responsibilities:
|
||||
* - Store tool declarations (from plugin manifests) alongside routing metadata
|
||||
* so the host can resolve namespaced tool names to the owning plugin worker.
|
||||
* - Namespace tools automatically: a tool `"search-issues"` from plugin
|
||||
* `"acme.linear"` is exposed to agents as `"acme.linear:search-issues"`.
|
||||
* - Route `executeTool` calls to the correct plugin worker via the
|
||||
* `PluginWorkerManager`.
|
||||
* - Provide tool discovery queries so agents can list available tools.
|
||||
* - Clean up tool registrations when a plugin is unloaded or its worker stops.
|
||||
*
|
||||
* The registry is an in-memory structure — tool declarations are derived from
|
||||
* the plugin manifest at load time and do not need persistence. When a plugin
|
||||
* worker restarts, the host re-registers its manifest tools.
|
||||
*
|
||||
* @see PLUGIN_SPEC.md §11 — Agent Tools
|
||||
* @see PLUGIN_SPEC.md §13.10 — `executeTool`
|
||||
*/
|
||||
|
||||
import type {
|
||||
PaperclipPluginManifestV1,
|
||||
PluginToolDeclaration,
|
||||
} from "@paperclipai/shared";
|
||||
import type { ToolRunContext, ToolResult, ExecuteToolParams } from "@paperclipai/plugin-sdk";
|
||||
import type { PluginWorkerManager } from "./plugin-worker-manager.js";
|
||||
import { logger } from "../middleware/logger.js";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Constants
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Separator between plugin ID and tool name in the namespaced tool identifier.
|
||||
*
|
||||
* Example: `"acme.linear:search-issues"`
|
||||
*/
|
||||
export const TOOL_NAMESPACE_SEPARATOR = ":";
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Types
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* A registered tool entry stored in the registry.
|
||||
*
|
||||
* Combines the manifest-level declaration with routing metadata so the host
|
||||
* can resolve a namespaced tool name → plugin worker in O(1).
|
||||
*/
|
||||
export interface RegisteredTool {
|
||||
/** The plugin key used for namespacing (e.g. `"acme.linear"`). */
|
||||
pluginId: string;
|
||||
/**
|
||||
* The plugin's database UUID, used for worker routing and availability
|
||||
* checks. Falls back to `pluginId` when not provided (e.g. in tests
|
||||
* where `id === pluginKey`).
|
||||
*/
|
||||
pluginDbId: string;
|
||||
/** The tool's bare name (without namespace prefix). */
|
||||
name: string;
|
||||
/** Fully namespaced identifier: `"<pluginId>:<toolName>"`. */
|
||||
namespacedName: string;
|
||||
/** Human-readable display name. */
|
||||
displayName: string;
|
||||
/** Description provided to the agent so it knows when to use this tool. */
|
||||
description: string;
|
||||
/** JSON Schema describing the tool's input parameters. */
|
||||
parametersSchema: Record<string, unknown>;
|
||||
}
|
||||
|
||||
/**
|
||||
* Filter criteria for listing available tools.
|
||||
*/
|
||||
export interface ToolListFilter {
|
||||
/** Only return tools owned by this plugin. */
|
||||
pluginId?: string;
|
||||
}
|
||||
|
||||
/**
|
||||
* Result of executing a tool, extending `ToolResult` with routing metadata.
|
||||
*/
|
||||
export interface ToolExecutionResult {
|
||||
/** The plugin that handled the tool call. */
|
||||
pluginId: string;
|
||||
/** The bare tool name that was executed. */
|
||||
toolName: string;
|
||||
/** The result returned by the plugin's tool handler. */
|
||||
result: ToolResult;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// PluginToolRegistry interface
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* The host-side tool registry — held by the host process.
|
||||
*
|
||||
* Created once at server startup and shared across the application. Plugins
|
||||
* register their tools when their worker starts, and unregister when the
|
||||
* worker stops or the plugin is uninstalled.
|
||||
*/
|
||||
export interface PluginToolRegistry {
|
||||
/**
|
||||
* Register all tools declared in a plugin's manifest.
|
||||
*
|
||||
* Called when a plugin worker starts and its manifest is loaded. Any
|
||||
* previously registered tools for the same plugin are replaced (idempotent).
|
||||
*
|
||||
* @param pluginId - The plugin's unique identifier (e.g. `"acme.linear"`)
|
||||
* @param manifest - The plugin manifest containing the `tools` array
|
||||
* @param pluginDbId - The plugin's database UUID, used for worker routing
|
||||
* and availability checks. If omitted, `pluginId` is used (backwards-compat).
|
||||
*/
|
||||
registerPlugin(pluginId: string, manifest: PaperclipPluginManifestV1, pluginDbId?: string): void;
|
||||
|
||||
/**
|
||||
* Remove all tool registrations for a plugin.
|
||||
*
|
||||
* Called when a plugin worker stops, crashes, or is uninstalled.
|
||||
*
|
||||
* @param pluginId - The plugin to clear
|
||||
*/
|
||||
unregisterPlugin(pluginId: string): void;
|
||||
|
||||
/**
|
||||
* Look up a registered tool by its namespaced name.
|
||||
*
|
||||
* @param namespacedName - Fully qualified name, e.g. `"acme.linear:search-issues"`
|
||||
* @returns The registered tool entry, or `null` if not found
|
||||
*/
|
||||
getTool(namespacedName: string): RegisteredTool | null;
|
||||
|
||||
/**
|
||||
* Look up a registered tool by plugin ID and bare tool name.
|
||||
*
|
||||
* @param pluginId - The owning plugin
|
||||
* @param toolName - The bare tool name (without namespace prefix)
|
||||
* @returns The registered tool entry, or `null` if not found
|
||||
*/
|
||||
getToolByPlugin(pluginId: string, toolName: string): RegisteredTool | null;
|
||||
|
||||
/**
|
||||
* List all registered tools, optionally filtered.
|
||||
*
|
||||
* @param filter - Optional filter criteria
|
||||
* @returns Array of registered tool entries
|
||||
*/
|
||||
listTools(filter?: ToolListFilter): RegisteredTool[];
|
||||
|
||||
/**
|
||||
* Parse a namespaced tool name into plugin ID and bare tool name.
|
||||
*
|
||||
* @param namespacedName - e.g. `"acme.linear:search-issues"`
|
||||
* @returns `{ pluginId, toolName }` or `null` if the format is invalid
|
||||
*/
|
||||
parseNamespacedName(namespacedName: string): { pluginId: string; toolName: string } | null;
|
||||
|
||||
/**
|
||||
* Build a namespaced tool name from a plugin ID and bare tool name.
|
||||
*
|
||||
* @param pluginId - e.g. `"acme.linear"`
|
||||
* @param toolName - e.g. `"search-issues"`
|
||||
* @returns The namespaced name, e.g. `"acme.linear:search-issues"`
|
||||
*/
|
||||
buildNamespacedName(pluginId: string, toolName: string): string;
|
||||
|
||||
/**
|
||||
* Execute a tool by its namespaced name, routing to the correct plugin worker.
|
||||
*
|
||||
* Resolves the namespaced name to the owning plugin, validates the tool
|
||||
* exists, and dispatches the `executeTool` RPC call to the worker.
|
||||
*
|
||||
* @param namespacedName - Fully qualified tool name (e.g. `"acme.linear:search-issues"`)
|
||||
* @param parameters - The parsed parameters matching the tool's schema
|
||||
* @param runContext - Agent run context
|
||||
* @returns The execution result with routing metadata
|
||||
* @throws {Error} if the tool is not found or the worker is not running
|
||||
*/
|
||||
executeTool(
|
||||
namespacedName: string,
|
||||
parameters: unknown,
|
||||
runContext: ToolRunContext,
|
||||
): Promise<ToolExecutionResult>;
|
||||
|
||||
/**
|
||||
* Get the number of registered tools, optionally scoped to a plugin.
|
||||
*
|
||||
* @param pluginId - If provided, count only this plugin's tools
|
||||
*/
|
||||
toolCount(pluginId?: string): number;
|
||||
}
|
||||
|
||||
// ---------------------------------------------------------------------------
|
||||
// Factory: createPluginToolRegistry
|
||||
// ---------------------------------------------------------------------------
|
||||
|
||||
/**
|
||||
* Create a new `PluginToolRegistry`.
|
||||
*
|
||||
* The registry is backed by two in-memory maps:
|
||||
* - `byNamespace`: namespaced name → `RegisteredTool` for O(1) lookups.
|
||||
* - `byPlugin`: pluginId → Set of namespaced names for efficient per-plugin ops.
|
||||
*
|
||||
* @param workerManager - The worker manager used to dispatch `executeTool` RPC
|
||||
* calls to plugin workers. If not provided, `executeTool` will throw.
|
||||
*
|
||||
* @example
|
||||
* ```ts
|
||||
* const toolRegistry = createPluginToolRegistry(workerManager);
|
||||
*
|
||||
* // Register tools from a plugin manifest
|
||||
* toolRegistry.registerPlugin("acme.linear", linearManifest);
|
||||
*
|
||||
* // List all available tools for agents
|
||||
* const tools = toolRegistry.listTools();
|
||||
* // → [{ namespacedName: "acme.linear:search-issues", ... }]
|
||||
*
|
||||
* // Execute a tool
|
||||
* const result = await toolRegistry.executeTool(
|
||||
* "acme.linear:search-issues",
|
||||
* { query: "auth bug" },
|
||||
* { agentId: "agent-1", runId: "run-1", companyId: "co-1", projectId: "proj-1" },
|
||||
* );
|
||||
* ```
|
||||
*/
|
||||
export function createPluginToolRegistry(
|
||||
workerManager?: PluginWorkerManager,
|
||||
): PluginToolRegistry {
|
||||
const log = logger.child({ service: "plugin-tool-registry" });
|
||||
|
||||
// Primary index: namespaced name → tool entry
|
||||
const byNamespace = new Map<string, RegisteredTool>();
|
||||
|
||||
// Secondary index: pluginId → set of namespaced names (for bulk operations)
|
||||
const byPlugin = new Map<string, Set<string>>();
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Internal helpers
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
function buildName(pluginId: string, toolName: string): string {
|
||||
return `${pluginId}${TOOL_NAMESPACE_SEPARATOR}${toolName}`;
|
||||
}
|
||||
|
||||
function parseName(namespacedName: string): { pluginId: string; toolName: string } | null {
|
||||
const sepIndex = namespacedName.lastIndexOf(TOOL_NAMESPACE_SEPARATOR);
|
||||
if (sepIndex <= 0 || sepIndex >= namespacedName.length - 1) {
|
||||
return null;
|
||||
}
|
||||
return {
|
||||
pluginId: namespacedName.slice(0, sepIndex),
|
||||
toolName: namespacedName.slice(sepIndex + 1),
|
||||
};
|
||||
}
|
||||
|
||||
function addTool(pluginId: string, decl: PluginToolDeclaration, pluginDbId: string): void {
|
||||
const namespacedName = buildName(pluginId, decl.name);
|
||||
|
||||
const entry: RegisteredTool = {
|
||||
pluginId,
|
||||
pluginDbId,
|
||||
name: decl.name,
|
||||
namespacedName,
|
||||
displayName: decl.displayName,
|
||||
description: decl.description,
|
||||
parametersSchema: decl.parametersSchema,
|
||||
};
|
||||
|
||||
byNamespace.set(namespacedName, entry);
|
||||
|
||||
let pluginTools = byPlugin.get(pluginId);
|
||||
if (!pluginTools) {
|
||||
pluginTools = new Set();
|
||||
byPlugin.set(pluginId, pluginTools);
|
||||
}
|
||||
pluginTools.add(namespacedName);
|
||||
}
|
||||
|
||||
function removePluginTools(pluginId: string): number {
|
||||
const pluginTools = byPlugin.get(pluginId);
|
||||
if (!pluginTools) return 0;
|
||||
|
||||
const count = pluginTools.size;
|
||||
for (const name of pluginTools) {
|
||||
byNamespace.delete(name);
|
||||
}
|
||||
byPlugin.delete(pluginId);
|
||||
|
||||
return count;
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------
|
||||
// Public API
|
||||
// -----------------------------------------------------------------------
|
||||
|
||||
return {
|
||||
registerPlugin(pluginId: string, manifest: PaperclipPluginManifestV1, pluginDbId?: string): void {
|
||||
const dbId = pluginDbId ?? pluginId;
|
||||
|
||||
// Remove any previously registered tools for this plugin (idempotent)
|
||||
const previousCount = removePluginTools(pluginId);
|
||||
if (previousCount > 0) {
|
||||
log.debug(
|
||||
{ pluginId, previousCount },
|
||||
"cleared previous tool registrations before re-registering",
|
||||
);
|
||||
}
|
||||
|
||||
const tools = manifest.tools ?? [];
|
||||
if (tools.length === 0) {
|
||||
log.debug({ pluginId }, "plugin declares no tools");
|
||||
return;
|
||||
}
|
||||
|
||||
for (const decl of tools) {
|
||||
addTool(pluginId, decl, dbId);
|
||||
}
|
||||
|
||||
log.info(
|
||||
{
|
||||
pluginId,
|
||||
toolCount: tools.length,
|
||||
tools: tools.map((t) => buildName(pluginId, t.name)),
|
||||
},
|
||||
`registered ${tools.length} tool(s) for plugin`,
|
||||
);
|
||||
},
|
||||
|
||||
unregisterPlugin(pluginId: string): void {
|
||||
const removed = removePluginTools(pluginId);
|
||||
if (removed > 0) {
|
||||
log.info(
|
||||
{ pluginId, removedCount: removed },
|
||||
`unregistered ${removed} tool(s) for plugin`,
|
||||
);
|
||||
}
|
||||
},
|
||||
|
||||
getTool(namespacedName: string): RegisteredTool | null {
|
||||
return byNamespace.get(namespacedName) ?? null;
|
||||
},
|
||||
|
||||
getToolByPlugin(pluginId: string, toolName: string): RegisteredTool | null {
|
||||
const namespacedName = buildName(pluginId, toolName);
|
||||
return byNamespace.get(namespacedName) ?? null;
|
||||
},
|
||||
|
||||
listTools(filter?: ToolListFilter): RegisteredTool[] {
|
||||
if (filter?.pluginId) {
|
||||
const pluginTools = byPlugin.get(filter.pluginId);
|
||||
if (!pluginTools) return [];
|
||||
const result: RegisteredTool[] = [];
|
||||
for (const name of pluginTools) {
|
||||
const tool = byNamespace.get(name);
|
||||
if (tool) result.push(tool);
|
||||
}
|
||||
return result;
|
||||
}
|
||||
|
||||
return Array.from(byNamespace.values());
|
||||
},
|
||||
|
||||
parseNamespacedName(namespacedName: string): { pluginId: string; toolName: string } | null {
|
||||
return parseName(namespacedName);
|
||||
},
|
||||
|
||||
buildNamespacedName(pluginId: string, toolName: string): string {
|
||||
return buildName(pluginId, toolName);
|
||||
},
|
||||
|
||||
async executeTool(
|
||||
namespacedName: string,
|
||||
parameters: unknown,
|
||||
runContext: ToolRunContext,
|
||||
): Promise<ToolExecutionResult> {
|
||||
// 1. Resolve the namespaced name
|
||||
const parsed = parseName(namespacedName);
|
||||
if (!parsed) {
|
||||
throw new Error(
|
||||
`Invalid tool name "${namespacedName}". Expected format: "<pluginId>${TOOL_NAMESPACE_SEPARATOR}<toolName>"`,
|
||||
);
|
||||
}
|
||||
|
||||
const { pluginId, toolName } = parsed;
|
||||
|
||||
// 2. Verify the tool is registered
|
||||
const tool = byNamespace.get(namespacedName);
|
||||
if (!tool) {
|
||||
throw new Error(
|
||||
`Tool "${namespacedName}" is not registered. ` +
|
||||
`The plugin may not be installed or its worker may not be running.`,
|
||||
);
|
||||
}
|
||||
|
||||
// 3. Verify the worker manager is available
|
||||
if (!workerManager) {
|
||||
throw new Error(
|
||||
`Cannot execute tool "${namespacedName}" — no worker manager configured. ` +
|
||||
`Tool execution requires a PluginWorkerManager.`,
|
||||
);
|
||||
}
|
||||
|
||||
// 4. Verify the plugin worker is running (use DB UUID for worker lookup)
|
||||
const dbId = tool.pluginDbId;
|
||||
if (!workerManager.isRunning(dbId)) {
|
||||
throw new Error(
|
||||
`Cannot execute tool "${namespacedName}" — ` +
|
||||
`worker for plugin "${pluginId}" is not running.`,
|
||||
);
|
||||
}
|
||||
|
||||
// 5. Dispatch the executeTool RPC call to the worker
|
||||
log.debug(
|
||||
{ pluginId, pluginDbId: dbId, toolName, namespacedName, agentId: runContext.agentId, runId: runContext.runId },
|
||||
"executing tool via plugin worker",
|
||||
);
|
||||
|
||||
const rpcParams: ExecuteToolParams = {
|
||||
toolName,
|
||||
parameters,
|
||||
runContext,
|
||||
};
|
||||
|
||||
const result = await workerManager.call(dbId, "executeTool", rpcParams);
|
||||
|
||||
log.debug(
|
||||
{
|
||||
pluginId,
|
||||
toolName,
|
||||
namespacedName,
|
||||
hasContent: !!result.content,
|
||||
hasData: result.data !== undefined,
|
||||
hasError: !!result.error,
|
||||
},
|
||||
"tool execution completed",
|
||||
);
|
||||
|
||||
return { pluginId, toolName, result };
|
||||
},
|
||||
|
||||
toolCount(pluginId?: string): number {
|
||||
if (pluginId !== undefined) {
|
||||
return byPlugin.get(pluginId)?.size ?? 0;
|
||||
}
|
||||
return byNamespace.size;
|
||||
},
|
||||
};
|
||||
}
|
||||
1342
server/src/services/plugin-worker-manager.ts
Normal file
1342
server/src/services/plugin-worker-manager.ts
Normal file
File diff suppressed because it is too large
Load Diff
@@ -1,17 +1,22 @@
|
||||
import { and, asc, desc, eq, inArray } from "drizzle-orm";
|
||||
import type { Db } from "@paperclipai/db";
|
||||
import { projects, projectGoals, goals, projectWorkspaces } from "@paperclipai/db";
|
||||
import { projects, projectGoals, goals, projectWorkspaces, workspaceRuntimeServices } from "@paperclipai/db";
|
||||
import {
|
||||
PROJECT_COLORS,
|
||||
deriveProjectUrlKey,
|
||||
isUuidLike,
|
||||
normalizeProjectUrlKey,
|
||||
type ProjectExecutionWorkspacePolicy,
|
||||
type ProjectGoalRef,
|
||||
type ProjectWorkspace,
|
||||
type WorkspaceRuntimeService,
|
||||
} from "@paperclipai/shared";
|
||||
import { listWorkspaceRuntimeServicesForProjectWorkspaces } from "./workspace-runtime.js";
|
||||
import { parseProjectExecutionWorkspacePolicy } from "./execution-workspace-policy.js";
|
||||
|
||||
type ProjectRow = typeof projects.$inferSelect;
|
||||
type ProjectWorkspaceRow = typeof projectWorkspaces.$inferSelect;
|
||||
type WorkspaceRuntimeServiceRow = typeof workspaceRuntimeServices.$inferSelect;
|
||||
const REPO_ONLY_CWD_SENTINEL = "/__paperclip_repo_only__";
|
||||
type CreateWorkspaceInput = {
|
||||
name?: string | null;
|
||||
@@ -23,14 +28,24 @@ type CreateWorkspaceInput = {
|
||||
};
|
||||
type UpdateWorkspaceInput = Partial<CreateWorkspaceInput>;
|
||||
|
||||
interface ProjectWithGoals extends ProjectRow {
|
||||
interface ProjectWithGoals extends Omit<ProjectRow, "executionWorkspacePolicy"> {
|
||||
urlKey: string;
|
||||
goalIds: string[];
|
||||
goals: ProjectGoalRef[];
|
||||
executionWorkspacePolicy: ProjectExecutionWorkspacePolicy | null;
|
||||
workspaces: ProjectWorkspace[];
|
||||
primaryWorkspace: ProjectWorkspace | null;
|
||||
}
|
||||
|
||||
interface ProjectShortnameRow {
|
||||
id: string;
|
||||
name: string;
|
||||
}
|
||||
|
||||
interface ResolveProjectNameOptions {
|
||||
excludeProjectId?: string | null;
|
||||
}
|
||||
|
||||
/** Batch-load goal refs for a set of projects. */
|
||||
async function attachGoals(db: Db, rows: ProjectRow[]): Promise<ProjectWithGoals[]> {
|
||||
if (rows.length === 0) return [];
|
||||
@@ -65,11 +80,46 @@ async function attachGoals(db: Db, rows: ProjectRow[]): Promise<ProjectWithGoals
|
||||
urlKey: deriveProjectUrlKey(r.name, r.id),
|
||||
goalIds: g.map((x) => x.id),
|
||||
goals: g,
|
||||
executionWorkspacePolicy: parseProjectExecutionWorkspacePolicy(r.executionWorkspacePolicy),
|
||||
} as ProjectWithGoals;
|
||||
});
|
||||
}
|
||||
|
||||
function toWorkspace(row: ProjectWorkspaceRow): ProjectWorkspace {
|
||||
function toRuntimeService(row: WorkspaceRuntimeServiceRow): WorkspaceRuntimeService {
|
||||
return {
|
||||
id: row.id,
|
||||
companyId: row.companyId,
|
||||
projectId: row.projectId ?? null,
|
||||
projectWorkspaceId: row.projectWorkspaceId ?? null,
|
||||
issueId: row.issueId ?? null,
|
||||
scopeType: row.scopeType as WorkspaceRuntimeService["scopeType"],
|
||||
scopeId: row.scopeId ?? null,
|
||||
serviceName: row.serviceName,
|
||||
status: row.status as WorkspaceRuntimeService["status"],
|
||||
lifecycle: row.lifecycle as WorkspaceRuntimeService["lifecycle"],
|
||||
reuseKey: row.reuseKey ?? null,
|
||||
command: row.command ?? null,
|
||||
cwd: row.cwd ?? null,
|
||||
port: row.port ?? null,
|
||||
url: row.url ?? null,
|
||||
provider: row.provider as WorkspaceRuntimeService["provider"],
|
||||
providerRef: row.providerRef ?? null,
|
||||
ownerAgentId: row.ownerAgentId ?? null,
|
||||
startedByRunId: row.startedByRunId ?? null,
|
||||
lastUsedAt: row.lastUsedAt,
|
||||
startedAt: row.startedAt,
|
||||
stoppedAt: row.stoppedAt ?? null,
|
||||
stopPolicy: (row.stopPolicy as Record<string, unknown> | null) ?? null,
|
||||
healthStatus: row.healthStatus as WorkspaceRuntimeService["healthStatus"],
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
function toWorkspace(
|
||||
row: ProjectWorkspaceRow,
|
||||
runtimeServices: WorkspaceRuntimeService[] = [],
|
||||
): ProjectWorkspace {
|
||||
return {
|
||||
id: row.id,
|
||||
companyId: row.companyId,
|
||||
@@ -80,15 +130,20 @@ function toWorkspace(row: ProjectWorkspaceRow): ProjectWorkspace {
|
||||
repoRef: row.repoRef ?? null,
|
||||
metadata: (row.metadata as Record<string, unknown> | null) ?? null,
|
||||
isPrimary: row.isPrimary,
|
||||
runtimeServices,
|
||||
createdAt: row.createdAt,
|
||||
updatedAt: row.updatedAt,
|
||||
};
|
||||
}
|
||||
|
||||
function pickPrimaryWorkspace(rows: ProjectWorkspaceRow[]): ProjectWorkspace | null {
|
||||
function pickPrimaryWorkspace(
|
||||
rows: ProjectWorkspaceRow[],
|
||||
runtimeServicesByWorkspaceId?: Map<string, WorkspaceRuntimeService[]>,
|
||||
): ProjectWorkspace | null {
|
||||
if (rows.length === 0) return null;
|
||||
const explicitPrimary = rows.find((row) => row.isPrimary);
|
||||
return toWorkspace(explicitPrimary ?? rows[0]);
|
||||
const primary = explicitPrimary ?? rows[0];
|
||||
return toWorkspace(primary, runtimeServicesByWorkspaceId?.get(primary.id) ?? []);
|
||||
}
|
||||
|
||||
/** Batch-load workspace refs for a set of projects. */
|
||||
@@ -101,6 +156,17 @@ async function attachWorkspaces(db: Db, rows: ProjectWithGoals[]): Promise<Proje
|
||||
.from(projectWorkspaces)
|
||||
.where(inArray(projectWorkspaces.projectId, projectIds))
|
||||
.orderBy(desc(projectWorkspaces.isPrimary), asc(projectWorkspaces.createdAt), asc(projectWorkspaces.id));
|
||||
const runtimeServicesByWorkspaceId = await listWorkspaceRuntimeServicesForProjectWorkspaces(
|
||||
db,
|
||||
rows[0]!.companyId,
|
||||
workspaceRows.map((workspace) => workspace.id),
|
||||
);
|
||||
const sharedRuntimeServicesByWorkspaceId = new Map(
|
||||
Array.from(runtimeServicesByWorkspaceId.entries()).map(([workspaceId, services]) => [
|
||||
workspaceId,
|
||||
services.map(toRuntimeService),
|
||||
]),
|
||||
);
|
||||
|
||||
const map = new Map<string, ProjectWorkspaceRow[]>();
|
||||
for (const row of workspaceRows) {
|
||||
@@ -114,11 +180,16 @@ async function attachWorkspaces(db: Db, rows: ProjectWithGoals[]): Promise<Proje
|
||||
|
||||
return rows.map((row) => {
|
||||
const projectWorkspaceRows = map.get(row.id) ?? [];
|
||||
const workspaces = projectWorkspaceRows.map(toWorkspace);
|
||||
const workspaces = projectWorkspaceRows.map((workspace) =>
|
||||
toWorkspace(
|
||||
workspace,
|
||||
sharedRuntimeServicesByWorkspaceId.get(workspace.id) ?? [],
|
||||
),
|
||||
);
|
||||
return {
|
||||
...row,
|
||||
workspaces,
|
||||
primaryWorkspace: pickPrimaryWorkspace(projectWorkspaceRows),
|
||||
primaryWorkspace: pickPrimaryWorkspace(projectWorkspaceRows, sharedRuntimeServicesByWorkspaceId),
|
||||
};
|
||||
});
|
||||
}
|
||||
@@ -192,6 +263,34 @@ function deriveWorkspaceName(input: {
|
||||
return "Workspace";
|
||||
}
|
||||
|
||||
export function resolveProjectNameForUniqueShortname(
|
||||
requestedName: string,
|
||||
existingProjects: ProjectShortnameRow[],
|
||||
options?: ResolveProjectNameOptions,
|
||||
): string {
|
||||
const requestedShortname = normalizeProjectUrlKey(requestedName);
|
||||
if (!requestedShortname) return requestedName;
|
||||
|
||||
const usedShortnames = new Set(
|
||||
existingProjects
|
||||
.filter((project) => !(options?.excludeProjectId && project.id === options.excludeProjectId))
|
||||
.map((project) => normalizeProjectUrlKey(project.name))
|
||||
.filter((value): value is string => value !== null),
|
||||
);
|
||||
if (!usedShortnames.has(requestedShortname)) return requestedName;
|
||||
|
||||
for (let suffix = 2; suffix < 10_000; suffix += 1) {
|
||||
const candidateName = `${requestedName} ${suffix}`;
|
||||
const candidateShortname = normalizeProjectUrlKey(candidateName);
|
||||
if (candidateShortname && !usedShortnames.has(candidateShortname)) {
|
||||
return candidateName;
|
||||
}
|
||||
}
|
||||
|
||||
// Fallback guard for pathological naming collisions.
|
||||
return `${requestedName} ${Date.now()}`;
|
||||
}
|
||||
|
||||
async function ensureSinglePrimaryWorkspace(
|
||||
dbOrTx: any,
|
||||
input: {
|
||||
@@ -271,6 +370,12 @@ export function projectService(db: Db) {
|
||||
projectData.color = nextColor;
|
||||
}
|
||||
|
||||
const existingProjects = await db
|
||||
.select({ id: projects.id, name: projects.name })
|
||||
.from(projects)
|
||||
.where(eq(projects.companyId, companyId));
|
||||
projectData.name = resolveProjectNameForUniqueShortname(projectData.name, existingProjects);
|
||||
|
||||
// Also write goalId to the legacy column (first goal or null)
|
||||
const legacyGoalId = ids && ids.length > 0 ? ids[0] : projectData.goalId ?? null;
|
||||
|
||||
@@ -295,6 +400,26 @@ export function projectService(db: Db) {
|
||||
): Promise<ProjectWithGoals | null> => {
|
||||
const { goalIds: inputGoalIds, ...projectData } = data;
|
||||
const ids = resolveGoalIds({ goalIds: inputGoalIds, goalId: projectData.goalId });
|
||||
const existingProject = await db
|
||||
.select({ id: projects.id, companyId: projects.companyId, name: projects.name })
|
||||
.from(projects)
|
||||
.where(eq(projects.id, id))
|
||||
.then((rows) => rows[0] ?? null);
|
||||
if (!existingProject) return null;
|
||||
|
||||
if (projectData.name !== undefined) {
|
||||
const existingShortname = normalizeProjectUrlKey(existingProject.name);
|
||||
const nextShortname = normalizeProjectUrlKey(projectData.name);
|
||||
if (existingShortname !== nextShortname) {
|
||||
const existingProjects = await db
|
||||
.select({ id: projects.id, name: projects.name })
|
||||
.from(projects)
|
||||
.where(eq(projects.companyId, existingProject.companyId));
|
||||
projectData.name = resolveProjectNameForUniqueShortname(projectData.name, existingProjects, {
|
||||
excludeProjectId: id,
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
// Keep legacy goalId column in sync
|
||||
const updates: Partial<typeof projects.$inferInsert> = {
|
||||
@@ -339,7 +464,18 @@ export function projectService(db: Db) {
|
||||
.from(projectWorkspaces)
|
||||
.where(eq(projectWorkspaces.projectId, projectId))
|
||||
.orderBy(desc(projectWorkspaces.isPrimary), asc(projectWorkspaces.createdAt), asc(projectWorkspaces.id));
|
||||
return rows.map(toWorkspace);
|
||||
if (rows.length === 0) return [];
|
||||
const runtimeServicesByWorkspaceId = await listWorkspaceRuntimeServicesForProjectWorkspaces(
|
||||
db,
|
||||
rows[0]!.companyId,
|
||||
rows.map((workspace) => workspace.id),
|
||||
);
|
||||
return rows.map((row) =>
|
||||
toWorkspace(
|
||||
row,
|
||||
(runtimeServicesByWorkspaceId.get(row.id) ?? []).map(toRuntimeService),
|
||||
),
|
||||
);
|
||||
},
|
||||
|
||||
createWorkspace: async (
|
||||
|
||||
@@ -2,6 +2,7 @@ import { createReadStream, promises as fs } from "node:fs";
|
||||
import path from "node:path";
|
||||
import { createHash } from "node:crypto";
|
||||
import { notFound } from "../errors.js";
|
||||
import { resolvePaperclipInstanceRoot } from "../home-paths.js";
|
||||
|
||||
export type RunLogStoreType = "local_file";
|
||||
|
||||
@@ -148,7 +149,7 @@ let cachedStore: RunLogStore | null = null;
|
||||
|
||||
export function getRunLogStore() {
|
||||
if (cachedStore) return cachedStore;
|
||||
const basePath = process.env.RUN_LOG_BASE_PATH ?? path.resolve(process.cwd(), "data/run-logs");
|
||||
const basePath = process.env.RUN_LOG_BASE_PATH ?? path.resolve(resolvePaperclipInstanceRoot(), "data", "run-logs");
|
||||
cachedStore = createLocalFileRunLogStore(basePath);
|
||||
return cachedStore;
|
||||
}
|
||||
|
||||
@@ -308,10 +308,11 @@ export function secretService(db: Db) {
|
||||
return normalized;
|
||||
},
|
||||
|
||||
resolveEnvBindings: async (companyId: string, envValue: unknown) => {
|
||||
resolveEnvBindings: async (companyId: string, envValue: unknown): Promise<{ env: Record<string, string>; secretKeys: Set<string> }> => {
|
||||
const record = asRecord(envValue);
|
||||
if (!record) return {} as Record<string, string>;
|
||||
if (!record) return { env: {} as Record<string, string>, secretKeys: new Set<string>() };
|
||||
const resolved: Record<string, string> = {};
|
||||
const secretKeys = new Set<string>();
|
||||
|
||||
for (const [key, rawBinding] of Object.entries(record)) {
|
||||
if (!ENV_KEY_RE.test(key)) {
|
||||
@@ -326,20 +327,22 @@ export function secretService(db: Db) {
|
||||
resolved[key] = binding.value;
|
||||
} else {
|
||||
resolved[key] = await resolveSecretValue(companyId, binding.secretId, binding.version);
|
||||
secretKeys.add(key);
|
||||
}
|
||||
}
|
||||
return resolved;
|
||||
return { env: resolved, secretKeys };
|
||||
},
|
||||
|
||||
resolveAdapterConfigForRuntime: async (companyId: string, adapterConfig: Record<string, unknown>) => {
|
||||
resolveAdapterConfigForRuntime: async (companyId: string, adapterConfig: Record<string, unknown>): Promise<{ config: Record<string, unknown>; secretKeys: Set<string> }> => {
|
||||
const resolved = { ...adapterConfig };
|
||||
const secretKeys = new Set<string>();
|
||||
if (!Object.prototype.hasOwnProperty.call(adapterConfig, "env")) {
|
||||
return resolved;
|
||||
return { config: resolved, secretKeys };
|
||||
}
|
||||
const record = asRecord(adapterConfig.env);
|
||||
if (!record) {
|
||||
resolved.env = {};
|
||||
return resolved;
|
||||
return { config: resolved, secretKeys };
|
||||
}
|
||||
const env: Record<string, string> = {};
|
||||
for (const [key, rawBinding] of Object.entries(record)) {
|
||||
@@ -355,10 +358,11 @@ export function secretService(db: Db) {
|
||||
env[key] = binding.value;
|
||||
} else {
|
||||
env[key] = await resolveSecretValue(companyId, binding.secretId, binding.version);
|
||||
secretKeys.add(key);
|
||||
}
|
||||
}
|
||||
resolved.env = env;
|
||||
return resolved;
|
||||
return { config: resolved, secretKeys };
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
1076
server/src/services/workspace-runtime.ts
Normal file
1076
server/src/services/workspace-runtime.ts
Normal file
File diff suppressed because it is too large
Load Diff
217
server/src/ui-branding.ts
Normal file
217
server/src/ui-branding.ts
Normal file
@@ -0,0 +1,217 @@
|
||||
const FAVICON_BLOCK_START = "<!-- PAPERCLIP_FAVICON_START -->";
|
||||
const FAVICON_BLOCK_END = "<!-- PAPERCLIP_FAVICON_END -->";
|
||||
const RUNTIME_BRANDING_BLOCK_START = "<!-- PAPERCLIP_RUNTIME_BRANDING_START -->";
|
||||
const RUNTIME_BRANDING_BLOCK_END = "<!-- PAPERCLIP_RUNTIME_BRANDING_END -->";
|
||||
|
||||
const DEFAULT_FAVICON_LINKS = [
|
||||
'<link rel="icon" href="/favicon.ico" sizes="48x48" />',
|
||||
'<link rel="icon" href="/favicon.svg" type="image/svg+xml" />',
|
||||
'<link rel="icon" type="image/png" sizes="32x32" href="/favicon-32x32.png" />',
|
||||
'<link rel="icon" type="image/png" sizes="16x16" href="/favicon-16x16.png" />',
|
||||
].join("\n");
|
||||
|
||||
export type WorktreeUiBranding = {
|
||||
enabled: boolean;
|
||||
name: string | null;
|
||||
color: string | null;
|
||||
textColor: string | null;
|
||||
faviconHref: string | null;
|
||||
};
|
||||
|
||||
function isTruthyEnvValue(value: string | undefined): boolean {
|
||||
if (!value) return false;
|
||||
const normalized = value.trim().toLowerCase();
|
||||
return normalized === "1" || normalized === "true" || normalized === "yes" || normalized === "on";
|
||||
}
|
||||
|
||||
function nonEmpty(value: string | undefined): string | null {
|
||||
if (typeof value !== "string") return null;
|
||||
const normalized = value.trim();
|
||||
return normalized.length > 0 ? normalized : null;
|
||||
}
|
||||
|
||||
function normalizeHexColor(value: string | undefined): string | null {
|
||||
const raw = nonEmpty(value);
|
||||
if (!raw) return null;
|
||||
const hex = raw.startsWith("#") ? raw.slice(1) : raw;
|
||||
if (/^[0-9a-fA-F]{3}$/.test(hex)) {
|
||||
return `#${hex.split("").map((char) => `${char}${char}`).join("").toLowerCase()}`;
|
||||
}
|
||||
if (/^[0-9a-fA-F]{6}$/.test(hex)) {
|
||||
return `#${hex.toLowerCase()}`;
|
||||
}
|
||||
return null;
|
||||
}
|
||||
|
||||
function hslComponentToHex(n: number): string {
|
||||
return Math.round(Math.max(0, Math.min(255, n)))
|
||||
.toString(16)
|
||||
.padStart(2, "0");
|
||||
}
|
||||
|
||||
function hslToHex(hue: number, saturation: number, lightness: number): string {
|
||||
const s = Math.max(0, Math.min(100, saturation)) / 100;
|
||||
const l = Math.max(0, Math.min(100, lightness)) / 100;
|
||||
const c = (1 - Math.abs((2 * l) - 1)) * s;
|
||||
const h = ((hue % 360) + 360) % 360;
|
||||
const x = c * (1 - Math.abs(((h / 60) % 2) - 1));
|
||||
const m = l - (c / 2);
|
||||
|
||||
let r = 0;
|
||||
let g = 0;
|
||||
let b = 0;
|
||||
|
||||
if (h < 60) {
|
||||
r = c;
|
||||
g = x;
|
||||
} else if (h < 120) {
|
||||
r = x;
|
||||
g = c;
|
||||
} else if (h < 180) {
|
||||
g = c;
|
||||
b = x;
|
||||
} else if (h < 240) {
|
||||
g = x;
|
||||
b = c;
|
||||
} else if (h < 300) {
|
||||
r = x;
|
||||
b = c;
|
||||
} else {
|
||||
r = c;
|
||||
b = x;
|
||||
}
|
||||
|
||||
return `#${hslComponentToHex((r + m) * 255)}${hslComponentToHex((g + m) * 255)}${hslComponentToHex((b + m) * 255)}`;
|
||||
}
|
||||
|
||||
function deriveColorFromSeed(seed: string): string {
|
||||
let hash = 0;
|
||||
for (const char of seed) {
|
||||
hash = ((hash * 33) + char.charCodeAt(0)) >>> 0;
|
||||
}
|
||||
return hslToHex(hash % 360, 68, 56);
|
||||
}
|
||||
|
||||
function hexToRgb(color: string): { r: number; g: number; b: number } {
|
||||
const normalized = normalizeHexColor(color) ?? "#000000";
|
||||
return {
|
||||
r: Number.parseInt(normalized.slice(1, 3), 16),
|
||||
g: Number.parseInt(normalized.slice(3, 5), 16),
|
||||
b: Number.parseInt(normalized.slice(5, 7), 16),
|
||||
};
|
||||
}
|
||||
|
||||
function relativeLuminanceChannel(value: number): number {
|
||||
const normalized = value / 255;
|
||||
return normalized <= 0.03928 ? normalized / 12.92 : ((normalized + 0.055) / 1.055) ** 2.4;
|
||||
}
|
||||
|
||||
function relativeLuminance(color: string): number {
|
||||
const { r, g, b } = hexToRgb(color);
|
||||
return (
|
||||
(0.2126 * relativeLuminanceChannel(r)) +
|
||||
(0.7152 * relativeLuminanceChannel(g)) +
|
||||
(0.0722 * relativeLuminanceChannel(b))
|
||||
);
|
||||
}
|
||||
|
||||
function pickReadableTextColor(background: string): string {
|
||||
const backgroundLuminance = relativeLuminance(background);
|
||||
const whiteContrast = 1.05 / (backgroundLuminance + 0.05);
|
||||
const blackContrast = (backgroundLuminance + 0.05) / 0.05;
|
||||
return whiteContrast >= blackContrast ? "#f8fafc" : "#111827";
|
||||
}
|
||||
|
||||
function escapeHtmlAttribute(value: string): string {
|
||||
return value
|
||||
.replaceAll("&", "&")
|
||||
.replaceAll('"', """)
|
||||
.replaceAll("<", "<")
|
||||
.replaceAll(">", ">");
|
||||
}
|
||||
|
||||
function createFaviconDataUrl(background: string, foreground: string): string {
|
||||
const svg = [
|
||||
'<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" fill="none">',
|
||||
`<rect width="24" height="24" rx="6" fill="${background}"/>`,
|
||||
`<path stroke="${foreground}" stroke-linecap="round" stroke-linejoin="round" stroke-width="2.15" d="m16 6-8.414 8.586a2 2 0 0 0 2.829 2.829l8.414-8.586a4 4 0 1 0-5.657-5.657l-8.379 8.551a6 6 0 1 0 8.485 8.485l8.379-8.551"/>`,
|
||||
"</svg>",
|
||||
].join("");
|
||||
return `data:image/svg+xml,${encodeURIComponent(svg)}`;
|
||||
}
|
||||
|
||||
export function isWorktreeUiBrandingEnabled(env: NodeJS.ProcessEnv = process.env): boolean {
|
||||
return isTruthyEnvValue(env.PAPERCLIP_IN_WORKTREE);
|
||||
}
|
||||
|
||||
export function getWorktreeUiBranding(env: NodeJS.ProcessEnv = process.env): WorktreeUiBranding {
|
||||
if (!isWorktreeUiBrandingEnabled(env)) {
|
||||
return {
|
||||
enabled: false,
|
||||
name: null,
|
||||
color: null,
|
||||
textColor: null,
|
||||
faviconHref: null,
|
||||
};
|
||||
}
|
||||
|
||||
const name = nonEmpty(env.PAPERCLIP_WORKTREE_NAME) ?? nonEmpty(env.PAPERCLIP_INSTANCE_ID) ?? "worktree";
|
||||
const color = normalizeHexColor(env.PAPERCLIP_WORKTREE_COLOR) ?? deriveColorFromSeed(name);
|
||||
const textColor = pickReadableTextColor(color);
|
||||
|
||||
return {
|
||||
enabled: true,
|
||||
name,
|
||||
color,
|
||||
textColor,
|
||||
faviconHref: createFaviconDataUrl(color, textColor),
|
||||
};
|
||||
}
|
||||
|
||||
export function renderFaviconLinks(branding: WorktreeUiBranding): string {
|
||||
if (!branding.enabled || !branding.faviconHref) return DEFAULT_FAVICON_LINKS;
|
||||
|
||||
const href = escapeHtmlAttribute(branding.faviconHref);
|
||||
return [
|
||||
`<link rel="icon" href="${href}" type="image/svg+xml" sizes="any" />`,
|
||||
`<link rel="shortcut icon" href="${href}" type="image/svg+xml" />`,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
export function renderRuntimeBrandingMeta(branding: WorktreeUiBranding): string {
|
||||
if (!branding.enabled || !branding.name || !branding.color || !branding.textColor) return "";
|
||||
|
||||
return [
|
||||
'<meta name="paperclip-worktree-enabled" content="true" />',
|
||||
`<meta name="paperclip-worktree-name" content="${escapeHtmlAttribute(branding.name)}" />`,
|
||||
`<meta name="paperclip-worktree-color" content="${escapeHtmlAttribute(branding.color)}" />`,
|
||||
`<meta name="paperclip-worktree-text-color" content="${escapeHtmlAttribute(branding.textColor)}" />`,
|
||||
].join("\n");
|
||||
}
|
||||
|
||||
function replaceMarkedBlock(html: string, startMarker: string, endMarker: string, content: string): string {
|
||||
const start = html.indexOf(startMarker);
|
||||
const end = html.indexOf(endMarker);
|
||||
if (start === -1 || end === -1 || end < start) return html;
|
||||
|
||||
const before = html.slice(0, start + startMarker.length);
|
||||
const after = html.slice(end);
|
||||
const indentedContent = content
|
||||
? `\n${content
|
||||
.split("\n")
|
||||
.map((line) => ` ${line}`)
|
||||
.join("\n")}\n `
|
||||
: "\n ";
|
||||
return `${before}${indentedContent}${after}`;
|
||||
}
|
||||
|
||||
export function applyUiBranding(html: string, env: NodeJS.ProcessEnv = process.env): string {
|
||||
const branding = getWorktreeUiBranding(env);
|
||||
const withFavicon = replaceMarkedBlock(html, FAVICON_BLOCK_START, FAVICON_BLOCK_END, renderFaviconLinks(branding));
|
||||
return replaceMarkedBlock(
|
||||
withFavicon,
|
||||
RUNTIME_BRANDING_BLOCK_START,
|
||||
RUNTIME_BRANDING_BLOCK_END,
|
||||
renderRuntimeBrandingMeta(branding),
|
||||
);
|
||||
}
|
||||
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"extends": "../tsconfig.json",
|
||||
"extends": "../tsconfig.base.json",
|
||||
"compilerOptions": {
|
||||
"outDir": "dist",
|
||||
"rootDir": "src"
|
||||
|
||||
Reference in New Issue
Block a user