Add secrets infrastructure: DB tables, shared types, env binding model, and migration improvements

Introduce company_secrets and company_secret_versions tables for
encrypted secret storage. Add EnvBinding discriminated union (plain vs
secret_ref) to replace raw string env values in adapter configs. Add
hiddenAt column to issues for soft-hiding. Improve migration system
with journal-ordered application and manual fallback when Drizzle
migrator can't reconcile history.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Forgotten
2026-02-19 15:43:43 -06:00
parent 3b81557f7c
commit d26b67ebc3
23 changed files with 7348 additions and 14 deletions

View File

@@ -129,6 +129,7 @@ export interface CreateConfigValues {
args: string; args: string;
extraArgs: string; extraArgs: string;
envVars: string; envVars: string;
envBindings: Record<string, unknown>;
url: string; url: string;
bootstrapPrompt: string; bootstrapPrompt: string;
maxTurnsPerRun: number; maxTurnsPerRun: number;

View File

@@ -54,12 +54,13 @@ async function listMigrationFiles(): Promise<string[]> {
} }
type MigrationJournalFile = { type MigrationJournalFile = {
entries?: Array<{ tag?: string; when?: number }>; entries?: Array<{ idx?: number; tag?: string; when?: number }>;
}; };
type JournalMigrationEntry = { type JournalMigrationEntry = {
fileName: string; fileName: string;
folderMillis: number; folderMillis: number;
order: number;
}; };
async function listJournalMigrationEntries(): Promise<JournalMigrationEntry[]> { async function listJournalMigrationEntries(): Promise<JournalMigrationEntry[]> {
@@ -68,10 +69,11 @@ async function listJournalMigrationEntries(): Promise<JournalMigrationEntry[]> {
const parsed = JSON.parse(raw) as MigrationJournalFile; const parsed = JSON.parse(raw) as MigrationJournalFile;
if (!Array.isArray(parsed.entries)) return []; if (!Array.isArray(parsed.entries)) return [];
return parsed.entries return parsed.entries
.map((entry) => { .map((entry, entryIndex) => {
if (typeof entry?.tag !== "string") return null; if (typeof entry?.tag !== "string") return null;
if (typeof entry?.when !== "number" || !Number.isFinite(entry.when)) return null; if (typeof entry?.when !== "number" || !Number.isFinite(entry.when)) return null;
return { fileName: `${entry.tag}.sql`, folderMillis: entry.when }; const order = Number.isInteger(entry.idx) ? Number(entry.idx) : entryIndex;
return { fileName: `${entry.tag}.sql`, folderMillis: entry.when, order };
}) })
.filter((entry): entry is JournalMigrationEntry => entry !== null); .filter((entry): entry is JournalMigrationEntry => entry !== null);
} catch { } catch {
@@ -88,6 +90,175 @@ async function readMigrationFileContent(migrationFile: string): Promise<string>
return readFile(new URL(`./migrations/${migrationFile}`, import.meta.url), "utf8"); return readFile(new URL(`./migrations/${migrationFile}`, import.meta.url), "utf8");
} }
async function orderMigrationsByJournal(migrationFiles: string[]): Promise<string[]> {
const journalEntries = await listJournalMigrationEntries();
const orderByFileName = new Map(journalEntries.map((entry) => [entry.fileName, entry.order]));
return [...migrationFiles].sort((left, right) => {
const leftOrder = orderByFileName.get(left);
const rightOrder = orderByFileName.get(right);
if (leftOrder === undefined && rightOrder === undefined) return left.localeCompare(right);
if (leftOrder === undefined) return 1;
if (rightOrder === undefined) return -1;
if (leftOrder === rightOrder) return left.localeCompare(right);
return leftOrder - rightOrder;
});
}
type SqlExecutor = Pick<ReturnType<typeof postgres>, "unsafe">;
async function runInTransaction(sql: SqlExecutor, action: () => Promise<void>): Promise<void> {
await sql.unsafe("BEGIN");
try {
await action();
await sql.unsafe("COMMIT");
} catch (error) {
try {
await sql.unsafe("ROLLBACK");
} catch {
// Ignore rollback failures and surface the original error.
}
throw error;
}
}
async function latestMigrationCreatedAt(
sql: SqlExecutor,
qualifiedTable: string,
): Promise<number | null> {
const rows = await sql.unsafe<{ created_at: string | number | null }[]>(
`SELECT created_at FROM ${qualifiedTable} ORDER BY created_at DESC NULLS LAST LIMIT 1`,
);
const value = Number(rows[0]?.created_at ?? Number.NaN);
return Number.isFinite(value) ? value : null;
}
function normalizeFolderMillis(value: number | null | undefined): number {
if (typeof value === "number" && Number.isFinite(value) && value >= 0) {
return Math.trunc(value);
}
return Date.now();
}
async function ensureMigrationJournalTable(
sql: ReturnType<typeof postgres>,
): Promise<{ migrationTableSchema: string; columnNames: Set<string> }> {
let migrationTableSchema = await discoverMigrationTableSchema(sql);
if (!migrationTableSchema) {
const drizzleSchema = quoteIdentifier("drizzle");
const migrationTable = quoteIdentifier(DRIZZLE_MIGRATIONS_TABLE);
await sql.unsafe(`CREATE SCHEMA IF NOT EXISTS ${drizzleSchema}`);
await sql.unsafe(
`CREATE TABLE IF NOT EXISTS ${drizzleSchema}.${migrationTable} (id SERIAL PRIMARY KEY, hash text NOT NULL, created_at bigint)`,
);
migrationTableSchema = (await discoverMigrationTableSchema(sql)) ?? "drizzle";
}
const columnNames = await getMigrationTableColumnNames(sql, migrationTableSchema);
return { migrationTableSchema, columnNames };
}
async function migrationHistoryEntryExists(
sql: SqlExecutor,
qualifiedTable: string,
columnNames: Set<string>,
migrationFile: string,
hash: string,
): Promise<boolean> {
const predicates: string[] = [];
if (columnNames.has("hash")) predicates.push(`hash = ${quoteLiteral(hash)}`);
if (columnNames.has("name")) predicates.push(`name = ${quoteLiteral(migrationFile)}`);
if (predicates.length === 0) return false;
const rows = await sql.unsafe<{ one: number }[]>(
`SELECT 1 AS one FROM ${qualifiedTable} WHERE ${predicates.join(" OR ")} LIMIT 1`,
);
return rows.length > 0;
}
async function recordMigrationHistoryEntry(
sql: SqlExecutor,
qualifiedTable: string,
columnNames: Set<string>,
migrationFile: string,
hash: string,
folderMillis: number,
): Promise<void> {
const insertColumns: string[] = [];
const insertValues: string[] = [];
if (columnNames.has("hash")) {
insertColumns.push(quoteIdentifier("hash"));
insertValues.push(quoteLiteral(hash));
}
if (columnNames.has("name")) {
insertColumns.push(quoteIdentifier("name"));
insertValues.push(quoteLiteral(migrationFile));
}
if (columnNames.has("created_at")) {
const latestCreatedAt = await latestMigrationCreatedAt(sql, qualifiedTable);
const createdAt = latestCreatedAt === null
? normalizeFolderMillis(folderMillis)
: Math.max(latestCreatedAt + 1, normalizeFolderMillis(folderMillis));
insertColumns.push(quoteIdentifier("created_at"));
insertValues.push(quoteLiteral(String(createdAt)));
}
if (insertColumns.length === 0) return;
await sql.unsafe(
`INSERT INTO ${qualifiedTable} (${insertColumns.join(", ")}) VALUES (${insertValues.join(", ")})`,
);
}
async function applyPendingMigrationsManually(
url: string,
pendingMigrations: string[],
): Promise<void> {
if (pendingMigrations.length === 0) return;
const orderedPendingMigrations = await orderMigrationsByJournal(pendingMigrations);
const journalEntries = await listJournalMigrationEntries();
const folderMillisByFileName = new Map(
journalEntries.map((entry) => [entry.fileName, normalizeFolderMillis(entry.folderMillis)]),
);
const sql = postgres(url, { max: 1 });
try {
const { migrationTableSchema, columnNames } = await ensureMigrationJournalTable(sql);
const qualifiedTable = `${quoteIdentifier(migrationTableSchema)}.${quoteIdentifier(DRIZZLE_MIGRATIONS_TABLE)}`;
for (const migrationFile of orderedPendingMigrations) {
const migrationContent = await readMigrationFileContent(migrationFile);
const hash = createHash("sha256").update(migrationContent).digest("hex");
const existingEntry = await migrationHistoryEntryExists(
sql,
qualifiedTable,
columnNames,
migrationFile,
hash,
);
if (existingEntry) continue;
await runInTransaction(sql, async () => {
for (const statement of splitMigrationStatements(migrationContent)) {
await sql.unsafe(statement);
}
await recordMigrationHistoryEntry(
sql,
qualifiedTable,
columnNames,
migrationFile,
hash,
folderMillisByFileName.get(migrationFile) ?? Date.now(),
);
});
}
} finally {
await sql.end();
}
}
async function mapHashesToMigrationFiles(migrationFiles: string[]): Promise<Map<string, string>> { async function mapHashesToMigrationFiles(migrationFiles: string[]): Promise<Map<string, string>> {
const mapped = new Map<string, string>(); const mapped = new Map<string, string>();
@@ -467,6 +638,9 @@ export async function inspectMigrations(url: string): Promise<MigrationState> {
} }
export async function applyPendingMigrations(url: string): Promise<void> { export async function applyPendingMigrations(url: string): Promise<void> {
const initialState = await inspectMigrations(url);
if (initialState.status === "upToDate") return;
const sql = postgres(url, { max: 1 }); const sql = postgres(url, { max: 1 });
try { try {
@@ -475,6 +649,28 @@ export async function applyPendingMigrations(url: string): Promise<void> {
} finally { } finally {
await sql.end(); await sql.end();
} }
let state = await inspectMigrations(url);
if (state.status === "upToDate") return;
const repair = await reconcilePendingMigrationHistory(url);
if (repair.repairedMigrations.length > 0) {
state = await inspectMigrations(url);
if (state.status === "upToDate") return;
}
if (state.status !== "needsMigrations" || state.reason !== "pending-migrations") {
throw new Error("Migrations are still pending after attempted apply; run inspectMigrations for details.");
}
await applyPendingMigrationsManually(url, state.pendingMigrations);
const finalState = await inspectMigrations(url);
if (finalState.status !== "upToDate") {
throw new Error(
`Failed to apply pending migrations: ${finalState.pendingMigrations.join(", ")}`,
);
}
} }
export type MigrationBootstrapResult = export type MigrationBootstrapResult =

View File

@@ -1,17 +1,21 @@
import { migrate as migratePg } from "drizzle-orm/postgres-js/migrator"; import { applyPendingMigrations, inspectMigrations } from "./client.js";
import postgres from "postgres";
import { drizzle as drizzlePg } from "drizzle-orm/postgres-js";
const migrationsFolder = new URL("./migrations", import.meta.url).pathname;
const url = process.env.DATABASE_URL; const url = process.env.DATABASE_URL;
if (!url) { if (!url) {
throw new Error("DATABASE_URL is required for db:migrate"); throw new Error("DATABASE_URL is required for db:migrate");
} }
const sql = postgres(url, { max: 1 }); const before = await inspectMigrations(url);
const db = drizzlePg(sql); if (before.status === "upToDate") {
await migratePg(db, { migrationsFolder }); console.log("No pending migrations");
await sql.end(); } else {
console.log(`Applying ${before.pendingMigrations.length} pending migration(s)...`);
await applyPendingMigrations(url);
console.log("Migrations complete"); const after = await inspectMigrations(url);
if (after.status !== "upToDate") {
throw new Error(`Migrations incomplete: ${after.pendingMigrations.join(", ")}`);
}
console.log("Migrations complete");
}

View File

@@ -0,0 +1 @@
ALTER TABLE "issues" ADD COLUMN "hidden_at" timestamp with time zone;

View File

@@ -0,0 +1,36 @@
CREATE TABLE "company_secret_versions" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"secret_id" uuid NOT NULL,
"version" integer NOT NULL,
"material" jsonb NOT NULL,
"value_sha256" text NOT NULL,
"created_by_agent_id" uuid,
"created_by_user_id" text,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"revoked_at" timestamp with time zone
);
--> statement-breakpoint
CREATE TABLE "company_secrets" (
"id" uuid PRIMARY KEY DEFAULT gen_random_uuid() NOT NULL,
"company_id" uuid NOT NULL,
"name" text NOT NULL,
"provider" text DEFAULT 'local_encrypted' NOT NULL,
"external_ref" text,
"latest_version" integer DEFAULT 1 NOT NULL,
"description" text,
"created_by_agent_id" uuid,
"created_by_user_id" text,
"created_at" timestamp with time zone DEFAULT now() NOT NULL,
"updated_at" timestamp with time zone DEFAULT now() NOT NULL
);
--> statement-breakpoint
ALTER TABLE "company_secret_versions" ADD CONSTRAINT "company_secret_versions_secret_id_company_secrets_id_fk" FOREIGN KEY ("secret_id") REFERENCES "public"."company_secrets"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "company_secret_versions" ADD CONSTRAINT "company_secret_versions_created_by_agent_id_agents_id_fk" FOREIGN KEY ("created_by_agent_id") REFERENCES "public"."agents"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "company_secrets" ADD CONSTRAINT "company_secrets_company_id_companies_id_fk" FOREIGN KEY ("company_id") REFERENCES "public"."companies"("id") ON DELETE no action ON UPDATE no action;--> statement-breakpoint
ALTER TABLE "company_secrets" ADD CONSTRAINT "company_secrets_created_by_agent_id_agents_id_fk" FOREIGN KEY ("created_by_agent_id") REFERENCES "public"."agents"("id") ON DELETE set null ON UPDATE no action;--> statement-breakpoint
CREATE INDEX "company_secret_versions_secret_idx" ON "company_secret_versions" USING btree ("secret_id","created_at");--> statement-breakpoint
CREATE INDEX "company_secret_versions_value_sha256_idx" ON "company_secret_versions" USING btree ("value_sha256");--> statement-breakpoint
CREATE UNIQUE INDEX "company_secret_versions_secret_version_uq" ON "company_secret_versions" USING btree ("secret_id","version");--> statement-breakpoint
CREATE INDEX "company_secrets_company_idx" ON "company_secrets" USING btree ("company_id");--> statement-breakpoint
CREATE INDEX "company_secrets_company_provider_idx" ON "company_secrets" USING btree ("company_id","provider");--> statement-breakpoint
CREATE UNIQUE INDEX "company_secrets_company_name_uq" ON "company_secrets" USING btree ("company_id","name");

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -57,6 +57,20 @@
"when": 1771545603000, "when": 1771545603000,
"tag": "0007_new_quentin_quire", "tag": "0007_new_quentin_quire",
"breakpoints": true "breakpoints": true
},
{
"idx": 8,
"version": "7",
"when": 1771534160426,
"tag": "0008_amused_zzzax",
"breakpoints": true
},
{
"idx": 9,
"version": "7",
"when": 1771534211029,
"tag": "0009_fast_jackal",
"breakpoints": true
} }
] ]
} }

View File

@@ -0,0 +1,23 @@
import { pgTable, uuid, text, timestamp, integer, jsonb, index, uniqueIndex } from "drizzle-orm/pg-core";
import { agents } from "./agents.js";
import { companySecrets } from "./company_secrets.js";
export const companySecretVersions = pgTable(
"company_secret_versions",
{
id: uuid("id").primaryKey().defaultRandom(),
secretId: uuid("secret_id").notNull().references(() => companySecrets.id, { onDelete: "cascade" }),
version: integer("version").notNull(),
material: jsonb("material").$type<Record<string, unknown>>().notNull(),
valueSha256: text("value_sha256").notNull(),
createdByAgentId: uuid("created_by_agent_id").references(() => agents.id, { onDelete: "set null" }),
createdByUserId: text("created_by_user_id"),
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
revokedAt: timestamp("revoked_at", { withTimezone: true }),
},
(table) => ({
secretIdx: index("company_secret_versions_secret_idx").on(table.secretId, table.createdAt),
valueHashIdx: index("company_secret_versions_value_sha256_idx").on(table.valueSha256),
secretVersionUq: uniqueIndex("company_secret_versions_secret_version_uq").on(table.secretId, table.version),
}),
);

View File

@@ -0,0 +1,25 @@
import { pgTable, uuid, text, timestamp, integer, index, uniqueIndex } from "drizzle-orm/pg-core";
import { companies } from "./companies.js";
import { agents } from "./agents.js";
export const companySecrets = pgTable(
"company_secrets",
{
id: uuid("id").primaryKey().defaultRandom(),
companyId: uuid("company_id").notNull().references(() => companies.id),
name: text("name").notNull(),
provider: text("provider").notNull().default("local_encrypted"),
externalRef: text("external_ref"),
latestVersion: integer("latest_version").notNull().default(1),
description: text("description"),
createdByAgentId: uuid("created_by_agent_id").references(() => agents.id, { onDelete: "set null" }),
createdByUserId: text("created_by_user_id"),
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(),
},
(table) => ({
companyIdx: index("company_secrets_company_idx").on(table.companyId),
companyProviderIdx: index("company_secrets_company_provider_idx").on(table.companyId, table.provider),
companyNameUq: uniqueIndex("company_secrets_company_name_uq").on(table.companyId, table.name),
}),
);

View File

@@ -16,3 +16,5 @@ export { costEvents } from "./cost_events.js";
export { approvals } from "./approvals.js"; export { approvals } from "./approvals.js";
export { approvalComments } from "./approval_comments.js"; export { approvalComments } from "./approval_comments.js";
export { activityLog } from "./activity_log.js"; export { activityLog } from "./activity_log.js";
export { companySecrets } from "./company_secrets.js";
export { companySecretVersions } from "./company_secret_versions.js";

View File

@@ -35,6 +35,7 @@ export const issues = pgTable(
startedAt: timestamp("started_at", { withTimezone: true }), startedAt: timestamp("started_at", { withTimezone: true }),
completedAt: timestamp("completed_at", { withTimezone: true }), completedAt: timestamp("completed_at", { withTimezone: true }),
cancelledAt: timestamp("cancelled_at", { withTimezone: true }), cancelledAt: timestamp("cancelled_at", { withTimezone: true }),
hiddenAt: timestamp("hidden_at", { withTimezone: true }),
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(), createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(), updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(),
}, },

View File

@@ -8,6 +8,7 @@ export const API = {
issues: `${API_PREFIX}/issues`, issues: `${API_PREFIX}/issues`,
goals: `${API_PREFIX}/goals`, goals: `${API_PREFIX}/goals`,
approvals: `${API_PREFIX}/approvals`, approvals: `${API_PREFIX}/approvals`,
secrets: `${API_PREFIX}/secrets`,
costs: `${API_PREFIX}/costs`, costs: `${API_PREFIX}/costs`,
activity: `${API_PREFIX}/activity`, activity: `${API_PREFIX}/activity`,
dashboard: `${API_PREFIX}/dashboard`, dashboard: `${API_PREFIX}/dashboard`,

View File

@@ -1,4 +1,5 @@
import { z } from "zod"; import { z } from "zod";
import { SECRET_PROVIDERS } from "./constants.js";
export const configMetaSchema = z.object({ export const configMetaSchema = z.object({
version: z.literal(1), version: z.literal(1),
@@ -28,12 +29,31 @@ export const serverConfigSchema = z.object({
serveUi: z.boolean().default(true), serveUi: z.boolean().default(true),
}); });
export const secretsLocalEncryptedConfigSchema = z.object({
keyFilePath: z.string().default("./data/secrets/master.key"),
});
export const secretsConfigSchema = z.object({
provider: z.enum(SECRET_PROVIDERS).default("local_encrypted"),
strictMode: z.boolean().default(false),
localEncrypted: secretsLocalEncryptedConfigSchema.default({
keyFilePath: "./data/secrets/master.key",
}),
});
export const paperclipConfigSchema = z.object({ export const paperclipConfigSchema = z.object({
$meta: configMetaSchema, $meta: configMetaSchema,
llm: llmConfigSchema.optional(), llm: llmConfigSchema.optional(),
database: databaseConfigSchema, database: databaseConfigSchema,
logging: loggingConfigSchema, logging: loggingConfigSchema,
server: serverConfigSchema, server: serverConfigSchema,
secrets: secretsConfigSchema.default({
provider: "local_encrypted",
strictMode: false,
localEncrypted: {
keyFilePath: "./data/secrets/master.key",
},
}),
}); });
export type PaperclipConfig = z.infer<typeof paperclipConfigSchema>; export type PaperclipConfig = z.infer<typeof paperclipConfigSchema>;
@@ -41,4 +61,6 @@ export type LlmConfig = z.infer<typeof llmConfigSchema>;
export type DatabaseConfig = z.infer<typeof databaseConfigSchema>; export type DatabaseConfig = z.infer<typeof databaseConfigSchema>;
export type LoggingConfig = z.infer<typeof loggingConfigSchema>; export type LoggingConfig = z.infer<typeof loggingConfigSchema>;
export type ServerConfig = z.infer<typeof serverConfigSchema>; export type ServerConfig = z.infer<typeof serverConfigSchema>;
export type SecretsConfig = z.infer<typeof secretsConfigSchema>;
export type SecretsLocalEncryptedConfig = z.infer<typeof secretsLocalEncryptedConfigSchema>;
export type ConfigMeta = z.infer<typeof configMetaSchema>; export type ConfigMeta = z.infer<typeof configMetaSchema>;

View File

@@ -71,6 +71,14 @@ export const APPROVAL_STATUSES = [
] as const; ] as const;
export type ApprovalStatus = (typeof APPROVAL_STATUSES)[number]; export type ApprovalStatus = (typeof APPROVAL_STATUSES)[number];
export const SECRET_PROVIDERS = [
"local_encrypted",
"aws_secrets_manager",
"gcp_secret_manager",
"vault",
] as const;
export type SecretProvider = (typeof SECRET_PROVIDERS)[number];
export const HEARTBEAT_INVOCATION_SOURCES = [ export const HEARTBEAT_INVOCATION_SOURCES = [
"timer", "timer",
"assignment", "assignment",

View File

@@ -10,6 +10,7 @@ export {
PROJECT_STATUSES, PROJECT_STATUSES,
APPROVAL_TYPES, APPROVAL_TYPES,
APPROVAL_STATUSES, APPROVAL_STATUSES,
SECRET_PROVIDERS,
HEARTBEAT_INVOCATION_SOURCES, HEARTBEAT_INVOCATION_SOURCES,
HEARTBEAT_RUN_STATUSES, HEARTBEAT_RUN_STATUSES,
WAKEUP_TRIGGER_DETAILS, WAKEUP_TRIGGER_DETAILS,
@@ -26,6 +27,7 @@ export {
type ProjectStatus, type ProjectStatus,
type ApprovalType, type ApprovalType,
type ApprovalStatus, type ApprovalStatus,
type SecretProvider,
type HeartbeatInvocationSource, type HeartbeatInvocationSource,
type HeartbeatRunStatus, type HeartbeatRunStatus,
type WakeupTriggerDetail, type WakeupTriggerDetail,
@@ -57,6 +59,10 @@ export type {
DashboardSummary, DashboardSummary,
ActivityEvent, ActivityEvent,
SidebarBadges, SidebarBadges,
EnvBinding,
AgentEnvConfig,
CompanySecret,
SecretProviderDescriptor,
} from "./types/index.js"; } from "./types/index.js";
export { export {
@@ -107,6 +113,16 @@ export {
type RequestApprovalRevision, type RequestApprovalRevision,
type ResubmitApproval, type ResubmitApproval,
type AddApprovalComment, type AddApprovalComment,
envBindingPlainSchema,
envBindingSecretRefSchema,
envBindingSchema,
envConfigSchema,
createSecretSchema,
rotateSecretSchema,
updateSecretSchema,
type CreateSecret,
type RotateSecret,
type UpdateSecret,
createCostEventSchema, createCostEventSchema,
updateBudgetSchema, updateBudgetSchema,
type CreateCostEvent, type CreateCostEvent,
@@ -122,10 +138,14 @@ export {
databaseConfigSchema, databaseConfigSchema,
loggingConfigSchema, loggingConfigSchema,
serverConfigSchema, serverConfigSchema,
secretsConfigSchema,
secretsLocalEncryptedConfigSchema,
type PaperclipConfig, type PaperclipConfig,
type LlmConfig, type LlmConfig,
type DatabaseConfig, type DatabaseConfig,
type LoggingConfig, type LoggingConfig,
type ServerConfig, type ServerConfig,
type SecretsConfig,
type SecretsLocalEncryptedConfig,
type ConfigMeta, type ConfigMeta,
} from "./config-schema.js"; } from "./config-schema.js";

View File

@@ -4,6 +4,16 @@ export type { Project } from "./project.js";
export type { Issue, IssueComment, IssueAncestor } from "./issue.js"; export type { Issue, IssueComment, IssueAncestor } from "./issue.js";
export type { Goal } from "./goal.js"; export type { Goal } from "./goal.js";
export type { Approval, ApprovalComment } from "./approval.js"; export type { Approval, ApprovalComment } from "./approval.js";
export type {
SecretProvider,
SecretVersionSelector,
EnvPlainBinding,
EnvSecretRefBinding,
EnvBinding,
AgentEnvConfig,
CompanySecret,
SecretProviderDescriptor,
} from "./secrets.js";
export type { CostEvent, CostSummary, CostByAgent } from "./cost.js"; export type { CostEvent, CostSummary, CostByAgent } from "./cost.js";
export type { export type {
HeartbeatRun, HeartbeatRun,

View File

@@ -32,6 +32,7 @@ export interface Issue {
startedAt: Date | null; startedAt: Date | null;
completedAt: Date | null; completedAt: Date | null;
cancelledAt: Date | null; cancelledAt: Date | null;
hiddenAt: Date | null;
createdAt: Date; createdAt: Date;
updatedAt: Date; updatedAt: Date;
} }

View File

@@ -0,0 +1,43 @@
export type SecretProvider =
| "local_encrypted"
| "aws_secrets_manager"
| "gcp_secret_manager"
| "vault";
export type SecretVersionSelector = number | "latest";
export interface EnvPlainBinding {
type: "plain";
value: string;
}
export interface EnvSecretRefBinding {
type: "secret_ref";
secretId: string;
version?: SecretVersionSelector;
}
// Backward-compatible: legacy plaintext string values are still accepted.
export type EnvBinding = string | EnvPlainBinding | EnvSecretRefBinding;
export type AgentEnvConfig = Record<string, EnvBinding>;
export interface CompanySecret {
id: string;
companyId: string;
name: string;
provider: SecretProvider;
externalRef: string | null;
latestVersion: number;
description: string | null;
createdByAgentId: string | null;
createdByUserId: string | null;
createdAt: Date;
updatedAt: Date;
}
export interface SecretProviderDescriptor {
id: SecretProvider;
label: string;
requiresExternalRef: boolean;
}

View File

@@ -4,11 +4,25 @@ import {
AGENT_ROLES, AGENT_ROLES,
AGENT_STATUSES, AGENT_STATUSES,
} from "../constants.js"; } from "../constants.js";
import { envConfigSchema } from "./secret.js";
export const agentPermissionsSchema = z.object({ export const agentPermissionsSchema = z.object({
canCreateAgents: z.boolean().optional().default(false), canCreateAgents: z.boolean().optional().default(false),
}); });
const adapterConfigSchema = z.record(z.unknown()).superRefine((value, ctx) => {
const envValue = value.env;
if (envValue === undefined) return;
const parsed = envConfigSchema.safeParse(envValue);
if (!parsed.success) {
ctx.addIssue({
code: z.ZodIssueCode.custom,
message: "adapterConfig.env must be a map of valid env bindings",
path: ["env"],
});
}
});
export const createAgentSchema = z.object({ export const createAgentSchema = z.object({
name: z.string().min(1), name: z.string().min(1),
role: z.enum(AGENT_ROLES).optional().default("general"), role: z.enum(AGENT_ROLES).optional().default("general"),
@@ -16,7 +30,7 @@ export const createAgentSchema = z.object({
reportsTo: z.string().uuid().optional().nullable(), reportsTo: z.string().uuid().optional().nullable(),
capabilities: z.string().optional().nullable(), capabilities: z.string().optional().nullable(),
adapterType: z.enum(AGENT_ADAPTER_TYPES).optional().default("process"), adapterType: z.enum(AGENT_ADAPTER_TYPES).optional().default("process"),
adapterConfig: z.record(z.unknown()).optional().default({}), adapterConfig: adapterConfigSchema.optional().default({}),
runtimeConfig: z.record(z.unknown()).optional().default({}), runtimeConfig: z.record(z.unknown()).optional().default({}),
budgetMonthlyCents: z.number().int().nonnegative().optional().default(0), budgetMonthlyCents: z.number().int().nonnegative().optional().default(0),
permissions: agentPermissionsSchema.optional(), permissions: agentPermissionsSchema.optional(),

View File

@@ -63,6 +63,19 @@ export {
type AddApprovalComment, type AddApprovalComment,
} from "./approval.js"; } from "./approval.js";
export {
envBindingPlainSchema,
envBindingSecretRefSchema,
envBindingSchema,
envConfigSchema,
createSecretSchema,
rotateSecretSchema,
updateSecretSchema,
type CreateSecret,
type RotateSecret,
type UpdateSecret,
} from "./secret.js";
export { export {
createCostEventSchema, createCostEventSchema,
updateBudgetSchema, updateBudgetSchema,

View File

@@ -18,6 +18,7 @@ export type CreateIssue = z.infer<typeof createIssueSchema>;
export const updateIssueSchema = createIssueSchema.partial().extend({ export const updateIssueSchema = createIssueSchema.partial().extend({
comment: z.string().min(1).optional(), comment: z.string().min(1).optional(),
hiddenAt: z.string().datetime().nullable().optional(),
}); });
export type UpdateIssue = z.infer<typeof updateIssueSchema>; export type UpdateIssue = z.infer<typeof updateIssueSchema>;

View File

@@ -0,0 +1,47 @@
import { z } from "zod";
import { SECRET_PROVIDERS } from "../constants.js";
export const envBindingPlainSchema = z.object({
type: z.literal("plain"),
value: z.string(),
});
export const envBindingSecretRefSchema = z.object({
type: z.literal("secret_ref"),
secretId: z.string().uuid(),
version: z.union([z.literal("latest"), z.number().int().positive()]).optional(),
});
// Backward-compatible union that accepts legacy inline values.
export const envBindingSchema = z.union([
z.string(),
envBindingPlainSchema,
envBindingSecretRefSchema,
]);
export const envConfigSchema = z.record(envBindingSchema);
export const createSecretSchema = z.object({
name: z.string().min(1),
provider: z.enum(SECRET_PROVIDERS).optional(),
value: z.string().min(1),
description: z.string().optional().nullable(),
externalRef: z.string().optional().nullable(),
});
export type CreateSecret = z.infer<typeof createSecretSchema>;
export const rotateSecretSchema = z.object({
value: z.string().min(1),
externalRef: z.string().optional().nullable(),
});
export type RotateSecret = z.infer<typeof rotateSecretSchema>;
export const updateSecretSchema = z.object({
name: z.string().min(1).optional(),
description: z.string().optional().nullable(),
externalRef: z.string().optional().nullable(),
});
export type UpdateSecret = z.infer<typeof updateSecretSchema>;