feat: add storage system with local disk and S3 providers

Introduces a provider-agnostic storage subsystem for file attachments.
Includes local disk and S3 backends, asset/attachment DB schemas, issue
attachment CRUD routes with multer upload, CLI configure/doctor/env
integration, and enriched issue ancestors with project/goal resolution.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Forgotten
2026-02-20 10:31:56 -06:00
parent 32119f5c2f
commit fdd2ea6157
36 changed files with 1683 additions and 32 deletions

View File

@@ -14,3 +14,4 @@ export { llmCheck } from "./llm-check.js";
export { logCheck } from "./log-check.js"; export { logCheck } from "./log-check.js";
export { portCheck } from "./port-check.js"; export { portCheck } from "./port-check.js";
export { secretsCheck } from "./secrets-check.js"; export { secretsCheck } from "./secrets-check.js";
export { storageCheck } from "./storage-check.js";

View File

@@ -0,0 +1,60 @@
import fs from "node:fs";
import type { PaperclipConfig } from "../config/schema.js";
import type { CheckResult } from "./index.js";
import { resolveRuntimeLikePath } from "./path-resolver.js";
export function storageCheck(config: PaperclipConfig, configPath?: string): CheckResult {
if (config.storage.provider === "local_disk") {
const baseDir = resolveRuntimeLikePath(config.storage.localDisk.baseDir, configPath);
if (!fs.existsSync(baseDir)) {
return {
name: "Storage",
status: "warn",
message: `Local storage directory does not exist: ${baseDir}`,
canRepair: true,
repair: () => {
fs.mkdirSync(baseDir, { recursive: true });
},
repairHint: "Run with --repair to create local storage directory",
};
}
try {
fs.accessSync(baseDir, fs.constants.W_OK);
return {
name: "Storage",
status: "pass",
message: `Local disk storage is writable: ${baseDir}`,
};
} catch {
return {
name: "Storage",
status: "fail",
message: `Local storage directory is not writable: ${baseDir}`,
canRepair: false,
repairHint: "Check file permissions for storage.localDisk.baseDir",
};
}
}
const bucket = config.storage.s3.bucket.trim();
const region = config.storage.s3.region.trim();
if (!bucket || !region) {
return {
name: "Storage",
status: "fail",
message: "S3 storage requires non-empty bucket and region",
canRepair: false,
repairHint: "Run `paperclip configure --section storage`",
};
}
return {
name: "Storage",
status: "warn",
message: `S3 storage configured (bucket=${bucket}, region=${region}). Reachability check is skipped in doctor.`,
canRepair: false,
repairHint: "Verify credentials and endpoint in deployment environment",
};
}

View File

@@ -7,6 +7,7 @@ import { promptDatabase } from "../prompts/database.js";
import { promptLlm } from "../prompts/llm.js"; import { promptLlm } from "../prompts/llm.js";
import { promptLogging } from "../prompts/logging.js"; import { promptLogging } from "../prompts/logging.js";
import { defaultSecretsConfig, promptSecrets } from "../prompts/secrets.js"; import { defaultSecretsConfig, promptSecrets } from "../prompts/secrets.js";
import { defaultStorageConfig, promptStorage } from "../prompts/storage.js";
import { promptServer } from "../prompts/server.js"; import { promptServer } from "../prompts/server.js";
import { import {
resolveDefaultEmbeddedPostgresDir, resolveDefaultEmbeddedPostgresDir,
@@ -14,13 +15,14 @@ import {
resolvePaperclipInstanceId, resolvePaperclipInstanceId,
} from "../config/home.js"; } from "../config/home.js";
type Section = "llm" | "database" | "logging" | "server" | "secrets"; type Section = "llm" | "database" | "logging" | "server" | "storage" | "secrets";
const SECTION_LABELS: Record<Section, string> = { const SECTION_LABELS: Record<Section, string> = {
llm: "LLM Provider", llm: "LLM Provider",
database: "Database", database: "Database",
logging: "Logging", logging: "Logging",
server: "Server", server: "Server",
storage: "Storage",
secrets: "Secrets", secrets: "Secrets",
}; };
@@ -45,6 +47,7 @@ function defaultConfig(): PaperclipConfig {
port: 3100, port: 3100,
serveUi: true, serveUi: true,
}, },
storage: defaultStorageConfig(),
secrets: defaultSecretsConfig(), secrets: defaultSecretsConfig(),
}; };
} }
@@ -123,6 +126,9 @@ export async function configure(opts: {
case "server": case "server":
config.server = await promptServer(); config.server = await promptServer();
break; break;
case "storage":
config.storage = await promptStorage(config.storage);
break;
case "secrets": case "secrets":
config.secrets = await promptSecrets(config.secrets); config.secrets = await promptSecrets(config.secrets);
{ {

View File

@@ -10,6 +10,7 @@ import {
logCheck, logCheck,
portCheck, portCheck,
secretsCheck, secretsCheck,
storageCheck,
type CheckResult, type CheckResult,
} from "../checks/index.js"; } from "../checks/index.js";
@@ -66,24 +67,30 @@ export async function doctor(opts: {
printResult(secretsResult); printResult(secretsResult);
await maybeRepair(secretsResult, opts); await maybeRepair(secretsResult, opts);
// 4. Database check // 4. Storage check
const storageResult = storageCheck(config, configPath);
results.push(storageResult);
printResult(storageResult);
await maybeRepair(storageResult, opts);
// 5. Database check
const dbResult = await databaseCheck(config, configPath); const dbResult = await databaseCheck(config, configPath);
results.push(dbResult); results.push(dbResult);
printResult(dbResult); printResult(dbResult);
await maybeRepair(dbResult, opts); await maybeRepair(dbResult, opts);
// 5. LLM check // 6. LLM check
const llmResult = await llmCheck(config); const llmResult = await llmCheck(config);
results.push(llmResult); results.push(llmResult);
printResult(llmResult); printResult(llmResult);
// 6. Log directory check // 7. Log directory check
const logResult = logCheck(config, configPath); const logResult = logCheck(config, configPath);
results.push(logResult); results.push(logResult);
printResult(logResult); printResult(logResult);
await maybeRepair(logResult, opts); await maybeRepair(logResult, opts);
// 7. Port check // 8. Port check
const portResult = await portCheck(config); const portResult = await portCheck(config);
results.push(portResult); results.push(portResult);
printResult(portResult); printResult(portResult);

View File

@@ -9,6 +9,7 @@ import {
} from "../config/env.js"; } from "../config/env.js";
import { import {
resolveDefaultSecretsKeyFilePath, resolveDefaultSecretsKeyFilePath,
resolveDefaultStorageDir,
resolvePaperclipInstanceId, resolvePaperclipInstanceId,
} from "../config/home.js"; } from "../config/home.js";
@@ -27,9 +28,13 @@ const DEFAULT_AGENT_JWT_ISSUER = "paperclip";
const DEFAULT_AGENT_JWT_AUDIENCE = "paperclip-api"; const DEFAULT_AGENT_JWT_AUDIENCE = "paperclip-api";
const DEFAULT_HEARTBEAT_SCHEDULER_INTERVAL_MS = "30000"; const DEFAULT_HEARTBEAT_SCHEDULER_INTERVAL_MS = "30000";
const DEFAULT_SECRETS_PROVIDER = "local_encrypted"; const DEFAULT_SECRETS_PROVIDER = "local_encrypted";
const DEFAULT_STORAGE_PROVIDER = "local_disk";
function defaultSecretsKeyFilePath(): string { function defaultSecretsKeyFilePath(): string {
return resolveDefaultSecretsKeyFilePath(resolvePaperclipInstanceId()); return resolveDefaultSecretsKeyFilePath(resolvePaperclipInstanceId());
} }
function defaultStorageBaseDir(): string {
return resolveDefaultStorageDir(resolvePaperclipInstanceId());
}
export async function envCommand(opts: { config?: string }): Promise<void> { export async function envCommand(opts: { config?: string }): Promise<void> {
p.intro(pc.bgCyan(pc.black(" paperclip env "))); p.intro(pc.bgCyan(pc.black(" paperclip env ")));
@@ -127,6 +132,33 @@ function collectDeploymentEnvRows(config: PaperclipConfig | null, configPath: st
process.env.PAPERCLIP_SECRETS_MASTER_KEY_FILE ?? process.env.PAPERCLIP_SECRETS_MASTER_KEY_FILE ??
config?.secrets?.localEncrypted?.keyFilePath ?? config?.secrets?.localEncrypted?.keyFilePath ??
defaultSecretsKeyFilePath(); defaultSecretsKeyFilePath();
const storageProvider =
process.env.PAPERCLIP_STORAGE_PROVIDER ??
config?.storage?.provider ??
DEFAULT_STORAGE_PROVIDER;
const storageLocalDir =
process.env.PAPERCLIP_STORAGE_LOCAL_DIR ??
config?.storage?.localDisk?.baseDir ??
defaultStorageBaseDir();
const storageS3Bucket =
process.env.PAPERCLIP_STORAGE_S3_BUCKET ??
config?.storage?.s3?.bucket ??
"paperclip";
const storageS3Region =
process.env.PAPERCLIP_STORAGE_S3_REGION ??
config?.storage?.s3?.region ??
"us-east-1";
const storageS3Endpoint =
process.env.PAPERCLIP_STORAGE_S3_ENDPOINT ??
config?.storage?.s3?.endpoint ??
"";
const storageS3Prefix =
process.env.PAPERCLIP_STORAGE_S3_PREFIX ??
config?.storage?.s3?.prefix ??
"";
const storageS3ForcePathStyle =
process.env.PAPERCLIP_STORAGE_S3_FORCE_PATH_STYLE ??
String(config?.storage?.s3?.forcePathStyle ?? false);
const rows: EnvVarRow[] = [ const rows: EnvVarRow[] = [
{ {
@@ -228,6 +260,83 @@ function collectDeploymentEnvRows(config: PaperclipConfig | null, configPath: st
required: false, required: false,
note: "Path to local encrypted secrets key file", note: "Path to local encrypted secrets key file",
}, },
{
key: "PAPERCLIP_STORAGE_PROVIDER",
value: storageProvider,
source: process.env.PAPERCLIP_STORAGE_PROVIDER
? "env"
: config?.storage?.provider
? "config"
: "default",
required: false,
note: "Storage provider (local_disk or s3)",
},
{
key: "PAPERCLIP_STORAGE_LOCAL_DIR",
value: storageLocalDir,
source: process.env.PAPERCLIP_STORAGE_LOCAL_DIR
? "env"
: config?.storage?.localDisk?.baseDir
? "config"
: "default",
required: false,
note: "Local storage base directory for local_disk provider",
},
{
key: "PAPERCLIP_STORAGE_S3_BUCKET",
value: storageS3Bucket,
source: process.env.PAPERCLIP_STORAGE_S3_BUCKET
? "env"
: config?.storage?.s3?.bucket
? "config"
: "default",
required: false,
note: "S3 bucket name for s3 provider",
},
{
key: "PAPERCLIP_STORAGE_S3_REGION",
value: storageS3Region,
source: process.env.PAPERCLIP_STORAGE_S3_REGION
? "env"
: config?.storage?.s3?.region
? "config"
: "default",
required: false,
note: "S3 region for s3 provider",
},
{
key: "PAPERCLIP_STORAGE_S3_ENDPOINT",
value: storageS3Endpoint,
source: process.env.PAPERCLIP_STORAGE_S3_ENDPOINT
? "env"
: config?.storage?.s3?.endpoint
? "config"
: "default",
required: false,
note: "Optional custom endpoint for S3-compatible providers",
},
{
key: "PAPERCLIP_STORAGE_S3_PREFIX",
value: storageS3Prefix,
source: process.env.PAPERCLIP_STORAGE_S3_PREFIX
? "env"
: config?.storage?.s3?.prefix
? "config"
: "default",
required: false,
note: "Optional object key prefix",
},
{
key: "PAPERCLIP_STORAGE_S3_FORCE_PATH_STYLE",
value: storageS3ForcePathStyle,
source: process.env.PAPERCLIP_STORAGE_S3_FORCE_PATH_STYLE
? "env"
: config?.storage?.s3?.forcePathStyle !== undefined
? "config"
: "default",
required: false,
note: "Set true for path-style access on compatible providers",
},
]; ];
const defaultConfigPath = resolveConfigPath(); const defaultConfigPath = resolveConfigPath();

View File

@@ -8,6 +8,7 @@ import { promptDatabase } from "../prompts/database.js";
import { promptLlm } from "../prompts/llm.js"; import { promptLlm } from "../prompts/llm.js";
import { promptLogging } from "../prompts/logging.js"; import { promptLogging } from "../prompts/logging.js";
import { defaultSecretsConfig } from "../prompts/secrets.js"; import { defaultSecretsConfig } from "../prompts/secrets.js";
import { defaultStorageConfig, promptStorage } from "../prompts/storage.js";
import { promptServer } from "../prompts/server.js"; import { promptServer } from "../prompts/server.js";
import { describeLocalInstancePaths, resolvePaperclipInstanceId } from "../config/home.js"; import { describeLocalInstancePaths, resolvePaperclipInstanceId } from "../config/home.js";
@@ -107,6 +108,10 @@ export async function onboard(opts: { config?: string }): Promise<void> {
p.log.step(pc.bold("Server")); p.log.step(pc.bold("Server"));
const server = await promptServer(); const server = await promptServer();
// Storage
p.log.step(pc.bold("Storage"));
const storage = await promptStorage(defaultStorageConfig());
// Secrets // Secrets
p.log.step(pc.bold("Secrets")); p.log.step(pc.bold("Secrets"));
const secrets = defaultSecretsConfig(); const secrets = defaultSecretsConfig();
@@ -137,6 +142,7 @@ export async function onboard(opts: { config?: string }): Promise<void> {
database, database,
logging, logging,
server, server,
storage,
secrets, secrets,
}; };
@@ -155,6 +161,7 @@ export async function onboard(opts: { config?: string }): Promise<void> {
llm ? `LLM: ${llm.provider}` : "LLM: not configured", llm ? `LLM: ${llm.provider}` : "LLM: not configured",
`Logging: ${logging.mode}${logging.logDir}`, `Logging: ${logging.mode}${logging.logDir}`,
`Server: port ${server.port}`, `Server: port ${server.port}`,
`Storage: ${storage.provider}`,
`Secrets: ${secrets.provider} (strict mode ${secrets.strictMode ? "on" : "off"})`, `Secrets: ${secrets.provider} (strict mode ${secrets.strictMode ? "on" : "off"})`,
`Agent auth: PAPERCLIP_AGENT_JWT_SECRET configured`, `Agent auth: PAPERCLIP_AGENT_JWT_SECRET configured`,
].join("\n"), ].join("\n"),

View File

@@ -45,6 +45,10 @@ export function resolveDefaultSecretsKeyFilePath(instanceId?: string): string {
return path.resolve(resolvePaperclipInstanceRoot(instanceId), "secrets", "master.key"); return path.resolve(resolvePaperclipInstanceRoot(instanceId), "secrets", "master.key");
} }
export function resolveDefaultStorageDir(instanceId?: string): string {
return path.resolve(resolvePaperclipInstanceRoot(instanceId), "data", "storage");
}
export function expandHomePrefix(value: string): string { export function expandHomePrefix(value: string): string {
if (value === "~") return os.homedir(); if (value === "~") return os.homedir();
if (value.startsWith("~/")) return path.resolve(os.homedir(), value.slice(2)); if (value.startsWith("~/")) return path.resolve(os.homedir(), value.slice(2));
@@ -62,5 +66,6 @@ export function describeLocalInstancePaths(instanceId?: string) {
embeddedPostgresDataDir: resolveDefaultEmbeddedPostgresDir(resolvedInstanceId), embeddedPostgresDataDir: resolveDefaultEmbeddedPostgresDir(resolvedInstanceId),
logDir: resolveDefaultLogsDir(resolvedInstanceId), logDir: resolveDefaultLogsDir(resolvedInstanceId),
secretsKeyFilePath: resolveDefaultSecretsKeyFilePath(resolvedInstanceId), secretsKeyFilePath: resolveDefaultSecretsKeyFilePath(resolvedInstanceId),
storageDir: resolveDefaultStorageDir(resolvedInstanceId),
}; };
} }

View File

@@ -5,6 +5,9 @@ export {
databaseConfigSchema, databaseConfigSchema,
loggingConfigSchema, loggingConfigSchema,
serverConfigSchema, serverConfigSchema,
storageConfigSchema,
storageLocalDiskConfigSchema,
storageS3ConfigSchema,
secretsConfigSchema, secretsConfigSchema,
secretsLocalEncryptedConfigSchema, secretsLocalEncryptedConfigSchema,
type PaperclipConfig, type PaperclipConfig,
@@ -12,6 +15,9 @@ export {
type DatabaseConfig, type DatabaseConfig,
type LoggingConfig, type LoggingConfig,
type ServerConfig, type ServerConfig,
type StorageConfig,
type StorageLocalDiskConfig,
type StorageS3Config,
type SecretsConfig, type SecretsConfig,
type SecretsLocalEncryptedConfig, type SecretsLocalEncryptedConfig,
type ConfigMeta, type ConfigMeta,

View File

@@ -48,7 +48,7 @@ program
.command("configure") .command("configure")
.description("Update configuration sections") .description("Update configuration sections")
.option("-c, --config <path>", "Path to config file") .option("-c, --config <path>", "Path to config file")
.option("-s, --section <section>", "Section to configure (llm, database, logging, server, secrets)") .option("-s, --section <section>", "Section to configure (llm, database, logging, server, storage, secrets)")
.action(configure); .action(configure);
program program

146
cli/src/prompts/storage.ts Normal file
View File

@@ -0,0 +1,146 @@
import * as p from "@clack/prompts";
import type { StorageConfig } from "../config/schema.js";
import { resolveDefaultStorageDir, resolvePaperclipInstanceId } from "../config/home.js";
function defaultStorageBaseDir(): string {
return resolveDefaultStorageDir(resolvePaperclipInstanceId());
}
export function defaultStorageConfig(): StorageConfig {
return {
provider: "local_disk",
localDisk: {
baseDir: defaultStorageBaseDir(),
},
s3: {
bucket: "paperclip",
region: "us-east-1",
endpoint: undefined,
prefix: "",
forcePathStyle: false,
},
};
}
export async function promptStorage(current?: StorageConfig): Promise<StorageConfig> {
const base = current ?? defaultStorageConfig();
const provider = await p.select({
message: "Storage provider",
options: [
{
value: "local_disk" as const,
label: "Local disk (recommended)",
hint: "best for single-user local deployments",
},
{
value: "s3" as const,
label: "S3 compatible",
hint: "for cloud/object storage backends",
},
],
initialValue: base.provider,
});
if (p.isCancel(provider)) {
p.cancel("Setup cancelled.");
process.exit(0);
}
if (provider === "local_disk") {
const baseDir = await p.text({
message: "Local storage base directory",
defaultValue: base.localDisk.baseDir || defaultStorageBaseDir(),
placeholder: defaultStorageBaseDir(),
validate: (value) => {
if (!value || value.trim().length === 0) return "Storage base directory is required";
},
});
if (p.isCancel(baseDir)) {
p.cancel("Setup cancelled.");
process.exit(0);
}
return {
provider: "local_disk",
localDisk: {
baseDir: baseDir.trim(),
},
s3: base.s3,
};
}
const bucket = await p.text({
message: "S3 bucket",
defaultValue: base.s3.bucket || "paperclip",
placeholder: "paperclip",
validate: (value) => {
if (!value || value.trim().length === 0) return "Bucket is required";
},
});
if (p.isCancel(bucket)) {
p.cancel("Setup cancelled.");
process.exit(0);
}
const region = await p.text({
message: "S3 region",
defaultValue: base.s3.region || "us-east-1",
placeholder: "us-east-1",
validate: (value) => {
if (!value || value.trim().length === 0) return "Region is required";
},
});
if (p.isCancel(region)) {
p.cancel("Setup cancelled.");
process.exit(0);
}
const endpoint = await p.text({
message: "S3 endpoint (optional for compatible backends)",
defaultValue: base.s3.endpoint ?? "",
placeholder: "https://s3.amazonaws.com",
});
if (p.isCancel(endpoint)) {
p.cancel("Setup cancelled.");
process.exit(0);
}
const prefix = await p.text({
message: "Object key prefix (optional)",
defaultValue: base.s3.prefix ?? "",
placeholder: "paperclip/",
});
if (p.isCancel(prefix)) {
p.cancel("Setup cancelled.");
process.exit(0);
}
const forcePathStyle = await p.confirm({
message: "Use S3 path-style URLs?",
initialValue: base.s3.forcePathStyle ?? false,
});
if (p.isCancel(forcePathStyle)) {
p.cancel("Setup cancelled.");
process.exit(0);
}
return {
provider: "s3",
localDisk: base.localDisk,
s3: {
bucket: bucket.trim(),
region: region.trim(),
endpoint: endpoint.trim() || undefined,
prefix: prefix.trim(),
forcePathStyle,
},
};
}

View File

@@ -0,0 +1,206 @@
# Storage System Implementation Plan (V1)
Status: Draft
Owner: Backend + UI
Date: 2026-02-20
## Goal
Add a single storage subsystem for Paperclip that supports:
- local disk storage for single-user local deployment
- S3-compatible object storage for cloud deployment
- a provider-agnostic interface for issue images and future file attachments
## V1 Scope
- First consumer: issue attachments/images.
- Storage adapters: `local_disk` and `s3`.
- Files are always company-scoped and access-controlled.
- API serves attachment bytes through authenticated Paperclip endpoints.
## Out of Scope (This Draft)
- Public unauthenticated object URLs.
- CDN/signed URL optimization.
- Image transformations/thumbnails.
- Malware scanning pipeline.
## Key Decisions
- Default local path is under instance root: `~/.paperclip/instances/<instanceId>/data/storage`.
- Object bytes live in storage provider; metadata lives in Postgres.
- `assets` is generic metadata table; `issue_attachments` links assets to issues/comments.
- S3 credentials come from runtime environment/default AWS provider chain, not DB rows.
- All object keys include company prefix to preserve hard tenancy boundaries.
## Phase 1: Shared Config + Provider Contract
### Checklist (Per File)
- [ ] `packages/shared/src/constants.ts`: add `STORAGE_PROVIDERS` and `StorageProvider` type.
- [ ] `packages/shared/src/config-schema.ts`: add `storageConfigSchema` with:
- provider: `local_disk | s3`
- localDisk.baseDir
- s3.bucket, s3.region, s3.endpoint?, s3.prefix?, s3.forcePathStyle?
- [ ] `packages/shared/src/index.ts`: export new storage config/types.
- [ ] `cli/src/config/schema.ts`: ensure re-export includes new storage schema/types.
- [ ] `cli/src/commands/configure.ts`: add `storage` section support.
- [ ] `cli/src/commands/onboard.ts`: initialize default storage config.
- [ ] `cli/src/prompts/storage.ts`: new prompt flow for local disk vs s3 settings.
- [ ] `cli/src/prompts/index` (if present) or direct imports: wire new storage prompt.
- [ ] `server/src/config.ts`: load storage config and resolve home-aware local path.
- [ ] `server/src/home-paths.ts`: add `resolveDefaultStorageDir()`.
- [ ] `doc/CLI.md`: document `configure --section storage`.
- [ ] `doc/DEVELOPING.md`: document default local storage path and overrides.
### Acceptance Criteria
- `paperclip onboard` writes a valid `storage` config block by default.
- `paperclip configure --section storage` can switch between local and s3 modes.
- Server startup reads storage config without env-only hacks.
## Phase 2: Server Storage Subsystem + Providers
### Checklist (Per File)
- [ ] `server/src/storage/types.ts`: define provider + service interfaces.
- [ ] `server/src/storage/service.ts`: provider-agnostic service (key generation, validation, stream APIs).
- [ ] `server/src/storage/local-disk-provider.ts`: implement local disk provider with safe path resolution.
- [ ] `server/src/storage/s3-provider.ts`: implement S3-compatible provider (`@aws-sdk/client-s3`).
- [ ] `server/src/storage/provider-registry.ts`: provider lookup by configured id.
- [ ] `server/src/storage/index.ts`: export storage factory helpers.
- [ ] `server/src/services/index.ts`: export `storageService` factory.
- [ ] `server/src/app.ts` or route wiring point: inject/use storage service where needed.
- [ ] `server/package.json`: add AWS SDK dependency if not present.
### Acceptance Criteria
- In `local_disk` mode, uploading + reading a file round-trips bytes on disk.
- In `s3` mode, service can `put/get/delete` against S3-compatible endpoint.
- Invalid provider config yields clear startup/config errors.
## Phase 3: Database Metadata Model
### Checklist (Per File)
- [ ] `packages/db/src/schema/assets.ts`: new generic asset metadata table.
- [ ] `packages/db/src/schema/issue_attachments.ts`: issue-to-asset linking table.
- [ ] `packages/db/src/schema/index.ts`: export new tables.
- [ ] `packages/db/src/migrations/*`: generate migration for both tables and indexes.
- [ ] `packages/shared/src/types/issue.ts` (or new asset types file): add `IssueAttachment` type.
- [ ] `packages/shared/src/index.ts`: export new types.
### Suggested Columns
- `assets`:
- `id`, `company_id`, `provider`, `object_key`
- `content_type`, `byte_size`, `sha256`, `original_filename`
- `created_by_agent_id`, `created_by_user_id`, timestamps
- `issue_attachments`:
- `id`, `company_id`, `issue_id`, `asset_id`, `issue_comment_id` (nullable), timestamps
### Acceptance Criteria
- Migration applies cleanly on empty and existing local dev DB.
- Metadata rows are company-scoped and indexed for issue lookup.
## Phase 4: Issue Attachment API
### Checklist (Per File)
- [ ] `packages/shared/src/validators/issue.ts`: add schemas for upload/list/delete attachment operations.
- [ ] `server/src/services/issues.ts`: add attachment CRUD helpers with company checks.
- [ ] `server/src/routes/issues.ts`: add endpoints:
- `POST /companies/:companyId/issues/:issueId/attachments` (multipart)
- `GET /issues/:issueId/attachments`
- `GET /attachments/:attachmentId/content`
- `DELETE /attachments/:attachmentId`
- [ ] `server/src/routes/authz.ts`: reuse/enforce company access for attachment endpoints.
- [ ] `server/src/services/activity-log.ts` usage callsites: log attachment add/remove mutations.
- [ ] `server/src/app.ts`: ensure multipart parsing middleware is in place for upload route.
### API Behavior
- Enforce max size and image/content-type allowlist in V1.
- Return consistent errors: `400/401/403/404/409/422/500`.
- Stream bytes instead of buffering large payloads in memory.
### Acceptance Criteria
- Board and same-company agents can upload and read attachments per issue permissions.
- Cross-company access is denied even with valid attachment id.
- Activity log records attachment add/remove actions.
## Phase 5: UI Issue Attachment Integration
### Checklist (Per File)
- [ ] `ui/src/api/issues.ts`: add attachment API client methods.
- [ ] `ui/src/api/client.ts`: support multipart upload helper (no JSON `Content-Type` for `FormData`).
- [ ] `ui/src/lib/queryKeys.ts`: add issue attachment query keys.
- [ ] `ui/src/pages/IssueDetail.tsx`: add upload UI + attachment list/query invalidation.
- [ ] `ui/src/components/CommentThread.tsx`: optional comment image attach or display linked images.
- [ ] `packages/shared/src/types/index.ts`: ensure attachment types are consumed cleanly in UI.
### Acceptance Criteria
- User can upload an image from issue detail and see it listed immediately.
- Uploaded image can be opened/rendered via authenticated API route.
- Upload and fetch failures are visible to users (no silent errors).
## Phase 6: CLI Doctor + Operational Hardening
### Checklist (Per File)
- [ ] `cli/src/checks/storage-check.ts`: add storage check (local writable dir, optional S3 reachability check).
- [ ] `cli/src/checks/index.ts`: export new storage check.
- [ ] `cli/src/commands/doctor.ts`: include storage check in doctor sequence.
- [ ] `doc/DATABASE.md` or `doc/DEVELOPING.md`: mention storage backend behavior by deployment mode.
- [ ] `doc/SPEC-implementation.md`: add storage subsystem and issue-attachment endpoint contract.
### Acceptance Criteria
- `paperclip doctor` reports actionable storage status.
- Local single-user install works without extra cloud credentials.
- Cloud config supports S3-compatible endpoint without code changes.
## Test Plan
### Server Integration Tests
- [ ] `server/src/__tests__/issue-attachments.auth.test.ts`: company boundary and permission tests.
- [ ] `server/src/__tests__/issue-attachments.lifecycle.test.ts`: upload/list/read/delete flow.
- [ ] `server/src/__tests__/storage-local-provider.test.ts`: local provider path safety and round-trip.
- [ ] `server/src/__tests__/storage-s3-provider.test.ts`: s3 provider contract (mocked client).
- [ ] `server/src/__tests__/activity-log.attachments.test.ts`: mutation logging assertions.
### CLI Tests
- [ ] `cli/src/__tests__/configure-storage.test.ts`: configure section writes valid config.
- [ ] `cli/src/__tests__/doctor-storage-check.test.ts`: storage health output and repair behavior.
### UI Tests (if present in current stack)
- [ ] `ui/src/...`: issue detail upload and error handling tests.
## Verification Gate Before Merge
Run:
```sh
pnpm -r typecheck
pnpm test:run
pnpm build
```
If any command is skipped, document exactly what was skipped and why.
## Implementation Order
1. Phase 1 and Phase 2 (foundation, no user-visible breakage)
2. Phase 3 (DB contract)
3. Phase 4 (API)
4. Phase 5 (UI consumer)
5. Phase 6 (doctor/docs hardening)

View File

@@ -0,0 +1,26 @@
import { pgTable, uuid, text, integer, timestamp, index, uniqueIndex } from "drizzle-orm/pg-core";
import { companies } from "./companies.js";
import { agents } from "./agents.js";
export const assets = pgTable(
"assets",
{
id: uuid("id").primaryKey().defaultRandom(),
companyId: uuid("company_id").notNull().references(() => companies.id),
provider: text("provider").notNull(),
objectKey: text("object_key").notNull(),
contentType: text("content_type").notNull(),
byteSize: integer("byte_size").notNull(),
sha256: text("sha256").notNull(),
originalFilename: text("original_filename"),
createdByAgentId: uuid("created_by_agent_id").references(() => agents.id),
createdByUserId: text("created_by_user_id"),
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(),
},
(table) => ({
companyCreatedIdx: index("assets_company_created_idx").on(table.companyId, table.createdAt),
companyProviderIdx: index("assets_company_provider_idx").on(table.companyId, table.provider),
companyObjectKeyUq: uniqueIndex("assets_company_object_key_uq").on(table.companyId, table.objectKey),
}),
);

View File

@@ -10,6 +10,8 @@ export { goals } from "./goals.js";
export { issues } from "./issues.js"; export { issues } from "./issues.js";
export { issueApprovals } from "./issue_approvals.js"; export { issueApprovals } from "./issue_approvals.js";
export { issueComments } from "./issue_comments.js"; export { issueComments } from "./issue_comments.js";
export { assets } from "./assets.js";
export { issueAttachments } from "./issue_attachments.js";
export { heartbeatRuns } from "./heartbeat_runs.js"; export { heartbeatRuns } from "./heartbeat_runs.js";
export { heartbeatRunEvents } from "./heartbeat_run_events.js"; export { heartbeatRunEvents } from "./heartbeat_run_events.js";
export { costEvents } from "./cost_events.js"; export { costEvents } from "./cost_events.js";

View File

@@ -0,0 +1,23 @@
import { pgTable, uuid, timestamp, index, uniqueIndex } from "drizzle-orm/pg-core";
import { companies } from "./companies.js";
import { issues } from "./issues.js";
import { assets } from "./assets.js";
import { issueComments } from "./issue_comments.js";
export const issueAttachments = pgTable(
"issue_attachments",
{
id: uuid("id").primaryKey().defaultRandom(),
companyId: uuid("company_id").notNull().references(() => companies.id),
issueId: uuid("issue_id").notNull().references(() => issues.id, { onDelete: "cascade" }),
assetId: uuid("asset_id").notNull().references(() => assets.id, { onDelete: "cascade" }),
issueCommentId: uuid("issue_comment_id").references(() => issueComments.id, { onDelete: "set null" }),
createdAt: timestamp("created_at", { withTimezone: true }).notNull().defaultNow(),
updatedAt: timestamp("updated_at", { withTimezone: true }).notNull().defaultNow(),
},
(table) => ({
companyIssueIdx: index("issue_attachments_company_issue_idx").on(table.companyId, table.issueId),
issueCommentIdx: index("issue_attachments_issue_comment_idx").on(table.issueCommentId),
assetUq: uniqueIndex("issue_attachments_asset_uq").on(table.assetId),
}),
);

View File

@@ -1,5 +1,5 @@
import { z } from "zod"; import { z } from "zod";
import { SECRET_PROVIDERS } from "./constants.js"; import { SECRET_PROVIDERS, STORAGE_PROVIDERS } from "./constants.js";
export const configMetaSchema = z.object({ export const configMetaSchema = z.object({
version: z.literal(1), version: z.literal(1),
@@ -29,6 +29,31 @@ export const serverConfigSchema = z.object({
serveUi: z.boolean().default(true), serveUi: z.boolean().default(true),
}); });
export const storageLocalDiskConfigSchema = z.object({
baseDir: z.string().default("~/.paperclip/instances/default/data/storage"),
});
export const storageS3ConfigSchema = z.object({
bucket: z.string().min(1).default("paperclip"),
region: z.string().min(1).default("us-east-1"),
endpoint: z.string().optional(),
prefix: z.string().default(""),
forcePathStyle: z.boolean().default(false),
});
export const storageConfigSchema = z.object({
provider: z.enum(STORAGE_PROVIDERS).default("local_disk"),
localDisk: storageLocalDiskConfigSchema.default({
baseDir: "~/.paperclip/instances/default/data/storage",
}),
s3: storageS3ConfigSchema.default({
bucket: "paperclip",
region: "us-east-1",
prefix: "",
forcePathStyle: false,
}),
});
export const secretsLocalEncryptedConfigSchema = z.object({ export const secretsLocalEncryptedConfigSchema = z.object({
keyFilePath: z.string().default("~/.paperclip/instances/default/secrets/master.key"), keyFilePath: z.string().default("~/.paperclip/instances/default/secrets/master.key"),
}); });
@@ -47,6 +72,18 @@ export const paperclipConfigSchema = z.object({
database: databaseConfigSchema, database: databaseConfigSchema,
logging: loggingConfigSchema, logging: loggingConfigSchema,
server: serverConfigSchema, server: serverConfigSchema,
storage: storageConfigSchema.default({
provider: "local_disk",
localDisk: {
baseDir: "~/.paperclip/instances/default/data/storage",
},
s3: {
bucket: "paperclip",
region: "us-east-1",
prefix: "",
forcePathStyle: false,
},
}),
secrets: secretsConfigSchema.default({ secrets: secretsConfigSchema.default({
provider: "local_encrypted", provider: "local_encrypted",
strictMode: false, strictMode: false,
@@ -61,6 +98,9 @@ export type LlmConfig = z.infer<typeof llmConfigSchema>;
export type DatabaseConfig = z.infer<typeof databaseConfigSchema>; export type DatabaseConfig = z.infer<typeof databaseConfigSchema>;
export type LoggingConfig = z.infer<typeof loggingConfigSchema>; export type LoggingConfig = z.infer<typeof loggingConfigSchema>;
export type ServerConfig = z.infer<typeof serverConfigSchema>; export type ServerConfig = z.infer<typeof serverConfigSchema>;
export type StorageConfig = z.infer<typeof storageConfigSchema>;
export type StorageLocalDiskConfig = z.infer<typeof storageLocalDiskConfigSchema>;
export type StorageS3Config = z.infer<typeof storageS3ConfigSchema>;
export type SecretsConfig = z.infer<typeof secretsConfigSchema>; export type SecretsConfig = z.infer<typeof secretsConfigSchema>;
export type SecretsLocalEncryptedConfig = z.infer<typeof secretsLocalEncryptedConfigSchema>; export type SecretsLocalEncryptedConfig = z.infer<typeof secretsLocalEncryptedConfigSchema>;
export type ConfigMeta = z.infer<typeof configMetaSchema>; export type ConfigMeta = z.infer<typeof configMetaSchema>;

View File

@@ -79,6 +79,9 @@ export const SECRET_PROVIDERS = [
] as const; ] as const;
export type SecretProvider = (typeof SECRET_PROVIDERS)[number]; export type SecretProvider = (typeof SECRET_PROVIDERS)[number];
export const STORAGE_PROVIDERS = ["local_disk", "s3"] as const;
export type StorageProvider = (typeof STORAGE_PROVIDERS)[number];
export const HEARTBEAT_INVOCATION_SOURCES = [ export const HEARTBEAT_INVOCATION_SOURCES = [
"timer", "timer",
"assignment", "assignment",

View File

@@ -11,6 +11,7 @@ export {
APPROVAL_TYPES, APPROVAL_TYPES,
APPROVAL_STATUSES, APPROVAL_STATUSES,
SECRET_PROVIDERS, SECRET_PROVIDERS,
STORAGE_PROVIDERS,
HEARTBEAT_INVOCATION_SOURCES, HEARTBEAT_INVOCATION_SOURCES,
HEARTBEAT_RUN_STATUSES, HEARTBEAT_RUN_STATUSES,
WAKEUP_TRIGGER_DETAILS, WAKEUP_TRIGGER_DETAILS,
@@ -28,6 +29,7 @@ export {
type ApprovalType, type ApprovalType,
type ApprovalStatus, type ApprovalStatus,
type SecretProvider, type SecretProvider,
type StorageProvider,
type HeartbeatInvocationSource, type HeartbeatInvocationSource,
type HeartbeatRunStatus, type HeartbeatRunStatus,
type WakeupTriggerDetail, type WakeupTriggerDetail,
@@ -44,6 +46,7 @@ export type {
Project, Project,
Issue, Issue,
IssueComment, IssueComment,
IssueAttachment,
Goal, Goal,
Approval, Approval,
ApprovalComment, ApprovalComment,
@@ -94,11 +97,13 @@ export {
checkoutIssueSchema, checkoutIssueSchema,
addIssueCommentSchema, addIssueCommentSchema,
linkIssueApprovalSchema, linkIssueApprovalSchema,
createIssueAttachmentMetadataSchema,
type CreateIssue, type CreateIssue,
type UpdateIssue, type UpdateIssue,
type CheckoutIssue, type CheckoutIssue,
type AddIssueComment, type AddIssueComment,
type LinkIssueApproval, type LinkIssueApproval,
type CreateIssueAttachmentMetadata,
createGoalSchema, createGoalSchema,
updateGoalSchema, updateGoalSchema,
type CreateGoal, type CreateGoal,
@@ -139,12 +144,18 @@ export {
loggingConfigSchema, loggingConfigSchema,
serverConfigSchema, serverConfigSchema,
secretsConfigSchema, secretsConfigSchema,
storageConfigSchema,
storageLocalDiskConfigSchema,
storageS3ConfigSchema,
secretsLocalEncryptedConfigSchema, secretsLocalEncryptedConfigSchema,
type PaperclipConfig, type PaperclipConfig,
type LlmConfig, type LlmConfig,
type DatabaseConfig, type DatabaseConfig,
type LoggingConfig, type LoggingConfig,
type ServerConfig, type ServerConfig,
type StorageConfig,
type StorageLocalDiskConfig,
type StorageS3Config,
type SecretsConfig, type SecretsConfig,
type SecretsLocalEncryptedConfig, type SecretsLocalEncryptedConfig,
type ConfigMeta, type ConfigMeta,

View File

@@ -1,7 +1,14 @@
export type { Company } from "./company.js"; export type { Company } from "./company.js";
export type { Agent, AgentPermissions, AgentKeyCreated, AgentConfigRevision } from "./agent.js"; export type { Agent, AgentPermissions, AgentKeyCreated, AgentConfigRevision } from "./agent.js";
export type { Project } from "./project.js"; export type { Project } from "./project.js";
export type { Issue, IssueComment, IssueAncestor } from "./issue.js"; export type {
Issue,
IssueComment,
IssueAncestor,
IssueAncestorProject,
IssueAncestorGoal,
IssueAttachment,
} from "./issue.js";
export type { Goal } from "./goal.js"; export type { Goal } from "./goal.js";
export type { Approval, ApprovalComment } from "./approval.js"; export type { Approval, ApprovalComment } from "./approval.js";
export type { export type {

View File

@@ -1,5 +1,21 @@
import type { IssuePriority, IssueStatus } from "../constants.js"; import type { IssuePriority, IssueStatus } from "../constants.js";
export interface IssueAncestorProject {
id: string;
name: string;
description: string | null;
status: string;
goalId: string | null;
}
export interface IssueAncestorGoal {
id: string;
title: string;
description: string | null;
level: string;
status: string;
}
export interface IssueAncestor { export interface IssueAncestor {
id: string; id: string;
title: string; title: string;
@@ -9,6 +25,8 @@ export interface IssueAncestor {
assigneeAgentId: string | null; assigneeAgentId: string | null;
projectId: string | null; projectId: string | null;
goalId: string | null; goalId: string | null;
project: IssueAncestorProject | null;
goal: IssueAncestorGoal | null;
} }
export interface Issue { export interface Issue {
@@ -47,3 +65,22 @@ export interface IssueComment {
createdAt: Date; createdAt: Date;
updatedAt: Date; updatedAt: Date;
} }
export interface IssueAttachment {
id: string;
companyId: string;
issueId: string;
issueCommentId: string | null;
assetId: string;
provider: string;
objectKey: string;
contentType: string;
byteSize: number;
sha256: string;
originalFilename: string | null;
createdByAgentId: string | null;
createdByUserId: string | null;
createdAt: Date;
updatedAt: Date;
contentPath: string;
}

View File

@@ -1,4 +1,5 @@
export interface SidebarBadges { export interface SidebarBadges {
inbox: number; inbox: number;
approvals: number; approvals: number;
failedRuns: number;
} }

View File

@@ -36,11 +36,13 @@ export {
checkoutIssueSchema, checkoutIssueSchema,
addIssueCommentSchema, addIssueCommentSchema,
linkIssueApprovalSchema, linkIssueApprovalSchema,
createIssueAttachmentMetadataSchema,
type CreateIssue, type CreateIssue,
type UpdateIssue, type UpdateIssue,
type CheckoutIssue, type CheckoutIssue,
type AddIssueComment, type AddIssueComment,
type LinkIssueApproval, type LinkIssueApproval,
type CreateIssueAttachmentMetadata,
} from "./issue.js"; } from "./issue.js";
export { export {

View File

@@ -42,3 +42,9 @@ export const linkIssueApprovalSchema = z.object({
}); });
export type LinkIssueApproval = z.infer<typeof linkIssueApprovalSchema>; export type LinkIssueApproval = z.infer<typeof linkIssueApprovalSchema>;
export const createIssueAttachmentMetadataSchema = z.object({
issueCommentId: z.string().uuid().optional().nullable(),
});
export type CreateIssueAttachmentMetadata = z.infer<typeof createIssueAttachmentMetadataSchema>;

View File

@@ -16,10 +16,12 @@
"@paperclip/adapter-utils": "workspace:*", "@paperclip/adapter-utils": "workspace:*",
"@paperclip/db": "workspace:*", "@paperclip/db": "workspace:*",
"@paperclip/shared": "workspace:*", "@paperclip/shared": "workspace:*",
"@aws-sdk/client-s3": "^3.888.0",
"detect-port": "^2.1.0", "detect-port": "^2.1.0",
"dotenv": "^17.0.1", "dotenv": "^17.0.1",
"drizzle-orm": "^0.38.4", "drizzle-orm": "^0.38.4",
"express": "^5.1.0", "express": "^5.1.0",
"multer": "^2.0.2",
"pino": "^9.6.0", "pino": "^9.6.0",
"pino-http": "^10.4.0", "pino-http": "^10.4.0",
"pino-pretty": "^13.1.3", "pino-pretty": "^13.1.3",
@@ -32,6 +34,7 @@
"devDependencies": { "devDependencies": {
"@types/express": "^5.0.0", "@types/express": "^5.0.0",
"@types/express-serve-static-core": "^5.0.0", "@types/express-serve-static-core": "^5.0.0",
"@types/multer": "^2.0.0",
"@types/supertest": "^6.0.2", "@types/supertest": "^6.0.2",
"supertest": "^7.0.0", "supertest": "^7.0.0",
"tsx": "^4.19.2", "tsx": "^4.19.2",

View File

@@ -3,6 +3,7 @@ import path from "node:path";
import fs from "node:fs"; import fs from "node:fs";
import { fileURLToPath } from "node:url"; import { fileURLToPath } from "node:url";
import type { Db } from "@paperclip/db"; import type { Db } from "@paperclip/db";
import type { StorageService } from "./storage/types.js";
import { httpLogger, errorHandler } from "./middleware/index.js"; import { httpLogger, errorHandler } from "./middleware/index.js";
import { actorMiddleware } from "./middleware/auth.js"; import { actorMiddleware } from "./middleware/auth.js";
import { healthRoutes } from "./routes/health.js"; import { healthRoutes } from "./routes/health.js";
@@ -21,7 +22,7 @@ import { llmRoutes } from "./routes/llms.js";
type UiMode = "none" | "static" | "vite-dev"; type UiMode = "none" | "static" | "vite-dev";
export async function createApp(db: Db, opts: { uiMode: UiMode }) { export async function createApp(db: Db, opts: { uiMode: UiMode; storageService: StorageService }) {
const app = express(); const app = express();
app.use(express.json()); app.use(express.json());
@@ -35,7 +36,7 @@ export async function createApp(db: Db, opts: { uiMode: UiMode }) {
api.use("/companies", companyRoutes(db)); api.use("/companies", companyRoutes(db));
api.use(agentRoutes(db)); api.use(agentRoutes(db));
api.use(projectRoutes(db)); api.use(projectRoutes(db));
api.use(issueRoutes(db)); api.use(issueRoutes(db, opts.storageService));
api.use(goalRoutes(db)); api.use(goalRoutes(db));
api.use(approvalRoutes(db)); api.use(approvalRoutes(db));
api.use(secretRoutes(db)); api.use(secretRoutes(db));

View File

@@ -2,10 +2,11 @@ import { readConfigFile } from "./config-file.js";
import { existsSync } from "node:fs"; import { existsSync } from "node:fs";
import { config as loadDotenv } from "dotenv"; import { config as loadDotenv } from "dotenv";
import { resolvePaperclipEnvPath } from "./paths.js"; import { resolvePaperclipEnvPath } from "./paths.js";
import { SECRET_PROVIDERS, type SecretProvider } from "@paperclip/shared"; import { SECRET_PROVIDERS, STORAGE_PROVIDERS, type SecretProvider, type StorageProvider } from "@paperclip/shared";
import { import {
resolveDefaultEmbeddedPostgresDir, resolveDefaultEmbeddedPostgresDir,
resolveDefaultSecretsKeyFilePath, resolveDefaultSecretsKeyFilePath,
resolveDefaultStorageDir,
resolveHomeAwarePath, resolveHomeAwarePath,
} from "./home-paths.js"; } from "./home-paths.js";
@@ -27,6 +28,13 @@ export interface Config {
secretsProvider: SecretProvider; secretsProvider: SecretProvider;
secretsStrictMode: boolean; secretsStrictMode: boolean;
secretsMasterKeyFilePath: string; secretsMasterKeyFilePath: string;
storageProvider: StorageProvider;
storageLocalDiskBaseDir: string;
storageS3Bucket: string;
storageS3Region: string;
storageS3Endpoint: string | undefined;
storageS3Prefix: string;
storageS3ForcePathStyle: boolean;
heartbeatSchedulerEnabled: boolean; heartbeatSchedulerEnabled: boolean;
heartbeatSchedulerIntervalMs: number; heartbeatSchedulerIntervalMs: number;
} }
@@ -41,6 +49,7 @@ export function loadConfig(): Config {
? fileConfig?.database.connectionString ? fileConfig?.database.connectionString
: undefined; : undefined;
const fileSecrets = fileConfig?.secrets; const fileSecrets = fileConfig?.secrets;
const fileStorage = fileConfig?.storage;
const strictModeFromEnv = process.env.PAPERCLIP_SECRETS_STRICT_MODE; const strictModeFromEnv = process.env.PAPERCLIP_SECRETS_STRICT_MODE;
const secretsStrictMode = const secretsStrictMode =
strictModeFromEnv !== undefined strictModeFromEnv !== undefined
@@ -55,6 +64,26 @@ export function loadConfig(): Config {
const providerFromFile = fileSecrets?.provider; const providerFromFile = fileSecrets?.provider;
const secretsProvider: SecretProvider = providerFromEnv ?? providerFromFile ?? "local_encrypted"; const secretsProvider: SecretProvider = providerFromEnv ?? providerFromFile ?? "local_encrypted";
const storageProviderFromEnvRaw = process.env.PAPERCLIP_STORAGE_PROVIDER;
const storageProviderFromEnv =
storageProviderFromEnvRaw && STORAGE_PROVIDERS.includes(storageProviderFromEnvRaw as StorageProvider)
? (storageProviderFromEnvRaw as StorageProvider)
: null;
const storageProvider: StorageProvider = storageProviderFromEnv ?? fileStorage?.provider ?? "local_disk";
const storageLocalDiskBaseDir = resolveHomeAwarePath(
process.env.PAPERCLIP_STORAGE_LOCAL_DIR ??
fileStorage?.localDisk?.baseDir ??
resolveDefaultStorageDir(),
);
const storageS3Bucket = process.env.PAPERCLIP_STORAGE_S3_BUCKET ?? fileStorage?.s3?.bucket ?? "paperclip";
const storageS3Region = process.env.PAPERCLIP_STORAGE_S3_REGION ?? fileStorage?.s3?.region ?? "us-east-1";
const storageS3Endpoint = process.env.PAPERCLIP_STORAGE_S3_ENDPOINT ?? fileStorage?.s3?.endpoint ?? undefined;
const storageS3Prefix = process.env.PAPERCLIP_STORAGE_S3_PREFIX ?? fileStorage?.s3?.prefix ?? "";
const storageS3ForcePathStyle =
process.env.PAPERCLIP_STORAGE_S3_FORCE_PATH_STYLE !== undefined
? process.env.PAPERCLIP_STORAGE_S3_FORCE_PATH_STYLE === "true"
: (fileStorage?.s3?.forcePathStyle ?? false);
return { return {
port: Number(process.env.PORT) || fileConfig?.server.port || 3100, port: Number(process.env.PORT) || fileConfig?.server.port || 3100,
databaseMode: fileDatabaseMode, databaseMode: fileDatabaseMode,
@@ -76,6 +105,13 @@ export function loadConfig(): Config {
fileSecrets?.localEncrypted.keyFilePath ?? fileSecrets?.localEncrypted.keyFilePath ??
resolveDefaultSecretsKeyFilePath(), resolveDefaultSecretsKeyFilePath(),
), ),
storageProvider,
storageLocalDiskBaseDir,
storageS3Bucket,
storageS3Region,
storageS3Endpoint,
storageS3Prefix,
storageS3ForcePathStyle,
heartbeatSchedulerEnabled: process.env.HEARTBEAT_SCHEDULER_ENABLED !== "false", heartbeatSchedulerEnabled: process.env.HEARTBEAT_SCHEDULER_ENABLED !== "false",
heartbeatSchedulerIntervalMs: Math.max(10000, Number(process.env.HEARTBEAT_SCHEDULER_INTERVAL_MS) || 30000), heartbeatSchedulerIntervalMs: Math.max(10000, Number(process.env.HEARTBEAT_SCHEDULER_INTERVAL_MS) || 30000),
}; };

View File

@@ -44,6 +44,10 @@ export function resolveDefaultSecretsKeyFilePath(): string {
return path.resolve(resolvePaperclipInstanceRoot(), "secrets", "master.key"); return path.resolve(resolvePaperclipInstanceRoot(), "secrets", "master.key");
} }
export function resolveDefaultStorageDir(): string {
return path.resolve(resolvePaperclipInstanceRoot(), "data", "storage");
}
export function resolveHomeAwarePath(value: string): string { export function resolveHomeAwarePath(value: string): string {
return path.resolve(expandHomePrefix(value)); return path.resolve(expandHomePrefix(value));
} }

View File

@@ -16,6 +16,7 @@ import { loadConfig } from "./config.js";
import { logger } from "./middleware/logger.js"; import { logger } from "./middleware/logger.js";
import { setupLiveEventsWebSocketServer } from "./realtime/live-events-ws.js"; import { setupLiveEventsWebSocketServer } from "./realtime/live-events-ws.js";
import { heartbeatService } from "./services/index.js"; import { heartbeatService } from "./services/index.js";
import { createStorageServiceFromConfig } from "./storage/index.js";
import { printStartupBanner } from "./startup-banner.js"; import { printStartupBanner } from "./startup-banner.js";
type EmbeddedPostgresInstance = { type EmbeddedPostgresInstance = {
@@ -217,7 +218,8 @@ if (config.databaseUrl) {
} }
const uiMode = config.uiDevMiddleware ? "vite-dev" : config.serveUi ? "static" : "none"; const uiMode = config.uiDevMiddleware ? "vite-dev" : config.serveUi ? "static" : "none";
const app = await createApp(db as any, { uiMode }); const storageService = createStorageServiceFromConfig(config);
const app = await createApp(db as any, { uiMode, storageService });
const server = createServer(app); const server = createServer(app);
const listenPort = await detectPort(config.port); const listenPort = await detectPort(config.port);

View File

@@ -1,29 +1,65 @@
import { Router, type Request, type Response } from "express"; import { Router, type Request, type Response } from "express";
import multer from "multer";
import type { Db } from "@paperclip/db"; import type { Db } from "@paperclip/db";
import { import {
addIssueCommentSchema, addIssueCommentSchema,
createIssueAttachmentMetadataSchema,
checkoutIssueSchema, checkoutIssueSchema,
createIssueSchema, createIssueSchema,
linkIssueApprovalSchema, linkIssueApprovalSchema,
updateIssueSchema, updateIssueSchema,
} from "@paperclip/shared"; } from "@paperclip/shared";
import type { StorageService } from "../storage/types.js";
import { validate } from "../middleware/validate.js"; import { validate } from "../middleware/validate.js";
import { import {
agentService, agentService,
goalService,
heartbeatService, heartbeatService,
issueApprovalService, issueApprovalService,
issueService, issueService,
logActivity, logActivity,
projectService,
} from "../services/index.js"; } from "../services/index.js";
import { logger } from "../middleware/logger.js"; import { logger } from "../middleware/logger.js";
import { assertCompanyAccess, getActorInfo } from "./authz.js"; import { assertCompanyAccess, getActorInfo } from "./authz.js";
export function issueRoutes(db: Db) { const MAX_ATTACHMENT_BYTES = Number(process.env.PAPERCLIP_ATTACHMENT_MAX_BYTES) || 10 * 1024 * 1024;
const ALLOWED_ATTACHMENT_CONTENT_TYPES = new Set([
"image/png",
"image/jpeg",
"image/jpg",
"image/webp",
"image/gif",
]);
export function issueRoutes(db: Db, storage: StorageService) {
const router = Router(); const router = Router();
const svc = issueService(db); const svc = issueService(db);
const heartbeat = heartbeatService(db); const heartbeat = heartbeatService(db);
const agentsSvc = agentService(db); const agentsSvc = agentService(db);
const projectsSvc = projectService(db);
const goalsSvc = goalService(db);
const issueApprovalsSvc = issueApprovalService(db); const issueApprovalsSvc = issueApprovalService(db);
const upload = multer({
storage: multer.memoryStorage(),
limits: { fileSize: MAX_ATTACHMENT_BYTES, files: 1 },
});
function withContentPath<T extends { id: string }>(attachment: T) {
return {
...attachment,
contentPath: `/api/attachments/${attachment.id}/content`,
};
}
async function runSingleFileUpload(req: Request, res: Response) {
await new Promise<void>((resolve, reject) => {
upload.single("file")(req, res, (err: unknown) => {
if (err) reject(err);
else resolve();
});
});
}
async function assertCanManageIssueApprovalLinks(req: Request, res: Response, companyId: string) { async function assertCanManageIssueApprovalLinks(req: Request, res: Response, companyId: string) {
assertCompanyAccess(req, companyId); assertCompanyAccess(req, companyId);
@@ -62,8 +98,12 @@ export function issueRoutes(db: Db) {
return; return;
} }
assertCompanyAccess(req, issue.companyId); assertCompanyAccess(req, issue.companyId);
const ancestors = await svc.getAncestors(issue.id); const [ancestors, project, goal] = await Promise.all([
res.json({ ...issue, ancestors }); svc.getAncestors(issue.id),
issue.projectId ? projectsSvc.getById(issue.projectId) : null,
issue.goalId ? goalsSvc.getById(issue.goalId) : null,
]);
res.json({ ...issue, ancestors, project: project ?? null, goal: goal ?? null });
}); });
router.get("/issues/:id/approvals", async (req, res) => { router.get("/issues/:id/approvals", async (req, res) => {
@@ -254,20 +294,17 @@ export function issueRoutes(db: Db) {
const assigneeChanged = const assigneeChanged =
req.body.assigneeAgentId !== undefined && req.body.assigneeAgentId !== existing.assigneeAgentId; req.body.assigneeAgentId !== undefined && req.body.assigneeAgentId !== existing.assigneeAgentId;
const reopened =
(existing.status === "done" || existing.status === "cancelled") &&
issue.status !== "done" && issue.status !== "cancelled";
if ((assigneeChanged || reopened) && issue.assigneeAgentId) { if (assigneeChanged && issue.assigneeAgentId) {
void heartbeat void heartbeat
.wakeup(issue.assigneeAgentId, { .wakeup(issue.assigneeAgentId, {
source: reopened ? "automation" : "assignment", source: "assignment",
triggerDetail: "system", triggerDetail: "system",
reason: reopened ? "issue_reopened" : "issue_assigned", reason: "issue_assigned",
payload: { issueId: issue.id, mutation: "update" }, payload: { issueId: issue.id, mutation: "update" },
requestedByActorType: actor.actorType, requestedByActorType: actor.actorType,
requestedByActorId: actor.actorId, requestedByActorId: actor.actorId,
contextSnapshot: { issueId: issue.id, source: reopened ? "issue.reopen" : "issue.update" }, contextSnapshot: { issueId: issue.id, source: "issue.update" },
}) })
.catch((err) => logger.warn({ err, issueId: issue.id }, "failed to wake assignee on issue update")); .catch((err) => logger.warn({ err, issueId: issue.id }, "failed to wake assignee on issue update"));
} }
@@ -518,5 +555,169 @@ export function issueRoutes(db: Db) {
res.status(201).json(comment); res.status(201).json(comment);
}); });
router.get("/issues/:id/attachments", async (req, res) => {
const issueId = req.params.id as string;
const issue = await svc.getById(issueId);
if (!issue) {
res.status(404).json({ error: "Issue not found" });
return;
}
assertCompanyAccess(req, issue.companyId);
const attachments = await svc.listAttachments(issueId);
res.json(attachments.map(withContentPath));
});
router.post("/companies/:companyId/issues/:issueId/attachments", async (req, res) => {
const companyId = req.params.companyId as string;
const issueId = req.params.issueId as string;
assertCompanyAccess(req, companyId);
const issue = await svc.getById(issueId);
if (!issue) {
res.status(404).json({ error: "Issue not found" });
return;
}
if (issue.companyId !== companyId) {
res.status(422).json({ error: "Issue does not belong to company" });
return;
}
try {
await runSingleFileUpload(req, res);
} catch (err) {
if (err instanceof multer.MulterError) {
if (err.code === "LIMIT_FILE_SIZE") {
res.status(422).json({ error: `Attachment exceeds ${MAX_ATTACHMENT_BYTES} bytes` });
return;
}
res.status(400).json({ error: err.message });
return;
}
throw err;
}
const file = (req as Request & { file?: { mimetype: string; buffer: Buffer; originalname: string } }).file;
if (!file) {
res.status(400).json({ error: "Missing file field 'file'" });
return;
}
const contentType = (file.mimetype || "").toLowerCase();
if (!ALLOWED_ATTACHMENT_CONTENT_TYPES.has(contentType)) {
res.status(422).json({ error: `Unsupported attachment type: ${contentType || "unknown"}` });
return;
}
if (file.buffer.length <= 0) {
res.status(422).json({ error: "Attachment is empty" });
return;
}
const parsedMeta = createIssueAttachmentMetadataSchema.safeParse(req.body ?? {});
if (!parsedMeta.success) {
res.status(400).json({ error: "Invalid attachment metadata", details: parsedMeta.error.issues });
return;
}
const actor = getActorInfo(req);
const stored = await storage.putFile({
companyId,
namespace: `issues/${issueId}`,
originalFilename: file.originalname || null,
contentType,
body: file.buffer,
});
const attachment = await svc.createAttachment({
issueId,
issueCommentId: parsedMeta.data.issueCommentId ?? null,
provider: stored.provider,
objectKey: stored.objectKey,
contentType: stored.contentType,
byteSize: stored.byteSize,
sha256: stored.sha256,
originalFilename: stored.originalFilename,
createdByAgentId: actor.agentId,
createdByUserId: actor.actorType === "user" ? actor.actorId : null,
});
await logActivity(db, {
companyId,
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.attachment_added",
entityType: "issue",
entityId: issueId,
details: {
attachmentId: attachment.id,
originalFilename: attachment.originalFilename,
contentType: attachment.contentType,
byteSize: attachment.byteSize,
},
});
res.status(201).json(withContentPath(attachment));
});
router.get("/attachments/:attachmentId/content", async (req, res, next) => {
const attachmentId = req.params.attachmentId as string;
const attachment = await svc.getAttachmentById(attachmentId);
if (!attachment) {
res.status(404).json({ error: "Attachment not found" });
return;
}
assertCompanyAccess(req, attachment.companyId);
const object = await storage.getObject(attachment.companyId, attachment.objectKey);
res.setHeader("Content-Type", attachment.contentType || object.contentType || "application/octet-stream");
res.setHeader("Content-Length", String(attachment.byteSize || object.contentLength || 0));
res.setHeader("Cache-Control", "private, max-age=60");
const filename = attachment.originalFilename ?? "attachment";
res.setHeader("Content-Disposition", `inline; filename=\"${filename.replaceAll("\"", "")}\"`);
object.stream.on("error", (err) => {
next(err);
});
object.stream.pipe(res);
});
router.delete("/attachments/:attachmentId", async (req, res) => {
const attachmentId = req.params.attachmentId as string;
const attachment = await svc.getAttachmentById(attachmentId);
if (!attachment) {
res.status(404).json({ error: "Attachment not found" });
return;
}
assertCompanyAccess(req, attachment.companyId);
try {
await storage.deleteObject(attachment.companyId, attachment.objectKey);
} catch (err) {
logger.warn({ err, attachmentId }, "storage delete failed while removing attachment");
}
const removed = await svc.removeAttachment(attachmentId);
if (!removed) {
res.status(404).json({ error: "Attachment not found" });
return;
}
const actor = getActorInfo(req);
await logActivity(db, {
companyId: removed.companyId,
actorType: actor.actorType,
actorId: actor.actorId,
agentId: actor.agentId,
runId: actor.runId,
action: "issue.attachment_removed",
entityType: "issue",
entityId: removed.issueId,
details: {
attachmentId: removed.id,
},
});
res.json({ ok: true });
});
return router; return router;
} }

View File

@@ -13,3 +13,4 @@ export { dashboardService } from "./dashboard.js";
export { sidebarBadgeService } from "./sidebar-badges.js"; export { sidebarBadgeService } from "./sidebar-badges.js";
export { logActivity, type LogActivityInput } from "./activity-log.js"; export { logActivity, type LogActivityInput } from "./activity-log.js";
export { publishLiveEvent, subscribeCompanyLiveEvents } from "./live-events.js"; export { publishLiveEvent, subscribeCompanyLiveEvents } from "./live-events.js";
export { createStorageServiceFromConfig, getStorageService } from "../storage/index.js";

View File

@@ -1,6 +1,15 @@
import { and, asc, desc, eq, inArray, isNull, or, sql } from "drizzle-orm"; import { and, asc, desc, eq, inArray, isNull, or, sql } from "drizzle-orm";
import type { Db } from "@paperclip/db"; import type { Db } from "@paperclip/db";
import { agents, companies, issues, issueComments } from "@paperclip/db"; import {
agents,
assets,
companies,
goals,
issueAttachments,
issueComments,
issues,
projects,
} from "@paperclip/db";
import { conflict, notFound, unprocessable } from "../errors.js"; import { conflict, notFound, unprocessable } from "../errors.js";
const ALL_ISSUE_STATUSES = ["backlog", "todo", "in_progress", "in_review", "blocked", "done", "cancelled"]; const ALL_ISSUE_STATUSES = ["backlog", "todo", "in_progress", "in_review", "blocked", "done", "cancelled"];
@@ -162,11 +171,26 @@ export function issueService(db: Db) {
}, },
remove: (id: string) => remove: (id: string) =>
db db.transaction(async (tx) => {
.delete(issues) const attachmentAssetIds = await tx
.where(eq(issues.id, id)) .select({ assetId: issueAttachments.assetId })
.returning() .from(issueAttachments)
.then((rows) => rows[0] ?? null), .where(eq(issueAttachments.issueId, id));
const removedIssue = await tx
.delete(issues)
.where(eq(issues.id, id))
.returning()
.then((rows) => rows[0] ?? null);
if (removedIssue && attachmentAssetIds.length > 0) {
await tx
.delete(assets)
.where(inArray(assets.id, attachmentAssetIds.map((row) => row.assetId)));
}
return removedIssue;
}),
checkout: async (id: string, agentId: string, expectedStatuses: string[]) => { checkout: async (id: string, agentId: string, expectedStatuses: string[]) => {
const issueCompany = await db const issueCompany = await db
@@ -275,6 +299,162 @@ export function issueService(db: Db) {
.then((rows) => rows[0]); .then((rows) => rows[0]);
}, },
createAttachment: async (input: {
issueId: string;
issueCommentId?: string | null;
provider: string;
objectKey: string;
contentType: string;
byteSize: number;
sha256: string;
originalFilename?: string | null;
createdByAgentId?: string | null;
createdByUserId?: string | null;
}) => {
const issue = await db
.select({ id: issues.id, companyId: issues.companyId })
.from(issues)
.where(eq(issues.id, input.issueId))
.then((rows) => rows[0] ?? null);
if (!issue) throw notFound("Issue not found");
if (input.issueCommentId) {
const comment = await db
.select({ id: issueComments.id, companyId: issueComments.companyId, issueId: issueComments.issueId })
.from(issueComments)
.where(eq(issueComments.id, input.issueCommentId))
.then((rows) => rows[0] ?? null);
if (!comment) throw notFound("Issue comment not found");
if (comment.companyId !== issue.companyId || comment.issueId !== issue.id) {
throw unprocessable("Attachment comment must belong to same issue and company");
}
}
return db.transaction(async (tx) => {
const [asset] = await tx
.insert(assets)
.values({
companyId: issue.companyId,
provider: input.provider,
objectKey: input.objectKey,
contentType: input.contentType,
byteSize: input.byteSize,
sha256: input.sha256,
originalFilename: input.originalFilename ?? null,
createdByAgentId: input.createdByAgentId ?? null,
createdByUserId: input.createdByUserId ?? null,
})
.returning();
const [attachment] = await tx
.insert(issueAttachments)
.values({
companyId: issue.companyId,
issueId: issue.id,
assetId: asset.id,
issueCommentId: input.issueCommentId ?? null,
})
.returning();
return {
id: attachment.id,
companyId: attachment.companyId,
issueId: attachment.issueId,
issueCommentId: attachment.issueCommentId,
assetId: attachment.assetId,
provider: asset.provider,
objectKey: asset.objectKey,
contentType: asset.contentType,
byteSize: asset.byteSize,
sha256: asset.sha256,
originalFilename: asset.originalFilename,
createdByAgentId: asset.createdByAgentId,
createdByUserId: asset.createdByUserId,
createdAt: attachment.createdAt,
updatedAt: attachment.updatedAt,
};
});
},
listAttachments: async (issueId: string) =>
db
.select({
id: issueAttachments.id,
companyId: issueAttachments.companyId,
issueId: issueAttachments.issueId,
issueCommentId: issueAttachments.issueCommentId,
assetId: issueAttachments.assetId,
provider: assets.provider,
objectKey: assets.objectKey,
contentType: assets.contentType,
byteSize: assets.byteSize,
sha256: assets.sha256,
originalFilename: assets.originalFilename,
createdByAgentId: assets.createdByAgentId,
createdByUserId: assets.createdByUserId,
createdAt: issueAttachments.createdAt,
updatedAt: issueAttachments.updatedAt,
})
.from(issueAttachments)
.innerJoin(assets, eq(issueAttachments.assetId, assets.id))
.where(eq(issueAttachments.issueId, issueId))
.orderBy(desc(issueAttachments.createdAt)),
getAttachmentById: async (id: string) =>
db
.select({
id: issueAttachments.id,
companyId: issueAttachments.companyId,
issueId: issueAttachments.issueId,
issueCommentId: issueAttachments.issueCommentId,
assetId: issueAttachments.assetId,
provider: assets.provider,
objectKey: assets.objectKey,
contentType: assets.contentType,
byteSize: assets.byteSize,
sha256: assets.sha256,
originalFilename: assets.originalFilename,
createdByAgentId: assets.createdByAgentId,
createdByUserId: assets.createdByUserId,
createdAt: issueAttachments.createdAt,
updatedAt: issueAttachments.updatedAt,
})
.from(issueAttachments)
.innerJoin(assets, eq(issueAttachments.assetId, assets.id))
.where(eq(issueAttachments.id, id))
.then((rows) => rows[0] ?? null),
removeAttachment: async (id: string) =>
db.transaction(async (tx) => {
const existing = await tx
.select({
id: issueAttachments.id,
companyId: issueAttachments.companyId,
issueId: issueAttachments.issueId,
issueCommentId: issueAttachments.issueCommentId,
assetId: issueAttachments.assetId,
provider: assets.provider,
objectKey: assets.objectKey,
contentType: assets.contentType,
byteSize: assets.byteSize,
sha256: assets.sha256,
originalFilename: assets.originalFilename,
createdByAgentId: assets.createdByAgentId,
createdByUserId: assets.createdByUserId,
createdAt: issueAttachments.createdAt,
updatedAt: issueAttachments.updatedAt,
})
.from(issueAttachments)
.innerJoin(assets, eq(issueAttachments.assetId, assets.id))
.where(eq(issueAttachments.id, id))
.then((rows) => rows[0] ?? null);
if (!existing) return null;
await tx.delete(issueAttachments).where(eq(issueAttachments.id, id));
await tx.delete(assets).where(eq(assets.id, existing.assetId));
return existing;
}),
findMentionedAgents: async (companyId: string, body: string) => { findMentionedAgents: async (companyId: string, body: string) => {
const re = /\B@([^\s@,!?.]+)/g; const re = /\B@([^\s@,!?.]+)/g;
const tokens = new Set<string>(); const tokens = new Set<string>();
@@ -287,7 +467,7 @@ export function issueService(db: Db) {
}, },
getAncestors: async (issueId: string) => { getAncestors: async (issueId: string) => {
const ancestors: Array<{ const raw: Array<{
id: string; title: string; description: string | null; id: string; title: string; description: string | null;
status: string; priority: string; status: string; priority: string;
assigneeAgentId: string | null; projectId: string | null; goalId: string | null; assigneeAgentId: string | null; projectId: string | null; goalId: string | null;
@@ -295,7 +475,7 @@ export function issueService(db: Db) {
const visited = new Set<string>([issueId]); const visited = new Set<string>([issueId]);
const start = await db.select().from(issues).where(eq(issues.id, issueId)).then(r => r[0] ?? null); const start = await db.select().from(issues).where(eq(issues.id, issueId)).then(r => r[0] ?? null);
let currentId = start?.parentId ?? null; let currentId = start?.parentId ?? null;
while (currentId && !visited.has(currentId) && ancestors.length < 50) { while (currentId && !visited.has(currentId) && raw.length < 50) {
visited.add(currentId); visited.add(currentId);
const parent = await db.select({ const parent = await db.select({
id: issues.id, title: issues.title, description: issues.description, id: issues.id, title: issues.title, description: issues.description,
@@ -304,7 +484,7 @@ export function issueService(db: Db) {
goalId: issues.goalId, parentId: issues.parentId, goalId: issues.goalId, parentId: issues.parentId,
}).from(issues).where(eq(issues.id, currentId)).then(r => r[0] ?? null); }).from(issues).where(eq(issues.id, currentId)).then(r => r[0] ?? null);
if (!parent) break; if (!parent) break;
ancestors.push({ raw.push({
id: parent.id, title: parent.title, description: parent.description ?? null, id: parent.id, title: parent.title, description: parent.description ?? null,
status: parent.status, priority: parent.priority, status: parent.status, priority: parent.priority,
assigneeAgentId: parent.assigneeAgentId ?? null, assigneeAgentId: parent.assigneeAgentId ?? null,
@@ -312,7 +492,39 @@ export function issueService(db: Db) {
}); });
currentId = parent.parentId ?? null; currentId = parent.parentId ?? null;
} }
return ancestors;
// Batch-fetch referenced projects and goals
const projectIds = [...new Set(raw.map(a => a.projectId).filter((id): id is string => id != null))];
const goalIds = [...new Set(raw.map(a => a.goalId).filter((id): id is string => id != null))];
const projectMap = new Map<string, { id: string; name: string; description: string | null; status: string; goalId: string | null }>();
const goalMap = new Map<string, { id: string; title: string; description: string | null; level: string; status: string }>();
if (projectIds.length > 0) {
const rows = await db.select({
id: projects.id, name: projects.name, description: projects.description,
status: projects.status, goalId: projects.goalId,
}).from(projects).where(inArray(projects.id, projectIds));
for (const r of rows) {
projectMap.set(r.id, r);
// Also collect goalIds from projects
if (r.goalId && !goalIds.includes(r.goalId)) goalIds.push(r.goalId);
}
}
if (goalIds.length > 0) {
const rows = await db.select({
id: goals.id, title: goals.title, description: goals.description,
level: goals.level, status: goals.status,
}).from(goals).where(inArray(goals.id, goalIds));
for (const r of rows) goalMap.set(r.id, r);
}
return raw.map(a => ({
...a,
project: a.projectId ? projectMap.get(a.projectId) ?? null : null,
goal: a.goalId ? goalMap.get(a.goalId) ?? null : null,
}));
}, },
staleCount: async (companyId: string, minutes = 60) => { staleCount: async (companyId: string, minutes = 60) => {

View File

@@ -0,0 +1,35 @@
import { loadConfig, type Config } from "../config.js";
import { createStorageProviderFromConfig } from "./provider-registry.js";
import { createStorageService } from "./service.js";
import type { StorageService } from "./types.js";
let cachedStorageService: StorageService | null = null;
let cachedSignature: string | null = null;
function signatureForConfig(config: Config): string {
return JSON.stringify({
provider: config.storageProvider,
localDisk: config.storageLocalDiskBaseDir,
s3Bucket: config.storageS3Bucket,
s3Region: config.storageS3Region,
s3Endpoint: config.storageS3Endpoint,
s3Prefix: config.storageS3Prefix,
s3ForcePathStyle: config.storageS3ForcePathStyle,
});
}
export function createStorageServiceFromConfig(config: Config): StorageService {
return createStorageService(createStorageProviderFromConfig(config));
}
export function getStorageService(): StorageService {
const config = loadConfig();
const signature = signatureForConfig(config);
if (!cachedStorageService || cachedSignature !== signature) {
cachedStorageService = createStorageServiceFromConfig(config);
cachedSignature = signature;
}
return cachedStorageService;
}
export type { StorageService, PutFileResult } from "./types.js";

View File

@@ -0,0 +1,89 @@
import { createReadStream, promises as fs } from "node:fs";
import path from "node:path";
import type { StorageProvider, GetObjectResult, HeadObjectResult } from "./types.js";
import { notFound, badRequest } from "../errors.js";
function normalizeObjectKey(objectKey: string): string {
const normalized = objectKey.replace(/\\/g, "/").trim();
if (!normalized || normalized.startsWith("/")) {
throw badRequest("Invalid object key");
}
const parts = normalized.split("/").filter((part) => part.length > 0);
if (parts.length === 0 || parts.some((part) => part === "." || part === "..")) {
throw badRequest("Invalid object key");
}
return parts.join("/");
}
function resolveWithin(baseDir: string, objectKey: string): string {
const normalizedKey = normalizeObjectKey(objectKey);
const resolved = path.resolve(baseDir, normalizedKey);
const base = path.resolve(baseDir);
if (resolved !== base && !resolved.startsWith(base + path.sep)) {
throw badRequest("Invalid object key path");
}
return resolved;
}
async function statOrNull(filePath: string) {
try {
return await fs.stat(filePath);
} catch {
return null;
}
}
export function createLocalDiskStorageProvider(baseDir: string): StorageProvider {
const root = path.resolve(baseDir);
return {
id: "local_disk",
async putObject(input) {
const targetPath = resolveWithin(root, input.objectKey);
const dir = path.dirname(targetPath);
await fs.mkdir(dir, { recursive: true });
const tempPath = `${targetPath}.tmp-${Date.now()}-${Math.random().toString(36).slice(2, 8)}`;
await fs.writeFile(tempPath, input.body);
await fs.rename(tempPath, targetPath);
},
async getObject(input): Promise<GetObjectResult> {
const filePath = resolveWithin(root, input.objectKey);
const stat = await statOrNull(filePath);
if (!stat || !stat.isFile()) {
throw notFound("Object not found");
}
return {
stream: createReadStream(filePath),
contentLength: stat.size,
lastModified: stat.mtime,
};
},
async headObject(input): Promise<HeadObjectResult> {
const filePath = resolveWithin(root, input.objectKey);
const stat = await statOrNull(filePath);
if (!stat || !stat.isFile()) {
return { exists: false };
}
return {
exists: true,
contentLength: stat.size,
lastModified: stat.mtime,
};
},
async deleteObject(input): Promise<void> {
const filePath = resolveWithin(root, input.objectKey);
try {
await fs.unlink(filePath);
} catch {
// idempotent delete
}
},
};
}

View File

@@ -0,0 +1,18 @@
import type { Config } from "../config.js";
import type { StorageProvider } from "./types.js";
import { createLocalDiskStorageProvider } from "./local-disk-provider.js";
import { createS3StorageProvider } from "./s3-provider.js";
export function createStorageProviderFromConfig(config: Config): StorageProvider {
if (config.storageProvider === "local_disk") {
return createLocalDiskStorageProvider(config.storageLocalDiskBaseDir);
}
return createS3StorageProvider({
bucket: config.storageS3Bucket,
region: config.storageS3Region,
endpoint: config.storageS3Endpoint,
prefix: config.storageS3Prefix,
forcePathStyle: config.storageS3ForcePathStyle,
});
}

View File

@@ -0,0 +1,145 @@
import {
S3Client,
DeleteObjectCommand,
GetObjectCommand,
HeadObjectCommand,
PutObjectCommand,
} from "@aws-sdk/client-s3";
import { Readable } from "node:stream";
import type { StorageProvider, GetObjectResult, HeadObjectResult } from "./types.js";
import { notFound, unprocessable } from "../errors.js";
interface S3ProviderConfig {
bucket: string;
region: string;
endpoint?: string;
prefix?: string;
forcePathStyle?: boolean;
}
function normalizePrefix(prefix: string | undefined): string {
if (!prefix) return "";
return prefix
.trim()
.replace(/^\/+/, "")
.replace(/\/+$/, "");
}
function buildKey(prefix: string, objectKey: string): string {
if (!prefix) return objectKey;
return `${prefix}/${objectKey}`;
}
async function toReadableStream(body: unknown): Promise<Readable> {
if (!body) throw notFound("Object not found");
if (body instanceof Readable) return body;
const candidate = body as {
transformToWebStream?: () => ReadableStream<Uint8Array>;
arrayBuffer?: () => Promise<ArrayBuffer>;
};
if (typeof candidate.transformToWebStream === "function") {
return Readable.fromWeb(candidate.transformToWebStream() as globalThis.ReadableStream<any>);
}
if (typeof candidate.arrayBuffer === "function") {
const buffer = Buffer.from(await candidate.arrayBuffer());
return Readable.from(buffer);
}
throw unprocessable("Unsupported S3 body stream type");
}
function toDate(value: Date | undefined): Date | undefined {
return value instanceof Date ? value : undefined;
}
export function createS3StorageProvider(config: S3ProviderConfig): StorageProvider {
const bucket = config.bucket.trim();
const region = config.region.trim();
if (!bucket) throw unprocessable("S3 storage bucket is required");
if (!region) throw unprocessable("S3 storage region is required");
const prefix = normalizePrefix(config.prefix);
const client = new S3Client({
region,
endpoint: config.endpoint,
forcePathStyle: Boolean(config.forcePathStyle),
});
return {
id: "s3",
async putObject(input) {
const key = buildKey(prefix, input.objectKey);
await client.send(
new PutObjectCommand({
Bucket: bucket,
Key: key,
Body: input.body,
ContentType: input.contentType,
ContentLength: input.contentLength,
}),
);
},
async getObject(input): Promise<GetObjectResult> {
const key = buildKey(prefix, input.objectKey);
try {
const output = await client.send(
new GetObjectCommand({
Bucket: bucket,
Key: key,
}),
);
return {
stream: await toReadableStream(output.Body),
contentType: output.ContentType,
contentLength: output.ContentLength,
etag: output.ETag,
lastModified: toDate(output.LastModified),
};
} catch (err) {
const code = (err as { name?: string }).name;
if (code === "NoSuchKey" || code === "NotFound") throw notFound("Object not found");
throw err;
}
},
async headObject(input): Promise<HeadObjectResult> {
const key = buildKey(prefix, input.objectKey);
try {
const output = await client.send(
new HeadObjectCommand({
Bucket: bucket,
Key: key,
}),
);
return {
exists: true,
contentType: output.ContentType,
contentLength: output.ContentLength,
etag: output.ETag,
lastModified: toDate(output.LastModified),
};
} catch (err) {
const code = (err as { name?: string }).name;
if (code === "NoSuchKey" || code === "NotFound") return { exists: false };
throw err;
}
},
async deleteObject(input): Promise<void> {
const key = buildKey(prefix, input.objectKey);
await client.send(
new DeleteObjectCommand({
Bucket: bucket,
Key: key,
}),
);
},
};
}

View File

@@ -0,0 +1,131 @@
import { createHash, randomUUID } from "node:crypto";
import path from "node:path";
import type { StorageService, StorageProvider, PutFileInput, PutFileResult } from "./types.js";
import { badRequest, forbidden, unprocessable } from "../errors.js";
const MAX_SEGMENT_LENGTH = 120;
function sanitizeSegment(value: string): string {
const cleaned = value
.trim()
.replace(/[^a-zA-Z0-9._-]+/g, "_")
.replace(/_{2,}/g, "_")
.replace(/^_+|_+$/g, "");
if (!cleaned) return "file";
return cleaned.slice(0, MAX_SEGMENT_LENGTH);
}
function normalizeNamespace(namespace: string): string {
const normalized = namespace
.split("/")
.map((entry) => entry.trim())
.filter((entry) => entry.length > 0)
.map((entry) => sanitizeSegment(entry));
if (normalized.length === 0) return "misc";
return normalized.join("/");
}
function splitFilename(filename: string | null): { stem: string; ext: string } {
if (!filename) return { stem: "file", ext: "" };
const base = path.basename(filename).trim();
if (!base) return { stem: "file", ext: "" };
const extRaw = path.extname(base);
const stemRaw = extRaw ? base.slice(0, base.length - extRaw.length) : base;
const stem = sanitizeSegment(stemRaw);
const ext = extRaw
.toLowerCase()
.replace(/[^a-z0-9.]/g, "")
.slice(0, 16);
return {
stem,
ext,
};
}
function ensureCompanyPrefix(companyId: string, objectKey: string): void {
const expectedPrefix = `${companyId}/`;
if (!objectKey.startsWith(expectedPrefix)) {
throw forbidden("Object does not belong to company");
}
if (objectKey.includes("..")) {
throw badRequest("Invalid object key");
}
}
function hashBuffer(input: Buffer): string {
return createHash("sha256").update(input).digest("hex");
}
function buildObjectKey(companyId: string, namespace: string, originalFilename: string | null): string {
const ns = normalizeNamespace(namespace);
const now = new Date();
const year = String(now.getUTCFullYear());
const month = String(now.getUTCMonth() + 1).padStart(2, "0");
const day = String(now.getUTCDate()).padStart(2, "0");
const { stem, ext } = splitFilename(originalFilename);
const suffix = randomUUID();
const filename = `${suffix}-${stem}${ext}`;
return `${companyId}/${ns}/${year}/${month}/${day}/${filename}`;
}
function assertPutFileInput(input: PutFileInput): void {
if (!input.companyId || input.companyId.trim().length === 0) {
throw unprocessable("companyId is required");
}
if (!input.namespace || input.namespace.trim().length === 0) {
throw unprocessable("namespace is required");
}
if (!input.contentType || input.contentType.trim().length === 0) {
throw unprocessable("contentType is required");
}
if (!(input.body instanceof Buffer)) {
throw unprocessable("body must be a Buffer");
}
if (input.body.length <= 0) {
throw unprocessable("File is empty");
}
}
export function createStorageService(provider: StorageProvider): StorageService {
return {
provider: provider.id,
async putFile(input: PutFileInput): Promise<PutFileResult> {
assertPutFileInput(input);
const objectKey = buildObjectKey(input.companyId, input.namespace, input.originalFilename);
const byteSize = input.body.length;
const contentType = input.contentType.trim().toLowerCase();
await provider.putObject({
objectKey,
body: input.body,
contentType,
contentLength: byteSize,
});
return {
provider: provider.id,
objectKey,
contentType,
byteSize,
sha256: hashBuffer(input.body),
originalFilename: input.originalFilename,
};
},
async getObject(companyId: string, objectKey: string) {
ensureCompanyPrefix(companyId, objectKey);
return provider.getObject({ objectKey });
},
async headObject(companyId: string, objectKey: string) {
ensureCompanyPrefix(companyId, objectKey);
return provider.headObject({ objectKey });
},
async deleteObject(companyId: string, objectKey: string) {
ensureCompanyPrefix(companyId, objectKey);
await provider.deleteObject({ objectKey });
},
};
}

View File

@@ -0,0 +1,62 @@
import type { StorageProvider as StorageProviderId } from "@paperclip/shared";
import type { Readable } from "node:stream";
export interface PutObjectInput {
objectKey: string;
body: Buffer;
contentType: string;
contentLength: number;
}
export interface GetObjectInput {
objectKey: string;
}
export interface GetObjectResult {
stream: Readable;
contentType?: string;
contentLength?: number;
etag?: string;
lastModified?: Date;
}
export interface HeadObjectResult {
exists: boolean;
contentType?: string;
contentLength?: number;
etag?: string;
lastModified?: Date;
}
export interface StorageProvider {
id: StorageProviderId;
putObject(input: PutObjectInput): Promise<void>;
getObject(input: GetObjectInput): Promise<GetObjectResult>;
headObject(input: GetObjectInput): Promise<HeadObjectResult>;
deleteObject(input: GetObjectInput): Promise<void>;
}
export interface PutFileInput {
companyId: string;
namespace: string;
originalFilename: string | null;
contentType: string;
body: Buffer;
}
export interface PutFileResult {
provider: StorageProviderId;
objectKey: string;
contentType: string;
byteSize: number;
sha256: string;
originalFilename: string | null;
}
export interface StorageService {
provider: StorageProviderId;
putFile(input: PutFileInput): Promise<PutFileResult>;
getObject(companyId: string, objectKey: string): Promise<GetObjectResult>;
headObject(companyId: string, objectKey: string): Promise<HeadObjectResult>;
deleteObject(companyId: string, objectKey: string): Promise<void>;
}