Initial commit — Drive, an S3 file browser with WOPI editing

Lightweight replacement for the upstream La Suite Numérique drive
(Django/Celery/Next.js) built as a single Deno binary.

Server (Deno + Hono):
- S3 file operations via AWS SigV4 (no SDK) with pre-signed URLs
- WOPI host for Collabora Online (CheckFileInfo, GetFile, PutFile, locks)
- Ory Kratos session auth + CSRF protection
- Ory Keto permission model (OPL namespaces, not yet wired to routes)
- PostgreSQL metadata with recursive folder sizes
- S3 backfill API for registering files uploaded outside the UI
- OpenTelemetry tracing + metrics (opt-in via OTEL_ENABLED)

Frontend (React 19 + Cunningham v4 + react-aria):
- File browser with GridList, keyboard nav, multi-select
- Collabora editor iframe (full-screen, form POST, postMessage)
- Profile menu, waffle menu, drag-drop upload, asset type badges
- La Suite integration service theming (runtime CSS)

Testing (549 tests):
- 235 server unit tests (Deno) — 90%+ coverage
- 278 UI unit tests (Vitest) — 90%+ coverage
- 11 E2E tests (Playwright)
- 12 integration service tests (Playwright)
- 13 WOPI integration tests (Playwright + Docker Compose + Collabora)

MIT licensed.
This commit is contained in:
2026-03-25 18:28:37 +00:00
commit 58237d9e44
112 changed files with 26841 additions and 0 deletions

162
server/auth.ts Normal file
View File

@@ -0,0 +1,162 @@
import type { Context, Next } from "hono";
const KRATOS_PUBLIC_URL =
Deno.env.get("KRATOS_PUBLIC_URL") ??
"http://kratos-public.ory.svc.cluster.local:80";
const PUBLIC_URL = Deno.env.get("PUBLIC_URL") ?? "http://localhost:3000";
const TEST_MODE = Deno.env.get("DRIVER_TEST_MODE") === "1";
if (TEST_MODE && Deno.env.get("DEPLOYMENT_ENVIRONMENT") === "production") {
throw new Error("DRIVER_TEST_MODE=1 is forbidden when DEPLOYMENT_ENVIRONMENT=production");
}
if (TEST_MODE) {
console.warn("⚠️ DRIVER_TEST_MODE is ON — authentication is bypassed. Do not use in production.");
}
const TEST_IDENTITY: SessionInfo = {
id: "e2e-test-user-00000000",
email: "e2e@test.local",
name: "E2E Test User",
picture: undefined,
session: { active: true },
};
// Routes that require no authentication
const PUBLIC_ROUTES = new Set([
"/health",
]);
// Routes with their own auth (WOPI access tokens)
const TOKEN_AUTH_PREFIXES = ["/wopi/"];
export interface SessionInfo {
id: string;
email: string;
name: string;
picture?: string;
session: unknown;
}
function extractSessionCookie(cookieHeader: string): string | null {
const cookies = cookieHeader.split(";").map((c) => c.trim());
for (const cookie of cookies) {
if (
cookie.startsWith("ory_session_") ||
cookie.startsWith("ory_kratos_session")
) {
return cookie;
}
}
return null;
}
export async function getSession(
cookieHeader: string,
): Promise<{ info: SessionInfo | null; needsAal2: boolean; redirectTo?: string }> {
const sessionCookie = extractSessionCookie(cookieHeader);
if (!sessionCookie) return { info: null, needsAal2: false };
try {
const resp = await fetch(`${KRATOS_PUBLIC_URL}/sessions/whoami`, {
headers: { cookie: sessionCookie },
});
if (resp.status === 403) {
const body = await resp.json().catch(() => null);
const redirectTo = body?.redirect_browser_to ?? body?.error?.details?.redirect_browser_to;
return { info: null, needsAal2: true, redirectTo };
}
if (resp.status !== 200) return { info: null, needsAal2: false };
const session = await resp.json();
const traits = session?.identity?.traits ?? {};
// Support both OIDC-standard (given_name/family_name) and legacy (name.first/name.last)
const givenName = traits.given_name ?? traits.name?.first ?? "";
const familyName = traits.family_name ?? traits.name?.last ?? "";
const fullName = [givenName, familyName].filter(Boolean).join(" ") || traits.email || "";
return {
info: {
id: session?.identity?.id ?? "",
email: traits.email ?? "",
name: fullName,
picture: traits.picture,
session,
},
needsAal2: false,
};
} catch {
return { info: null, needsAal2: false };
}
}
function isPublicRoute(path: string): boolean {
for (const route of PUBLIC_ROUTES) {
if (path === route || path.startsWith(route + "/")) return true;
}
// Static assets
if (path.startsWith("/assets/") || path === "/index.html" || path === "/favicon.ico") return true;
return false;
}
function isTokenAuthRoute(path: string): boolean {
for (const prefix of TOKEN_AUTH_PREFIXES) {
if (path.startsWith(prefix)) return true;
}
return false;
}
export async function authMiddleware(c: Context, next: Next) {
const path = c.req.path;
// Public routes: no auth needed
if (isPublicRoute(path)) return await next();
// WOPI routes: handled by their own token auth
if (isTokenAuthRoute(path)) return await next();
// Test mode: inject a fake identity for E2E testing
if (TEST_MODE) {
c.set("identity", TEST_IDENTITY);
return await next();
}
// All other routes need a Kratos session
const cookieHeader = c.req.header("cookie") ?? "";
const { info: sessionInfo, needsAal2, redirectTo } = await getSession(cookieHeader);
if (needsAal2) {
if (path.startsWith("/api/") || c.req.header("accept")?.includes("application/json")) {
return c.json({ error: "AAL2 required", redirectTo }, 403);
}
if (redirectTo) return c.redirect(redirectTo, 302);
const returnTo = encodeURIComponent(PUBLIC_URL + path);
return c.redirect(
`/kratos/self-service/login/browser?aal=aal2&return_to=${returnTo}`,
302,
);
}
if (!sessionInfo) {
if (path.startsWith("/api/") || c.req.header("accept")?.includes("application/json")) {
return c.json({ error: "Unauthorized" }, 401);
}
const loginUrl = `${PUBLIC_URL}/login?return_to=${encodeURIComponent(PUBLIC_URL + path)}`;
return c.redirect(loginUrl, 302);
}
c.set("identity", sessionInfo);
await next();
}
/** GET /api/auth/session */
export async function sessionHandler(c: Context): Promise<Response> {
if (TEST_MODE) {
const { session, ...user } = TEST_IDENTITY;
return c.json({ session, user });
}
const cookieHeader = c.req.header("cookie") ?? "";
const { info, needsAal2, redirectTo } = await getSession(cookieHeader);
if (needsAal2) return c.json({ error: "AAL2 required", needsAal2: true, redirectTo }, 403);
if (!info) return c.json({ error: "Unauthorized" }, 401);
const { session, ...user } = info;
return c.json({ session, user });
}

308
server/backfill.ts Normal file
View File

@@ -0,0 +1,308 @@
/**
* S3 Backfill API
*
* Scans the SeaweedFS bucket and registers any S3 objects that don't have
* a corresponding row in the PostgreSQL `files` table.
*
* Exposed as POST /api/admin/backfill — requires an authenticated session.
* Not exposed via ingress (internal use only).
*
* Request body (all optional):
* { prefix?: string, dry_run?: boolean }
*
* Key layout convention:
* {identity-id}/my-files/{path}/{filename} → personal files, owner = identity-id
* shared/{path}/{filename} → shared files, owner = "shared"
*/
import type { Context } from "hono";
import sql from "./db.ts";
import { listObjects, headObject } from "./s3.ts";
// Mimetype inference from file extension
const EXT_MIMETYPES: Record<string, string> = {
docx: "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
doc: "application/msword",
xlsx: "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
xls: "application/vnd.ms-excel",
pptx: "application/vnd.openxmlformats-officedocument.presentationml.presentation",
ppt: "application/vnd.ms-powerpoint",
odt: "application/vnd.oasis.opendocument.text",
ods: "application/vnd.oasis.opendocument.spreadsheet",
odp: "application/vnd.oasis.opendocument.presentation",
pdf: "application/pdf",
txt: "text/plain",
csv: "text/csv",
md: "text/markdown",
png: "image/png",
jpg: "image/jpeg",
jpeg: "image/jpeg",
gif: "image/gif",
webp: "image/webp",
svg: "image/svg+xml",
tga: "image/x-tga",
psd: "image/vnd.adobe.photoshop",
exr: "image/x-exr",
mp4: "video/mp4",
webm: "video/webm",
mov: "video/quicktime",
avi: "video/x-msvideo",
mkv: "video/x-matroska",
mp3: "audio/mpeg",
wav: "audio/wav",
ogg: "audio/ogg",
flac: "audio/flac",
aac: "audio/aac",
fbx: "application/octet-stream",
gltf: "model/gltf+json",
glb: "model/gltf-binary",
obj: "model/obj",
blend: "application/x-blender",
dds: "image/vnd-ms.dds",
ktx: "image/ktx",
ktx2: "image/ktx2",
zip: "application/zip",
tar: "application/x-tar",
gz: "application/gzip",
"7z": "application/x-7z-compressed",
json: "application/json",
yaml: "text/yaml",
yml: "text/yaml",
xml: "application/xml",
js: "text/javascript",
ts: "text/typescript",
py: "text/x-python",
lua: "text/x-lua",
glsl: "text/x-glsl",
hlsl: "text/x-hlsl",
};
function inferMimetype(filename: string): string {
const ext = filename.split(".").pop()?.toLowerCase() ?? "";
return EXT_MIMETYPES[ext] ?? "application/octet-stream";
}
/**
* Parse an S3 key into owner_id and path components.
*
* Expected formats:
* {identity-id}/my-files/{path} → owner = identity-id
* shared/{path} → owner = "shared"
*/
export function parseKey(key: string): {
ownerId: string;
pathParts: string[];
filename: string;
isFolder: boolean;
} | null {
if (!key || key === "/") return null;
const isFolder = key.endsWith("/");
const parts = key.replace(/\/$/, "").split("/").filter(Boolean);
if (parts.length === 0) return null;
let ownerId: string;
let pathStart: number;
if (parts[0] === "shared") {
ownerId = "shared";
pathStart = 1;
} else if (parts.length >= 2 && parts[1] === "my-files") {
ownerId = parts[0];
pathStart = 2;
} else {
ownerId = parts[0];
pathStart = 1;
}
const remaining = parts.slice(pathStart);
if (remaining.length === 0 && !isFolder) return null;
const filename = isFolder
? (remaining[remaining.length - 1] ?? parts[parts.length - 1])
: remaining[remaining.length - 1];
return {
ownerId,
pathParts: remaining.slice(0, -1),
filename,
isFolder,
};
}
interface BackfillResult {
scanned: number;
already_registered: number;
folders_created: number;
files_created: number;
errors: string[];
dry_run: boolean;
}
async function runBackfill(prefix: string, dryRun: boolean): Promise<BackfillResult> {
const result: BackfillResult = {
scanned: 0,
already_registered: 0,
folders_created: 0,
files_created: 0,
errors: [],
dry_run: dryRun,
};
// Load existing keys
const existingRows = await sql`SELECT s3_key FROM files`;
const existingKeys = new Set(existingRows.map((r: Record<string, unknown>) => r.s3_key as string));
// Folder ID cache: s3_key → uuid
const folderIdCache = new Map<string, string>();
const existingFolders = await sql`SELECT id, s3_key FROM files WHERE is_folder = true`;
for (const f of existingFolders) {
folderIdCache.set(f.s3_key, f.id);
}
// Recursive folder creation
async function ensureFolder(s3Key: string, ownerId: string, filename: string): Promise<string> {
const cached = folderIdCache.get(s3Key);
if (cached) return cached;
if (existingKeys.has(s3Key)) {
const [row] = await sql`SELECT id FROM files WHERE s3_key = ${s3Key}`;
if (row) {
folderIdCache.set(s3Key, row.id);
return row.id;
}
}
// Resolve parent folder
let parentId: string | null = null;
const segments = s3Key.replace(/\/$/, "").split("/");
if (segments.length > 2) {
const parentS3Key = segments.slice(0, -1).join("/") + "/";
const parentName = segments[segments.length - 2];
parentId = await ensureFolder(parentS3Key, ownerId, parentName);
}
if (dryRun) {
const fakeId = crypto.randomUUID();
folderIdCache.set(s3Key, fakeId);
result.folders_created++;
return fakeId;
}
const [row] = await sql`
INSERT INTO files (s3_key, filename, mimetype, size, owner_id, parent_id, is_folder)
VALUES (${s3Key}, ${filename}, ${"inode/directory"}, ${0}, ${ownerId}, ${parentId}, ${true})
ON CONFLICT (s3_key) DO UPDATE SET s3_key = files.s3_key
RETURNING id
`;
folderIdCache.set(s3Key, row.id);
existingKeys.add(s3Key);
result.folders_created++;
return row.id;
}
// Walk bucket
let continuationToken: string | undefined;
do {
const listing = await listObjects(prefix, undefined, 1000, continuationToken);
for (const obj of listing.contents) {
result.scanned++;
if (existingKeys.has(obj.key)) {
result.already_registered++;
continue;
}
const parsed = parseKey(obj.key);
if (!parsed) continue;
try {
let size = obj.size;
let mimetype = inferMimetype(parsed.filename);
const head = await headObject(obj.key);
if (head) {
size = head.contentLength;
if (head.contentType && head.contentType !== "application/octet-stream") {
mimetype = head.contentType;
}
}
// Ensure parent folder chain
let parentId: string | null = null;
if (parsed.pathParts.length > 0) {
const keySegments = obj.key.split("/");
const parentSegments = keySegments.slice(0, -1);
const parentS3Key = parentSegments.join("/") + "/";
const parentFilename = parentSegments[parentSegments.length - 1];
parentId = await ensureFolder(parentS3Key, parsed.ownerId, parentFilename);
}
if (parsed.isFolder) {
await ensureFolder(obj.key, parsed.ownerId, parsed.filename);
continue;
}
if (dryRun) {
result.files_created++;
continue;
}
const [row] = await sql`
INSERT INTO files (s3_key, filename, mimetype, size, owner_id, parent_id, is_folder)
VALUES (${obj.key}, ${parsed.filename}, ${mimetype}, ${size}, ${parsed.ownerId}, ${parentId}, ${false})
ON CONFLICT (s3_key) DO NOTHING
RETURNING id
`;
if (row) {
existingKeys.add(obj.key);
result.files_created++;
} else {
result.already_registered++;
}
} catch (err) {
result.errors.push(`${obj.key}: ${err instanceof Error ? err.message : String(err)}`);
}
}
continuationToken = listing.nextContinuationToken;
} while (continuationToken);
// Propagate folder sizes
if (result.folders_created > 0 && !dryRun) {
const folders = await sql`SELECT id FROM files WHERE is_folder = true`;
for (const f of folders) {
await sql`SELECT propagate_folder_sizes(${f.id}::uuid)`;
}
}
return result;
}
/** POST /api/admin/backfill — requires authenticated session */
const ADMIN_IDS = (Deno.env.get("ADMIN_IDENTITY_IDS") ?? "").split(",").map((s) => s.trim()).filter(Boolean);
/** POST /api/admin/backfill — requires authenticated session + admin identity */
export async function backfillHandler(c: Context): Promise<Response> {
const identity = c.get("identity");
if (!identity?.id) return c.json({ error: "Unauthorized" }, 401);
// Admin check: ADMIN_IDENTITY_IDS must be set and caller must be in the list
if (ADMIN_IDS.length === 0 || !ADMIN_IDS.includes(identity.id)) {
return c.json({ error: "Forbidden — admin access required" }, 403);
}
let prefix = "";
let dryRun = false;
try {
const body = await c.req.json();
prefix = body.prefix ?? "";
dryRun = body.dry_run ?? false;
} catch {
// No body or invalid JSON — use defaults
}
const result = await runBackfill(prefix, dryRun);
return c.json(result);
}

90
server/csrf.ts Normal file
View File

@@ -0,0 +1,90 @@
import type { Context, Next } from "hono";
const CSRF_COOKIE_SECRET = Deno.env.get("CSRF_COOKIE_SECRET")
?? (Deno.env.get("DRIVER_TEST_MODE") === "1" ? "test-csrf-secret" : "");
if (!CSRF_COOKIE_SECRET) {
throw new Error("CSRF_COOKIE_SECRET must be set (or run with DRIVER_TEST_MODE=1)");
}
const CSRF_COOKIE_NAME = "driver-csrf-token";
const encoder = new TextEncoder();
async function hmacSign(data: string, secret: string): Promise<string> {
const key = await crypto.subtle.importKey(
"raw",
encoder.encode(secret),
{ name: "HMAC", hash: "SHA-256" },
false,
["sign"],
);
const sig = await crypto.subtle.sign("HMAC", key, encoder.encode(data));
return Array.from(new Uint8Array(sig))
.map((b) => b.toString(16).padStart(2, "0"))
.join("");
}
async function hmacVerify(data: string, signature: string, secret: string): Promise<boolean> {
const expected = await hmacSign(data, secret);
if (expected.length !== signature.length) return false;
let result = 0;
for (let i = 0; i < expected.length; i++) {
result |= expected.charCodeAt(i) ^ signature.charCodeAt(i);
}
return result === 0;
}
export async function generateCsrfToken(): Promise<{ token: string; cookie: string }> {
const raw = crypto.randomUUID();
const sig = await hmacSign(raw, CSRF_COOKIE_SECRET);
const token = `${raw}.${sig}`;
const secure = Deno.env.get("DRIVER_TEST_MODE") === "1" ? "" : "; Secure";
const cookie = `${CSRF_COOKIE_NAME}=${token}; Path=/; HttpOnly; SameSite=Strict${secure}`;
return { token, cookie };
}
function extractCookie(cookieHeader: string, name: string): string | null {
const cookies = cookieHeader.split(";").map((c) => c.trim());
for (const cookie of cookies) {
if (cookie.startsWith(`${name}=`)) {
return cookie.slice(name.length + 1);
}
}
return null;
}
async function verifyCsrfToken(req: Request): Promise<boolean> {
const headerToken = req.headers.get("x-csrf-token");
if (!headerToken) return false;
const cookieHeader = req.headers.get("cookie") ?? "";
const cookieToken = extractCookie(cookieHeader, CSRF_COOKIE_NAME);
if (!cookieToken) return false;
if (headerToken !== cookieToken) return false;
const parts = headerToken.split(".");
if (parts.length !== 2) return false;
const [raw, sig] = parts;
return await hmacVerify(raw, sig, CSRF_COOKIE_SECRET);
}
const MUTATING_METHODS = new Set(["POST", "PUT", "PATCH", "DELETE"]);
export async function csrfMiddleware(c: Context, next: Next) {
// Skip CSRF entirely in test mode (checked at call time so tests can control it)
if (Deno.env.get("DRIVER_TEST_MODE") === "1") return await next();
// Skip CSRF for WOPI endpoints (use token auth) and API uploads
if (c.req.path.startsWith("/wopi/")) {
return await next();
}
if (MUTATING_METHODS.has(c.req.method) && c.req.path.startsWith("/api/")) {
const valid = await verifyCsrfToken(c.req.raw);
if (!valid) {
return c.text("CSRF token invalid or missing", 403);
}
}
await next();
}
export { CSRF_COOKIE_NAME };

41
server/db.ts Normal file
View File

@@ -0,0 +1,41 @@
import postgres from "postgres";
import { OTEL_ENABLED, withSpan } from "./telemetry.ts";
const DATABASE_URL =
Deno.env.get("DATABASE_URL") ??
"postgres://driver:driver@localhost:5432/driver_db";
const _sql = postgres(DATABASE_URL, {
max: 10,
idle_timeout: 20,
connect_timeout: 10,
});
/**
* Traced SQL tagged-template proxy.
*
* When OTEL is enabled every query is wrapped in a `db.query` span whose
* `db.statement` attribute contains the SQL template (parameters replaced
* with `$N` placeholders — no user data leaks into traces).
*
* When OTEL is disabled this is the raw postgres.js instance.
*/
// deno-lint-ignore no-explicit-any
const sql: typeof _sql = OTEL_ENABLED
? new Proxy(_sql, {
apply(_target, _thisArg, args) {
// Tagged-template call: sql`SELECT ...`
const [strings] = args as [TemplateStringsArray, ...unknown[]];
const statement = Array.isArray(strings)
? strings.join("$?")
: "unknown";
return withSpan(
"db.query",
{ "db.statement": statement, "db.system": "postgresql" },
() => Reflect.apply(_target, _thisArg, args),
);
},
})
: _sql;
export default sql;

444
server/files.ts Normal file
View File

@@ -0,0 +1,444 @@
/**
* File CRUD handlers for Hono routes.
* Uses server/db.ts for metadata and server/s3.ts for storage.
*/
import type { Context } from "hono";
import sql from "./db.ts";
import { copyObject, deleteObject, getObject, putObject } from "./s3.ts";
import { presignGetUrl, presignPutUrl, createMultipartUpload, presignUploadPart, completeMultipartUpload as s3CompleteMultipart } from "./s3-presign.ts";
// ── Helpers ─────────────────────────────────────────────────────────────────
interface Identity {
id: string;
email: string;
}
function getIdentity(c: Context): Identity | null {
const identity = c.get("identity");
if (!identity?.id) return null;
return identity as Identity;
}
function buildS3Key(ownerId: string, path: string, filename: string): string {
const parts = [ownerId, "my-files"];
if (path) parts.push(path);
parts.push(filename);
return parts.join("/");
}
/** Recompute folder sizes up the ancestor chain after a file mutation. */
async function propagateFolderSizes(parentId: string | null) {
if (!parentId) return;
await sql`SELECT propagate_folder_sizes(${parentId}::uuid)`;
}
// ── File operations ────────────────────────────────────────────────────────
/** GET /api/files?parent_id=&sort=&search=&limit=&offset= */
export async function listFiles(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const parentId = c.req.query("parent_id") || null;
const sort = c.req.query("sort") || "filename";
const search = c.req.query("search") || "";
const limit = Math.min(parseInt(c.req.query("limit") || "50", 10), 200);
const offset = parseInt(c.req.query("offset") || "0", 10);
const allowedSorts: Record<string, string> = {
filename: "filename ASC",
"-filename": "filename DESC",
size: "size ASC",
"-size": "size DESC",
created_at: "created_at ASC",
"-created_at": "created_at DESC",
updated_at: "updated_at ASC",
"-updated_at": "updated_at DESC",
};
const orderBy = allowedSorts[sort] ?? "filename ASC";
let rows;
if (search) {
if (parentId) {
rows = await sql.unsafe(
`SELECT * FROM files
WHERE owner_id = $1 AND parent_id = $2 AND deleted_at IS NULL
AND filename ILIKE $3
ORDER BY is_folder DESC, ${orderBy}
LIMIT $4 OFFSET $5`,
[identity.id, parentId, `%${search}%`, limit, offset],
);
} else {
rows = await sql.unsafe(
`SELECT * FROM files
WHERE owner_id = $1 AND parent_id IS NULL AND deleted_at IS NULL
AND filename ILIKE $2
ORDER BY is_folder DESC, ${orderBy}
LIMIT $3 OFFSET $4`,
[identity.id, `%${search}%`, limit, offset],
);
}
} else {
if (parentId) {
rows = await sql.unsafe(
`SELECT * FROM files
WHERE owner_id = $1 AND parent_id = $2 AND deleted_at IS NULL
ORDER BY is_folder DESC, ${orderBy}
LIMIT $3 OFFSET $4`,
[identity.id, parentId, limit, offset],
);
} else {
rows = await sql.unsafe(
`SELECT * FROM files
WHERE owner_id = $1 AND parent_id IS NULL AND deleted_at IS NULL
ORDER BY is_folder DESC, ${orderBy}
LIMIT $2 OFFSET $3`,
[identity.id, limit, offset],
);
}
}
return c.json({ files: rows });
}
/** GET /api/files/:id */
export async function getFile(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const id = c.req.param("id");
const [file] = await sql`
SELECT * FROM files WHERE id = ${id} AND owner_id = ${identity.id}
`;
if (!file) return c.json({ error: "Not found" }, 404);
// Update last_opened
await sql`
INSERT INTO user_file_state (user_id, file_id, last_opened)
VALUES (${identity.id}, ${id}, now())
ON CONFLICT (user_id, file_id) DO UPDATE SET last_opened = now()
`;
return c.json({ file });
}
/** POST /api/files — create file (form-data with file, or JSON for metadata-only) */
export async function createFile(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const contentType = c.req.header("content-type") ?? "";
if (contentType.includes("multipart/form-data")) {
const formData = await c.req.formData();
const file = formData.get("file");
if (!(file instanceof File)) {
return c.json({ error: "No file provided" }, 400);
}
const parentId = formData.get("parent_id") as string | null;
const filename = (formData.get("filename") as string) || file.name;
// Build path from parent chain
const path = parentId ? await buildPathFromParent(parentId, identity.id) : "";
const s3Key = buildS3Key(identity.id, path, filename);
const bytes = new Uint8Array(await file.arrayBuffer());
await putObject(s3Key, bytes, file.type);
const [row] = await sql`
INSERT INTO files (s3_key, filename, mimetype, size, owner_id, parent_id)
VALUES (${s3Key}, ${filename}, ${file.type}, ${bytes.length}, ${identity.id}, ${parentId})
RETURNING *
`;
await propagateFolderSizes(parentId);
return c.json({ file: row }, 201);
}
// JSON body — metadata-only (the actual upload happens via presigned URL)
const body = await c.req.json();
const { filename, mimetype, size, parent_id } = body;
if (!filename) return c.json({ error: "filename required" }, 400);
const path = parent_id ? await buildPathFromParent(parent_id, identity.id) : "";
const s3Key = buildS3Key(identity.id, path, filename);
const [row] = await sql`
INSERT INTO files (s3_key, filename, mimetype, size, owner_id, parent_id)
VALUES (${s3Key}, ${filename}, ${mimetype || "application/octet-stream"}, ${size || 0}, ${identity.id}, ${parent_id || null})
RETURNING *
`;
await propagateFolderSizes(parent_id || null);
return c.json({ file: row }, 201);
}
/** PUT /api/files/:id — rename or move */
export async function updateFile(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const id = c.req.param("id");
const [file] = await sql`
SELECT * FROM files WHERE id = ${id} AND owner_id = ${identity.id} AND deleted_at IS NULL
`;
if (!file) return c.json({ error: "Not found" }, 404);
const body = await c.req.json();
const newFilename = body.filename ?? file.filename;
const newParentId = body.parent_id !== undefined ? body.parent_id : file.parent_id;
const newSize = body.size !== undefined ? body.size : file.size;
// Compute new S3 key
const path = newParentId ? await buildPathFromParent(newParentId, identity.id) : "";
const newS3Key = buildS3Key(identity.id, path, newFilename);
// If S3 key changed, copy + delete in S3 (only if content exists)
if (newS3Key !== file.s3_key && !file.is_folder && Number(file.size) > 0) {
await copyObject(file.s3_key, newS3Key);
await deleteObject(file.s3_key);
}
const oldParentId = file.parent_id;
const [updated] = await sql`
UPDATE files
SET filename = ${newFilename},
parent_id = ${newParentId},
s3_key = ${newS3Key},
size = ${newSize},
updated_at = now()
WHERE id = ${id}
RETURNING *
`;
// Propagate folder sizes if parent changed or file was moved
await propagateFolderSizes(newParentId);
if (oldParentId && oldParentId !== newParentId) {
await propagateFolderSizes(oldParentId);
}
return c.json({ file: updated });
}
/** DELETE /api/files/:id — soft delete */
export async function deleteFile(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const id = c.req.param("id");
const [file] = await sql`
UPDATE files
SET deleted_at = now()
WHERE id = ${id} AND owner_id = ${identity.id} AND deleted_at IS NULL
RETURNING *
`;
if (!file) return c.json({ error: "Not found" }, 404);
await propagateFolderSizes(file.parent_id);
return c.json({ file });
}
/** POST /api/files/:id/restore */
export async function restoreFile(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const id = c.req.param("id");
const [file] = await sql`
UPDATE files
SET deleted_at = NULL, updated_at = now()
WHERE id = ${id} AND owner_id = ${identity.id} AND deleted_at IS NOT NULL
RETURNING *
`;
if (!file) return c.json({ error: "Not found" }, 404);
await propagateFolderSizes(file.parent_id);
return c.json({ file });
}
/** GET /api/files/:id/download — returns pre-signed download URL */
export async function downloadFile(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const id = c.req.param("id");
const [file] = await sql`
SELECT * FROM files WHERE id = ${id} AND owner_id = ${identity.id} AND deleted_at IS NULL
`;
if (!file) return c.json({ error: "Not found" }, 404);
if (file.is_folder) return c.json({ error: "Cannot download a folder" }, 400);
const url = await presignGetUrl(file.s3_key);
return c.json({ url });
}
/** POST /api/files/:id/upload-url — returns pre-signed upload URL(s) */
export async function getUploadUrl(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const id = c.req.param("id");
const [file] = await sql`
SELECT * FROM files WHERE id = ${id} AND owner_id = ${identity.id} AND deleted_at IS NULL
`;
if (!file) return c.json({ error: "Not found" }, 404);
const body = await c.req.json();
const contentType = body.content_type || file.mimetype;
const parts = body.parts as number | undefined;
if (parts && (parts < 1 || parts > 10000)) {
return c.json({ error: "parts must be between 1 and 10000" }, 400);
}
if (parts && parts > 1) {
// Multipart upload
const uploadId = await createMultipartUpload(file.s3_key, contentType);
const urls: string[] = [];
for (let i = 1; i <= parts; i++) {
urls.push(await presignUploadPart(file.s3_key, uploadId, i));
}
return c.json({ multipart: true, upload_id: uploadId, urls });
}
// Single-part pre-signed PUT
const url = await presignPutUrl(file.s3_key, contentType);
return c.json({ multipart: false, url });
}
/** POST /api/files/:id/complete-upload — complete multipart upload */
export async function completeUpload(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const id = c.req.param("id");
const [file] = await sql`
SELECT * FROM files WHERE id = ${id} AND owner_id = ${identity.id} AND deleted_at IS NULL
`;
if (!file) return c.json({ error: "Not found" }, 404);
const body = await c.req.json();
const { upload_id, parts } = body;
if (!upload_id || !Array.isArray(parts)) {
return c.json({ error: "upload_id and parts[] required" }, 400);
}
await s3CompleteMultipart(file.s3_key, upload_id, parts);
// Update size if provided
if (body.size) {
await sql`UPDATE files SET size = ${body.size}, updated_at = now() WHERE id = ${id}`;
await propagateFolderSizes(file.parent_id);
}
return c.json({ ok: true });
}
// ── User state handlers ────────────────────────────────────────────────────
/** GET /api/recent */
export async function listRecent(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const limit = Math.min(parseInt(c.req.query("limit") || "50", 10), 200);
const offset = parseInt(c.req.query("offset") || "0", 10);
const rows = await sql`
SELECT f.*, ufs.last_opened
FROM files f
JOIN user_file_state ufs ON ufs.file_id = f.id
WHERE ufs.user_id = ${identity.id}
AND ufs.last_opened IS NOT NULL
AND f.deleted_at IS NULL
ORDER BY ufs.last_opened DESC
LIMIT ${limit} OFFSET ${offset}
`;
return c.json({ files: rows });
}
/** GET /api/favorites */
export async function listFavorites(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const limit = Math.min(parseInt(c.req.query("limit") || "50", 10), 200);
const offset = parseInt(c.req.query("offset") || "0", 10);
const rows = await sql`
SELECT f.*, ufs.favorited
FROM files f
JOIN user_file_state ufs ON ufs.file_id = f.id
WHERE ufs.user_id = ${identity.id}
AND ufs.favorited = true
AND f.deleted_at IS NULL
ORDER BY f.filename ASC
LIMIT ${limit} OFFSET ${offset}
`;
return c.json({ files: rows });
}
/** PUT /api/files/:id/favorite — toggle favorite */
export async function toggleFavorite(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const id = c.req.param("id");
const [file] = await sql`
SELECT id FROM files WHERE id = ${id} AND owner_id = ${identity.id} AND deleted_at IS NULL
`;
if (!file) return c.json({ error: "Not found" }, 404);
const [state] = await sql`
INSERT INTO user_file_state (user_id, file_id, favorited)
VALUES (${identity.id}, ${id}, true)
ON CONFLICT (user_id, file_id)
DO UPDATE SET favorited = NOT user_file_state.favorited
RETURNING favorited
`;
return c.json({ favorited: state.favorited });
}
/** GET /api/trash */
export async function listTrash(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const limit = Math.min(parseInt(c.req.query("limit") || "50", 10), 200);
const offset = parseInt(c.req.query("offset") || "0", 10);
const rows = await sql`
SELECT * FROM files
WHERE owner_id = ${identity.id} AND deleted_at IS NOT NULL
ORDER BY deleted_at DESC
LIMIT ${limit} OFFSET ${offset}
`;
return c.json({ files: rows });
}
// ── Internal helpers ────────────────────────────────────────────────────────
async function buildPathFromParent(parentId: string, ownerId: string): Promise<string> {
const parts: string[] = [];
let currentId: string | null = parentId;
while (currentId) {
const [folder] = await sql`
SELECT id, filename, parent_id FROM files
WHERE id = ${currentId} AND owner_id = ${ownerId} AND is_folder = true
`;
if (!folder) break;
parts.unshift(folder.filename);
currentId = folder.parent_id;
}
return parts.join("/");
}

103
server/folders.ts Normal file
View File

@@ -0,0 +1,103 @@
/**
* Folder operation handlers for Hono routes.
*/
import type { Context } from "hono";
import sql from "./db.ts";
interface Identity {
id: string;
email: string;
}
function getIdentity(c: Context): Identity | null {
const identity = c.get("identity");
if (!identity?.id) return null;
return identity as Identity;
}
/** POST /api/folders — create a folder (DB record only, no S3 object) */
export async function createFolder(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const body = await c.req.json();
const { name, parent_id } = body;
if (!name) return c.json({ error: "name required" }, 400);
// Build s3_key for the folder (convention: ends with /)
const pathParts = [identity.id, "my-files"];
if (parent_id) {
const parentPath = await buildPathFromParent(parent_id, identity.id);
if (parentPath) pathParts.push(parentPath);
}
pathParts.push(name);
const s3Key = pathParts.join("/") + "/";
const [folder] = await sql`
INSERT INTO files (s3_key, filename, mimetype, size, owner_id, parent_id, is_folder)
VALUES (${s3Key}, ${name}, ${"inode/directory"}, ${0}, ${identity.id}, ${parent_id || null}, ${true})
RETURNING *
`;
return c.json({ folder }, 201);
}
/** GET /api/folders/:id/children — list folder contents (sorted, paginated) */
export async function listFolderChildren(c: Context): Promise<Response> {
const identity = getIdentity(c);
if (!identity) return c.json({ error: "Unauthorized" }, 401);
const id = c.req.param("id");
const sort = c.req.query("sort") || "filename";
const limit = Math.min(parseInt(c.req.query("limit") || "50", 10), 200);
const offset = parseInt(c.req.query("offset") || "0", 10);
// Verify folder exists and belongs to user
const [folder] = await sql`
SELECT id FROM files
WHERE id = ${id} AND owner_id = ${identity.id} AND is_folder = true AND deleted_at IS NULL
`;
if (!folder) return c.json({ error: "Folder not found" }, 404);
const allowedSorts: Record<string, string> = {
filename: "filename ASC",
"-filename": "filename DESC",
size: "size ASC",
"-size": "size DESC",
created_at: "created_at ASC",
"-created_at": "created_at DESC",
updated_at: "updated_at ASC",
"-updated_at": "updated_at DESC",
};
const orderBy = allowedSorts[sort] ?? "filename ASC";
const rows = await sql.unsafe(
`SELECT * FROM files
WHERE parent_id = $1 AND owner_id = $2 AND deleted_at IS NULL
ORDER BY is_folder DESC, ${orderBy}
LIMIT $3 OFFSET $4`,
[id, identity.id, limit, offset],
);
return c.json({ files: rows });
}
// ── Internal helpers ────────────────────────────────────────────────────────
async function buildPathFromParent(parentId: string, ownerId: string): Promise<string> {
const parts: string[] = [];
let currentId: string | null = parentId;
while (currentId) {
const [folder] = await sql`
SELECT id, filename, parent_id FROM files
WHERE id = ${currentId} AND owner_id = ${ownerId} AND is_folder = true
`;
if (!folder) break;
parts.unshift(folder.filename);
currentId = folder.parent_id;
}
return parts.join("/");
}

227
server/keto.ts Normal file
View File

@@ -0,0 +1,227 @@
/**
* Lightweight HTTP client for Ory Keto — no SDK, just fetch.
*/
import { withSpan } from "./telemetry.ts";
const KETO_READ_URL = Deno.env.get("KETO_READ_URL") ??
"http://keto-read.ory.svc.cluster.local:4466";
const KETO_WRITE_URL = Deno.env.get("KETO_WRITE_URL") ??
"http://keto-write.ory.svc.cluster.local:4467";
// ── Types ────────────────────────────────────────────────────────────────────
export interface RelationTuple {
namespace: string;
object: string;
relation: string;
subject_id?: string;
subject_set?: {
namespace: string;
object: string;
relation: string;
};
}
export interface RelationPatch {
action: "insert" | "delete";
relation_tuple: RelationTuple;
}
// ── Check ────────────────────────────────────────────────────────────────────
/**
* Check whether `subjectId` has `relation` on `namespace:object`.
* Returns false on network errors instead of throwing.
*/
export async function checkPermission(
namespace: string,
object: string,
relation: string,
subjectId: string,
): Promise<boolean> {
return withSpan("keto.checkPermission", { "keto.namespace": namespace, "keto.relation": relation }, async () => {
try {
const res = await fetch(
`${KETO_READ_URL}/relation-tuples/check/openapi`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ namespace, object, relation, subject_id: subjectId }),
},
);
if (!res.ok) return false;
const body = await res.json();
return body.allowed === true;
} catch {
return false;
}
});
}
// ── Write ────────────────────────────────────────────────────────────────────
/**
* Create a relationship tuple with a direct subject.
*/
export async function createRelationship(
namespace: string,
object: string,
relation: string,
subjectId: string,
): Promise<void> {
return withSpan("keto.createRelationship", { "keto.namespace": namespace, "keto.relation": relation }, async () => {
const res = await fetch(`${KETO_WRITE_URL}/admin/relation-tuples`, {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ namespace, object, relation, subject_id: subjectId }),
});
if (!res.ok) {
const text = await res.text();
throw new Error(
`Keto createRelationship failed (${res.status}): ${text}`,
);
}
});
}
/**
* Create a relationship tuple with a subject set (e.g. parent folder).
*/
export async function createRelationshipWithSubjectSet(
namespace: string,
object: string,
relation: string,
subjectSetNamespace: string,
subjectSetObject: string,
subjectSetRelation: string,
): Promise<void> {
return withSpan("keto.createRelationshipWithSubjectSet", { "keto.namespace": namespace, "keto.relation": relation }, async () => {
const res = await fetch(`${KETO_WRITE_URL}/admin/relation-tuples`, {
method: "PUT",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({
namespace,
object,
relation,
subject_set: {
namespace: subjectSetNamespace,
object: subjectSetObject,
relation: subjectSetRelation,
},
}),
});
if (!res.ok) {
const text = await res.text();
throw new Error(
`Keto createRelationshipWithSubjectSet failed (${res.status}): ${text}`,
);
}
});
}
// ── Delete ───────────────────────────────────────────────────────────────────
/**
* Delete a relationship tuple.
*/
export async function deleteRelationship(
namespace: string,
object: string,
relation: string,
subjectId: string,
): Promise<void> {
return withSpan("keto.deleteRelationship", { "keto.namespace": namespace, "keto.relation": relation }, async () => {
const params = new URLSearchParams({ namespace, object, relation, subject_id: subjectId });
const res = await fetch(
`${KETO_WRITE_URL}/admin/relation-tuples?${params}`,
{ method: "DELETE" },
);
if (!res.ok) {
const text = await res.text();
throw new Error(
`Keto deleteRelationship failed (${res.status}): ${text}`,
);
}
});
}
// ── Batch ────────────────────────────────────────────────────────────────────
/**
* Apply a batch of insert/delete patches atomically.
*/
export async function batchWriteRelationships(
patches: RelationPatch[],
): Promise<void> {
return withSpan("keto.batchWriteRelationships", { "keto.patch_count": patches.length }, async () => {
const res = await fetch(`${KETO_WRITE_URL}/admin/relation-tuples`, {
method: "PATCH",
headers: { "Content-Type": "application/json" },
body: JSON.stringify(patches),
});
if (!res.ok) {
const text = await res.text();
throw new Error(
`Keto batchWriteRelationships failed (${res.status}): ${text}`,
);
}
});
}
// ── List ─────────────────────────────────────────────────────────────────────
/**
* List relationship tuples matching the given filters.
*/
export async function listRelationships(
namespace: string,
object?: string,
relation?: string,
subjectId?: string,
): Promise<RelationTuple[]> {
return withSpan("keto.listRelationships", { "keto.namespace": namespace }, async () => {
const params = new URLSearchParams({ namespace });
if (object) params.set("object", object);
if (relation) params.set("relation", relation);
if (subjectId) params.set("subject_id", subjectId);
const res = await fetch(
`${KETO_READ_URL}/relation-tuples?${params}`,
);
if (!res.ok) {
const text = await res.text();
throw new Error(`Keto listRelationships failed (${res.status}): ${text}`);
}
const body = await res.json();
return body.relation_tuples ?? [];
});
}
// ── Expand ───────────────────────────────────────────────────────────────────
/**
* Expand a permission tree for debugging / UI display.
*/
export async function expandPermission(
namespace: string,
object: string,
relation: string,
maxDepth?: number,
): Promise<unknown> {
return withSpan("keto.expandPermission", { "keto.namespace": namespace, "keto.relation": relation }, async () => {
const res = await fetch(
`${KETO_READ_URL}/relation-tuples/expand`,
{
method: "POST",
headers: { "Content-Type": "application/json" },
body: JSON.stringify({ namespace, object, relation, max_depth: maxDepth ?? 3 }),
},
);
if (!res.ok) {
const text = await res.text();
throw new Error(`Keto expandPermission failed (${res.status}): ${text}`);
}
return await res.json();
});
}

116
server/migrate.ts Normal file
View File

@@ -0,0 +1,116 @@
import sql from "./db.ts";
const MIGRATIONS = [
{
name: "001_create_files",
up: `
CREATE TABLE IF NOT EXISTS files (
id UUID PRIMARY KEY DEFAULT gen_random_uuid(),
s3_key TEXT NOT NULL UNIQUE,
filename TEXT NOT NULL,
mimetype TEXT NOT NULL DEFAULT 'application/octet-stream',
size BIGINT NOT NULL DEFAULT 0,
owner_id TEXT NOT NULL,
parent_id UUID REFERENCES files(id) ON DELETE CASCADE,
is_folder BOOLEAN NOT NULL DEFAULT false,
created_at TIMESTAMPTZ NOT NULL DEFAULT now(),
updated_at TIMESTAMPTZ NOT NULL DEFAULT now(),
deleted_at TIMESTAMPTZ
);
CREATE INDEX IF NOT EXISTS idx_files_parent ON files(parent_id) WHERE deleted_at IS NULL;
CREATE INDEX IF NOT EXISTS idx_files_owner ON files(owner_id) WHERE deleted_at IS NULL;
CREATE INDEX IF NOT EXISTS idx_files_s3key ON files(s3_key);
`,
},
{
name: "002_create_user_file_state",
up: `
CREATE TABLE IF NOT EXISTS user_file_state (
user_id TEXT NOT NULL,
file_id UUID NOT NULL REFERENCES files(id) ON DELETE CASCADE,
favorited BOOLEAN NOT NULL DEFAULT false,
last_opened TIMESTAMPTZ,
PRIMARY KEY (user_id, file_id)
);
`,
},
{
name: "004_folder_sizes",
up: `
-- Recompute a single folder's size from its direct + nested descendants.
-- Called after any file size change, create, delete, or move.
CREATE OR REPLACE FUNCTION recompute_folder_size(folder_id UUID)
RETURNS BIGINT LANGUAGE SQL AS $$
WITH RECURSIVE descendants AS (
-- Direct children of this folder
SELECT id, size, is_folder
FROM files
WHERE parent_id = folder_id AND deleted_at IS NULL
UNION ALL
-- Recurse into subfolders
SELECT f.id, f.size, f.is_folder
FROM files f
JOIN descendants d ON f.parent_id = d.id
WHERE f.deleted_at IS NULL
)
SELECT COALESCE(SUM(size) FILTER (WHERE NOT is_folder), 0)
FROM descendants;
$$;
-- Propagate size updates from a file up through all ancestor folders.
CREATE OR REPLACE FUNCTION propagate_folder_sizes(start_parent_id UUID)
RETURNS VOID LANGUAGE plpgsql AS $$
DECLARE
current_id UUID := start_parent_id;
computed BIGINT;
BEGIN
WHILE current_id IS NOT NULL LOOP
computed := recompute_folder_size(current_id);
UPDATE files SET size = computed WHERE id = current_id AND is_folder = true;
SELECT parent_id INTO current_id FROM files WHERE id = current_id;
END LOOP;
END;
$$;
`,
},
{
name: "003_create_migrations_table",
up: `
CREATE TABLE IF NOT EXISTS _migrations (
name TEXT PRIMARY KEY,
applied_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
`,
},
];
async function migrate() {
// Ensure migrations table exists first
await sql.unsafe(`
CREATE TABLE IF NOT EXISTS _migrations (
name TEXT PRIMARY KEY,
applied_at TIMESTAMPTZ NOT NULL DEFAULT now()
);
`);
for (const migration of MIGRATIONS) {
const [existing] = await sql`
SELECT name FROM _migrations WHERE name = ${migration.name}
`;
if (existing) {
console.log(` skip: ${migration.name}`);
continue;
}
console.log(` apply: ${migration.name}`);
await sql.unsafe(migration.up);
await sql`INSERT INTO _migrations (name) VALUES (${migration.name})`;
}
console.log("Migrations complete.");
}
if (import.meta.main) {
await migrate();
await sql.end();
}
export { migrate };

216
server/permissions.ts Normal file
View File

@@ -0,0 +1,216 @@
/**
* Hono middleware and helpers for Keto-based permission checks.
*/
import type { Context, Next } from "hono";
import {
checkPermission,
createRelationship,
createRelationshipWithSubjectSet,
deleteRelationship,
batchWriteRelationships,
type RelationPatch,
} from "./keto.ts";
// ── Middleware ────────────────────────────────────────────────────────────────
/**
* Permission middleware for /api/files/* routes.
*
* - Extracts identity from `c.get("identity")`.
* - For GET requests, checks `read` permission.
* - For POST/PUT/DELETE, checks `write` or `delete` permission.
* - Returns 403 if denied.
* - Passes through for list operations (no file ID in route).
*/
export async function permissionMiddleware(
c: Context,
next: Next,
): Promise<Response | void> {
const identity = c.get("identity") as { id: string } | undefined;
if (!identity?.id) {
return c.json({ error: "Unauthorized" }, 401);
}
// Extract file/folder ID from the path — e.g. /api/files/:id or /api/folders/:id
const match = c.req.path.match(
/\/api\/(?:files|folders)\/([0-9a-f-]{36})/,
);
if (!match) {
// List operations — per-item filtering happens in the handler
return await next();
}
const resourceId = match[1];
const method = c.req.method.toUpperCase();
const namespace = c.req.path.includes("/folders/") ? "folders" : "files";
let relation: string;
if (method === "GET") {
relation = "read";
} else if (method === "DELETE") {
relation = "delete";
} else {
// POST, PUT, PATCH
relation = "write";
}
const allowed = await checkPermission(
namespace,
resourceId,
relation,
identity.id,
);
if (!allowed) {
return c.json({ error: "Forbidden" }, 403);
}
return await next();
}
// ── Tuple lifecycle helpers ──────────────────────────────────────────────────
/**
* Write permission tuples when a file is created.
* - Creates owner relationship: files:{fileId}#owner@{ownerId}
* - If parentFolderId, creates parent relationship:
* files:{fileId}#parent@folders:{parentFolderId}#...
*/
export async function writeFilePermissions(
fileId: string,
ownerId: string,
parentFolderId?: string,
): Promise<void> {
await createRelationship("files", fileId, "owners", ownerId);
if (parentFolderId) {
await createRelationshipWithSubjectSet(
"files",
fileId,
"parents",
"folders",
parentFolderId,
"",
);
}
}
/**
* Write permission tuples when a folder is created.
*/
export async function writeFolderPermissions(
folderId: string,
ownerId: string,
parentFolderId?: string,
bucketId?: string,
): Promise<void> {
await createRelationship("folders", folderId, "owners", ownerId);
if (parentFolderId) {
await createRelationshipWithSubjectSet(
"folders",
folderId,
"parents",
"folders",
parentFolderId,
"",
);
} else if (bucketId) {
await createRelationshipWithSubjectSet(
"folders",
folderId,
"parents",
"buckets",
bucketId,
"",
);
}
}
/**
* Remove all relationships for a file.
*/
export async function deleteFilePermissions(fileId: string): Promise<void> {
// We need to list and delete all tuples for this file.
// Use batch delete with known relations.
const relations = ["owners", "editors", "viewers", "parents"];
const patches: RelationPatch[] = [];
// We cannot enumerate subjects without listing, so we list first.
const { listRelationships } = await import("./keto.ts");
for (const relation of relations) {
const tuples = await listRelationships("files", fileId, relation);
for (const tuple of tuples) {
patches.push({ action: "delete", relation_tuple: tuple });
}
}
if (patches.length > 0) {
await batchWriteRelationships(patches);
}
}
/**
* Update parent relationship when a file is moved.
* Deletes old parent tuple and creates new one.
*/
export async function moveFilePermissions(
fileId: string,
newParentId: string,
): Promise<void> {
const { listRelationships } = await import("./keto.ts");
// Find and remove existing parent relationships
const existing = await listRelationships("files", fileId, "parents");
const patches: RelationPatch[] = [];
for (const tuple of existing) {
patches.push({ action: "delete", relation_tuple: tuple });
}
// Add new parent
patches.push({
action: "insert",
relation_tuple: {
namespace: "files",
object: fileId,
relation: "parents",
subject_set: {
namespace: "folders",
object: newParentId,
relation: "",
},
},
});
await batchWriteRelationships(patches);
}
/**
* Filter a list of files/folders by permission.
* Checks permissions in parallel and returns only the allowed items.
*/
export async function filterByPermission<
T extends { id: string; is_folder?: boolean },
>(
files: T[],
userId: string,
relation: string,
): Promise<T[]> {
const results = await Promise.all(
files.map(async (file) => {
const namespace = file.is_folder ? "folders" : "files";
const allowed = await checkPermission(
namespace,
file.id,
relation,
userId,
);
return { file, allowed };
}),
);
return results.filter((r) => r.allowed).map((r) => r.file);
}

215
server/s3-presign.ts Normal file
View File

@@ -0,0 +1,215 @@
/**
* Pre-signed URL generation for S3 (AWS Signature V4 query-string auth).
* Supports single-object GET/PUT and multipart upload lifecycle.
*/
import {
ACCESS_KEY,
BUCKET,
getSigningKey,
hmacSha256,
REGION,
SECRET_KEY,
SEAWEEDFS_S3_URL,
sha256Hex,
toHex,
} from "./s3.ts";
const encoder = new TextEncoder();
/**
* Build a pre-signed URL using AWS SigV4 query-string signing.
*/
export async function presignUrl(
method: string,
key: string,
expiresIn: number,
extraQuery?: Record<string, string>,
extraSignedHeaders?: Record<string, string>,
): Promise<string> {
const url = new URL(`/${BUCKET}/${key}`, SEAWEEDFS_S3_URL);
const now = new Date();
const dateStamp =
now.toISOString().replace(/[-:]/g, "").split(".")[0] + "Z";
const shortDate = dateStamp.slice(0, 8);
const scope = `${shortDate}/${REGION}/s3/aws4_request`;
// Query parameters required for pre-signed URL
url.searchParams.set("X-Amz-Algorithm", "AWS4-HMAC-SHA256");
url.searchParams.set("X-Amz-Credential", `${ACCESS_KEY}/${scope}`);
url.searchParams.set("X-Amz-Date", dateStamp);
url.searchParams.set("X-Amz-Expires", String(expiresIn));
// Extra query params (for multipart etc.)
if (extraQuery) {
for (const [k, v] of Object.entries(extraQuery)) {
url.searchParams.set(k, v);
}
}
// Headers to sign
const headers: Record<string, string> = {
host: url.host,
...extraSignedHeaders,
};
const signedHeaderKeys = Object.keys(headers)
.map((k) => k.toLowerCase())
.sort();
const signedHeadersStr = signedHeaderKeys.join(";");
url.searchParams.set("X-Amz-SignedHeaders", signedHeadersStr);
// Sort query params for canonical request
const sortedParams = [...url.searchParams.entries()].sort((a, b) =>
a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0,
);
const canonicalQs = sortedParams
.map(
([k, v]) =>
`${encodeURIComponent(k)}=${encodeURIComponent(v)}`,
)
.join("&");
const canonicalHeaders =
signedHeaderKeys
.map((k) => {
const originalKey = Object.keys(headers).find(
(h) => h.toLowerCase() === k,
)!;
return `${k}:${headers[originalKey]}`;
})
.join("\n") + "\n";
const canonicalRequest = [
method,
url.pathname,
canonicalQs,
canonicalHeaders,
signedHeadersStr,
"UNSIGNED-PAYLOAD",
].join("\n");
const stringToSign = [
"AWS4-HMAC-SHA256",
dateStamp,
scope,
await sha256Hex(encoder.encode(canonicalRequest)),
].join("\n");
const signingKey = await getSigningKey(SECRET_KEY, shortDate, REGION);
const signature = toHex(await hmacSha256(signingKey, stringToSign));
url.searchParams.set("X-Amz-Signature", signature);
return url.toString();
}
// ── Public helpers ──────────────────────────────────────────────────────────
const DEFAULT_EXPIRES = 3600; // 1 hour
export function presignGetUrl(
key: string,
expiresIn = DEFAULT_EXPIRES,
): Promise<string> {
return presignUrl("GET", key, expiresIn);
}
export function presignPutUrl(
key: string,
contentType: string,
expiresIn = DEFAULT_EXPIRES,
): Promise<string> {
return presignUrl("PUT", key, expiresIn, undefined, {
"content-type": contentType,
});
}
// ── Multipart upload ────────────────────────────────────────────────────────
export async function createMultipartUpload(
key: string,
contentType: string,
): Promise<string> {
// POST /{bucket}/{key}?uploads to initiate
const url = new URL(`/${BUCKET}/${key}`, SEAWEEDFS_S3_URL);
url.searchParams.set("uploads", "");
const headers: Record<string, string> = {
host: url.host,
"content-type": contentType,
};
const bodyHash = await sha256Hex(new Uint8Array(0));
// We need to import signRequest from s3.ts
const { signRequest } = await import("./s3.ts");
await signRequest("POST", url, headers, bodyHash);
const resp = await fetch(url.toString(), {
method: "POST",
headers,
});
if (!resp.ok) {
const text = await resp.text();
throw new Error(`CreateMultipartUpload failed ${resp.status}: ${text}`);
}
const xml = await resp.text();
const uploadId = xml.match(/<UploadId>(.*?)<\/UploadId>/)?.[1];
if (!uploadId) {
throw new Error("No UploadId in CreateMultipartUpload response");
}
return uploadId;
}
export function presignUploadPart(
key: string,
uploadId: string,
partNumber: number,
expiresIn = DEFAULT_EXPIRES,
): Promise<string> {
return presignUrl("PUT", key, expiresIn, {
uploadId,
partNumber: String(partNumber),
});
}
export async function completeMultipartUpload(
key: string,
uploadId: string,
parts: { partNumber: number; etag: string }[],
): Promise<void> {
const url = new URL(`/${BUCKET}/${key}`, SEAWEEDFS_S3_URL);
url.searchParams.set("uploadId", uploadId);
const xmlParts = parts
.map(
(p) =>
`<Part><PartNumber>${p.partNumber}</PartNumber><ETag>${p.etag}</ETag></Part>`,
)
.join("");
const body = encoder.encode(
`<CompleteMultipartUpload>${xmlParts}</CompleteMultipartUpload>`,
);
const headers: Record<string, string> = {
host: url.host,
"content-type": "application/xml",
};
const bodyHash = await sha256Hex(body);
const { signRequest } = await import("./s3.ts");
await signRequest("POST", url, headers, bodyHash);
const resp = await fetch(url.toString(), {
method: "POST",
headers,
body,
});
if (!resp.ok) {
const text = await resp.text();
throw new Error(`CompleteMultipartUpload failed ${resp.status}: ${text}`);
}
await resp.text();
}

338
server/s3.ts Normal file
View File

@@ -0,0 +1,338 @@
/**
* S3 client using AWS Signature V4 (Web Crypto API, no external SDK).
* Generalised from kratos-admin/server/s3.ts to support full CRUD + list + copy.
*/
import { withSpan } from "./telemetry.ts";
const SEAWEEDFS_S3_URL =
Deno.env.get("SEAWEEDFS_S3_URL") ??
"http://seaweedfs-filer.storage.svc.cluster.local:8333";
const ACCESS_KEY = Deno.env.get("SEAWEEDFS_ACCESS_KEY") ?? "";
const SECRET_KEY = Deno.env.get("SEAWEEDFS_SECRET_KEY") ?? "";
const BUCKET = Deno.env.get("S3_BUCKET") ?? "sunbeam-driver";
const REGION = Deno.env.get("S3_REGION") ?? "us-east-1";
const encoder = new TextEncoder();
// ── Crypto helpers ──────────────────────────────────────────────────────────
export async function hmacSha256(
key: ArrayBuffer | Uint8Array,
data: string,
): Promise<ArrayBuffer> {
const keyBuf =
key instanceof Uint8Array
? key.buffer.slice(key.byteOffset, key.byteOffset + key.byteLength)
: key;
const cryptoKey = await crypto.subtle.importKey(
"raw",
keyBuf as ArrayBuffer,
{ name: "HMAC", hash: "SHA-256" },
false,
["sign"],
);
return crypto.subtle.sign("HMAC", cryptoKey, encoder.encode(data));
}
export async function sha256Hex(data: Uint8Array): Promise<string> {
const buf = data.buffer.slice(
data.byteOffset,
data.byteOffset + data.byteLength,
) as ArrayBuffer;
const hash = await crypto.subtle.digest("SHA-256", buf);
return toHex(hash);
}
export function toHex(buf: ArrayBuffer): string {
return Array.from(new Uint8Array(buf))
.map((b) => b.toString(16).padStart(2, "0"))
.join("");
}
// ── Signing ────────────────────────────────────────────────────────────────
export async function getSigningKey(
secretKey: string,
shortDate: string,
region: string,
): Promise<ArrayBuffer> {
let key: ArrayBuffer = await hmacSha256(
encoder.encode("AWS4" + secretKey),
shortDate,
);
key = await hmacSha256(key, region);
key = await hmacSha256(key, "s3");
key = await hmacSha256(key, "aws4_request");
return key;
}
/**
* Build canonical query string from URLSearchParams, sorted by key.
*/
function canonicalQueryString(params: URLSearchParams): string {
const entries = [...params.entries()].sort((a, b) =>
a[0] < b[0] ? -1 : a[0] > b[0] ? 1 : 0,
);
return entries
.map(
([k, v]) =>
`${encodeURIComponent(k)}=${encodeURIComponent(v)}`,
)
.join("&");
}
export interface SignedHeaders {
[key: string]: string;
}
export async function signRequest(
method: string,
url: URL,
headers: Record<string, string>,
bodyHash: string,
accessKey: string = ACCESS_KEY,
secretKey: string = SECRET_KEY,
region: string = REGION,
): Promise<Record<string, string>> {
const now = new Date();
const dateStamp =
now.toISOString().replace(/[-:]/g, "").split(".")[0] + "Z";
const shortDate = dateStamp.slice(0, 8);
const scope = `${shortDate}/${region}/s3/aws4_request`;
headers["x-amz-date"] = dateStamp;
headers["x-amz-content-sha256"] = bodyHash;
const signedHeaderKeys = Object.keys(headers)
.map((k) => k.toLowerCase())
.sort();
const signedHeaders = signedHeaderKeys.join(";");
const canonicalHeaders =
signedHeaderKeys
.map((k) => {
const originalKey = Object.keys(headers).find(
(h) => h.toLowerCase() === k,
)!;
return `${k}:${headers[originalKey]}`;
})
.join("\n") + "\n";
const qs = canonicalQueryString(url.searchParams);
const canonicalRequest = [
method,
url.pathname,
qs,
canonicalHeaders,
signedHeaders,
bodyHash,
].join("\n");
const stringToSign = [
"AWS4-HMAC-SHA256",
dateStamp,
scope,
await sha256Hex(encoder.encode(canonicalRequest)),
].join("\n");
const signingKey = await getSigningKey(secretKey, shortDate, region);
const signature = toHex(await hmacSha256(signingKey, stringToSign));
headers[
"Authorization"
] = `AWS4-HMAC-SHA256 Credential=${accessKey}/${scope}, SignedHeaders=${signedHeaders}, Signature=${signature}`;
return headers;
}
// ── Low-level S3 request ────────────────────────────────────────────────────
async function s3Fetch(
method: string,
path: string,
opts: {
query?: Record<string, string>;
body?: Uint8Array | ReadableStream<Uint8Array> | null;
contentType?: string;
extraHeaders?: Record<string, string>;
} = {},
): Promise<Response> {
const url = new URL(path, SEAWEEDFS_S3_URL);
if (opts.query) {
for (const [k, v] of Object.entries(opts.query)) {
url.searchParams.set(k, v);
}
}
const headers: Record<string, string> = {
host: url.host,
...opts.extraHeaders,
};
if (opts.contentType) headers["content-type"] = opts.contentType;
// For streaming bodies we can't hash upfront; use UNSIGNED-PAYLOAD
let bodyHash: string;
let fetchBody: BodyInit | null = null;
if (opts.body instanceof ReadableStream) {
bodyHash = "UNSIGNED-PAYLOAD";
fetchBody = opts.body;
} else if (opts.body) {
bodyHash = await sha256Hex(opts.body);
fetchBody = opts.body as unknown as BodyInit;
} else {
bodyHash = await sha256Hex(new Uint8Array(0));
}
await signRequest(method, url, headers, bodyHash);
return fetch(url.toString(), {
method,
headers,
body: fetchBody,
});
}
// ── Public API ──────────────────────────────────────────────────────────────
export interface ListObjectsResult {
contents: { key: string; lastModified: string; size: number }[];
commonPrefixes: string[];
isTruncated: boolean;
nextContinuationToken?: string;
}
export async function listObjects(
prefix: string,
delimiter?: string,
maxKeys?: number,
continuationToken?: string,
): Promise<ListObjectsResult> {
return withSpan("s3.listObjects", { "s3.prefix": prefix }, async () => {
const query: Record<string, string> = {
"list-type": "2",
prefix,
};
if (delimiter) query["delimiter"] = delimiter;
if (maxKeys) query["max-keys"] = String(maxKeys);
if (continuationToken) query["continuation-token"] = continuationToken;
const resp = await s3Fetch("GET", `/${BUCKET}/`, { query });
const text = await resp.text();
if (!resp.ok) throw new Error(`ListObjects failed ${resp.status}: ${text}`);
// Minimal XML parsing (no external deps)
const contents: ListObjectsResult["contents"] = [];
const contentMatches = text.matchAll(
/<Contents>([\s\S]*?)<\/Contents>/g,
);
for (const m of contentMatches) {
const block = m[1];
const key = block.match(/<Key>(.*?)<\/Key>/)?.[1] ?? "";
const lastModified =
block.match(/<LastModified>(.*?)<\/LastModified>/)?.[1] ?? "";
const size = parseInt(block.match(/<Size>(.*?)<\/Size>/)?.[1] ?? "0", 10);
contents.push({ key, lastModified, size });
}
const commonPrefixes: string[] = [];
const prefixMatches = text.matchAll(
/<CommonPrefixes>\s*<Prefix>(.*?)<\/Prefix>\s*<\/CommonPrefixes>/g,
);
for (const m of prefixMatches) {
commonPrefixes.push(m[1]);
}
const isTruncated = /<IsTruncated>true<\/IsTruncated>/.test(text);
const nextToken =
text.match(/<NextContinuationToken>(.*?)<\/NextContinuationToken>/)?.[1];
return {
contents,
commonPrefixes,
isTruncated,
nextContinuationToken: nextToken,
};
});
}
export async function headObject(
key: string,
): Promise<{ contentType: string; contentLength: number; lastModified: string } | null> {
return withSpan("s3.headObject", { "s3.key": key }, async () => {
const resp = await s3Fetch("HEAD", `/${BUCKET}/${key}`);
if (resp.status === 404) return null;
if (!resp.ok) throw new Error(`HeadObject failed ${resp.status}`);
return {
contentType: resp.headers.get("content-type") ?? "application/octet-stream",
contentLength: parseInt(resp.headers.get("content-length") ?? "0", 10),
lastModified: resp.headers.get("last-modified") ?? "",
};
});
}
export async function getObject(key: string): Promise<Response> {
return withSpan("s3.getObject", { "s3.key": key }, async () => {
const resp = await s3Fetch("GET", `/${BUCKET}/${key}`);
return resp;
});
}
export async function putObject(
key: string,
body: Uint8Array,
contentType: string,
): Promise<void> {
return withSpan("s3.putObject", { "s3.key": key, "s3.content_type": contentType }, async () => {
const resp = await s3Fetch("PUT", `/${BUCKET}/${key}`, {
body,
contentType,
});
if (!resp.ok) {
const text = await resp.text();
throw new Error(`PutObject failed ${resp.status}: ${text}`);
}
// Drain response body
await resp.text();
});
}
export async function deleteObject(key: string): Promise<void> {
return withSpan("s3.deleteObject", { "s3.key": key }, async () => {
const resp = await s3Fetch("DELETE", `/${BUCKET}/${key}`);
if (!resp.ok && resp.status !== 404) {
const text = await resp.text();
throw new Error(`DeleteObject failed ${resp.status}: ${text}`);
}
await resp.text();
});
}
export async function copyObject(
sourceKey: string,
destKey: string,
): Promise<void> {
return withSpan("s3.copyObject", { "s3.source_key": sourceKey, "s3.dest_key": destKey }, async () => {
const resp = await s3Fetch("PUT", `/${BUCKET}/${destKey}`, {
extraHeaders: {
"x-amz-copy-source": `/${BUCKET}/${sourceKey}`,
},
});
if (!resp.ok) {
const text = await resp.text();
throw new Error(`CopyObject failed ${resp.status}: ${text}`);
}
await resp.text();
});
}
// Re-export config for use in presigning
export {
ACCESS_KEY,
BUCKET,
REGION,
SECRET_KEY,
SEAWEEDFS_S3_URL,
};

311
server/telemetry.ts Normal file
View File

@@ -0,0 +1,311 @@
/**
* OpenTelemetry instrumentation for the Drive service.
*
* When OTEL_ENABLED=true, initialises the SDK with OTLP export and exposes:
* - tracingMiddleware — Hono middleware that creates per-request spans
* - metricsMiddleware — Hono middleware that records HTTP metrics
* - withSpan — utility to wrap any async function in a child span
* - traceDbQuery — utility to wrap a DB query with a span
* - shutdown — graceful SDK shutdown
*
* When OTEL_ENABLED is not "true" every export is a lightweight no-op.
*/
import { trace, context, SpanKind, SpanStatusCode, metrics, propagation } from "npm:@opentelemetry/api@1.9.0";
import type { Span, Tracer, Context as OtelContext } from "npm:@opentelemetry/api@1.9.0";
import type { Context, Next } from "hono";
// ---------------------------------------------------------------------------
// Configuration
// ---------------------------------------------------------------------------
const OTEL_ENABLED = Deno.env.get("OTEL_ENABLED") === "true";
const OTEL_SERVICE_NAME = Deno.env.get("OTEL_SERVICE_NAME") ?? "drive";
const OTEL_ENDPOINT =
Deno.env.get("OTEL_EXPORTER_OTLP_ENDPOINT") ??
"http://localhost:4317";
const OTEL_SAMPLER = Deno.env.get("OTEL_TRACES_SAMPLER") ?? "parentbased_traceidratio";
const OTEL_SAMPLER_ARG = parseFloat(Deno.env.get("OTEL_TRACES_SAMPLER_ARG") ?? "1.0");
const DEPLOYMENT_ENV = Deno.env.get("DEPLOYMENT_ENVIRONMENT") ?? "development";
const SERVICE_VERSION = Deno.env.get("SERVICE_VERSION") ?? "0.0.0";
// Re-export so tests/other modules can check
export { OTEL_ENABLED };
// ---------------------------------------------------------------------------
// SDK initialisation (only when enabled)
// ---------------------------------------------------------------------------
let _shutdownFn: (() => Promise<void>) | null = null;
let _tracer: Tracer | null = null;
// Metric instruments (initialised lazily)
let _requestDuration: ReturnType<ReturnType<typeof metrics.getMeter>["createHistogram"]> | null = null;
let _activeRequests: ReturnType<ReturnType<typeof metrics.getMeter>["createUpDownCounter"]> | null = null;
let _requestTotal: ReturnType<ReturnType<typeof metrics.getMeter>["createCounter"]> | null = null;
async function initSdk(): Promise<void> {
// Dynamic imports so the heavy SDK packages are never loaded when disabled
const { NodeSDK } = await import("npm:@opentelemetry/sdk-node@0.57.2");
const { OTLPTraceExporter } = await import("npm:@opentelemetry/exporter-trace-otlp-grpc@0.57.2");
const { OTLPMetricExporter } = await import("npm:@opentelemetry/exporter-metrics-otlp-grpc@0.57.2");
const { PeriodicExportingMetricReader } = await import("npm:@opentelemetry/sdk-metrics@1.30.1");
const { Resource } = await import("npm:@opentelemetry/resources@1.30.1");
const { ATTR_SERVICE_NAME, ATTR_SERVICE_VERSION } = await import(
"npm:@opentelemetry/semantic-conventions@1.28.0"
);
const { ParentBasedSampler, TraceIdRatioBasedSampler, AlwaysOnSampler, AlwaysOffSampler } = await import(
"npm:@opentelemetry/sdk-trace-base@1.30.1"
);
const { W3CTraceContextPropagator } = await import("npm:@opentelemetry/core@1.30.1");
// Build sampler
let innerSampler;
if (OTEL_SAMPLER === "always_on") {
innerSampler = new AlwaysOnSampler();
} else if (OTEL_SAMPLER === "always_off") {
innerSampler = new AlwaysOffSampler();
} else {
innerSampler = new TraceIdRatioBasedSampler(OTEL_SAMPLER_ARG);
}
const sampler = OTEL_SAMPLER.startsWith("parentbased_")
? new ParentBasedSampler({ root: innerSampler })
: innerSampler;
const resource = new Resource({
[ATTR_SERVICE_NAME]: OTEL_SERVICE_NAME,
[ATTR_SERVICE_VERSION]: SERVICE_VERSION,
"deployment.environment": DEPLOYMENT_ENV,
});
const traceExporter = new OTLPTraceExporter({ url: OTEL_ENDPOINT });
const metricExporter = new OTLPMetricExporter({ url: OTEL_ENDPOINT });
const metricReader = new PeriodicExportingMetricReader({
exporter: metricExporter,
exportIntervalMillis: 15_000,
});
const sdk = new NodeSDK({
resource,
traceExporter,
metricReader,
sampler,
});
// Set the propagator globally before starting the SDK
propagation.setGlobalPropagator(new W3CTraceContextPropagator());
sdk.start();
_shutdownFn = () => sdk.shutdown();
// Grab tracer
_tracer = trace.getTracer(OTEL_SERVICE_NAME, SERVICE_VERSION);
// Init metric instruments
const meter = metrics.getMeter(OTEL_SERVICE_NAME, SERVICE_VERSION);
_requestDuration = meter.createHistogram("http.server.request.duration", {
description: "Duration of HTTP server requests",
unit: "ms",
});
_activeRequests = meter.createUpDownCounter("http.server.active_requests", {
description: "Number of active HTTP requests",
});
_requestTotal = meter.createCounter("http.server.request.total", {
description: "Total HTTP requests",
});
}
// Kick off init if enabled (fire-and-forget; middleware awaits the promise)
const _initPromise: Promise<void> | null = OTEL_ENABLED ? initSdk() : null;
// ---------------------------------------------------------------------------
// Helpers
// ---------------------------------------------------------------------------
function getTracer(): Tracer {
return _tracer ?? trace.getTracer(OTEL_SERVICE_NAME);
}
/**
* Derive a short route template from the Hono matched route.
* Falls back to the raw path.
*/
function routeOf(c: Context): string {
// Hono exposes the matched route pattern via c.req.routePath (Hono v4)
try {
// deno-lint-ignore no-explicit-any
const rp = (c.req as any).routePath;
if (rp) return rp;
} catch { /* ignore */ }
return c.req.path;
}
// ---------------------------------------------------------------------------
// Hono middleware — tracing
// ---------------------------------------------------------------------------
export async function tracingMiddleware(c: Context, next: Next): Promise<void | Response> {
if (!OTEL_ENABLED) return await next();
// Ensure SDK is ready
if (_initPromise) await _initPromise;
const tracer = getTracer();
const req = c.req;
// Extract incoming trace context from request headers
const carrier: Record<string, string> = {};
req.raw.headers.forEach((value, key) => {
carrier[key] = value;
});
const parentCtx = propagation.extract(context.active(), carrier);
const route = routeOf(c);
const spanName = `${req.method} ${route}`;
return await tracer.startActiveSpan(
spanName,
{
kind: SpanKind.SERVER,
attributes: {
"http.method": req.method,
"http.url": req.url.replace(/access_token=[^&]+/g, "access_token=REDACTED"),
"http.route": route,
"http.user_agent": req.header("user-agent") ?? "",
},
},
parentCtx,
async (span: Span) => {
try {
await next();
const status = c.res.status;
span.setAttribute("http.status_code", status);
// Attach user identity if set by auth middleware
try {
const identity = c.get("identity");
if (identity?.id) {
span.setAttribute("enduser.id", identity.id);
}
} catch { /* identity not set */ }
if (status >= 500) {
span.setStatus({ code: SpanStatusCode.ERROR, message: `HTTP ${status}` });
} else {
span.setStatus({ code: SpanStatusCode.OK });
}
// Inject trace context into response headers
const responseCarrier: Record<string, string> = {};
propagation.inject(context.active(), responseCarrier);
for (const [k, v] of Object.entries(responseCarrier)) {
c.res.headers.set(k, v);
}
} catch (err) {
span.setStatus({ code: SpanStatusCode.ERROR, message: String(err) });
span.recordException(err instanceof Error ? err : new Error(String(err)));
throw err;
} finally {
span.end();
}
},
);
}
// ---------------------------------------------------------------------------
// Hono middleware — metrics
// ---------------------------------------------------------------------------
export async function metricsMiddleware(c: Context, next: Next): Promise<void | Response> {
if (!OTEL_ENABLED) return await next();
if (_initPromise) await _initPromise;
const route = routeOf(c);
const method = c.req.method;
_activeRequests?.add(1, { "http.method": method, "http.route": route });
const start = performance.now();
try {
await next();
} finally {
const durationMs = performance.now() - start;
const status = c.res?.status ?? 500;
_activeRequests?.add(-1, { "http.method": method, "http.route": route });
_requestDuration?.record(durationMs, {
"http.method": method,
"http.route": route,
"http.status_code": status,
});
_requestTotal?.add(1, {
"http.method": method,
"http.route": route,
"http.status_code": status,
});
}
}
// ---------------------------------------------------------------------------
// withSpan — wrap any async function in a child span
// ---------------------------------------------------------------------------
/**
* Run `fn` inside a new child span. Attributes can be set inside `fn` via the
* span argument. If OTEL is disabled this simply calls `fn` with a no-op span.
*/
export async function withSpan<T>(
name: string,
attributes: Record<string, string | number | boolean>,
fn: (span: Span) => Promise<T>,
): Promise<T> {
if (!OTEL_ENABLED) {
// Provide a no-op span
const noopSpan = trace.getTracer("noop").startSpan("noop");
noopSpan.end();
return await fn(noopSpan);
}
if (_initPromise) await _initPromise;
const tracer = getTracer();
return await tracer.startActiveSpan(name, { attributes }, async (span: Span) => {
try {
const result = await fn(span);
span.setStatus({ code: SpanStatusCode.OK });
return result;
} catch (err) {
span.setStatus({ code: SpanStatusCode.ERROR, message: String(err) });
span.recordException(err instanceof Error ? err : new Error(String(err)));
throw err;
} finally {
span.end();
}
});
}
// ---------------------------------------------------------------------------
// traceDbQuery — wrap a database call with a span
// ---------------------------------------------------------------------------
/**
* Wrap a DB query function call with a `db.query` span.
* `statement` should be the SQL template (no interpolated values).
*/
export async function traceDbQuery<T>(
statement: string,
fn: () => Promise<T>,
): Promise<T> {
return withSpan("db.query", { "db.statement": statement, "db.system": "postgresql" }, async () => {
return await fn();
});
}
// ---------------------------------------------------------------------------
// Graceful shutdown
// ---------------------------------------------------------------------------
export async function shutdown(): Promise<void> {
if (_shutdownFn) await _shutdownFn();
}

120
server/wopi/discovery.ts Normal file
View File

@@ -0,0 +1,120 @@
/**
* Collabora WOPI discovery — fetch and cache discovery.xml.
* Parses XML to extract urlsrc for each mimetype/action pair.
*/
import { withSpan } from "../telemetry.ts";
const COLLABORA_URL =
Deno.env.get("COLLABORA_URL") ??
"http://collabora.lasuite.svc.cluster.local:9980";
const CACHE_TTL_MS = 60 * 60 * 1000; // 1 hour
interface ActionEntry {
name: string;
ext: string;
urlsrc: string;
}
interface DiscoveryCache {
/** Map: mimetype -> ActionEntry[] */
actions: Map<string, ActionEntry[]>;
fetchedAt: number;
}
let cache: DiscoveryCache | null = null;
/**
* Parse discovery XML into a map of mimetype -> action entries.
*/
export function parseDiscoveryXml(
xml: string,
): Map<string, ActionEntry[]> {
const result = new Map<string, ActionEntry[]>();
// Match <app name="..."> blocks
const appRegex = /<app\s+name="([^"]*)"[^>]*>([\s\S]*?)<\/app>/g;
for (const appMatch of xml.matchAll(appRegex)) {
const mimetype = appMatch[1];
const appBody = appMatch[2];
const actions: ActionEntry[] = [];
// Match <action name="..." ext="..." urlsrc="..." />
const actionRegex =
/<action\s+([^>]*?)\/?\s*>/g;
for (const actionMatch of appBody.matchAll(actionRegex)) {
const attrs = actionMatch[1];
const name =
attrs.match(/name="([^"]*)"/)?.[1] ?? "";
const ext = attrs.match(/ext="([^"]*)"/)?.[1] ?? "";
const urlsrc =
attrs.match(/urlsrc="([^"]*)"/)?.[1] ?? "";
if (name && urlsrc) {
actions.push({ name, ext, urlsrc });
}
}
if (actions.length > 0) {
result.set(mimetype, actions);
}
}
return result;
}
async function fetchDiscovery(): Promise<Map<string, ActionEntry[]>> {
const url = `${COLLABORA_URL}/hosting/discovery`;
const resp = await fetch(url);
if (!resp.ok) {
throw new Error(
`Collabora discovery fetch failed: ${resp.status} ${resp.statusText}`,
);
}
const xml = await resp.text();
return parseDiscoveryXml(xml);
}
async function getDiscovery(): Promise<Map<string, ActionEntry[]>> {
if (cache && Date.now() - cache.fetchedAt < CACHE_TTL_MS) {
return cache.actions;
}
// Retry up to 3 times
let lastError: Error | null = null;
for (let i = 0; i < 3; i++) {
try {
const actions = await fetchDiscovery();
cache = { actions, fetchedAt: Date.now() };
return actions;
} catch (e) {
lastError = e as Error;
if (i < 2) await new Promise((r) => setTimeout(r, 1000 * (i + 1)));
}
}
throw lastError;
}
/**
* Get the Collabora editor URL for a given mimetype and action.
* Returns the urlsrc template or null if not found.
*/
export async function getCollaboraActionUrl(
mimetype: string,
action = "edit",
): Promise<string | null> {
const cacheHit = !!(cache && Date.now() - cache.fetchedAt < CACHE_TTL_MS);
return withSpan("collabora.discovery", { "collabora.mimetype": mimetype, "collabora.action": action, "collabora.cache_hit": cacheHit }, async () => {
const discovery = await getDiscovery();
const actions = discovery.get(mimetype);
if (!actions) return null;
const match = actions.find((a) => a.name === action);
return match?.urlsrc ?? null;
});
}
/** Clear cache (for testing). */
export function clearDiscoveryCache(): void {
cache = null;
}

260
server/wopi/handler.ts Normal file
View File

@@ -0,0 +1,260 @@
/**
* WOPI endpoint handlers.
*/
import type { Context } from "hono";
import sql from "../db.ts";
import { getObject, putObject } from "../s3.ts";
import { withSpan } from "../telemetry.ts";
import { verifyWopiToken, generateWopiToken } from "./token.ts";
import type { WopiTokenPayload } from "./token.ts";
import { getCollaboraActionUrl } from "./discovery.ts";
import {
acquireLock,
getLock,
refreshLock,
releaseLock,
unlockAndRelock,
} from "./lock.ts";
// ── Token validation helper ─────────────────────────────────────────────────
async function validateToken(c: Context): Promise<WopiTokenPayload | null> {
const token = c.req.query("access_token");
if (!token) return null;
return verifyWopiToken(token);
}
function wopiError(c: Context, status: number, msg: string): Response {
return c.text(msg, status);
}
// ── CheckFileInfo ───────────────────────────────────────────────────────────
/** GET /wopi/files/:id */
export async function wopiCheckFileInfo(c: Context): Promise<Response> {
return withSpan("wopi.checkFileInfo", { "wopi.file_id": c.req.param("id") ?? "" }, async () => {
const payload = await validateToken(c);
if (!payload) return wopiError(c, 401, "Invalid access token");
const fileId = c.req.param("id");
if (payload.fid !== fileId) return wopiError(c, 401, "Token/file mismatch");
const [file] = await sql`
SELECT * FROM files WHERE id = ${fileId}
`;
if (!file) return wopiError(c, 404, "File not found");
return c.json({
BaseFileName: file.filename,
OwnerId: file.owner_id,
Size: Number(file.size),
UserId: payload.uid,
UserFriendlyName: payload.unm,
Version: file.updated_at?.toISOString?.() ?? String(file.updated_at),
UserCanWrite: payload.wr,
UserCanNotWriteRelative: true,
SupportsLocks: true,
SupportsUpdate: payload.wr,
SupportsGetLock: true,
LastModifiedTime: file.updated_at?.toISOString?.() ?? String(file.updated_at),
});
});
}
// ── GetFile ─────────────────────────────────────────────────────────────────
/** GET /wopi/files/:id/contents */
export async function wopiGetFile(c: Context): Promise<Response> {
return withSpan("wopi.getFile", { "wopi.file_id": c.req.param("id") ?? "" }, async () => {
const payload = await validateToken(c);
if (!payload) return wopiError(c, 401, "Invalid access token");
const fileId = c.req.param("id");
if (payload.fid !== fileId) return wopiError(c, 401, "Token/file mismatch");
const [file] = await sql`
SELECT s3_key, mimetype FROM files WHERE id = ${fileId}
`;
if (!file) return wopiError(c, 404, "File not found");
const resp = await getObject(file.s3_key);
if (!resp.ok) return wopiError(c, 500, "Failed to retrieve file from storage");
const headers = new Headers();
headers.set("Content-Type", file.mimetype);
if (resp.headers.get("content-length")) {
headers.set("Content-Length", resp.headers.get("content-length")!);
}
return new Response(resp.body, { status: 200, headers });
});
}
// ── PutFile ─────────────────────────────────────────────────────────────────
/** POST /wopi/files/:id/contents */
export async function wopiPutFile(c: Context): Promise<Response> {
return withSpan("wopi.putFile", { "wopi.file_id": c.req.param("id") ?? "" }, async () => {
const payload = await validateToken(c);
if (!payload) return wopiError(c, 401, "Invalid access token");
if (!payload.wr) return wopiError(c, 403, "No write permission");
const fileId = c.req.param("id");
if (payload.fid !== fileId) return wopiError(c, 401, "Token/file mismatch");
// Verify lock
const requestLock = c.req.header("X-WOPI-Lock") ?? "";
const currentLock = await getLock(fileId);
if (currentLock && currentLock !== requestLock) {
const headers = new Headers();
headers.set("X-WOPI-Lock", currentLock);
return new Response("Lock mismatch", { status: 409, headers });
}
const [file] = await sql`
SELECT s3_key, mimetype FROM files WHERE id = ${fileId}
`;
if (!file) return wopiError(c, 404, "File not found");
const body = new Uint8Array(await c.req.arrayBuffer());
await putObject(file.s3_key, body, file.mimetype);
// Update size + timestamp
await sql`
UPDATE files SET size = ${body.length}, updated_at = now() WHERE id = ${fileId}
`;
return c.text("", 200);
});
}
// ── Lock/Unlock/RefreshLock/GetLock (routed by X-WOPI-Override) ─────────────
/** POST /wopi/files/:id */
export async function wopiPostAction(c: Context): Promise<Response> {
const override = c.req.header("X-WOPI-Override")?.toUpperCase() ?? "UNKNOWN";
return withSpan(`wopi.${override.toLowerCase()}`, { "wopi.file_id": c.req.param("id") ?? "", "wopi.override": override }, async () => {
const payload = await validateToken(c);
if (!payload) return wopiError(c, 401, "Invalid access token");
const fileId = c.req.param("id");
if (payload.fid !== fileId) return wopiError(c, 401, "Token/file mismatch");
const lockId = c.req.header("X-WOPI-Lock") ?? "";
const oldLockId = c.req.header("X-WOPI-OldLock") ?? "";
switch (override) {
case "LOCK": {
if (oldLockId) {
const result = await unlockAndRelock(fileId, oldLockId, lockId);
if (!result.success) {
const headers = new Headers();
if (result.existingLockId) headers.set("X-WOPI-Lock", result.existingLockId);
return new Response("Lock mismatch", { status: 409, headers });
}
return c.text("", 200);
}
const result = await acquireLock(fileId, lockId);
if (!result.success) {
const headers = new Headers();
if (result.existingLockId) headers.set("X-WOPI-Lock", result.existingLockId);
return new Response("Lock conflict", { status: 409, headers });
}
return c.text("", 200);
}
case "GET_LOCK": {
const current = await getLock(fileId);
const headers = new Headers();
headers.set("X-WOPI-Lock", current ?? "");
return new Response("", { status: 200, headers });
}
case "REFRESH_LOCK": {
const result = await refreshLock(fileId, lockId);
if (!result.success) {
const headers = new Headers();
if (result.existingLockId) headers.set("X-WOPI-Lock", result.existingLockId);
return new Response("Lock mismatch", { status: 409, headers });
}
return c.text("", 200);
}
case "UNLOCK": {
const result = await releaseLock(fileId, lockId);
if (!result.success) {
const headers = new Headers();
if (result.existingLockId) headers.set("X-WOPI-Lock", result.existingLockId);
return new Response("Lock mismatch", { status: 409, headers });
}
return c.text("", 200);
}
case "PUT_RELATIVE":
return wopiError(c, 501, "PutRelative not supported");
case "RENAME_FILE": {
if (!payload.wr) return wopiError(c, 403, "No write permission");
const newName = c.req.header("X-WOPI-RequestedName") ?? "";
if (!newName) return wopiError(c, 400, "Missing X-WOPI-RequestedName");
await sql`UPDATE files SET filename = ${newName}, updated_at = now() WHERE id = ${fileId}`;
return c.json({ Name: newName });
}
default:
return wopiError(c, 501, `Unknown override: ${override}`);
}
});
}
// ── Token generation endpoint ───────────────────────────────────────────────
/** POST /api/wopi/token — session-authenticated, generates token for a file */
export async function generateWopiTokenHandler(c: Context): Promise<Response> {
const identity = c.get("identity");
if (!identity?.id) return c.json({ error: "Unauthorized" }, 401);
const body = await c.req.json();
const fileId = body.file_id;
if (!fileId) return c.json({ error: "file_id required" }, 400);
// Verify file exists and user has access (owner check)
// TODO: Replace owner_id check with Keto permission check when permissions are wired up
const [file] = await sql`
SELECT id, owner_id, mimetype FROM files
WHERE id = ${fileId} AND deleted_at IS NULL AND owner_id = ${identity.id}
`;
if (!file) return c.json({ error: "File not found" }, 404);
const canWrite = file.owner_id === identity.id;
const token = await generateWopiToken(
fileId,
identity.id,
identity.name || identity.email,
canWrite,
);
const tokenTtl = Date.now() + 8 * 3600 * 1000;
// Build the Collabora editor URL with WOPISrc
let editorUrl: string | null = null;
try {
const urlsrc = await getCollaboraActionUrl(file?.mimetype ?? "", "edit");
if (urlsrc) {
const PUBLIC_URL = Deno.env.get("PUBLIC_URL") ?? "http://localhost:3000";
const wopiSrc = encodeURIComponent(`${PUBLIC_URL}/wopi/files/${fileId}`);
editorUrl = `${urlsrc}WOPISrc=${wopiSrc}`;
}
} catch {
// Discovery not available — editorUrl stays null
}
return c.json({
access_token: token,
access_token_ttl: tokenTtl,
editor_url: editorUrl,
});
}

216
server/wopi/lock.ts Normal file
View File

@@ -0,0 +1,216 @@
/**
* Valkey (Redis)-backed WOPI lock service with TTL.
* Uses an injectable store interface so tests can use an in-memory Map.
*/
const LOCK_TTL_SECONDS = 30 * 60; // 30 minutes
const KEY_PREFIX = "wopi:lock:";
// ── Store interface ─────────────────────────────────────────────────────────
export interface LockStore {
/** Get value for key, or null if missing/expired. */
get(key: string): Promise<string | null>;
/**
* Set key=value only if key does not exist. Returns true if set, false if conflict.
* TTL in seconds.
*/
setNX(key: string, value: string, ttlSeconds: number): Promise<boolean>;
/** Set key=value unconditionally with TTL. */
set(key: string, value: string, ttlSeconds: number): Promise<void>;
/** Delete key. */
del(key: string): Promise<void>;
/** Set TTL on existing key (returns false if key doesn't exist). */
expire(key: string, ttlSeconds: number): Promise<boolean>;
}
// ── In-memory store (for tests + development) ──────────────────────────────
export class InMemoryLockStore implements LockStore {
private store = new Map<string, { value: string; expiresAt: number }>();
async get(key: string): Promise<string | null> {
const entry = this.store.get(key);
if (!entry) return null;
if (Date.now() > entry.expiresAt) {
this.store.delete(key);
return null;
}
return entry.value;
}
async setNX(key: string, value: string, ttlSeconds: number): Promise<boolean> {
const existing = await this.get(key);
if (existing !== null) return false;
this.store.set(key, { value, expiresAt: Date.now() + ttlSeconds * 1000 });
return true;
}
async set(key: string, value: string, ttlSeconds: number): Promise<void> {
this.store.set(key, { value, expiresAt: Date.now() + ttlSeconds * 1000 });
}
async del(key: string): Promise<void> {
this.store.delete(key);
}
async expire(key: string, ttlSeconds: number): Promise<boolean> {
const entry = this.store.get(key);
if (!entry) return false;
if (Date.now() > entry.expiresAt) {
this.store.delete(key);
return false;
}
entry.expiresAt = Date.now() + ttlSeconds * 1000;
return true;
}
}
// ── Valkey (Redis) store using ioredis ──────────────────────────────────────
export class ValkeyLockStore implements LockStore {
private client: {
get(key: string): Promise<string | null>;
set(key: string, value: string, ex: string, ttl: number, nx: string): Promise<string | null>;
set(key: string, value: string, ex: string, ttl: number): Promise<string | null>;
del(key: string): Promise<number>;
expire(key: string, ttl: number): Promise<number>;
};
constructor() {
// Lazy-init to avoid import issues in tests
const url = Deno.env.get("VALKEY_URL") ?? "redis://localhost:6379/2";
// deno-lint-ignore no-explicit-any
const Redis = (globalThis as any).Redis ?? null;
if (!Redis) {
throw new Error("ioredis not available — use InMemoryLockStore for tests");
}
this.client = new Redis(url);
}
async get(key: string): Promise<string | null> {
return this.client.get(key);
}
async setNX(key: string, value: string, ttlSeconds: number): Promise<boolean> {
const result = await this.client.set(key, value, "EX", ttlSeconds, "NX");
return result === "OK";
}
async set(key: string, value: string, ttlSeconds: number): Promise<void> {
await this.client.set(key, value, "EX", ttlSeconds);
}
async del(key: string): Promise<void> {
await this.client.del(key);
}
async expire(key: string, ttlSeconds: number): Promise<boolean> {
const result = await this.client.expire(key, ttlSeconds);
return result === 1;
}
}
// ── Lock service ────────────────────────────────────────────────────────────
let _store: LockStore | null = null;
export function setLockStore(store: LockStore): void {
_store = store;
}
function getStore(): LockStore {
if (!_store) {
// Try Valkey, fall back to in-memory
try {
_store = new ValkeyLockStore();
} catch {
console.warn("WOPI lock: falling back to in-memory store");
_store = new InMemoryLockStore();
}
}
return _store;
}
export interface LockResult {
success: boolean;
existingLockId?: string;
}
export async function acquireLock(
fileId: string,
lockId: string,
): Promise<LockResult> {
const store = getStore();
const key = KEY_PREFIX + fileId;
const set = await store.setNX(key, lockId, LOCK_TTL_SECONDS);
if (set) return { success: true };
const existing = await store.get(key);
if (existing === lockId) {
// Same lock — refresh TTL
await store.expire(key, LOCK_TTL_SECONDS);
return { success: true };
}
return { success: false, existingLockId: existing ?? undefined };
}
export async function getLock(fileId: string): Promise<string | null> {
const store = getStore();
return store.get(KEY_PREFIX + fileId);
}
export async function refreshLock(
fileId: string,
lockId: string,
): Promise<LockResult> {
const store = getStore();
const key = KEY_PREFIX + fileId;
const existing = await store.get(key);
if (existing === null) {
return { success: false };
}
if (existing !== lockId) {
return { success: false, existingLockId: existing };
}
await store.expire(key, LOCK_TTL_SECONDS);
return { success: true };
}
export async function releaseLock(
fileId: string,
lockId: string,
): Promise<LockResult> {
const store = getStore();
const key = KEY_PREFIX + fileId;
const existing = await store.get(key);
if (existing === null) {
return { success: true }; // Already unlocked
}
if (existing !== lockId) {
return { success: false, existingLockId: existing };
}
await store.del(key);
return { success: true };
}
export async function unlockAndRelock(
fileId: string,
oldLockId: string,
newLockId: string,
): Promise<LockResult> {
const store = getStore();
const key = KEY_PREFIX + fileId;
const existing = await store.get(key);
if (existing !== oldLockId) {
return { success: false, existingLockId: existing ?? undefined };
}
await store.set(key, newLockId, LOCK_TTL_SECONDS);
return { success: true };
}

132
server/wopi/token.ts Normal file
View File

@@ -0,0 +1,132 @@
/**
* JWT-based WOPI access tokens using Web Crypto (HMAC-SHA256).
*/
const TEST_MODE = Deno.env.get("DRIVER_TEST_MODE") === "1";
const WOPI_JWT_SECRET = Deno.env.get("WOPI_JWT_SECRET") ?? (TEST_MODE ? "test-wopi-secret" : "");
if (!WOPI_JWT_SECRET && !TEST_MODE) {
throw new Error("WOPI_JWT_SECRET must be set in production");
}
const encoder = new TextEncoder();
const decoder = new TextDecoder();
// ── Base64url helpers ───────────────────────────────────────────────────────
function base64urlEncode(data: Uint8Array): string {
const binString = Array.from(data, (b) => String.fromCharCode(b)).join("");
return btoa(binString).replace(/\+/g, "-").replace(/\//g, "_").replace(/=+$/, "");
}
function base64urlDecode(str: string): Uint8Array {
let s = str.replace(/-/g, "+").replace(/_/g, "/");
while (s.length % 4) s += "=";
const binString = atob(s);
return Uint8Array.from(binString, (c) => c.charCodeAt(0));
}
// ── HMAC helpers ────────────────────────────────────────────────────────────
async function hmacSign(data: Uint8Array, secret: string): Promise<Uint8Array> {
const key = await crypto.subtle.importKey(
"raw",
encoder.encode(secret),
{ name: "HMAC", hash: "SHA-256" },
false,
["sign"],
);
const sig = await crypto.subtle.sign("HMAC", key, data as unknown as BufferSource);
return new Uint8Array(sig);
}
async function hmacVerify(data: Uint8Array, signature: Uint8Array, secret: string): Promise<boolean> {
const key = await crypto.subtle.importKey(
"raw",
encoder.encode(secret),
{ name: "HMAC", hash: "SHA-256" },
false,
["verify"],
);
return crypto.subtle.verify("HMAC", key, signature as unknown as BufferSource, data as unknown as BufferSource);
}
// ── Token types ─────────────────────────────────────────────────────────────
export interface WopiTokenPayload {
/** File UUID */
fid: string;
/** User ID (Kratos identity) */
uid: string;
/** User display name */
unm: string;
/** Can write */
wr: boolean;
/** Issued at (unix seconds) */
iat: number;
/** Expires at (unix seconds) */
exp: number;
}
// ── Public API ──────────────────────────────────────────────────────────────
const DEFAULT_EXPIRES_SECONDS = 8 * 3600; // 8 hours
export async function generateWopiToken(
fileId: string,
userId: string,
userName: string,
canWrite: boolean,
expiresInSeconds = DEFAULT_EXPIRES_SECONDS,
secret = WOPI_JWT_SECRET,
): Promise<string> {
const now = Math.floor(Date.now() / 1000);
const payload: WopiTokenPayload = {
fid: fileId,
uid: userId,
unm: userName,
wr: canWrite,
iat: now,
exp: now + expiresInSeconds,
};
const header = base64urlEncode(
encoder.encode(JSON.stringify({ alg: "HS256", typ: "JWT" })),
);
const body = base64urlEncode(
encoder.encode(JSON.stringify(payload)),
);
const sigInput = encoder.encode(`${header}.${body}`);
const sig = await hmacSign(sigInput, secret);
return `${header}.${body}.${base64urlEncode(sig)}`;
}
export async function verifyWopiToken(
token: string,
secret = WOPI_JWT_SECRET,
): Promise<WopiTokenPayload | null> {
const parts = token.split(".");
if (parts.length !== 3) return null;
const [header, body, sig] = parts;
// Verify signature
const sigInput = encoder.encode(`${header}.${body}`);
const sigBytes = base64urlDecode(sig);
const valid = await hmacVerify(sigInput, sigBytes, secret);
if (!valid) return null;
// Parse payload
let payload: WopiTokenPayload;
try {
payload = JSON.parse(decoder.decode(base64urlDecode(body)));
} catch {
return null;
}
// Check expiry
const now = Math.floor(Date.now() / 1000);
if (payload.exp < now) return null;
return payload;
}