every.channel: sanitized baseline

This commit is contained in:
every.channel 2026-02-15 16:17:27 -05:00
commit 897e556bea
No known key found for this signature in database
258 changed files with 74298 additions and 0 deletions

View file

@ -0,0 +1,575 @@
function json(data: unknown, init?: ResponseInit): Response {
return new Response(JSON.stringify(data), {
...init,
headers: {
"content-type": "application/json; charset=utf-8",
...(init?.headers ?? {}),
},
});
}
function jsonNoStore(data: unknown, init?: ResponseInit): Response {
return json(data, {
...init,
headers: {
"cache-control": "no-store",
...(init?.headers ?? {}),
},
});
}
async function hmacBase64(
secret: string,
msg: string,
hash: "SHA-1" | "SHA-256",
): Promise<string> {
const enc = new TextEncoder();
const key = await crypto.subtle.importKey(
"raw",
enc.encode(secret),
{ name: "HMAC", hash: { name: hash } },
false,
["sign"],
);
const sig = await crypto.subtle.sign("HMAC", key, enc.encode(msg));
const bytes = new Uint8Array(sig);
let s = "";
for (const b of bytes) s += String.fromCharCode(b);
return btoa(s);
}
async function handleTurn(env: Env): Promise<Response> {
// Always provide STUN. TURN is optional and requires a shared secret.
// Response shape is compatible with `just-webrtc` types.
const ice_servers: Array<{
urls: string[];
username: string;
credential: string;
// `just-webrtc` uses serde defaults on enum variants (Password/Oauth).
credential_type: "Password" | "Oauth";
}> = [];
ice_servers.push({
urls: [
"stun:stun.cloudflare.com:3478",
"stun:stun.l.google.com:19302",
"stun:stun1.l.google.com:19302",
],
username: "",
credential: "",
credential_type: "Password",
});
const shared = env.EC_TURN_SHARED_SECRET?.trim();
if (shared) {
const ttlSec = Number(env.EC_TURN_TTL_SECS ?? "3600") || 3600;
const exp = Math.floor(Date.now() / 1000) + Math.max(60, ttlSec);
const prefix = (env.EC_TURN_USER_PREFIX ?? "every.channel").trim();
const username = `${exp}:${prefix}`;
const hash = (env.EC_TURN_HMAC ?? "sha1").toLowerCase() === "sha256" ? "SHA-256" : "SHA-1";
const credential = await hmacBase64(shared, username, hash);
const host = (env.EC_TURN_HOST ?? "turn.cloudflare.com").trim();
ice_servers.push({
urls: [
`turn:${host}:3478?transport=udp`,
`turn:${host}:3478?transport=tcp`,
`turns:${host}:5349?transport=tcp`,
],
username,
credential,
credential_type: "Password",
});
}
return json({ ice_servers });
}
export default {
async fetch(request: Request, env: Env): Promise<Response> {
const url = new URL(request.url);
// ICE bootstrap for WebRTC clients (browser + native).
if (url.pathname === "/api/turn") {
return handleTurn(env);
}
// Stream relay (one-to-many) for CMAF object frames transported via direct-wire chunks.
// This is a bootstrap relay so browsers can watch live streams without direct P2P.
if (url.pathname === "/api/stream/ws") {
const stream_id = url.searchParams.get("stream_id") ?? "";
if (!stream_id) {
return jsonNoStore({ error: "missing stream_id" }, { status: 400 });
}
const id = env.EC_STREAM.idFromName(stream_id);
const stub = env.EC_STREAM.get(id);
return stub.fetch(request);
}
// Minimal bootstrap API: proxy /api/* to a single durable object instance ("global").
// This exists only to rendezvous WebRTC offers/answers and list "live" entries.
if (url.pathname.startsWith("/api/")) {
const id = env.EC_API.idFromName("global");
const stub = env.EC_API.get(id);
return stub.fetch(request);
}
// Serve static assets from the Worker Assets binding.
// SPA fallback: unknown paths serve the app shell (`/index.html`).
const assets = (env as unknown as { ASSETS?: Fetcher }).ASSETS;
if (!assets || typeof (assets as any).fetch !== "function") {
return new Response("Assets binding not configured", { status: 500 });
}
const res = await assets.fetch(request);
if (res.status !== 404) return res;
url.pathname = "/index.html";
return assets.fetch(new Request(url.toString(), request));
},
};
interface Env {
ASSETS: Fetcher;
EC_API: DurableObjectNamespace;
EC_STREAM: DurableObjectNamespace;
// Optional TURN REST shared secret (Worker secret). When set, `/api/turn` includes TURN URLs
// with short-lived credentials derived from this shared secret.
EC_TURN_SHARED_SECRET?: string;
EC_TURN_TTL_SECS?: string;
EC_TURN_USER_PREFIX?: string;
EC_TURN_HOST?: string;
EC_TURN_HMAC?: string;
}
type DirectoryEntry = {
stream_id: string;
title: string;
offer: string;
updated_ms: number;
expires_ms: number;
};
type AnswerEntry = {
stream_id: string;
answer: string;
updated_ms: number;
expires_ms: number;
};
type DirectoryList = {
now_ms: number;
entries: DirectoryEntry[];
};
function nowMs(): number {
return Date.now();
}
function clampStr(s: string, maxLen: number): string {
if (s.length <= maxLen) return s;
return s.slice(0, maxLen);
}
function entryKey(streamId: string): string {
return `e:${streamId}`;
}
function answerKey(streamId: string): string {
return `a:${streamId}`;
}
async function listWithPrefix<T>(
storage: DurableObjectStorage,
prefix: string,
): Promise<Array<[string, T]>> {
const out: Array<[string, T]> = [];
// Pull in small pages; this is a bootstrap rendezvous, not a global index.
let cursor: string | undefined = undefined;
for (;;) {
const page = await storage.list<T>({ prefix, cursor, limit: 256 });
for (const [k, v] of page) out.push([k, v]);
if (page.cursor) cursor = page.cursor;
else break;
}
return out;
}
async function pruneAndCap(
storage: DurableObjectStorage,
now: number,
): Promise<void> {
const entries = await listWithPrefix<DirectoryEntry>(storage, "e:");
const answers = await listWithPrefix<AnswerEntry>(storage, "a:");
const toDelete: string[] = [];
for (const [k, v] of entries) if (v.expires_ms <= now) toDelete.push(k);
for (const [k, v] of answers) if (v.expires_ms <= now) toDelete.push(k);
// Cap growth defensively. This is not spam-resistant; it's a bootstrap rendezvous.
if (entries.length > 200) {
const sorted = entries
.map(([, v]) => v)
.sort((a, b) => b.updated_ms - a.updated_ms)
.slice(0, 200);
const keep = new Set(sorted.map((e) => entryKey(e.stream_id)));
for (const [k] of entries) if (!keep.has(k)) toDelete.push(k);
}
if (answers.length > 500) {
const sorted = answers
.map(([, v]) => v)
.sort((a, b) => b.updated_ms - a.updated_ms)
.slice(0, 500);
const keep = new Set(sorted.map((a) => answerKey(a.stream_id)));
for (const [k] of answers) if (!keep.has(k)) toDelete.push(k);
}
if (toDelete.length > 0) {
// Delete in chunks to avoid oversized requests.
for (let i = 0; i < toDelete.length; i += 128) {
await storage.delete(toDelete.slice(i, i + 128));
}
}
}
type AnnounceReq = {
stream_id: string;
title: string;
offer: string;
expires_ms?: number;
};
type AnswerPostReq = {
stream_id: string;
answer: string;
};
// Minimal bootstrap API Durable Object. The binding name is historical; we keep it stable so
// existing migrations and wrangler config remain valid while removing Cloudflare Containers.
export class EcApiContainer implements DurableObject {
private state: DurableObjectState;
constructor(state: DurableObjectState) {
this.state = state;
}
async fetch(request: Request): Promise<Response> {
const url = new URL(request.url);
const now = nowMs();
// Best-effort pruning on any request.
await pruneAndCap(this.state.storage, now);
if (url.pathname === "/api/health") {
return jsonNoStore({ ok: true });
}
if (url.pathname === "/api/directory") {
const items = await listWithPrefix<DirectoryEntry>(this.state.storage, "e:");
const entries = items
.map(([, v]) => v)
.filter((v) => v.expires_ms > now)
.sort((a, b) => b.updated_ms - a.updated_ms);
const resp: DirectoryList = { now_ms: now, entries };
return jsonNoStore(resp);
}
if (url.pathname === "/api/announce") {
if (request.method !== "POST") {
return jsonNoStore({ error: "method not allowed" }, { status: 405 });
}
let body: AnnounceReq;
try {
body = (await request.json()) as AnnounceReq;
} catch {
return jsonNoStore({ error: "invalid json" }, { status: 400 });
}
if (!body.stream_id || !body.title || !body.offer) {
return jsonNoStore({ error: "missing stream_id/title/offer" }, { status: 400 });
}
if (body.offer.length > 64_000) {
return jsonNoStore({ error: "offer too large" }, { status: 413 });
}
const requestedExpires = body.expires_ms ?? now + 20_000;
const requestedTtl = Math.max(0, requestedExpires - now);
const ttlMs = Math.min(60_000, Math.max(5_000, requestedTtl));
const entry: DirectoryEntry = {
stream_id: clampStr(body.stream_id, 256),
title: clampStr(body.title, 128),
offer: body.offer,
updated_ms: now,
expires_ms: now + ttlMs,
};
await this.state.storage.put(entryKey(entry.stream_id), entry);
return jsonNoStore({ ok: true, ttl_ms: ttlMs, entry });
}
if (url.pathname === "/api/answer") {
if (request.method === "POST") {
let body: AnswerPostReq;
try {
body = (await request.json()) as AnswerPostReq;
} catch {
return jsonNoStore({ error: "invalid json" }, { status: 400 });
}
if (!body.stream_id || !body.answer) {
return jsonNoStore({ error: "missing stream_id/answer" }, { status: 400 });
}
if (body.answer.length > 64_000) {
return jsonNoStore({ error: "answer too large" }, { status: 413 });
}
const entry: AnswerEntry = {
stream_id: clampStr(body.stream_id, 256),
answer: body.answer,
updated_ms: now,
expires_ms: now + 2 * 60_000,
};
await this.state.storage.put(answerKey(entry.stream_id), entry);
return jsonNoStore({ ok: true });
}
if (request.method === "GET") {
const streamId = url.searchParams.get("stream_id") ?? "";
if (!streamId) {
return jsonNoStore({ error: "missing stream_id" }, { status: 400 });
}
const key = answerKey(streamId);
const ans = await this.state.storage.get<AnswerEntry>(key);
if (!ans || ans.expires_ms <= now) {
await this.state.storage.delete(key);
return jsonNoStore({ error: "not found" }, { status: 404 });
}
// One-shot: first reader consumes.
await this.state.storage.delete(key);
return jsonNoStore(ans);
}
return jsonNoStore({ error: "method not allowed" }, { status: 405 });
}
return jsonNoStore({ error: "not found" }, { status: 404 });
}
}
// Historical class referenced by older migrations. It is not bound anymore,
// but exporting it keeps workerd/wrangler happy if migrations mention it.
export class DirectoryDO implements DurableObject {
async fetch(): Promise<Response> {
return new Response("gone", { status: 410 });
}
}
const DIRECT_WIRE_TAG_FRAME = 0x00;
const DIRECT_WIRE_TAG_STREAM = 0x01;
const DIRECT_WIRE_TAG_PING = 0x02;
const DIRECT_WIRE_CHUNK_BYTES = 16 * 1024;
type TimingMeta = {
chunk_index: number;
};
type ObjectMeta = {
content_type: string;
timing?: TimingMeta;
};
// One-to-many fanout for direct-wire message chunks. Publisher sends direct-wire messages,
// subscribers receive the same stream, plus a buffered init + recent segments upon join.
export class StreamRelayDO implements DurableObject {
private publisher: WebSocket | null = null;
private subs = new Set<WebSocket>();
// Reassemble publisher STREAM chunks into full object frames for buffering.
private buf = new Uint8Array(0);
private want: number | null = null;
private initFrame: Uint8Array | null = null;
private segFrames = new Map<number, Uint8Array>();
private maxSegments = 12;
async fetch(request: Request): Promise<Response> {
const upgrade = request.headers.get("Upgrade")?.toLowerCase();
if (upgrade !== "websocket") {
return json({ error: "expected websocket" }, { status: 400 });
}
const url = new URL(request.url);
const role = (url.searchParams.get("role") ?? "sub").toLowerCase();
// eslint-disable-next-line @typescript-eslint/no-explicit-any
const pair = new (WebSocketPair as any)();
const client: WebSocket = pair[0];
const server: WebSocket = pair[1];
server.accept();
if (role === "pub") {
if (this.publisher) {
try {
this.publisher.close(1013, "publisher replaced");
} catch {}
}
this.publisher = server;
} else {
this.subs.add(server);
// Best-effort fast start: send init + last segments.
try {
if (this.initFrame) {
this.sendFrame(server, this.initFrame);
}
const idxs = Array.from(this.segFrames.keys()).sort((a, b) => a - b);
for (const idx of idxs) {
const fr = this.segFrames.get(idx);
if (fr) this.sendFrame(server, fr);
}
} catch {}
}
server.addEventListener("message", (evt: MessageEvent) => {
const data = evt.data;
if (!(data instanceof ArrayBuffer)) {
return;
}
const msg = new Uint8Array(data);
if (role === "pub") {
// Fanout immediately (no buffering delay).
for (const sub of this.subs) {
try {
sub.send(data);
} catch {}
}
// Also decode for buffered join.
this.handlePublisherMsg(msg);
} else {
// Subscribers can send pings; ignore.
if (msg.length > 0 && msg[0] === DIRECT_WIRE_TAG_PING) {
return;
}
}
});
server.addEventListener("close", () => {
if (server === this.publisher) {
this.publisher = null;
// Keep buffered init/segments; publisher may reconnect soon.
}
this.subs.delete(server);
});
server.addEventListener("error", () => {
if (server === this.publisher) {
this.publisher = null;
}
this.subs.delete(server);
});
return new Response(null, { status: 101, webSocket: client });
}
private handlePublisherMsg(msg: Uint8Array) {
if (msg.length === 0) return;
const tag = msg[0];
if (tag === DIRECT_WIRE_TAG_PING) return;
if (tag === DIRECT_WIRE_TAG_FRAME) {
const frame = msg.subarray(1);
this.bufferFrame(frame);
return;
}
if (tag !== DIRECT_WIRE_TAG_STREAM) {
// Legacy: assume the whole thing is a frame.
this.bufferFrame(msg);
return;
}
// Append to reassembly buffer.
const chunk = msg.subarray(1);
this.buf = concatU8(this.buf, chunk);
// Pull as many framed payloads as possible:
// [u32be len][frame bytes...]
// The `frame` bytes are `encode_object_frame(meta, data)`.
while (true) {
if (this.want === null) {
if (this.buf.length < 4) return;
this.want =
(this.buf[0] << 24) | (this.buf[1] << 16) | (this.buf[2] << 8) | this.buf[3];
this.buf = this.buf.subarray(4);
}
const want = this.want ?? 0;
if (this.buf.length < want) return;
const frame = this.buf.subarray(0, want);
this.buf = this.buf.subarray(want);
this.want = null;
this.bufferFrame(frame);
// Avoid unbounded growth if publisher sends junk.
if (this.buf.length > 4 * 1024 * 1024) {
this.buf = new Uint8Array(0);
this.want = null;
return;
}
}
}
private bufferFrame(frame: Uint8Array) {
const meta = tryDecodeObjectMeta(frame);
const idx = meta?.timing?.chunk_index;
if (typeof idx !== "number") return;
if (idx === 0) {
this.initFrame = frame.slice();
return;
}
this.segFrames.set(idx, frame.slice());
while (this.segFrames.size > this.maxSegments) {
const oldest = Math.min(...this.segFrames.keys());
this.segFrames.delete(oldest);
}
}
private sendFrame(ws: WebSocket, frame: Uint8Array) {
// Mirror the Rust `direct_wire_send_frame` format:
// Stream bytes are [u32be frame_len][frame]
const out = new Uint8Array(4 + frame.length);
const len = frame.length >>> 0;
out[0] = (len >>> 24) & 0xff;
out[1] = (len >>> 16) & 0xff;
out[2] = (len >>> 8) & 0xff;
out[3] = len & 0xff;
out.set(frame, 4);
for (let i = 0; i < out.length; i += DIRECT_WIRE_CHUNK_BYTES) {
const chunk = out.subarray(i, Math.min(out.length, i + DIRECT_WIRE_CHUNK_BYTES));
const msg = new Uint8Array(1 + chunk.length);
msg[0] = DIRECT_WIRE_TAG_STREAM;
msg.set(chunk, 1);
ws.send(msg);
}
}
}
function concatU8(a: Uint8Array, b: Uint8Array): Uint8Array {
if (a.length === 0) return b.slice();
if (b.length === 0) return a.slice();
const out = new Uint8Array(a.length + b.length);
out.set(a, 0);
out.set(b, a.length);
return out;
}
function tryDecodeObjectMeta(frame: Uint8Array): ObjectMeta | null {
if (frame.length < 4) return null;
const metaLen = (frame[0] << 24) | (frame[1] << 16) | (frame[2] << 8) | frame[3];
if (metaLen < 2 || frame.length < 4 + metaLen) return null;
const metaBytes = frame.subarray(4, 4 + metaLen);
try {
const txt = new TextDecoder().decode(metaBytes);
return JSON.parse(txt) as ObjectMeta;
} catch {
return null;
}
}