Bridge iroh control announcements into web stream discovery
This commit is contained in:
parent
74842eb25e
commit
2778715304
8 changed files with 528 additions and 26 deletions
|
|
@ -151,6 +151,7 @@ interface Env {
|
|||
EC_TURN_USER_PREFIX?: string;
|
||||
EC_TURN_HOST?: string;
|
||||
EC_TURN_HMAC?: string;
|
||||
EC_STREAM_UPSERT_TOKEN?: string;
|
||||
}
|
||||
|
||||
type DirectoryEntry = {
|
||||
|
|
@ -173,6 +174,21 @@ type DirectoryList = {
|
|||
entries: DirectoryEntry[];
|
||||
};
|
||||
|
||||
type PublicStreamEntry = {
|
||||
stream_id: string;
|
||||
title: string;
|
||||
relay_url: string;
|
||||
broadcast_name: string;
|
||||
track_name: string;
|
||||
updated_ms: number;
|
||||
expires_ms: number;
|
||||
};
|
||||
|
||||
type PublicStreamList = {
|
||||
now_ms: number;
|
||||
entries: PublicStreamEntry[];
|
||||
};
|
||||
|
||||
function nowMs(): number {
|
||||
return Date.now();
|
||||
}
|
||||
|
|
@ -190,6 +206,10 @@ function answerKey(streamId: string): string {
|
|||
return `a:${streamId}`;
|
||||
}
|
||||
|
||||
function streamKey(streamId: string): string {
|
||||
return `s:${streamId}`;
|
||||
}
|
||||
|
||||
async function listWithPrefix<T>(
|
||||
storage: DurableObjectStorage,
|
||||
prefix: string,
|
||||
|
|
@ -212,10 +232,12 @@ async function pruneAndCap(
|
|||
): Promise<void> {
|
||||
const entries = await listWithPrefix<DirectoryEntry>(storage, "e:");
|
||||
const answers = await listWithPrefix<AnswerEntry>(storage, "a:");
|
||||
const streams = await listWithPrefix<PublicStreamEntry>(storage, "s:");
|
||||
|
||||
const toDelete: string[] = [];
|
||||
for (const [k, v] of entries) if (v.expires_ms <= now) toDelete.push(k);
|
||||
for (const [k, v] of answers) if (v.expires_ms <= now) toDelete.push(k);
|
||||
for (const [k, v] of streams) if (v.expires_ms <= now) toDelete.push(k);
|
||||
|
||||
// Cap growth defensively. This is not spam-resistant; it's a bootstrap rendezvous.
|
||||
if (entries.length > 200) {
|
||||
|
|
@ -234,6 +256,14 @@ async function pruneAndCap(
|
|||
const keep = new Set(sorted.map((a) => answerKey(a.stream_id)));
|
||||
for (const [k] of answers) if (!keep.has(k)) toDelete.push(k);
|
||||
}
|
||||
if (streams.length > 1000) {
|
||||
const sorted = streams
|
||||
.map(([, v]) => v)
|
||||
.sort((a, b) => b.updated_ms - a.updated_ms)
|
||||
.slice(0, 1000);
|
||||
const keep = new Set(sorted.map((s) => streamKey(s.stream_id)));
|
||||
for (const [k] of streams) if (!keep.has(k)) toDelete.push(k);
|
||||
}
|
||||
|
||||
if (toDelete.length > 0) {
|
||||
// Delete in chunks to avoid oversized requests.
|
||||
|
|
@ -255,13 +285,32 @@ type AnswerPostReq = {
|
|||
answer: string;
|
||||
};
|
||||
|
||||
type StreamUpsertReq = {
|
||||
stream_id: string;
|
||||
title: string;
|
||||
relay_url: string;
|
||||
broadcast_name: string;
|
||||
track_name?: string;
|
||||
expires_ms?: number;
|
||||
};
|
||||
|
||||
function authBearerToken(request: Request): string | null {
|
||||
const auth = request.headers.get("authorization");
|
||||
if (!auth) return null;
|
||||
const m = /^Bearer\s+(.+)$/i.exec(auth.trim());
|
||||
if (!m) return null;
|
||||
return m[1];
|
||||
}
|
||||
|
||||
// Minimal bootstrap API Durable Object. The binding name is historical; we keep it stable so
|
||||
// existing migrations and wrangler config remain valid while removing Cloudflare Containers.
|
||||
export class EcApiContainer implements DurableObject {
|
||||
private state: DurableObjectState;
|
||||
private env: Env;
|
||||
|
||||
constructor(state: DurableObjectState) {
|
||||
constructor(state: DurableObjectState, env: Env) {
|
||||
this.state = state;
|
||||
this.env = env;
|
||||
}
|
||||
|
||||
async fetch(request: Request): Promise<Response> {
|
||||
|
|
@ -275,6 +324,61 @@ export class EcApiContainer implements DurableObject {
|
|||
return jsonNoStore({ ok: true });
|
||||
}
|
||||
|
||||
if (url.pathname === "/api/public-streams") {
|
||||
const items = await listWithPrefix<PublicStreamEntry>(this.state.storage, "s:");
|
||||
const entries = items
|
||||
.map(([, v]) => v)
|
||||
.filter((v) => v.expires_ms > now)
|
||||
.sort((a, b) => b.updated_ms - a.updated_ms);
|
||||
const resp: PublicStreamList = { now_ms: now, entries };
|
||||
return jsonNoStore(resp);
|
||||
}
|
||||
|
||||
if (url.pathname === "/api/stream-upsert") {
|
||||
if (request.method !== "POST") {
|
||||
return jsonNoStore({ error: "method not allowed" }, { status: 405 });
|
||||
}
|
||||
|
||||
const requiredToken = this.env.EC_STREAM_UPSERT_TOKEN?.trim();
|
||||
if (requiredToken) {
|
||||
const supplied = authBearerToken(request);
|
||||
if (!supplied || supplied !== requiredToken) {
|
||||
return jsonNoStore({ error: "unauthorized" }, { status: 401 });
|
||||
}
|
||||
}
|
||||
|
||||
let body: StreamUpsertReq;
|
||||
try {
|
||||
body = (await request.json()) as StreamUpsertReq;
|
||||
} catch {
|
||||
return jsonNoStore({ error: "invalid json" }, { status: 400 });
|
||||
}
|
||||
|
||||
if (!body.stream_id || !body.title || !body.relay_url || !body.broadcast_name) {
|
||||
return jsonNoStore(
|
||||
{ error: "missing stream_id/title/relay_url/broadcast_name" },
|
||||
{ status: 400 },
|
||||
);
|
||||
}
|
||||
|
||||
const requestedExpires = body.expires_ms ?? now + 20_000;
|
||||
const requestedTtl = Math.max(0, requestedExpires - now);
|
||||
const ttlMs = Math.min(60_000, Math.max(5_000, requestedTtl));
|
||||
|
||||
const entry: PublicStreamEntry = {
|
||||
stream_id: clampStr(body.stream_id, 256),
|
||||
title: clampStr(body.title, 128),
|
||||
relay_url: clampStr(body.relay_url, 512),
|
||||
broadcast_name: clampStr(body.broadcast_name, 256),
|
||||
track_name: clampStr(body.track_name || "video0.m4s", 256),
|
||||
updated_ms: now,
|
||||
expires_ms: now + ttlMs,
|
||||
};
|
||||
|
||||
await this.state.storage.put(streamKey(entry.stream_id), entry);
|
||||
return jsonNoStore({ ok: true, ttl_ms: ttlMs, entry });
|
||||
}
|
||||
|
||||
if (url.pathname === "/api/directory") {
|
||||
const items = await listWithPrefix<DirectoryEntry>(this.state.storage, "e:");
|
||||
const entries = items
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue