Fix hosted live playback
Some checks are pending
ci-gates / checks (push) Waiting to run
deploy-cloudflare / checks (push) Waiting to run
deploy-cloudflare / deploy (push) Blocked by required conditions

This commit is contained in:
every.channel 2026-05-03 22:10:41 -07:00
parent 340e2346ba
commit 6739b424ab
No known key found for this signature in database
4 changed files with 142 additions and 72 deletions

View file

@ -88,8 +88,8 @@ function destroyArchivePlayer() {
activeHlsPlayer = null; activeHlsPlayer = null;
} }
function bindPlayerSignals(watch, name) { function bindPlayerSignals(watch, name, extraCleanup) {
const cleanup = []; const cleanup = Array.isArray(extraCleanup) ? [...extraCleanup] : [];
let offlineTimer = null; let offlineTimer = null;
const clearOfflineTimer = () => { const clearOfflineTimer = () => {
@ -164,7 +164,6 @@ function mountPlayer(relayUrl, name) {
watch.setAttribute("name", name); watch.setAttribute("name", name);
watch.setAttribute("path", name); watch.setAttribute("path", name);
watch.setAttribute("volume", "1"); watch.setAttribute("volume", "1");
watch.setAttribute("muted", "");
watch.setAttribute("jitter", String(LIVE_JITTER_MS)); watch.setAttribute("jitter", String(LIVE_JITTER_MS));
// Force WebTransport in-browser; websocket fallback has shown degraded // Force WebTransport in-browser; websocket fallback has shown degraded
@ -184,28 +183,44 @@ function mountPlayer(relayUrl, name) {
video.playsInline = true; video.playsInline = true;
watch.appendChild(video); watch.appendChild(video);
mount.appendChild(watch); mount.appendChild(watch);
const cleanup = [];
let audioUnlocked = false;
const forceAudioOn = () => { const forceAudioOn = () => {
try { try {
watch.backend?.audio?.muted?.set?.(false); watch.backend?.audio?.muted?.set?.(false);
watch.backend?.audio?.volume?.set?.(1); watch.backend?.audio?.volume?.set?.(1);
watch.backend?.muted?.set?.(false);
watch.backend?.volume?.set?.(1);
} catch (_) { } catch (_) {
// Best effort only. // Best effort only.
} }
watch.removeAttribute("muted");
watch.muted = false;
video.muted = false;
video.volume = 1;
};
const keepAudioUnlocked = () => {
if (audioUnlocked) forceAudioOn();
}; };
const unlockAudio = () => { const unlockAudio = () => {
audioUnlocked = true;
forceAudioOn(); forceAudioOn();
watch.backend?.paused?.set?.(true); watch.backend?.paused?.set?.(true);
watch.backend?.paused?.set?.(false); watch.backend?.paused?.set?.(false);
video.muted = false;
video.volume = 1;
void video.play().catch(() => {}); void video.play().catch(() => {});
setHint(`Live: subscribed to ${name} (audio unlocked)`, "ok"); setHint(`Live: subscribed to ${name} (audio unlocked)`, "ok");
}; };
document.addEventListener("pointerdown", unlockAudio, { once: true }); document.addEventListener("pointerdown", unlockAudio, { once: true });
video.addEventListener("pointerdown", unlockAudio, { once: true }); video.addEventListener("pointerdown", unlockAudio, { once: true });
video.addEventListener("volumechange", keepAudioUnlocked);
cleanup.push(() => {
document.removeEventListener("pointerdown", unlockAudio);
video.removeEventListener("pointerdown", unlockAudio);
video.removeEventListener("volumechange", keepAudioUnlocked);
});
setHint(`Live: subscribed to ${name} (tap video to unmute)`, "warn"); setHint(`Live: subscribed to ${name} (tap video to unmute)`, "warn");
void video.play().catch(() => {}); void video.play().catch(() => {});
bindPlayerSignals(watch, name); bindPlayerSignals(watch, name, cleanup);
} }
async function ensureMoqWatchElement() { async function ensureMoqWatchElement() {

View file

@ -66,6 +66,7 @@ const DIRECT_HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(8);
const DIRECT_WIRE_CHUNK_BYTES: usize = 16 * 1024; const DIRECT_WIRE_CHUNK_BYTES: usize = 16 * 1024;
const WT_ARCHIVE_DEFAULT_TRACKS: &[&str] = const WT_ARCHIVE_DEFAULT_TRACKS: &[&str] =
&["catalog.json", "init.mp4", "video0.m4s", "audio0.m4s"]; &["catalog.json", "init.mp4", "video0.m4s", "audio0.m4s"];
const WT_PUBLISH_MOVFLAGS: &str = "empty_moov+frag_keyframe+separate_moof+omit_tfhd_offset";
use tokio::sync::mpsc; use tokio::sync::mpsc;
use tokio::sync::oneshot; use tokio::sync::oneshot;
use tokio::sync::RwLock; use tokio::sync::RwLock;
@ -471,7 +472,7 @@ struct WtPublishArgs {
transcode: bool, transcode: bool,
/// Transmit fMP4 fragments directly (passthrough mode). /// Transmit fMP4 fragments directly (passthrough mode).
/// When false, the importer may reframe into CMAF fragments. /// When false, the importer may reframe into CMAF fragments.
#[arg(long, default_value_t = true, action = clap::ArgAction::Set)] #[arg(long, default_value_t = false, action = clap::ArgAction::Set)]
passthrough: bool, passthrough: bool,
/// Danger: disable TLS verification for the relay. /// Danger: disable TLS verification for the relay.
#[arg(long, default_value_t = false)] #[arg(long, default_value_t = false)]
@ -528,7 +529,7 @@ struct NbcWtPublishArgs {
#[arg(long)] #[arg(long)]
source_url: String, source_url: String,
/// Transmit fMP4 fragments directly (passthrough mode). /// Transmit fMP4 fragments directly (passthrough mode).
#[arg(long, default_value_t = true, action = clap::ArgAction::Set)] #[arg(long, default_value_t = false, action = clap::ArgAction::Set)]
passthrough: bool, passthrough: bool,
/// Danger: disable TLS verification for the relay. /// Danger: disable TLS verification for the relay.
#[arg(long, default_value_t = false)] #[arg(long, default_value_t = false)]
@ -6829,13 +6830,7 @@ async fn wt_publish(args: WtPublishArgs) -> Result<()> {
cmd.args(["-c", "copy"]); cmd.args(["-c", "copy"]);
} }
cmd.args([ cmd.args(["-f", "mp4", "-movflags", WT_PUBLISH_MOVFLAGS, "pipe:1"]);
"-f",
"mp4",
"-movflags",
"empty_moov+frag_every_frame+separate_moof+omit_tfhd_offset",
"pipe:1",
]);
cmd.stdout(Stdio::piped()); cmd.stdout(Stdio::piped());
cmd.stderr(Stdio::inherit()); cmd.stderr(Stdio::inherit());
@ -6970,7 +6965,7 @@ async fn nbc_wt_publish(args: NbcWtPublishArgs) -> Result<()> {
"-f", "-f",
"mp4", "mp4",
"-movflags", "-movflags",
"empty_moov+frag_every_frame+separate_moof+omit_tfhd_offset", WT_PUBLISH_MOVFLAGS,
"pipe:1", "pipe:1",
]); ]);

View file

@ -16,33 +16,6 @@ fn chrome_path() -> Option<std::path::PathBuf> {
.or_else(|| which("chromium")) .or_else(|| which("chromium"))
} }
fn click_css(tab: &headless_chrome::Tab, css: &str) -> anyhow::Result<()> {
tab.wait_for_element(css)?.click()?;
Ok(())
}
fn wait_for_text(
tab: &headless_chrome::Tab,
needle: &str,
timeout: Duration,
) -> anyhow::Result<()> {
let deadline = Instant::now() + timeout;
while Instant::now() < deadline {
let js = format!(
r#"(function() {{
return document.body && (document.body.innerText || '').includes({n});
}})();"#,
n = serde_json::to_string(needle).unwrap()
);
let v = tab.evaluate(&js, false)?;
if v.value.and_then(|v| v.as_bool()).unwrap_or(false) {
return Ok(());
}
std::thread::sleep(Duration::from_millis(200));
}
anyhow::bail!("timed out waiting for text: {needle}");
}
fn wait_for_blob_video(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> { fn wait_for_blob_video(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
let deadline = Instant::now() + timeout; let deadline = Instant::now() + timeout;
while Instant::now() < deadline { while Instant::now() < deadline {
@ -80,12 +53,16 @@ fn debug_player_state(tab: &headless_chrome::Tab) -> anyhow::Result<String> {
let js = r#"(function() { let js = r#"(function() {
let v = document.querySelector('video'); let v = document.querySelector('video');
let src = v ? (v.src || '') : null; let src = v ? (v.src || '') : null;
let currentTime = v ? v.currentTime : null;
let muted = v ? v.muted : null;
let readyState = v ? v.readyState : null;
let buffered = v ? Array.from({ length: v.buffered.length }, (_, i) => [v.buffered.start(i), v.buffered.end(i)]) : [];
let placeholder = document.querySelector('.placeholder'); let placeholder = document.querySelector('.placeholder');
let placeholderText = placeholder ? (placeholder.innerText || '') : null; let placeholderText = placeholder ? (placeholder.innerText || '') : null;
let status = document.querySelector('.source-status'); let status = document.querySelector('.source-status');
let statusText = status ? (status.innerText || '') : null; let statusText = status ? (status.innerText || '') : null;
let sources = Array.from(document.querySelectorAll('button[data-testid="global-watch"]')).length; let sources = Array.from(document.querySelectorAll('button[data-testid="global-watch"]')).length;
return JSON.stringify({ hasVideo: !!v, videoSrc: src, placeholderText, statusText, sources }); return JSON.stringify({ hasVideo: !!v, videoSrc: src, currentTime, muted, readyState, buffered, placeholderText, statusText, sources });
})();"#; })();"#;
let v = tab.evaluate(js, false)?; let v = tab.evaluate(js, false)?;
Ok(v.value Ok(v.value
@ -93,20 +70,84 @@ fn debug_player_state(tab: &headless_chrome::Tab) -> anyhow::Result<String> {
.unwrap_or_default()) .unwrap_or_default())
} }
fn click_global_watch(tab: &headless_chrome::Tab, stream_id: &str) -> anyhow::Result<bool> { fn video_motion_sample(tab: &headless_chrome::Tab) -> anyhow::Result<Option<(f64, u32)>> {
let js = format!( let js = r#"(function() {
r#"(function() {{ let v = document.querySelector('video');
let target = {sid}; if (!v || !v.videoWidth || !v.videoHeight) return null;
let btn = document.querySelector(`button[data-stream-id="${{target}}"]`); let canvas = window.__ec_motion_canvas || (window.__ec_motion_canvas = document.createElement('canvas'));
if (!btn) return false; canvas.width = 160;
// Some SPA frameworks attach delegated listeners; dispatch a real click event. canvas.height = 90;
btn.dispatchEvent(new MouseEvent('click', {{ bubbles: true, cancelable: true, view: window }})); let ctx = canvas.getContext('2d', { willReadFrequently: true });
return true; ctx.drawImage(v, 0, 0, canvas.width, canvas.height);
}})();"#, let data = ctx.getImageData(0, 0, canvas.width, canvas.height).data;
sid = serde_json::to_string(stream_id).unwrap() let hash = 2166136261 >>> 0;
); for (let i = 0; i < data.length; i += 16) {
hash ^= data[i]; hash = Math.imul(hash, 16777619);
hash ^= data[i + 1]; hash = Math.imul(hash, 16777619);
hash ^= data[i + 2]; hash = Math.imul(hash, 16777619);
}
return JSON.stringify({ currentTime: v.currentTime, hash: hash >>> 0 });
})();"#;
let v = tab.evaluate(&js, false)?; let v = tab.evaluate(&js, false)?;
Ok(v.value.and_then(|v| v.as_bool()).unwrap_or(false)) let Some(s) = v.value.and_then(|v| v.as_str().map(|s| s.to_string())) else {
return Ok(None);
};
let value: serde_json::Value = serde_json::from_str(&s)?;
let current_time = value
.get("currentTime")
.and_then(|v| v.as_f64())
.unwrap_or_default();
let hash = value
.get("hash")
.and_then(|v| v.as_u64())
.unwrap_or_default() as u32;
Ok(Some((current_time, hash)))
}
fn wait_for_video_motion(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
let deadline = Instant::now() + timeout;
let mut first: Option<(f64, u32)> = None;
while Instant::now() < deadline {
if let Some(sample) = video_motion_sample(tab)? {
if let Some((first_time, first_hash)) = first {
if sample.0 > first_time + 0.5 && sample.1 != first_hash {
return Ok(());
}
} else {
first = Some(sample);
}
}
std::thread::sleep(Duration::from_millis(500));
}
let st = debug_player_state(tab).unwrap_or_default();
anyhow::bail!("timed out waiting for changing video frames\nplayer_state={st}");
}
fn wait_for_unmuted_video(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
let deadline = Instant::now() + timeout;
while Instant::now() < deadline {
let js = r#"(function() {
let v = document.querySelector('video');
return !!v && v.muted === false && v.volume > 0;
})();"#;
let v = tab.evaluate(js, false)?;
if v.value.and_then(|v| v.as_bool()).unwrap_or(false) {
return Ok(());
}
std::thread::sleep(Duration::from_millis(200));
}
let st = debug_player_state(tab).unwrap_or_default();
anyhow::bail!("timed out waiting for unmuted video\nplayer_state={st}");
}
fn watch_url(site_url: &str, relay_url: &str, stream_id: &str) -> anyhow::Result<String> {
let mut url = url::Url::parse(site_url)?;
url.set_path("/watch");
url.query_pairs_mut()
.clear()
.append_pair("url", relay_url)
.append_pair("name", stream_id);
Ok(url.to_string())
} }
#[test] #[test]
@ -123,6 +164,8 @@ fn e2e_remote_website_watch_existing_stream_id() -> anyhow::Result<()> {
let site_url = std::env::var("EVERY_CHANNEL_SITE_URL") let site_url = std::env::var("EVERY_CHANNEL_SITE_URL")
.unwrap_or_else(|_| "https://every.channel/".to_string()); .unwrap_or_else(|_| "https://every.channel/".to_string());
let relay_url = std::env::var("EVERY_CHANNEL_RELAY_URL")
.unwrap_or_else(|_| "https://cdn.moq.dev/anon".to_string());
let stream_id = match std::env::var("EVERY_CHANNEL_STREAM_ID") { let stream_id = match std::env::var("EVERY_CHANNEL_STREAM_ID") {
Ok(v) if !v.trim().is_empty() => v, Ok(v) if !v.trim().is_empty() => v,
_ => return Ok(()), // skip _ => return Ok(()), // skip
@ -133,7 +176,6 @@ fn e2e_remote_website_watch_existing_stream_id() -> anyhow::Result<()> {
.headless(true) .headless(true)
.args(vec![ .args(vec![
OsStr::new("--autoplay-policy=no-user-gesture-required"), OsStr::new("--autoplay-policy=no-user-gesture-required"),
OsStr::new("--mute-audio"),
OsStr::new("--disable-application-cache"), OsStr::new("--disable-application-cache"),
OsStr::new("--disable-service-worker"), OsStr::new("--disable-service-worker"),
OsStr::new("--disk-cache-size=0"), OsStr::new("--disk-cache-size=0"),
@ -142,22 +184,9 @@ fn e2e_remote_website_watch_existing_stream_id() -> anyhow::Result<()> {
.unwrap(); .unwrap();
let browser = headless_chrome::Browser::new(launch_options)?; let browser = headless_chrome::Browser::new(launch_options)?;
let tab = browser.new_tab()?; let tab = browser.new_tab()?;
tab.navigate_to(&site_url)?; tab.navigate_to(&watch_url(&site_url, &relay_url, &stream_id)?)?;
tab.wait_until_navigated()?; tab.wait_until_navigated()?;
click_css(&tab, "button[data-testid='global-refresh']")?;
let deadline = Instant::now() + Duration::from_secs(60);
loop {
if click_global_watch(&tab, &stream_id)? {
break;
}
if Instant::now() > deadline {
anyhow::bail!("timed out waiting for stream_id to appear in global list");
}
std::thread::sleep(Duration::from_millis(250));
}
// Ensure the player is instantiated. // Ensure the player is instantiated.
if let Err(err) = wait_for_video_element(&tab, Duration::from_secs(90)) { if let Err(err) = wait_for_video_element(&tab, Duration::from_secs(90)) {
let st = debug_player_state(&tab).unwrap_or_default(); let st = debug_player_state(&tab).unwrap_or_default();
@ -170,5 +199,9 @@ fn e2e_remote_website_watch_existing_stream_id() -> anyhow::Result<()> {
anyhow::bail!("{err}\nplayer_state={st}"); anyhow::bail!("{err}\nplayer_state={st}");
} }
tab.wait_for_element("video")?.click()?;
wait_for_unmuted_video(&tab, Duration::from_secs(10))?;
wait_for_video_motion(&tab, Duration::from_secs(30))?;
Ok(()) Ok(())
} }

View file

@ -0,0 +1,27 @@
# ECP-0117: Live Fragment Duration and Audio Unlock
Status: Draft
## Problem / context
Hosted live playback can subscribe to a local HDHomeRun stream while the visible frame stays frozen and audio stays muted. Browser inspection showed `currentTime` advancing through keyframe-spaced buffered ranges, but each range was only one microsecond long. The web wrapper also left the `<moq-watch muted>` attribute in place, so a user gesture could be overwritten back to muted.
## Decision
Publish WebTransport fMP4 with keyframe-duration fragments (`frag_keyframe`) instead of one fragment per frame, and default `wt-publish` / `nbc-wt-publish` to the non-passthrough CMAF sample path. On the hosted web player, remove the `muted` attribute and reapply unmuted state to both the wrapper and underlying video element after a user gesture.
## Consequences
- Browser MSE receives continuous segment-duration ranges instead of isolated zero-length frame ranges.
- Live playback accepts GOP-sized fragment latency, matching the current 48-frame live GOP.
- Relay subscribers receive `video0.m4s` and `audio0.m4s` media groups by default instead of catalog-only passthrough announcements.
- Audio remains gesture-gated for autoplay policy, but the gesture now actually unmutes the player.
## Alternatives considered
- Raise web jitter again. Rejected because the buffered media ranges were effectively zero-length; more latency does not turn still ranges into playable media.
- Keep passthrough mode as the default. Rejected because relay probes received only `catalog.json` while the non-passthrough sample path delivered video and audio media groups.
## Rollout / teardown
Rebuild/restart local and hosted publishers, deploy the updated web asset, and verify hosted playback by checking frame hashes over time. Teardown is restoring `WT_PUBLISH_MOVFLAGS` to `frag_every_frame`, restoring passthrough defaults to true, and restoring the prior muted wrapper behavior.