Stabilize hosted live video playback
Some checks are pending
ci-gates / checks (push) Waiting to run
deploy-cloudflare / checks (push) Waiting to run
deploy-cloudflare / deploy (push) Blocked by required conditions

This commit is contained in:
every.channel 2026-05-03 22:38:39 -07:00
parent 6739b424ab
commit bd5d9857ed
No known key found for this signature in database
4 changed files with 83 additions and 67 deletions

View file

@ -172,16 +172,11 @@ function mountPlayer(relayUrl, name) {
watch.connection.websocket = { enabled: false };
}
// Prefer a video element for native controls/audio routing.
// Start muted to satisfy autoplay policy, then unlock audio on user gesture.
const video = document.createElement("video");
video.className = "archiveVideo";
video.controls = true;
video.autoplay = true;
video.muted = true;
video.volume = 1;
video.playsInline = true;
watch.appendChild(video);
const canvas = document.createElement("canvas");
canvas.className = "canvas";
canvas.width = 1280;
canvas.height = 720;
watch.appendChild(canvas);
mount.appendChild(watch);
const cleanup = [];
let audioUnlocked = false;
@ -196,30 +191,21 @@ function mountPlayer(relayUrl, name) {
}
watch.removeAttribute("muted");
watch.muted = false;
video.muted = false;
video.volume = 1;
};
const keepAudioUnlocked = () => {
if (audioUnlocked) forceAudioOn();
};
const unlockAudio = () => {
audioUnlocked = true;
forceAudioOn();
watch.backend?.paused?.set?.(true);
watch.backend?.paused?.set?.(false);
void video.play().catch(() => {});
setHint(`Live: subscribed to ${name} (audio unlocked)`, "ok");
};
document.addEventListener("pointerdown", unlockAudio, { once: true });
video.addEventListener("pointerdown", unlockAudio, { once: true });
video.addEventListener("volumechange", keepAudioUnlocked);
canvas.addEventListener("pointerdown", unlockAudio, { once: true });
cleanup.push(() => {
document.removeEventListener("pointerdown", unlockAudio);
video.removeEventListener("pointerdown", unlockAudio);
video.removeEventListener("volumechange", keepAudioUnlocked);
canvas.removeEventListener("pointerdown", unlockAudio);
});
setHint(`Live: subscribed to ${name} (tap video to unmute)`, "warn");
void video.play().catch(() => {});
setHint(`Live: subscribed to ${name} (tap player to unmute)`, "warn");
bindPlayerSignals(watch, name, cleanup);
}

View file

@ -66,6 +66,8 @@ const DIRECT_HEARTBEAT_TIMEOUT: Duration = Duration::from_secs(8);
const DIRECT_WIRE_CHUNK_BYTES: usize = 16 * 1024;
const WT_ARCHIVE_DEFAULT_TRACKS: &[&str] =
&["catalog.json", "init.mp4", "video0.m4s", "audio0.m4s"];
const WT_PUBLISH_GOP_FRAMES: u32 = 1;
const WT_PUBLISH_VIDEO_FILTER: &str = "fps=6";
const WT_PUBLISH_MOVFLAGS: &str = "empty_moov+frag_keyframe+separate_moof+omit_tfhd_offset";
use tokio::sync::mpsc;
use tokio::sync::oneshot;
@ -470,6 +472,15 @@ struct WtPublishArgs {
/// If set, transcode to H.264/AAC before fragmenting to fMP4.
#[arg(long, default_value_t = true, action = clap::ArgAction::Set)]
transcode: bool,
/// ffmpeg video filter used by the transcode path.
#[arg(long, default_value = WT_PUBLISH_VIDEO_FILTER)]
video_filter: String,
/// H.264 GOP/keyframe interval in frames for the transcode path.
#[arg(long, default_value_t = WT_PUBLISH_GOP_FRAMES)]
gop_frames: u32,
/// fMP4 movflags used for WebTransport publishing.
#[arg(long, default_value = WT_PUBLISH_MOVFLAGS)]
movflags: String,
/// Transmit fMP4 fragments directly (passthrough mode).
/// When false, the importer may reframe into CMAF fragments.
#[arg(long, default_value_t = false, action = clap::ArgAction::Set)]
@ -531,6 +542,12 @@ struct NbcWtPublishArgs {
/// Transmit fMP4 fragments directly (passthrough mode).
#[arg(long, default_value_t = false, action = clap::ArgAction::Set)]
passthrough: bool,
/// H.264 GOP/keyframe interval in frames.
#[arg(long, default_value_t = WT_PUBLISH_GOP_FRAMES)]
gop_frames: u32,
/// fMP4 movflags used for WebTransport publishing.
#[arg(long, default_value = WT_PUBLISH_MOVFLAGS)]
movflags: String,
/// Danger: disable TLS verification for the relay.
#[arg(long, default_value_t = false)]
tls_disable_verify: bool,
@ -6795,6 +6812,8 @@ async fn wt_publish(args: WtPublishArgs) -> Result<()> {
"0:a:0?",
"-c:v",
"libx264",
"-vf",
args.video_filter.as_str(),
"-preset",
"veryfast",
"-tune",
@ -6804,9 +6823,9 @@ async fn wt_publish(args: WtPublishArgs) -> Result<()> {
"-profile:v",
"main",
"-g",
"48",
&args.gop_frames.to_string(),
"-keyint_min",
"48",
&args.gop_frames.to_string(),
"-sc_threshold",
"0",
"-threads",
@ -6830,7 +6849,7 @@ async fn wt_publish(args: WtPublishArgs) -> Result<()> {
cmd.args(["-c", "copy"]);
}
cmd.args(["-f", "mp4", "-movflags", WT_PUBLISH_MOVFLAGS, "pipe:1"]);
cmd.args(["-f", "mp4", "-movflags", args.movflags.as_str(), "pipe:1"]);
cmd.stdout(Stdio::piped());
cmd.stderr(Stdio::inherit());
@ -6921,7 +6940,6 @@ async fn nbc_wt_publish(args: NbcWtPublishArgs) -> Result<()> {
.with_context(|| format!("failed to open NBC browser session for {}", args.source_url))?;
let fps = nbc_capture_fps().max(1);
let gop = (fps * 4).clamp(12, 48);
let mut cmd = TokioCommand::new("ffmpeg");
cmd.arg("-hide_banner")
@ -6954,9 +6972,9 @@ async fn nbc_wt_publish(args: NbcWtPublishArgs) -> Result<()> {
"main",
"-g",
])
.arg(gop.to_string())
.arg(args.gop_frames.to_string())
.arg("-keyint_min")
.arg(gop.to_string())
.arg(args.gop_frames.to_string())
.args([
"-sc_threshold",
"0",
@ -6965,7 +6983,7 @@ async fn nbc_wt_publish(args: NbcWtPublishArgs) -> Result<()> {
"-f",
"mp4",
"-movflags",
WT_PUBLISH_MOVFLAGS,
args.movflags.as_str(),
"pipe:1",
]);

View file

@ -16,14 +16,11 @@ fn chrome_path() -> Option<std::path::PathBuf> {
.or_else(|| which("chromium"))
}
fn wait_for_blob_video(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
fn wait_for_canvas_element(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
let deadline = Instant::now() + timeout;
while Instant::now() < deadline {
let js = r#"(function() {
let v = document.querySelector('video');
if (!v) return false;
if (typeof v.src !== 'string') return false;
return v.src.startsWith('blob:');
return !!document.querySelector('moq-watch canvas');
})();"#;
let v = tab.evaluate(js, false)?;
if v.value.and_then(|v| v.as_bool()).unwrap_or(false) {
@ -31,14 +28,14 @@ fn wait_for_blob_video(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow:
}
std::thread::sleep(Duration::from_millis(200));
}
anyhow::bail!("timed out waiting for video blob src");
anyhow::bail!("timed out waiting for canvas player");
}
fn wait_for_video_element(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
fn wait_for_moq_watch_element(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
let deadline = Instant::now() + timeout;
while Instant::now() < deadline {
let js = r#"(function() {
return !!document.querySelector('video');
return !!document.querySelector('moq-watch');
})();"#;
let v = tab.evaluate(js, false)?;
if v.value.and_then(|v| v.as_bool()).unwrap_or(false) {
@ -46,23 +43,32 @@ fn wait_for_video_element(tab: &headless_chrome::Tab, timeout: Duration) -> anyh
}
std::thread::sleep(Duration::from_millis(200));
}
anyhow::bail!("timed out waiting for <video> element");
anyhow::bail!("timed out waiting for <moq-watch> element");
}
fn debug_player_state(tab: &headless_chrome::Tab) -> anyhow::Result<String> {
let js = r#"(function() {
let v = document.querySelector('video');
let src = v ? (v.src || '') : null;
let currentTime = v ? v.currentTime : null;
let muted = v ? v.muted : null;
let readyState = v ? v.readyState : null;
let buffered = v ? Array.from({ length: v.buffered.length }, (_, i) => [v.buffered.start(i), v.buffered.end(i)]) : [];
let watch = document.querySelector('moq-watch');
let canvas = document.querySelector('moq-watch canvas');
let placeholder = document.querySelector('.placeholder');
let placeholderText = placeholder ? (placeholder.innerText || '') : null;
let status = document.querySelector('.source-status');
let statusText = status ? (status.innerText || '') : null;
let sources = Array.from(document.querySelectorAll('button[data-testid="global-watch"]')).length;
return JSON.stringify({ hasVideo: !!v, videoSrc: src, currentTime, muted, readyState, buffered, placeholderText, statusText, sources });
let hint = document.querySelector('#hint');
let hintText = hint ? (hint.innerText || '') : null;
return JSON.stringify({
hasWatch: !!watch,
hasCanvas: !!canvas,
canvasWidth: canvas ? canvas.width : null,
canvasHeight: canvas ? canvas.height : null,
muted: watch ? watch.muted : null,
volume: watch ? watch.volume : null,
hintText,
placeholderText,
statusText,
sources
});
})();"#;
let v = tab.evaluate(js, false)?;
Ok(v.value
@ -70,15 +76,15 @@ fn debug_player_state(tab: &headless_chrome::Tab) -> anyhow::Result<String> {
.unwrap_or_default())
}
fn video_motion_sample(tab: &headless_chrome::Tab) -> anyhow::Result<Option<(f64, u32)>> {
fn canvas_motion_sample(tab: &headless_chrome::Tab) -> anyhow::Result<Option<(f64, u32)>> {
let js = r#"(function() {
let v = document.querySelector('video');
if (!v || !v.videoWidth || !v.videoHeight) return null;
let source = document.querySelector('moq-watch canvas');
if (!source || !source.width || !source.height) return null;
let canvas = window.__ec_motion_canvas || (window.__ec_motion_canvas = document.createElement('canvas'));
canvas.width = 160;
canvas.height = 90;
let ctx = canvas.getContext('2d', { willReadFrequently: true });
ctx.drawImage(v, 0, 0, canvas.width, canvas.height);
ctx.drawImage(source, 0, 0, canvas.width, canvas.height);
let data = ctx.getImageData(0, 0, canvas.width, canvas.height).data;
let hash = 2166136261 >>> 0;
for (let i = 0; i < data.length; i += 16) {
@ -86,7 +92,7 @@ fn video_motion_sample(tab: &headless_chrome::Tab) -> anyhow::Result<Option<(f64
hash ^= data[i + 1]; hash = Math.imul(hash, 16777619);
hash ^= data[i + 2]; hash = Math.imul(hash, 16777619);
}
return JSON.stringify({ currentTime: v.currentTime, hash: hash >>> 0 });
return JSON.stringify({ currentTime: performance.now() / 1000, hash: hash >>> 0 });
})();"#;
let v = tab.evaluate(&js, false)?;
let Some(s) = v.value.and_then(|v| v.as_str().map(|s| s.to_string())) else {
@ -104,11 +110,11 @@ fn video_motion_sample(tab: &headless_chrome::Tab) -> anyhow::Result<Option<(f64
Ok(Some((current_time, hash)))
}
fn wait_for_video_motion(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
fn wait_for_canvas_motion(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
let deadline = Instant::now() + timeout;
let mut first: Option<(f64, u32)> = None;
while Instant::now() < deadline {
if let Some(sample) = video_motion_sample(tab)? {
if let Some(sample) = canvas_motion_sample(tab)? {
if let Some((first_time, first_hash)) = first {
if sample.0 > first_time + 0.5 && sample.1 != first_hash {
return Ok(());
@ -120,15 +126,15 @@ fn wait_for_video_motion(tab: &headless_chrome::Tab, timeout: Duration) -> anyho
std::thread::sleep(Duration::from_millis(500));
}
let st = debug_player_state(tab).unwrap_or_default();
anyhow::bail!("timed out waiting for changing video frames\nplayer_state={st}");
anyhow::bail!("timed out waiting for changing canvas frames\nplayer_state={st}");
}
fn wait_for_unmuted_video(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
fn wait_for_unmuted_player(tab: &headless_chrome::Tab, timeout: Duration) -> anyhow::Result<()> {
let deadline = Instant::now() + timeout;
while Instant::now() < deadline {
let js = r#"(function() {
let v = document.querySelector('video');
return !!v && v.muted === false && v.volume > 0;
let watch = document.querySelector('moq-watch');
return !!watch && watch.muted === false && watch.volume > 0 && !watch.hasAttribute('muted');
})();"#;
let v = tab.evaluate(js, false)?;
if v.value.and_then(|v| v.as_bool()).unwrap_or(false) {
@ -137,7 +143,7 @@ fn wait_for_unmuted_video(tab: &headless_chrome::Tab, timeout: Duration) -> anyh
std::thread::sleep(Duration::from_millis(200));
}
let st = debug_player_state(tab).unwrap_or_default();
anyhow::bail!("timed out waiting for unmuted video\nplayer_state={st}");
anyhow::bail!("timed out waiting for unmuted player\nplayer_state={st}");
}
fn watch_url(site_url: &str, relay_url: &str, stream_id: &str) -> anyhow::Result<String> {
@ -188,20 +194,19 @@ fn e2e_remote_website_watch_existing_stream_id() -> anyhow::Result<()> {
tab.wait_until_navigated()?;
// Ensure the player is instantiated.
if let Err(err) = wait_for_video_element(&tab, Duration::from_secs(90)) {
if let Err(err) = wait_for_moq_watch_element(&tab, Duration::from_secs(90)) {
let st = debug_player_state(&tab).unwrap_or_default();
anyhow::bail!("{err}\nplayer_state={st}");
}
// We consider playback "started" when the video uses a blob: URL (MSE).
if let Err(err) = wait_for_blob_video(&tab, Duration::from_secs(90)) {
if let Err(err) = wait_for_canvas_element(&tab, Duration::from_secs(90)) {
let st = debug_player_state(&tab).unwrap_or_default();
anyhow::bail!("{err}\nplayer_state={st}");
}
tab.wait_for_element("video")?.click()?;
wait_for_unmuted_video(&tab, Duration::from_secs(10))?;
wait_for_video_motion(&tab, Duration::from_secs(30))?;
tab.wait_for_element("moq-watch canvas")?.click()?;
wait_for_unmuted_player(&tab, Duration::from_secs(10))?;
wait_for_canvas_motion(&tab, Duration::from_secs(30))?;
Ok(())
}

View file

@ -8,12 +8,15 @@ Hosted live playback can subscribe to a local HDHomeRun stream while the visible
## Decision
Publish WebTransport fMP4 with keyframe-duration fragments (`frag_keyframe`) instead of one fragment per frame, and default `wt-publish` / `nbc-wt-publish` to the non-passthrough CMAF sample path. On the hosted web player, remove the `muted` attribute and reapply unmuted state to both the wrapper and underlying video element after a user gesture.
Publish WebTransport fMP4 with keyframe fragments where every emitted video frame is a keyframe (`g=1`, `keyint_min=1`). For HDHomeRun-style live input, cap the default WebTransport transcode to 6 fps so the hosted watcher receives independently decodable video groups at a sustainable cadence. Expose publisher knobs for the ffmpeg video filter, GOP interval, and fMP4 movflags so runtime operators can tune without another code edit. Keep `wt-publish` / `nbc-wt-publish` on the non-passthrough CMAF sample path. On the hosted web player, render live video through the `<moq-watch>` canvas/WebCodecs path instead of the `<video>`/MSE path, remove the `muted` attribute, and reapply unmuted state to the watcher backend after a user gesture.
## Consequences
- Browser MSE receives continuous segment-duration ranges instead of isolated zero-length frame ranges.
- Live playback accepts GOP-sized fragment latency, matching the current 48-frame live GOP.
- Browser playback receives continuous media ranges without turning each GOP into a single playback jump.
- Live playback and observation diffing receive independently decodable video groups at frame cadence.
- WebTransport video publishing uses more bandwidth per frame, but the 6 fps cap keeps group churn lower than full-rate all-intra OTA publishing.
- Operators can raise or lower `--video-filter`, `--gop-frames`, and `--movflags` from publisher configuration instead of rebuilding.
- Hosted live rendering avoids the upstream MSE path that side-browser validation showed repeatedly skipping slow groups.
- Relay subscribers receive `video0.m4s` and `audio0.m4s` media groups by default instead of catalog-only passthrough announcements.
- Audio remains gesture-gated for autoplay policy, but the gesture now actually unmutes the player.
@ -21,7 +24,11 @@ Publish WebTransport fMP4 with keyframe-duration fragments (`frag_keyframe`) ins
- Raise web jitter again. Rejected because the buffered media ranges were effectively zero-length; more latency does not turn still ranges into playable media.
- Keep passthrough mode as the default. Rejected because relay probes received only `catalog.json` while the non-passthrough sample path delivered video and audio media groups.
- Use keyframe-duration fragments. Rejected after side-browser validation showed repeated `seeking forward/backward` corrections and GOP-sized visual jumps even though audio was healthy.
- Keep 48-frame GOPs with every-frame fMP4 fragments. Rejected because relay archive proof still showed 48-frame video groups; `moq-mux` groups video by keyframe in the non-passthrough path.
- Use full-rate or 12 fps all-intra every-frame fragments. Rejected because relay proof showed one-frame video groups, but side-browser validation produced heavy `skipping slow group` churn from too many tiny media groups.
- Keep the `<video>` child renderer. Rejected because the hosted side browser showed the MSE renderer repeating frames and emitting slow-group skips while subscribed.
## Rollout / teardown
Rebuild/restart local and hosted publishers, deploy the updated web asset, and verify hosted playback by checking frame hashes over time. Teardown is restoring `WT_PUBLISH_MOVFLAGS` to `frag_every_frame`, restoring passthrough defaults to true, and restoring the prior muted wrapper behavior.
Rebuild/restart local and hosted publishers, deploy the updated web asset, and verify hosted playback by checking canvas frame hashes over time plus side-browser console seek corrections. Teardown is setting `--gop-frames 48`, setting `--video-filter` back to the prior source cadence, restoring passthrough defaults to true, and restoring the prior `<video>` child plus muted wrapper behavior.