every.channel: sanitized baseline

This commit is contained in:
every.channel 2026-02-15 16:17:27 -05:00
commit 897e556bea
No known key found for this signature in database
258 changed files with 74298 additions and 0 deletions

View file

@ -0,0 +1,94 @@
[package]
name = "iroh-live"
version = "0.1.0"
edition = "2024"
description = "audio and video live streaming over iroh"
authors = ["Franz Heinzmann <frando@n0.computer>"]
repository = "https://github.com/n0-computer/iroh-live"
license = "MIT OR Apache-2.0"
[dependencies]
anyhow = "1.0.100"
bytemuck = "1.24.0"
byte-unit = { version = "5.1", features = ["bit"] }
data-encoding = "2.9.0"
derive_more = { version = "2.0.1", features = ["display", "debug", "eq"] }
ffmpeg-next = { version = "8.0.0", default-features = false, features = ["device", "format", "filter", "software-resampling", "software-scaling"] }
ffmpeg-sys-next = { version = "8.0.1", optional = true }
firewheel = { version = "0.9.1", features = ["cpal", "peak_meter_node", "std", "stream_nodes", "cpal_resample_inputs"] }
hang = "0.9.0"
image = { version = "0.25.8", default-features = false }
iroh = "0.95.1"
iroh-gossip = "0.95.0"
iroh-moq = { path = "../iroh-moq" }
iroh-tickets = "0.2.0"
moq-lite = "0.10.1"
moq-media = { version = "0.1.0", path = "../moq-media" }
n0-error = { version = "0.1.2", features = ["anyhow"] }
n0-future = "0.3.1"
n0-watcher = "0.6.0"
nokhwa = { version = "0.10", features = [
"input-native",
"input-v4l",
"output-threaded",
] }
postcard = "1.1.3"
rand = "0.9.2"
serde = { version = "1.0.228", features = ["derive"] }
strum = { version = "0.27", features = ["derive"] }
tokio = { version = "1.48.0", features = ["sync"] }
tokio-util = "0.7.17"
tracing = "0.1.41"
xcap = "0.8"
webrtc-audio-processing = { version = "0.5.0", features = ["bundled"] }
bytes = "1.11.0"
iroh-smol-kv = { git = "https://github.com/Frando/iroh-smol-kv", branch = "iroh-095", version = "0.3.1", default-features = false }
buf-list = "1.1.2"
[dev-dependencies]
clap = { version = "4.5", features = ["derive"] }
eframe = "0.33.0"
postcard = "1.1.3"
tokio = { version = "1.48.0", features = ["full"] }
tracing-subscriber = "0.3.20"
[features]
default = []
# Enable static build of ffmpeg
static = [
"ffmpeg-next/static",
"ffmpeg-next/build-lib-openssl",
"ffmpeg-next/build-license-version3",
"ffmpeg-next/build-lib-opus",
"ffmpeg-next/build-lib-x264",
"ffmpeg-next/build-license-gpl",
"dep:ffmpeg-sys-next",
]
[target.'cfg(target_os = "macos")'.dependencies]
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
"build-videotoolbox",
"build-audiotoolbox",
] }
[target.'cfg(target_os = "linux")'.dependencies]
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
"build-vaapi",
# "build-vulkan",
# "build-lib-libmfx",
] }
[target.'cfg(target_os = "windows")'.dependencies]
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
"build-lib-d3d11va",
"build-lib-dxva2",
# "build-nvidia",
# "build-amf",
] }
[target.'cfg(target_os = "android")'.dependencies]
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
# "build-mediacodec",
] }

View file

@ -0,0 +1,3 @@
# iroh-live
See [../README.md](../README.md)

View file

@ -0,0 +1,143 @@
use std::{
path::{Path, PathBuf},
process::Stdio,
};
use bytes::BytesMut;
use clap::ValueEnum;
use moq_lite::BroadcastProducer;
use n0_error::Result;
use tokio::{
io::{AsyncRead, AsyncReadExt},
process::Command,
};
use tracing::info;
#[derive(ValueEnum, Debug, Clone, Default, Copy)]
pub enum ImportType {
#[default]
Cmaf,
AnnexB,
}
impl ImportType {
pub fn as_str(&self) -> &'static str {
match self {
ImportType::AnnexB => "annex-b",
ImportType::Cmaf => "cmaf",
}
}
}
// Taken from
// https://github.com/moq-dev/moq/blob/30c28b8c3b6bd941fe1279c0fd8855139a1d4f6a/rs/hang-cli/src/import.rs
// License: Apache-2.0
pub struct Import {
decoder: hang::import::Decoder,
buffer: BytesMut,
}
impl Import {
pub fn new(broadcast: BroadcastProducer, format: ImportType) -> Self {
let decoder = hang::import::Decoder::new(broadcast.into(), format.as_str())
.expect("supported format");
Self {
decoder,
buffer: BytesMut::new(),
}
}
}
impl Import {
pub async fn init_from<T: AsyncRead + Unpin>(&mut self, input: &mut T) -> anyhow::Result<()> {
while !self.decoder.is_initialized() && input.read_buf(&mut self.buffer).await? > 0 {
self.decoder.decode_stream(&mut self.buffer)?;
}
Ok(())
}
pub async fn read_from<T: AsyncRead + Unpin>(&mut self, input: &mut T) -> anyhow::Result<()> {
while input.read_buf(&mut self.buffer).await? > 0 {
self.decoder.decode_stream(&mut self.buffer)?;
}
// Flush the final frame.
self.decoder.decode_frame(&mut self.buffer, None)
}
}
pub async fn transcode(input: PathBuf, format: ImportType) -> Result<impl AsyncRead> {
let copy_video = is_h264(&input).await?;
let mut cmd = Command::new("ffmpeg");
cmd.args([
"-hide_banner",
"-loglevel",
"error",
"-stream_loop",
"-1",
"-re",
"-i",
]);
cmd.arg(input.as_os_str());
if copy_video {
info!("input is h264, copy video");
cmd.args(["-c:v", "copy"]);
} else {
info!("input is not h264, transcode");
cmd.args(["-c:v", "libx264", "-pix_fmt", "yuv420p"]);
}
match format {
ImportType::Cmaf => {
cmd.args(["-c:a", "libopus", "-b:a", "128k"]);
cmd.args([
"-movflags",
"cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame",
"-f",
"mp4",
]);
}
ImportType::AnnexB => {
cmd.args([
"-a",
"n",
"-bsf:v",
"h264_mp4toannexb",
"-f",
"h264",
"-movflags",
"cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame",
"-f",
"mp4",
]);
}
}
cmd.arg("-");
let mut child = cmd.stdout(Stdio::piped()).spawn()?;
let stdout = child.stdout.take().unwrap();
Ok(stdout)
}
pub async fn is_h264(input: &Path) -> Result<bool> {
let out = Command::new("ffprobe")
.args([
"-v",
"error",
"-select_streams",
"v:0",
"-show_entries",
"stream=codec_name",
"-of",
"default=nokey=1:noprint_wrappers=1",
])
.arg(input.as_os_str())
.output()
.await?;
Ok(String::from_utf8_lossy(&out.stdout).trim() == "h264")
}

View file

@ -0,0 +1 @@
pub mod import;

View file

@ -0,0 +1,95 @@
use clap::Parser;
use iroh::{Endpoint, SecretKey, protocol::Router};
use iroh_live::{
Live,
media::{
audio::AudioBackend,
av::{AudioPreset, VideoCodec, VideoPreset},
capture::CameraCapturer,
ffmpeg::{H264Encoder, OpusEncoder},
publish::{AudioRenditions, PublishBroadcast, VideoRenditions},
},
ticket::LiveTicket,
};
use n0_error::StdResultExt;
#[tokio::main]
async fn main() -> n0_error::Result {
tracing_subscriber::fmt::init();
let cli = Cli::parse();
// Setup audio backend.
let audio_ctx = AudioBackend::new();
// Setup iroh and iroh-live.
let endpoint = Endpoint::builder()
.secret_key(secret_key_from_env()?)
.bind()
.await?;
let live = Live::new(endpoint.clone());
let router = Router::builder(endpoint)
.accept(iroh_live::ALPN, live.moq.protocol_handler())
.spawn();
// Create a publish broadcast.
let mut broadcast = PublishBroadcast::new();
// Capture audio, and encode with the cli-provided preset.
if !cli.no_audio {
let mic = audio_ctx.default_input().await?;
let audio = AudioRenditions::new::<OpusEncoder>(mic, [cli.audio_preset]);
broadcast.set_audio(Some(audio))?;
}
// Capture camera, and encode with the cli-provided presets.
if !cli.no_video {
let camera = CameraCapturer::new()?;
let video = VideoRenditions::new::<H264Encoder>(camera, cli.video_presets);
broadcast.set_video(Some(video))?;
}
// Publish under the name "hello".
let name = "hello";
live.publish(name, broadcast.producer()).await?;
// Create a ticket string and print
let ticket = LiveTicket::new(router.endpoint().id(), name);
println!("publishing at {ticket}");
let long_ticket = LiveTicket::new(router.endpoint().addr(), name);
println!("\nticket with addrs: {long_ticket}");
// Wait for ctrl-c and then shutdown.
tokio::signal::ctrl_c().await?;
live.shutdown();
router.shutdown().await.std_context("router shutdown")?;
Ok(())
}
#[derive(Parser, Debug)]
struct Cli {
#[arg(long, default_value_t=VideoCodec::H264)]
codec: VideoCodec,
#[arg(long, value_delimiter=',', default_values_t=[VideoPreset::P180, VideoPreset::P360, VideoPreset::P720, VideoPreset::P1080])]
video_presets: Vec<VideoPreset>,
#[arg(long, default_value_t=AudioPreset::Hq)]
audio_preset: AudioPreset,
#[arg(long)]
no_video: bool,
#[arg(long)]
no_audio: bool,
}
fn secret_key_from_env() -> n0_error::Result<SecretKey> {
Ok(match std::env::var("IROH_SECRET") {
Ok(key) => key.parse()?,
Err(_) => {
let key = SecretKey::generate(&mut rand::rng());
println!(
"Created new secret. Reuse with IROH_SECRET={}",
data_encoding::HEXLOWER.encode(&key.to_bytes())
);
key
}
})
}

View file

@ -0,0 +1,71 @@
use std::{path::PathBuf, pin::Pin};
use clap::Parser;
use iroh::EndpointId;
use iroh_live::LiveNode;
use moq_lite::BroadcastProducer;
use n0_error::Result;
use tokio::io::AsyncRead;
use tracing::warn;
mod common;
use self::common::import::{Import, ImportType, transcode};
#[derive(Debug, Parser)]
struct Cli {
#[clap(short, long)]
target: EndpointId,
#[clap(short, long, default_value = "anon/bbb")]
path: String,
/// The format of the input media.
#[clap(long, value_enum, default_value_t = ImportType::Cmaf)]
format: ImportType,
/// Input file.
#[clap(short, long)]
file: Option<PathBuf>,
/// Transcode the video with ffmpeg.
#[clap(long)]
transcode: bool,
}
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt::init();
let cli = Cli::parse();
let node = LiveNode::spawn_from_env().await?;
let session = node.live.connect(cli.target).await?;
let mut input: Pin<Box<dyn AsyncRead + Send + 'static>> = match (cli.file, cli.transcode) {
(Some(path), true) => Box::pin(transcode(path.clone(), cli.format).await?),
(Some(path), false) => Box::pin(tokio::fs::File::open(path).await?),
(None, false) => Box::pin(tokio::io::stdin()),
(None, true) => panic!("transcoding stdin is not supported"),
};
let broadcast = BroadcastProducer::default();
session.publish(cli.path, broadcast.consume());
let import = async move {
let mut import = Import::new(broadcast.into(), cli.format);
import.init_from(&mut input).await?;
import.read_from(&mut input).await?;
n0_error::Ok(())
};
tokio::pin!(import);
tokio::select! {
res = &mut import => {
if let Err(err) = res {
warn!("Import failed: {err:#}");
}
}
_ = tokio::signal::ctrl_c() => {}
};
drop(import);
node.shutdown().await?;
Ok(())
}

View file

@ -0,0 +1,75 @@
use std::{path::PathBuf, pin::Pin};
use clap::Parser;
use iroh_live::{LiveNode, rooms::RoomTicket};
use moq_lite::BroadcastProducer;
use n0_error::Result;
use tokio::io::AsyncRead;
use tracing::warn;
mod common;
use self::common::import::{Import, ImportType, transcode};
#[derive(Debug, Parser)]
struct Cli {
/// Room to join. If empty a new room will be created.
/// Will also be read from the IROH_LIVE_ROOM environment variable.
#[clap(short, long)]
room: Option<RoomTicket>,
/// The format of the input media.
#[clap(long, value_enum, default_value_t = ImportType::Cmaf)]
format: ImportType,
/// Input file. If empty reads from stdin.
file: Option<PathBuf>,
/// Transcode the video with ffmpeg.
#[clap(long)]
transcode: bool,
}
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt::init();
let cli = Cli::parse();
let ticket = match cli.room {
Some(ticket) => ticket,
None => RoomTicket::new_from_env()?,
};
let node = LiveNode::spawn_from_env().await?;
let room = node.join_room(ticket).await?;
let mut input: Pin<Box<dyn AsyncRead + Send + 'static>> = match (cli.file, cli.transcode) {
(Some(path), true) => Box::pin(transcode(path.clone(), cli.format).await?),
(Some(path), false) => Box::pin(tokio::fs::File::open(path).await?),
(None, false) => Box::pin(tokio::io::stdin()),
(None, true) => panic!("transcoding stdin is not supported"),
};
let broadcast = BroadcastProducer::default();
room.publish("file", broadcast.clone()).await?;
let import = async move {
let mut import = Import::new(broadcast.into(), cli.format);
import.init_from(&mut input).await?;
import.read_from(&mut input).await?;
n0_error::Ok(())
};
tokio::pin!(import);
tokio::select! {
res = &mut import => {
if let Err(err) = res {
warn!("Import failed: {err:#}");
}
}
_ = tokio::signal::ctrl_c() => {}
};
drop(import);
drop(room);
node.shutdown().await?;
Ok(())
}

View file

@ -0,0 +1,428 @@
use std::time::Duration;
use clap::Parser;
use eframe::egui::{self, Color32, Id, Vec2};
use iroh::{Endpoint, protocol::Router};
use iroh_gossip::{Gossip, TopicId};
use iroh_live::{
Live,
media::{
audio::AudioBackend,
av::{AudioPreset, VideoPreset},
capture::{CameraCapturer, ScreenCapturer},
ffmpeg::{FfmpegDecoders, FfmpegVideoDecoder, H264Encoder, OpusEncoder, ffmpeg_log_init},
publish::{AudioRenditions, PublishBroadcast, VideoRenditions},
subscribe::{AudioTrack, AvRemoteTrack, SubscribeBroadcast, WatchTrack},
},
moq::MoqSession,
rooms::{Room, RoomEvent, RoomTicket},
util::StatsSmoother,
};
use n0_error::{Result, StdResultExt, anyerr};
use tracing::{info, warn};
const BROADCAST_NAME: &str = "cam";
#[derive(Debug, Parser)]
struct Cli {
join: Option<RoomTicket>,
#[clap(long)]
screen: bool,
#[clap(long)]
no_audio: bool,
}
fn main() -> Result<()> {
tracing_subscriber::fmt::init();
ffmpeg_log_init();
let cli = Cli::parse();
let rt = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
let audio_ctx = AudioBackend::new();
let (router, broadcast, room) = rt.block_on(setup(cli, audio_ctx.clone()))?;
let _guard = rt.enter();
eframe::run_native(
"IrohLive",
eframe::NativeOptions::default(),
Box::new(|cc| {
let app = App {
rt,
room,
peers: vec![],
self_video: broadcast
.watch_local(Default::default())
.map(|track| VideoView::new(&cc.egui_ctx, track, usize::MAX)),
router,
_broadcast: broadcast,
audio_ctx,
};
Ok(Box::new(app))
}),
)
.map_err(|err| anyerr!("eframe failed: {err:#}"))
}
async fn setup(cli: Cli, audio_ctx: AudioBackend) -> Result<(Router, PublishBroadcast, Room)> {
let endpoint = Endpoint::builder()
.secret_key(secret_key_from_env()?)
.bind()
.await?;
info!(endpoint_id=%endpoint.id(), "endpoint bound");
let gossip = Gossip::builder().spawn(endpoint.clone());
let live = Live::new(endpoint.clone());
let router = Router::builder(endpoint)
.accept(iroh_gossip::ALPN, gossip.clone())
.accept(iroh_moq::ALPN, live.protocol_handler())
.spawn();
// Publish ourselves.
let broadcast = {
let mut broadcast = PublishBroadcast::new();
if !cli.no_audio {
let mic = audio_ctx.default_input().await?;
let audio = AudioRenditions::new::<OpusEncoder>(mic, [AudioPreset::Hq]);
broadcast.set_audio(Some(audio))?;
}
let video = if cli.screen {
let screen = ScreenCapturer::new()?;
VideoRenditions::new::<H264Encoder>(screen, VideoPreset::all())
} else {
let camera = CameraCapturer::new()?;
VideoRenditions::new::<H264Encoder>(camera, VideoPreset::all())
};
broadcast.set_video(Some(video))?;
broadcast
};
let ticket = match cli.join {
None => RoomTicket::new(topic_id_from_env()?, vec![]),
Some(ticket) => ticket,
};
let room = Room::new(router.endpoint(), gossip, live, ticket).await?;
room.publish(BROADCAST_NAME, broadcast.producer()).await?;
println!("room ticket: {}", room.ticket());
Ok((router, broadcast, room))
}
struct App {
room: Room,
peers: Vec<RemoteTrackView>,
self_video: Option<VideoView>,
router: Router,
_broadcast: PublishBroadcast,
audio_ctx: AudioBackend,
rt: tokio::runtime::Runtime,
}
impl eframe::App for App {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
ctx.request_repaint_after(Duration::from_millis(30)); // min 30 fps
// Remove closed peers.
self.peers.retain(|track| !track.is_closed());
// Add newly subscribed peers.
while let Ok(event) = self.room.try_recv() {
match event {
RoomEvent::RemoteAnnounced { remote, broadcasts } => {
info!(
"peer announced: {} with broadcasts {broadcasts:?}",
remote.fmt_short(),
);
}
RoomEvent::RemoteConnected { session } => {
info!("peer connected: {}", session.conn().remote_id().fmt_short());
}
RoomEvent::BroadcastSubscribed { session, broadcast } => {
info!(
"subscribing to {}:{}",
session.remote_id(),
broadcast.broadcast_name()
);
let track = match self.rt.block_on(async {
let audio_out = self.audio_ctx.default_output().await?;
broadcast.watch_and_listen::<FfmpegDecoders>(audio_out, Default::default())
}) {
Ok(track) => track,
Err(err) => {
warn!("failed to add track: {err}");
continue;
}
};
self.peers
.push(RemoteTrackView::new(ctx, session, track, self.peers.len()));
}
}
}
egui::CentralPanel::default()
.frame(egui::Frame::new().inner_margin(0.0).outer_margin(0.0))
.show(ctx, |ui| {
ui.spacing_mut().item_spacing = egui::vec2(0.0, 0.0);
show_video_grid(ctx, ui, &mut self.peers);
// Render video preview of self
if let Some(self_view) = self.self_video.as_mut() {
let size = (200., 200.);
egui::Area::new(Id::new("self-video"))
.anchor(egui::Align2::RIGHT_BOTTOM, [-10.0, -10.0]) // 10px from the bottom-right edge
.order(egui::Order::Foreground)
.show(ui.ctx(), |ui| {
egui::Frame::new()
.fill(egui::Color32::from_rgba_unmultiplied(0, 0, 0, 128))
.corner_radius(8.0)
.show(ui, |ui| {
ui.set_width(size.0);
ui.set_height(size.1);
ui.add_sized(size, self_view.render_image(ctx, size.into()));
});
});
}
});
}
fn on_exit(&mut self, _gl: Option<&eframe::glow::Context>) {
let router = self.router.clone();
self.rt.block_on(async move {
if let Err(err) = router.shutdown().await {
warn!("shutdown error: {err:?}");
}
});
}
}
struct RemoteTrackView {
id: usize,
video: Option<VideoView>,
_audio_track: Option<AudioTrack>,
session: MoqSession,
broadcast: SubscribeBroadcast,
stats: StatsSmoother,
}
impl RemoteTrackView {
fn new(ctx: &egui::Context, session: MoqSession, track: AvRemoteTrack, id: usize) -> Self {
Self {
video: track.video.map(|video| VideoView::new(ctx, video, id)),
stats: StatsSmoother::new(),
broadcast: track.broadcast,
id,
_audio_track: track.audio,
session,
}
}
fn is_closed(&self) -> bool {
self.session.conn().close_reason().is_some()
}
fn render_image(
&mut self,
ctx: &egui::Context,
available_size: Vec2,
) -> Option<egui::Image<'_>> {
self.video
.as_mut()
.map(|video| video.render_image(ctx, available_size))
}
fn render_overlay_in_rect(&mut self, ui: &mut egui::Ui, rect: egui::Rect) {
let pos = rect.left_bottom() + egui::vec2(8.0, -8.0);
let overlay_id = egui::Id::new(("overlay", self.id));
egui::Area::new(overlay_id)
.order(egui::Order::Foreground)
.fixed_pos(pos)
.show(ui.ctx(), |ui| {
egui::Frame::new()
.fill(egui::Color32::from_rgba_unmultiplied(0, 0, 0, 128))
.corner_radius(3.0)
.show(ui, |ui| {
ui.spacing_mut().item_spacing = egui::vec2(8.0, 8.0);
ui.set_min_width(100.);
self.render_overlay(ui);
});
});
}
fn render_overlay(&mut self, ui: &mut egui::Ui) {
ui.vertical(|ui| {
let selected = self.video.as_ref().map(|v| v.track.rendition().to_owned());
egui::ComboBox::from_id_salt(format!("video{}", self.id))
.selected_text(selected.clone().unwrap_or_default())
.show_ui(ui, |ui| {
for name in self.broadcast.catalog().video_renditions() {
if ui
.selectable_label(selected.as_deref() == Some(name), name)
.clicked()
{
if let Ok(track) = self
.broadcast
.watch_rendition::<FfmpegVideoDecoder>(&Default::default(), name)
{
if let Some(video) = self.video.as_mut() {
video.set_track(track);
} else {
self.video = Some(VideoView::new(ui.ctx(), track, self.id))
}
}
}
}
});
let stats = self.stats.smoothed(|| self.session.conn().stats());
ui.label(format!(
"peer: {}",
self.session.conn().remote_id().fmt_short()
));
ui.label(format!("BW up: {}", stats.up.rate_str));
ui.label(format!("BW down: {}", stats.down.rate_str));
ui.label(format!("RTT: {}ms", stats.rtt.as_millis()));
});
}
}
struct VideoView {
track: WatchTrack,
size: egui::Vec2,
texture: egui::TextureHandle,
}
impl VideoView {
fn new(ctx: &egui::Context, track: WatchTrack, id: usize) -> Self {
let texture_name = format!("video-texture-{}", id);
let size = egui::vec2(100., 100.);
let color_image =
egui::ColorImage::filled([size.x as usize, size.y as usize], Color32::BLACK);
let texture = ctx.load_texture(&texture_name, color_image, egui::TextureOptions::default());
Self {
size,
texture,
track,
}
}
fn set_track(&mut self, track: WatchTrack) {
self.track = track;
}
fn render_image(&mut self, ctx: &egui::Context, available_size: Vec2) -> egui::Image<'_> {
let available_size = available_size.into();
if available_size != self.size {
self.size = available_size;
let ppp = ctx.pixels_per_point();
let w = (available_size.x * ppp) as u32;
let h = (available_size.y * ppp) as u32;
self.track.set_viewport(w, h);
}
if let Some(frame) = self.track.current_frame() {
let (w, h) = frame.img().dimensions();
let image = egui::ColorImage::from_rgba_unmultiplied(
[w as usize, h as usize],
frame.img().as_raw(),
);
self.texture.set(image, Default::default());
}
egui::Image::from_texture(&self.texture).shrink_to_fit()
}
}
/// Show `textures` as squares in a compact auto grid that fills the parent as much as
/// possible without breaking square aspect.
fn show_video_grid(ctx: &egui::Context, ui: &mut egui::Ui, videos: &mut [RemoteTrackView]) {
let n = videos.len();
if n == 0 {
return;
}
// Parent size were allowed to use
let avail = ui.available_size(); // egui docs recommend this for filling containers
// Choose columns ≈ ceil(sqrt(n)), rows to fit the rest
let cols = (n as f32).sqrt().ceil() as usize;
let rows = (n + cols - 1) / cols;
// Side length of each square in points (fill the limiting axis)
let cell = (avail.x / cols as f32).min(avail.y / rows as f32).floor();
let cell_size = [cell, cell];
// Compute the grids actual pixel footprint
let grid_w = cell * cols as f32;
let grid_h = cell * rows as f32;
// Center the grid in any leftover space
let pad_x = ((avail.x - grid_w) * 0.5).max(0.0);
let pad_y = ((avail.y - grid_h) * 0.5).max(0.0);
ui.add_space(pad_y);
ui.horizontal(|ui| {
ui.add_space(pad_x);
egui::Grid::new("image_grid")
.spacing(Vec2::ZERO) // no gaps; tiles butt together
.show(ui, |ui| {
let mut i = 0;
for _r in 0..rows {
for _c in 0..cols {
if i < n {
// Force exact square size for each image
if let Some(image) = videos[i].render_image(ctx, cell_size.into()) {
let response = ui.add_sized(cell_size, image);
let rect = response.rect;
videos[i].render_overlay_in_rect(ui, rect);
}
i += 1;
} else {
// Keep the grid rectangular when N isnt a multiple of cols
ui.allocate_exact_size(Vec2::splat(cell), egui::Sense::hover());
}
}
ui.end_row();
}
});
});
}
fn secret_key_from_env() -> n0_error::Result<iroh::SecretKey> {
Ok(match std::env::var("IROH_SECRET") {
Ok(key) => key.parse()?,
Err(_) => {
let key = iroh::SecretKey::generate(&mut rand::rng());
println!(
"Created new secret. Reuse with IROH_SECRET={}",
data_encoding::HEXLOWER.encode(&key.to_bytes())
);
key
}
})
}
fn topic_id_from_env() -> n0_error::Result<TopicId> {
Ok(match std::env::var("IROH_TOPIC") {
Ok(topic) => TopicId::from_bytes(
data_encoding::HEXLOWER
.decode(topic.as_bytes())
.std_context("invalid hex")?
.as_slice()
.try_into()
.std_context("invalid length")?,
),
Err(_) => {
let topic = TopicId::from_bytes(rand::random());
println!(
"Created new topic. Reuse with IROH_TOPIC={}",
data_encoding::HEXLOWER.encode(topic.as_bytes())
);
topic
}
})
}

View file

@ -0,0 +1,225 @@
use std::time::Duration;
use clap::Parser;
use eframe::egui::{self, Color32, Id, Vec2};
use iroh::{Endpoint, EndpointId};
use iroh_live::{
Live,
media::{
audio::AudioBackend,
ffmpeg::{FfmpegDecoders, FfmpegVideoDecoder, ffmpeg_log_init},
subscribe::{AudioTrack, SubscribeBroadcast, WatchTrack},
},
moq::MoqSession,
ticket::LiveTicket,
util::StatsSmoother,
};
use n0_error::{Result, anyerr};
use tracing::info;
#[derive(Debug, Parser)]
struct Cli {
#[clap(long, conflicts_with = "endpoint-id")]
ticket: Option<LiveTicket>,
#[clap(long, conflicts_with = "ticket", requires = "name")]
endpoint_id: Option<EndpointId>,
#[clap(long, conflicts_with = "ticket", requires = "endpoint-id")]
name: Option<String>,
}
fn main() -> Result<()> {
let cli = Cli::parse();
let ticket = match (cli.ticket, cli.endpoint_id, cli.name) {
(Some(ticket), None, None) => ticket,
(None, Some(endpoint_id), Some(name)) => LiveTicket::new(endpoint_id, name),
_ => {
eprintln!("Invalid arguments: Use either --ticket, or --endpoint and --name");
std::process::exit(1);
}
};
tracing_subscriber::fmt::init();
ffmpeg_log_init();
let rt = tokio::runtime::Builder::new_multi_thread()
.enable_all()
.build()
.unwrap();
let audio_ctx = AudioBackend::new();
println!("connecting to {ticket} ...");
let (endpoint, session, track) = rt.block_on({
let audio_ctx = audio_ctx.clone();
async move {
let endpoint = Endpoint::bind().await?;
let live = Live::new(endpoint.clone());
let audio_out = audio_ctx.default_output().await?;
let (session, track) = live
.watch_and_listen::<FfmpegDecoders>(
ticket.endpoint,
&ticket.broadcast_name,
audio_out,
Default::default(),
)
.await?;
println!("connected!");
n0_error::Ok((endpoint, session, track))
}
})?;
let _guard = rt.enter();
eframe::run_native(
"IrohLive",
eframe::NativeOptions::default(),
Box::new(|cc| {
let egui_ctx = cc.egui_ctx.clone();
rt.spawn(async move {
let _ = tokio::signal::ctrl_c().await;
egui_ctx.send_viewport_cmd(egui::ViewportCommand::Close);
// TODO: When the app is not visible, this will not trigger `update` immediately.
// See https://github.com/emilk/egui/issues/5112
egui_ctx.request_repaint();
});
let app = App {
video: track.video.map(|video| VideoView::new(&cc.egui_ctx, video)),
_audio_ctx: audio_ctx,
_audio: track.audio,
broadcast: track.broadcast,
session: session,
stats: StatsSmoother::new(),
endpoint,
rt,
};
Ok(Box::new(app))
}),
)
.map_err(|err| anyerr!("eframe failed: {err:#}"))
}
struct App {
video: Option<VideoView>,
_audio: Option<AudioTrack>,
_audio_ctx: AudioBackend,
endpoint: Endpoint,
session: MoqSession,
broadcast: SubscribeBroadcast,
stats: StatsSmoother,
rt: tokio::runtime::Runtime,
}
impl eframe::App for App {
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
ctx.request_repaint_after(Duration::from_millis(30)); // min 30 fps
egui::CentralPanel::default()
.frame(egui::Frame::new().inner_margin(0.0).outer_margin(0.0))
.show(ctx, |ui| {
ui.spacing_mut().item_spacing = egui::vec2(0.0, 0.0);
let avail = ui.available_size();
if let Some(video) = self.video.as_mut() {
ui.add_sized(avail, video.render(ctx, avail));
}
egui::Area::new(Id::new("overlay"))
.anchor(egui::Align2::LEFT_BOTTOM, [8.0, -8.0])
.show(ctx, |ui| {
egui::Frame::new()
.fill(egui::Color32::from_rgba_unmultiplied(0, 0, 0, 128))
.corner_radius(3.0)
.show(ui, |ui| {
ui.spacing_mut().item_spacing = egui::vec2(8.0, 8.0);
ui.set_min_width(100.);
self.render_overlay(ctx, ui);
})
})
});
}
fn on_exit(&mut self, _gl: Option<&eframe::glow::Context>) {
info!("exit");
self.broadcast.shutdown();
self.session.close(0, b"bye");
let endpoint = self.endpoint.clone();
self.rt.block_on(async move {
endpoint.close().await;
info!("endpoint closed");
});
}
}
impl App {
fn render_overlay(&mut self, ctx: &egui::Context, ui: &mut egui::Ui) {
ui.vertical(|ui| {
let selected = self
.video
.as_ref()
.map(|video| video.track.rendition().to_owned());
egui::ComboBox::from_label("")
.selected_text(selected.clone().unwrap_or_default())
.show_ui(ui, |ui| {
for name in self.broadcast.catalog().video_renditions() {
if ui
.selectable_label(selected.as_deref() == Some(name), name)
.clicked()
{
if let Ok(track) = self
.broadcast
.watch_rendition::<FfmpegVideoDecoder>(&Default::default(), name)
{
self.video = Some(VideoView::new(ctx, track));
}
}
}
});
let stats = self.stats.smoothed(|| self.session.conn().stats());
ui.label(format!(
"peer: {}",
self.session.conn().remote_id().fmt_short()
));
ui.label(format!("BW up: {}", stats.up.rate_str));
ui.label(format!("BW down: {}", stats.down.rate_str));
ui.label(format!("RTT: {}ms", stats.rtt.as_millis()));
});
}
}
struct VideoView {
track: WatchTrack,
texture: egui::TextureHandle,
size: egui::Vec2,
}
impl VideoView {
fn new(ctx: &egui::Context, track: WatchTrack) -> Self {
let size = egui::vec2(100., 100.);
let color_image =
egui::ColorImage::filled([size.x as usize, size.y as usize], Color32::BLACK);
let texture = ctx.load_texture("video", color_image, egui::TextureOptions::default());
Self {
size,
texture,
track,
}
}
fn render(&mut self, ctx: &egui::Context, available_size: Vec2) -> egui::Image<'_> {
let available_size = available_size.into();
if available_size != self.size {
self.size = available_size;
let ppp = ctx.pixels_per_point();
let w = (available_size.x * ppp) as u32;
let h = (available_size.y * ppp) as u32;
self.track.set_viewport(w, h);
}
if let Some(frame) = self.track.current_frame() {
let (w, h) = frame.img().dimensions();
let image = egui::ColorImage::from_rgba_unmultiplied(
[w as usize, h as usize],
frame.img().as_raw(),
);
self.texture.set(image, Default::default());
}
egui::Image::from_texture(&self.texture).shrink_to_fit()
}
}

View file

@ -0,0 +1,15 @@
mod live;
mod node;
pub mod rooms;
pub mod ticket;
pub mod util;
pub use self::live::Live;
pub use self::node::LiveNode;
pub use iroh_moq as moq;
pub use iroh_moq::ALPN;
pub use hang::catalog;
pub use moq_media as media;

View file

@ -0,0 +1,62 @@
use iroh::{Endpoint, EndpointAddr};
use iroh_moq::{Moq, MoqProtocolHandler, MoqSession};
use moq_lite::BroadcastProducer;
use moq_media::{
av::{AudioSink, Decoders, PlaybackConfig},
subscribe::{AvRemoteTrack, SubscribeBroadcast},
};
use n0_error::Result;
use tracing::info;
#[derive(Clone)]
pub struct Live {
pub moq: Moq,
}
impl Live {
pub fn new(endpoint: Endpoint) -> Self {
Self {
moq: Moq::new(endpoint),
}
}
pub async fn connect(&self, remote: impl Into<EndpointAddr>) -> Result<MoqSession> {
self.moq.connect(remote).await
}
pub async fn connect_and_subscribe(
&self,
remote: impl Into<EndpointAddr>,
broadcast_name: &str,
) -> Result<(MoqSession, SubscribeBroadcast)> {
let mut session = self.connect(remote).await?;
info!(id=%session.conn().remote_id(), "new peer connected");
let broadcast = session.subscribe(broadcast_name).await?;
let broadcast = SubscribeBroadcast::new(broadcast_name.to_string(), broadcast).await?;
Ok((session, broadcast))
}
pub async fn watch_and_listen<D: Decoders>(
&self,
remote: impl Into<EndpointAddr>,
broadcast_name: &str,
audio_out: impl AudioSink,
config: PlaybackConfig,
) -> Result<(MoqSession, AvRemoteTrack)> {
let (session, broadcast) = self.connect_and_subscribe(remote, &broadcast_name).await?;
let track = broadcast.watch_and_listen::<D>(audio_out, config)?;
Ok((session, track))
}
pub fn protocol_handler(&self) -> MoqProtocolHandler {
self.moq.protocol_handler()
}
pub async fn publish(&self, name: impl ToString, producer: BroadcastProducer) -> Result<()> {
self.moq.publish(name, producer).await
}
pub fn shutdown(&self) {
self.moq.shutdown();
}
}

View file

@ -0,0 +1,72 @@
use crate::{
live::Live,
rooms::{Room, RoomTicket},
};
use iroh::{Endpoint, protocol::Router};
use iroh_gossip::Gossip;
use n0_error::{Result, StdResultExt};
use tracing::info;
#[derive(Clone)]
pub struct LiveNode {
router: Router,
pub live: Live,
pub gossip: Gossip,
}
impl LiveNode {
pub async fn spawn_from_env() -> Result<Self> {
let endpoint = Endpoint::builder()
.secret_key(secret_key_from_env()?)
.bind()
.await?;
info!(endpoint_id=%endpoint.id(), "endpoint bound");
let gossip = Gossip::builder().spawn(endpoint.clone());
let live = Live::new(endpoint.clone());
let router = Router::builder(endpoint)
.accept(iroh_gossip::ALPN, gossip.clone())
.accept(iroh_moq::ALPN, live.protocol_handler())
.spawn();
Ok(Self {
router,
gossip,
live,
})
}
pub async fn shutdown(&self) -> Result<()> {
self.live.shutdown();
self.router.shutdown().await.anyerr()
}
pub fn endpoint(&self) -> &Endpoint {
self.router.endpoint()
}
pub async fn join_room(&self, ticket: RoomTicket) -> Result<Room> {
Room::new(
self.endpoint(),
self.gossip.clone(),
self.live.clone(),
ticket,
)
.await
}
}
fn secret_key_from_env() -> n0_error::Result<iroh::SecretKey> {
Ok(match std::env::var("IROH_SECRET") {
Ok(key) => key.parse()?,
Err(_) => {
let key = iroh::SecretKey::generate(&mut rand::rng());
println!(
"Created new secret. Reuse with IROH_SECRET={}",
data_encoding::HEXLOWER.encode(&key.to_bytes())
);
key
}
})
}

View file

@ -0,0 +1,391 @@
use std::collections::HashSet;
use std::pin::Pin;
use std::sync::Arc;
use std::time::Duration;
use bytes::Bytes;
use iroh::{Endpoint, EndpointId, SecretKey};
use iroh_gossip::Gossip;
use iroh_moq::MoqSession;
use iroh_smol_kv::{ExpiryConfig, Filter, SignedValue, Subscribe, SubscribeMode, WriteScope};
use moq_lite::BroadcastProducer;
use moq_media::subscribe::SubscribeBroadcast;
use n0_error::{Result, StdResultExt, anyerr};
use n0_future::FuturesUnordered;
use n0_future::{StreamExt, task::AbortOnDropHandle};
use serde::{Deserialize, Serialize};
use tokio::sync::mpsc::{self, error::TryRecvError};
use tracing::{Instrument, debug, error_span, warn};
use crate::Live;
pub use self::publisher::{PublishOpts, RoomPublisherSync, StreamKind};
pub use self::ticket::RoomTicket;
type BoxFuture<T> = Pin<Box<dyn Future<Output = T> + Send + Sync + 'static>>;
mod publisher;
pub struct Room {
handle: RoomHandle,
events: mpsc::Receiver<RoomEvent>,
}
pub type RoomEvents = mpsc::Receiver<RoomEvent>;
#[derive(Clone)]
pub struct RoomHandle {
me: EndpointId,
ticket: RoomTicket,
tx: mpsc::Sender<ApiMessage>,
_actor_handle: Arc<AbortOnDropHandle<()>>,
}
impl RoomHandle {
pub fn ticket(&self) -> RoomTicket {
let mut ticket = self.ticket.clone();
ticket.bootstrap = vec![self.me];
ticket
}
pub async fn publish(&self, name: impl ToString, producer: BroadcastProducer) -> Result<()> {
self.tx
.send(ApiMessage::Publish {
name: name.to_string(),
producer: producer,
})
.await
.map_err(|_| anyerr!("room actor died"))
}
}
impl Room {
pub async fn new(
endpoint: &Endpoint,
gossip: Gossip,
live: Live,
ticket: RoomTicket,
) -> Result<Self> {
let endpoint_id = endpoint.id();
let (actor_tx, actor_rx) = mpsc::channel(16);
let (event_tx, event_rx) = mpsc::channel(16);
let actor = Actor::new(
endpoint.secret_key(),
live,
event_tx,
gossip,
ticket.clone(),
)
.await?;
let actor_task = tokio::task::spawn(
async move { actor.run(actor_rx).await }
.instrument(error_span!("RoomActor", id = ticket.topic_id.fmt_short())),
);
Ok(Self {
handle: RoomHandle {
ticket,
me: endpoint_id,
tx: actor_tx,
_actor_handle: Arc::new(AbortOnDropHandle::new(actor_task)),
},
events: event_rx,
})
}
pub async fn recv(&mut self) -> Result<RoomEvent> {
self.events.recv().await.std_context("sender stopped")
}
pub fn try_recv(&mut self) -> Result<RoomEvent, TryRecvError> {
self.events.try_recv()
}
pub fn ticket(&self) -> RoomTicket {
self.handle.ticket()
}
pub fn split(self) -> (RoomEvents, RoomHandle) {
(self.events, self.handle)
}
pub async fn publish(&self, name: impl ToString, producer: BroadcastProducer) -> Result<()> {
self.handle.publish(name, producer).await
}
}
enum ApiMessage {
Publish {
name: String,
producer: BroadcastProducer,
},
}
pub enum RoomEvent {
RemoteAnnounced {
remote: EndpointId,
broadcasts: Vec<String>,
},
RemoteConnected {
session: MoqSession,
},
BroadcastSubscribed {
session: MoqSession,
broadcast: SubscribeBroadcast,
},
}
const PEER_STATE_KEY: &[u8] = b"s";
#[derive(Debug, Clone, Serialize, Deserialize)]
struct PeerState {
broadcasts: Vec<String>,
}
type KvEntry = (EndpointId, Bytes, SignedValue);
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, derive_more::Display)]
#[display("{}:{}", _0.fmt_short(), _1)]
struct BroadcastId(EndpointId, String);
struct Actor {
me: EndpointId,
_gossip: Gossip,
live: Live,
active_subscribe: HashSet<BroadcastId>,
active_publish: HashSet<String>,
connecting:
FuturesUnordered<BoxFuture<(BroadcastId, Result<(MoqSession, SubscribeBroadcast)>)>>,
subscribe_closed: FuturesUnordered<BoxFuture<BroadcastId>>,
publish_closed: FuturesUnordered<BoxFuture<String>>,
event_tx: mpsc::Sender<RoomEvent>,
kv: iroh_smol_kv::Client,
kv_writer: WriteScope,
}
impl Actor {
async fn new(
me: &SecretKey,
live: Live,
event_tx: mpsc::Sender<RoomEvent>,
gossip: Gossip,
ticket: RoomTicket,
) -> Result<Self> {
let topic = gossip
.subscribe(ticket.topic_id, ticket.bootstrap.clone())
.await?;
let kv = iroh_smol_kv::Client::local(
topic,
iroh_smol_kv::Config {
anti_entropy_interval: Duration::from_secs(60),
fast_anti_entropy_interval: Duration::from_secs(1),
expiry: Some(ExpiryConfig {
check_interval: Duration::from_secs(10),
horizon: Duration::from_secs(60 * 2),
}),
},
);
let kv_writer = kv.write(me.clone());
Ok(Self {
me: me.public(),
live,
_gossip: gossip,
active_subscribe: Default::default(),
active_publish: Default::default(),
connecting: Default::default(),
subscribe_closed: Default::default(),
publish_closed: Default::default(),
event_tx,
kv,
kv_writer,
})
}
pub async fn run(mut self, mut inbox: mpsc::Receiver<ApiMessage>) {
let updates = self
.kv
.subscribe_with_opts(Subscribe {
mode: SubscribeMode::Both,
filter: Filter::ALL,
})
.stream();
tokio::pin!(updates);
loop {
tokio::select! {
Some(update) = updates.next() => {
match update {
Err(err) => warn!("gossip kv update failed: {err:#}"),
Ok(update) => self.handle_gossip_update(update).await,
}
}
msg = inbox.recv() => {
match msg {
None => break,
Some(msg) => self.handle_api_message(msg).await
}
}
Some((id, res)) = self.connecting.next(), if !self.connecting.is_empty() => {
match res {
Ok((session, broadcast)) => {
let closed_fut = broadcast.closed();
self.event_tx.send(RoomEvent::BroadcastSubscribed { session, broadcast }).await.ok();
self.subscribe_closed.push(Box::pin(async move {
closed_fut.await;
id
}))
}
Err(err) => {
self.active_subscribe.remove(&id);
warn!("Subscribing to broadcast {id} failed: {err:#}");
}
}
}
Some(id) = self.subscribe_closed.next(), if !self.subscribe_closed.is_empty() => {
debug!("broadcast closed: {id}");
self.active_subscribe.remove(&id);
}
Some(name) = self.publish_closed.next(), if !self.publish_closed.is_empty() => {
self.active_publish.remove(&name);
self.update_kv().await;
}
}
}
}
async fn handle_api_message(&mut self, msg: ApiMessage) {
match msg {
ApiMessage::Publish { name, producer } => {
let closed = producer.consume().closed();
self.live.publish(name.clone(), producer).await.ok();
self.active_publish.insert(name.clone());
self.publish_closed.push(Box::pin(async move {
closed.await;
name
}));
self.update_kv().await;
}
}
}
async fn handle_gossip_update(&mut self, entry: KvEntry) {
let (remote, key, value) = entry;
if remote == self.me || &key != PEER_STATE_KEY {
return;
}
let Ok(value) = postcard::from_bytes::<PeerState>(&value.value) else {
return;
};
let PeerState { broadcasts } = value;
for name in broadcasts.clone() {
let id = BroadcastId(remote, name.clone());
if !self.active_subscribe.insert(id.clone()) {
continue;
}
let live = self.live.clone();
self.connecting.push(Box::pin(async move {
let session = live.connect_and_subscribe(remote, &name).await;
(id, session)
}));
}
self.event_tx
.send(RoomEvent::RemoteAnnounced { remote, broadcasts })
.await
.ok();
}
async fn update_kv(&self) {
let state = PeerState {
broadcasts: self.active_publish.iter().cloned().collect(),
};
if let Err(err) = self
.kv_writer
.put(PEER_STATE_KEY, postcard::to_stdvec(&state).unwrap())
.await
{
warn!("failed to update gossip kv: {err:#}");
}
}
}
mod ticket {
use std::str::FromStr;
use iroh::EndpointId;
use iroh_gossip::TopicId;
use n0_error::{Result, StdResultExt};
use serde::{Deserialize, Serialize};
#[derive(Debug, Serialize, Deserialize, Clone, derive_more::Display)]
#[display("{}", iroh_tickets::Ticket::serialize(self))]
pub struct RoomTicket {
pub bootstrap: Vec<EndpointId>,
pub topic_id: TopicId,
}
impl RoomTicket {
pub fn new(topic_id: TopicId, bootstrap: impl IntoIterator<Item = EndpointId>) -> Self {
Self {
bootstrap: bootstrap.into_iter().collect(),
topic_id,
}
}
pub fn generate() -> Self {
Self {
bootstrap: vec![],
topic_id: TopicId::from_bytes(rand::random()),
}
}
pub fn new_from_env() -> Result<Self> {
if let Ok(value) = std::env::var("IROH_LIVE_ROOM") {
value
.parse()
.std_context("failed to parse ticket from IROH_LIVE_ROOM environment variable")
} else {
let topic_id = match std::env::var("IROH_LIVE_TOPIC") {
Ok(topic) => TopicId::from_bytes(
data_encoding::HEXLOWER
.decode(topic.as_bytes())
.std_context("invalid hex")?
.as_slice()
.try_into()
.std_context("invalid length")?,
),
Err(_) => {
let topic = TopicId::from_bytes(rand::random());
println!(
"Created new topic. Reuse with IROH_TOPIC={}",
data_encoding::HEXLOWER.encode(topic.as_bytes())
);
topic
}
};
Ok(Self::new(topic_id, vec![]))
}
}
}
impl FromStr for RoomTicket {
type Err = iroh_tickets::ParseError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
iroh_tickets::Ticket::deserialize(s)
}
}
impl iroh_tickets::Ticket for RoomTicket {
const KIND: &'static str = "room";
fn to_bytes(&self) -> Vec<u8> {
postcard::to_stdvec(self).unwrap()
}
fn from_bytes(bytes: &[u8]) -> Result<Self, iroh_tickets::ParseError> {
let ticket = postcard::from_bytes(bytes)?;
Ok(ticket)
}
}
}

View file

@ -0,0 +1,199 @@
use std::sync::{Arc, Mutex};
use moq_lite::BroadcastProducer;
use moq_media::{
audio::AudioBackend,
av::{AudioPreset, VideoPreset},
capture::{CameraCapturer, ScreenCapturer},
ffmpeg::{H264Encoder, OpusEncoder},
publish::{AudioRenditions, PublishBroadcast, VideoRenditions},
};
use n0_error::{AnyError, Result};
use tracing::{info, warn};
use crate::rooms::RoomHandle;
#[derive(Debug, strum::Display, strum::EnumString)]
#[strum(serialize_all = "lowercase")]
enum Broadcasts {
Camera,
Screen,
}
#[derive(Debug)]
pub enum StreamKind {
Camera,
Screen,
Microphone,
}
#[derive(Default, Clone, Debug)]
pub struct PublishOpts {
pub camera: bool,
pub screen: bool,
pub audio: bool,
}
/// Manager for publish broadcasts in a room
///
/// Synchronous version which spawns all async ops on new tokio tasks. Panics if methods are
/// not called in the context of a tokio runtime.
///
/// Why does this have sync methods? In UI land it is so much easier for the operations to be sync,
/// so this just spawns all async ops on tokio threads. Not yet sure about where this should evolve to
/// but this kept me moving for now.
pub struct RoomPublisherSync {
audio_ctx: AudioBackend,
room: RoomHandle,
camera: Option<Arc<Mutex<PublishBroadcast>>>,
screen: Option<Arc<Mutex<PublishBroadcast>>>,
state: PublishOpts,
}
impl RoomPublisherSync {
pub fn new(room: RoomHandle, audio_ctx: AudioBackend) -> Self {
Self {
room,
audio_ctx,
camera: None,
screen: None,
state: Default::default(),
}
}
pub fn set_state(&mut self, state: &PublishOpts) -> Result<(), Vec<(StreamKind, AnyError)>> {
info!(new=?state, old=?self.state, "set publish state");
let errors = [
self.set_audio(state.audio)
.err()
.map(|e| (StreamKind::Microphone, e)),
self.set_camera(state.camera)
.err()
.map(|e| (StreamKind::Camera, e)),
self.set_screen(state.screen)
.err()
.map(|e| (StreamKind::Screen, e)),
]
.into_iter()
.flatten()
.collect::<Vec<_>>();
if errors.is_empty() {
Ok(())
} else {
Err(errors)
}
}
pub fn state(&self) -> &PublishOpts {
&self.state
}
pub fn camera(&self) -> bool {
self.state.camera
}
pub fn camera_broadcast(&self) -> Option<Arc<Mutex<PublishBroadcast>>> {
self.camera.clone()
}
pub fn screen_broadcast(&self) -> Option<Arc<Mutex<PublishBroadcast>>> {
self.screen.clone()
}
pub fn set_camera(&mut self, enable: bool) -> Result<()> {
if self.state.camera != enable {
if enable {
let camera = CameraCapturer::new()?;
let renditions = VideoRenditions::new::<H264Encoder>(camera, VideoPreset::all());
self.ensure_camera();
self.camera
.as_ref()
.unwrap()
.lock()
.unwrap()
.set_video(Some(renditions))?;
} else if let Some(camera) = self.camera.as_ref() {
camera.lock().unwrap().set_video(None)?;
}
self.state.camera = enable;
}
Ok(())
}
pub fn screen(&self) -> bool {
self.state.screen
}
fn ensure_camera(&mut self) {
if self.camera.is_none() {
let broadcast = PublishBroadcast::new();
self.publish(Broadcasts::Camera, broadcast.producer());
self.camera = Some(Arc::new(Mutex::new(broadcast)));
};
}
fn publish(&self, name: Broadcasts, producer: BroadcastProducer) {
let room = self.room.clone();
tokio::spawn(async move {
if let Err(err) = room.publish(name, producer).await {
warn!("publish to room failed: {err:#}");
}
});
}
pub fn set_screen(&mut self, enable: bool) -> Result<()> {
if self.state.screen != enable {
if enable {
if self.screen.is_none() {
let broadcast = PublishBroadcast::new();
self.publish(Broadcasts::Screen, broadcast.producer());
self.screen = Some(Arc::new(Mutex::new(broadcast)));
};
let screen = ScreenCapturer::new()?;
let renditions = VideoRenditions::new::<H264Encoder>(screen, VideoPreset::all());
self.screen
.as_mut()
.unwrap()
.lock()
.unwrap()
.set_video(Some(renditions))?;
} else {
let _ = self.screen.take();
}
self.state.screen = enable;
}
Ok(())
}
pub fn audio(&self) -> bool {
self.state.audio
}
pub fn set_audio(&mut self, enable: bool) -> Result<()> {
if self.state.audio != enable {
if enable {
self.ensure_camera();
let camera = self.camera.as_ref().unwrap().clone();
let audio_ctx = self.audio_ctx.clone();
tokio::spawn(async move {
let mic = match audio_ctx.default_input().await {
Err(err) => {
warn!("failed to open audio input: {err:#}");
return;
}
Ok(mic) => mic,
};
let renditions = AudioRenditions::new::<OpusEncoder>(mic, [AudioPreset::Hq]);
if let Err(err) = camera.lock().unwrap().set_audio(Some(renditions)) {
warn!("failed to set audio: {err:#}");
}
});
} else if let Some(camera) = self.camera.as_mut() {
camera.lock().unwrap().set_audio(None)?;
}
self.state.audio = enable;
}
Ok(())
}
}

View file

@ -0,0 +1,61 @@
use iroh::EndpointAddr;
use n0_error::{Result, StdResultExt};
use serde::{Deserialize, Serialize};
#[derive(Debug, Clone, PartialEq, Eq, derive_more::Display, Serialize, Deserialize)]
#[display("{}", self.serialize())]
pub struct LiveTicket {
pub endpoint: EndpointAddr,
pub broadcast_name: String,
}
impl LiveTicket {
pub fn new(endpoint: impl Into<EndpointAddr>, broadcast_name: impl ToString) -> Self {
Self {
endpoint: endpoint.into(),
broadcast_name: broadcast_name.to_string(),
}
}
pub fn to_bytes(&self) -> Vec<u8> {
postcard::to_stdvec(self).unwrap()
}
pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
let ticket = postcard::from_bytes(bytes).std_context("failed to deserialize")?;
Ok(ticket)
}
/// Serialize to string.
pub fn serialize(&self) -> String {
let mut out = self.broadcast_name.clone();
out.push_str("@");
data_encoding::BASE32_NOPAD
.encode_append(&postcard::to_stdvec(&self.endpoint).unwrap(), &mut out);
out.to_ascii_lowercase()
}
/// Deserialize from a string.
pub fn deserialize(str: &str) -> Result<Self> {
let (broadcast_name, encoded_addr) = str
.split_once("@")
.std_context("invalid ticket: missing @")?;
let endpoint_addr: EndpointAddr = postcard::from_bytes(
&(data_encoding::BASE32_NOPAD_NOCASE
.decode(encoded_addr.as_bytes())
.std_context("invalid base32")?),
)
.std_context("failed to parse")?;
Ok(Self {
broadcast_name: broadcast_name.to_string(),
endpoint: endpoint_addr,
})
}
}
impl std::str::FromStr for LiveTicket {
type Err = n0_error::AnyError;
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
LiveTicket::deserialize(s)
}
}

View file

@ -0,0 +1,84 @@
use std::time::{Duration, Instant};
use byte_unit::{Bit, UnitType};
use iroh::endpoint::ConnectionStats;
/// Spawn a named OS thread and panic if spawning fails.
pub fn spawn_thread<F, T>(name: impl ToString, f: F) -> std::thread::JoinHandle<T>
where
F: FnOnce() -> T + Send + 'static,
T: Send + 'static,
{
let name_str = name.to_string();
std::thread::Builder::new()
.name(name_str.clone())
.spawn(f)
.expect(&format!("failed to spawn thread: {}", name_str))
}
pub struct StatsSmoother {
rate_up: Rate,
rate_down: Rate,
last_update: Instant,
rtt: Duration,
}
impl StatsSmoother {
pub fn new() -> Self {
Self {
rate_up: Default::default(),
rate_down: Default::default(),
last_update: Instant::now(),
rtt: Duration::from_secs(0),
}
}
pub fn smoothed(&mut self, total: impl FnOnce() -> ConnectionStats) -> SmoothedStats<'_> {
let now = Instant::now();
let elapsed = now.duration_since(self.last_update);
if elapsed >= Duration::from_secs(1) {
let stats = (total)();
self.rate_down.update(elapsed, stats.udp_rx.bytes);
self.rate_up.update(elapsed, stats.udp_tx.bytes);
self.last_update = now;
self.rtt = stats.path.rtt;
}
SmoothedStats {
down: &self.rate_down,
up: &self.rate_up,
rtt: self.rtt,
}
}
}
#[derive(Debug, Clone, Default)]
pub struct Rate {
/// Total bytes
pub total: u64,
/// Rate in bytes per second
pub rate: f32,
/// Rate rendered as a string
pub rate_str: String,
}
impl Rate {
fn update(&mut self, delta_time: Duration, new_total: u64) {
let delta = new_total.saturating_sub(self.total);
let delta_secs = delta_time.as_secs_f32();
let rate = if delta_secs > 0.0 && delta > 0 {
(delta as f32 * 8.0) / delta_secs
} else {
0.0
};
let bit = Bit::from_f32(rate).unwrap();
let adjusted = bit.get_appropriate_unit(UnitType::Decimal);
self.rate = rate;
self.rate_str = format!("{adjusted:.2}/s");
self.total = new_total;
}
}
pub struct SmoothedStats<'a> {
pub rtt: Duration,
pub down: &'a Rate,
pub up: &'a Rate,
}