Advance forge rollout, Ethereum rails, and NBC sources

This commit is contained in:
every.channel 2026-04-01 15:58:49 -07:00
parent be26313225
commit 7d84510eac
No known key found for this signature in database
88 changed files with 11230 additions and 302 deletions

View file

@ -8,3 +8,5 @@ license.workspace = true
serde.workspace = true
blake3.workspace = true
serde_json.workspace = true
hex = "0.4"
sha3 = "0.10"

View file

@ -1,8 +1,14 @@
//! Core types shared across every.channel.
use serde::{Deserialize, Serialize};
use sha3::{Digest, Keccak256};
use std::fmt;
pub const MANIFEST_ID_ALG_BLAKE3: &str = "blake3";
pub const MANIFEST_ID_ALG_KECCAK256: &str = "keccak256";
pub const MERKLE_PROOF_ALG_BLAKE3: &str = "merkle+blake3";
pub const MERKLE_PROOF_ALG_KECCAK256: &str = "merkle+keccak256";
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ChannelId(pub String);
@ -12,6 +18,13 @@ pub struct DeviceId(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct StreamId(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ChainCommitment {
pub chain: String,
pub scheme: String,
pub digest: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StreamDescriptor {
pub id: StreamId,
@ -19,6 +32,8 @@ pub struct StreamDescriptor {
pub number: Option<String>,
pub source: String,
pub metadata: Vec<StreamMetadata>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub commitments: Vec<ChainCommitment>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -32,6 +47,7 @@ pub struct BroadcastId {
pub standard: String,
pub transport_stream_id: Option<u16>,
pub program_number: Option<u16>,
pub virtual_channel: Option<String>,
pub callsign: Option<String>,
pub region: Option<String>,
pub frequency: Option<String>,
@ -54,6 +70,30 @@ pub struct StreamKey {
}
impl StreamKey {
pub fn for_channel_or_source(
channel: Option<&Channel>,
standard: Option<&str>,
source: SourceId,
profile: Option<String>,
variant: Option<String>,
) -> Self {
let broadcast = channel
.and_then(|channel| standard.and_then(|standard| channel.broadcast_id(standard)));
let source = if broadcast.is_some() {
None
} else {
Some(source)
};
Self {
version: 1,
broadcast,
source,
profile,
variant,
}
}
pub fn to_stream_id(&self) -> StreamId {
let mut parts = vec![
"ec".to_string(),
@ -70,6 +110,9 @@ impl StreamKey {
if let Some(program) = broadcast.program_number {
parts.push(format!("program-{program}"));
}
if let Some(channel) = &broadcast.virtual_channel {
parts.push(format!("channel-{}", sanitize(channel)));
}
if let Some(callsign) = &broadcast.callsign {
parts.push(format!("callsign-{}", sanitize(callsign)));
}
@ -132,6 +175,90 @@ pub enum ChannelMetadata {
Extra(String, String),
}
impl BroadcastId {
pub fn is_usable(&self) -> bool {
self.transport_stream_id.is_some()
|| self.program_number.is_some()
|| self
.virtual_channel
.as_ref()
.is_some_and(|value| !value.trim().is_empty())
|| self
.callsign
.as_ref()
.is_some_and(|value| !value.trim().is_empty())
}
}
impl Channel {
pub fn broadcast_id(&self, standard: &str) -> Option<BroadcastId> {
let standard = standard.trim().to_ascii_lowercase();
if standard.is_empty() {
return None;
}
let callsign = channel_metadata_value(&self.metadata, "callsign").or_else(|| {
let name = self.name.trim();
(!name.is_empty()).then(|| name.to_string())
});
let region = channel_metadata_value(&self.metadata, "region");
let frequency = channel_metadata_value(&self.metadata, "frequency");
let transport_stream_id = channel_metadata_u16(&self.metadata, "transport_stream_id")
.or_else(|| channel_metadata_u16(&self.metadata, "tsid"));
let program_number = self
.program_id
.or_else(|| channel_metadata_u16(&self.metadata, "program_number"))
.or_else(|| channel_metadata_u16(&self.metadata, "program_id"));
let virtual_channel = self.number.as_ref().and_then(|value| {
let trimmed = value.trim();
(!trimmed.is_empty()).then(|| trimmed.to_string())
});
let broadcast = BroadcastId {
standard,
transport_stream_id,
program_number,
virtual_channel,
callsign,
region,
frequency,
};
broadcast.is_usable().then_some(broadcast)
}
}
fn channel_metadata_value(metadata: &[ChannelMetadata], key: &str) -> Option<String> {
for item in metadata {
match item {
ChannelMetadata::Callsign(value) if key == "callsign" => {
return Some(value.trim().to_string())
}
ChannelMetadata::Region(value) if key == "region" => {
return Some(value.trim().to_string())
}
ChannelMetadata::Frequency(value) if key == "frequency" => {
return Some(value.trim().to_string())
}
ChannelMetadata::Network(value) if key == "network" => {
return Some(value.trim().to_string())
}
ChannelMetadata::Extra(extra_key, value) if extra_key.eq_ignore_ascii_case(key) => {
let trimmed = value.trim_matches('"').trim().to_string();
if !trimmed.is_empty() {
return Some(trimmed);
}
}
_ => {}
}
}
None
}
fn channel_metadata_u16(metadata: &[ChannelMetadata], key: &str) -> Option<u16> {
channel_metadata_value(metadata, key).and_then(|value| value.parse().ok())
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PacketDigest {
pub algorithm: String,
@ -207,6 +334,8 @@ pub struct StreamControlAnnouncement {
pub updated_unix_ms: u64,
/// Suggested freshness window for this announcement.
pub ttl_ms: u64,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub commitments: Vec<ChainCommitment>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -218,6 +347,8 @@ pub struct ManifestSummary {
pub chunk_start_index: u64,
pub encoder_profile_id: String,
pub signed_by: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub commitments: Vec<ChainCommitment>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -268,6 +399,8 @@ pub struct Manifest {
pub body: ManifestBody,
pub manifest_id: String,
pub signatures: Vec<ManifestSignature>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub commitments: Vec<ChainCommitment>,
}
impl Manifest {
@ -284,6 +417,7 @@ impl Manifest {
.iter()
.map(|sig| sig.signer_id.clone())
.collect(),
commitments: self.commitments.clone(),
}
}
}
@ -307,44 +441,96 @@ impl std::error::Error for ManifestError {}
impl ManifestBody {
pub fn manifest_id(&self) -> Result<String, serde_json::Error> {
self.manifest_id_blake3()
}
pub fn manifest_id_blake3(&self) -> Result<String, serde_json::Error> {
let bytes = serde_json::to_vec(self)?;
Ok(blake3::hash(&bytes).to_hex().to_string())
}
pub fn manifest_id_keccak256(&self) -> Result<String, serde_json::Error> {
let bytes = serde_json::to_vec(self)?;
Ok(hex::encode(keccak256(&bytes)))
}
}
pub fn merkle_root_from_hashes(hashes: &[String]) -> Result<String, ManifestError> {
fn parse_hash32(value: &str) -> Result<[u8; 32], ManifestError> {
let trimmed = value.trim().strip_prefix("0x").unwrap_or(value.trim());
let bytes = hex::decode(trimmed).map_err(|_| ManifestError::InvalidHash(value.to_string()))?;
if bytes.len() != 32 {
return Err(ManifestError::InvalidHash(value.to_string()));
}
let mut out = [0u8; 32];
out.copy_from_slice(&bytes);
Ok(out)
}
fn keccak256(bytes: &[u8]) -> [u8; 32] {
let mut hasher = Keccak256::new();
hasher.update(bytes);
let digest = hasher.finalize();
let mut out = [0u8; 32];
out.copy_from_slice(&digest);
out
}
fn blake3_pair_hash(left: &[u8; 32], right: &[u8; 32]) -> [u8; 32] {
let mut hasher = blake3::Hasher::new();
hasher.update(left);
hasher.update(right);
*hasher.finalize().as_bytes()
}
fn keccak_pair_hash(left: &[u8; 32], right: &[u8; 32]) -> [u8; 32] {
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left);
merged[32..].copy_from_slice(right);
keccak256(&merged)
}
fn merkle_root_from_hashes_with(
hashes: &[String],
pair_hash: fn(&[u8; 32], &[u8; 32]) -> [u8; 32],
) -> Result<String, ManifestError> {
if hashes.is_empty() {
return Err(ManifestError::Empty);
}
let mut nodes: Vec<blake3::Hash> = Vec::with_capacity(hashes.len());
let mut nodes: Vec<[u8; 32]> = Vec::with_capacity(hashes.len());
for hash in hashes {
let parsed = blake3::Hash::from_hex(hash.as_bytes())
.map_err(|_| ManifestError::InvalidHash(hash.clone()))?;
nodes.push(parsed);
nodes.push(parse_hash32(hash)?);
}
while nodes.len() > 1 {
if nodes.len() % 2 == 1 {
if let Some(last) = nodes.last().cloned() {
if let Some(last) = nodes.last().copied() {
nodes.push(last);
}
}
let mut parents = Vec::with_capacity(nodes.len() / 2);
for pair in nodes.chunks(2) {
let left = pair[0].as_bytes();
let right = pair[1].as_bytes();
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left);
merged[32..].copy_from_slice(right);
parents.push(blake3::hash(&merged));
parents.push(pair_hash(&pair[0], &pair[1]));
}
nodes = parents;
}
Ok(nodes[0].to_hex().to_string())
Ok(hex::encode(nodes[0]))
}
pub fn merkle_proof_for_index(
pub fn merkle_root_from_hashes(hashes: &[String]) -> Result<String, ManifestError> {
blake3_merkle_root_from_hashes(hashes)
}
pub fn blake3_merkle_root_from_hashes(hashes: &[String]) -> Result<String, ManifestError> {
merkle_root_from_hashes_with(hashes, blake3_pair_hash)
}
pub fn keccak_merkle_root_from_hashes(hashes: &[String]) -> Result<String, ManifestError> {
merkle_root_from_hashes_with(hashes, keccak_pair_hash)
}
fn merkle_proof_for_index_with(
hashes: &[String],
index: usize,
pair_hash: fn(&[u8; 32], &[u8; 32]) -> [u8; 32],
) -> Result<Vec<String>, ManifestError> {
if hashes.is_empty() {
return Err(ManifestError::Empty);
@ -355,18 +541,16 @@ pub fn merkle_proof_for_index(
)));
}
let mut nodes: Vec<blake3::Hash> = Vec::with_capacity(hashes.len());
let mut nodes: Vec<[u8; 32]> = Vec::with_capacity(hashes.len());
for hash in hashes {
let parsed = blake3::Hash::from_hex(hash.as_bytes())
.map_err(|_| ManifestError::InvalidHash(hash.clone()))?;
nodes.push(parsed);
nodes.push(parse_hash32(hash)?);
}
let mut proof = Vec::new();
let mut pos = index;
while nodes.len() > 1 {
if nodes.len() % 2 == 1 {
if let Some(last) = nodes.last().cloned() {
if let Some(last) = nodes.last().copied() {
nodes.push(last);
}
}
@ -375,16 +559,11 @@ pub fn merkle_proof_for_index(
let sibling = nodes
.get(sibling_index)
.ok_or_else(|| ManifestError::InvalidHash("missing sibling".to_string()))?;
proof.push(sibling.to_hex().to_string());
proof.push(hex::encode(sibling));
let mut parents = Vec::with_capacity(nodes.len() / 2);
for pair in nodes.chunks(2) {
let left = pair[0].as_bytes();
let right = pair[1].as_bytes();
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left);
merged[32..].copy_from_slice(right);
parents.push(blake3::hash(&merged));
parents.push(pair_hash(&pair[0], &pair[1]));
}
nodes = parents;
pos /= 2;
@ -393,17 +572,39 @@ pub fn merkle_proof_for_index(
Ok(proof)
}
pub fn verify_merkle_proof(
pub fn merkle_proof_for_index(
hashes: &[String],
index: usize,
) -> Result<Vec<String>, ManifestError> {
blake3_merkle_proof_for_index(hashes, index)
}
pub fn blake3_merkle_proof_for_index(
hashes: &[String],
index: usize,
) -> Result<Vec<String>, ManifestError> {
merkle_proof_for_index_with(hashes, index, blake3_pair_hash)
}
pub fn keccak_merkle_proof_for_index(
hashes: &[String],
index: usize,
) -> Result<Vec<String>, ManifestError> {
merkle_proof_for_index_with(hashes, index, keccak_pair_hash)
}
fn verify_merkle_proof_with(
leaf_hash: &str,
mut index: usize,
branch: &[String],
expected_root: &str,
pair_hash: fn(&[u8; 32], &[u8; 32]) -> [u8; 32],
) -> bool {
let Ok(mut acc) = blake3::Hash::from_hex(leaf_hash.as_bytes()) else {
let Ok(mut acc) = parse_hash32(leaf_hash) else {
return false;
};
for sibling_hex in branch {
let Ok(sibling) = blake3::Hash::from_hex(sibling_hex.as_bytes()) else {
let Ok(sibling) = parse_hash32(sibling_hex) else {
return false;
};
let (left, right) = if index % 2 == 0 {
@ -411,13 +612,40 @@ pub fn verify_merkle_proof(
} else {
(sibling, acc)
};
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left.as_bytes());
merged[32..].copy_from_slice(right.as_bytes());
acc = blake3::hash(&merged);
acc = pair_hash(&left, &right);
index /= 2;
}
acc.to_hex().to_string() == expected_root
match parse_hash32(expected_root) {
Ok(root) => acc == root,
Err(_) => false,
}
}
pub fn verify_merkle_proof(
leaf_hash: &str,
index: usize,
branch: &[String],
expected_root: &str,
) -> bool {
verify_blake3_merkle_proof(leaf_hash, index, branch, expected_root)
}
pub fn verify_blake3_merkle_proof(
leaf_hash: &str,
index: usize,
branch: &[String],
expected_root: &str,
) -> bool {
verify_merkle_proof_with(leaf_hash, index, branch, expected_root, blake3_pair_hash)
}
pub fn verify_keccak_merkle_proof(
leaf_hash: &str,
index: usize,
branch: &[String],
expected_root: &str,
) -> bool {
verify_merkle_proof_with(leaf_hash, index, branch, expected_root, keccak_pair_hash)
}
#[cfg(test)]
@ -446,11 +674,39 @@ mod tests {
assert_ne!(id1, id2);
}
#[test]
fn manifest_id_defaults_to_blake3() {
let body = ManifestBody {
stream_id: StreamId("s".to_string()),
epoch_id: "e".to_string(),
chunk_duration_ms: 2000,
total_chunks: 1,
chunk_start_index: 0,
encoder_profile_id: "p".to_string(),
merkle_root: "00".repeat(32),
created_unix_ms: 1,
metadata: Vec::new(),
chunk_hashes: vec!["11".repeat(32)],
variants: None,
};
let bytes = serde_json::to_vec(&body).unwrap();
assert_eq!(
body.manifest_id().unwrap(),
blake3::hash(&bytes).to_hex().to_string()
);
assert_ne!(
body.manifest_id().unwrap(),
body.manifest_id_keccak256().unwrap()
);
}
#[test]
fn merkle_root_single_is_leaf() {
let leaf = blake3::hash(b"leaf").to_hex().to_string();
let root = merkle_root_from_hashes(&[leaf.clone()]).unwrap();
assert_eq!(root, leaf);
let keccak_root = keccak_merkle_root_from_hashes(&[leaf.clone()]).unwrap();
assert_eq!(keccak_root, leaf);
}
#[test]
@ -476,6 +732,23 @@ mod tests {
}
}
#[test]
fn keccak_merkle_proof_roundtrip_small_sets() {
for size in 1..=9usize {
let leaves = (0..size)
.map(|i| blake3::hash(&[i as u8]).to_hex().to_string())
.collect::<Vec<_>>();
let root = keccak_merkle_root_from_hashes(&leaves).unwrap();
for idx in 0..size {
let proof = keccak_merkle_proof_for_index(&leaves, idx).unwrap();
assert!(
verify_keccak_merkle_proof(&leaves[idx], idx, &proof, &root),
"size {size} idx {idx} failed"
);
}
}
}
#[test]
fn merkle_proof_detects_tampering() {
let leaves = (0..4usize)
@ -486,4 +759,90 @@ mod tests {
proof[0] = blake3::hash(b"evil").to_hex().to_string();
assert!(!verify_merkle_proof(&leaves[2], 2, &proof, &root));
}
#[test]
fn channel_broadcast_id_uses_typed_and_extra_metadata() {
let channel = Channel {
id: ChannelId("kcbs".to_string()),
name: "KCBS-HD".to_string(),
number: Some("2.1".to_string()),
program_id: Some(3),
metadata: vec![
ChannelMetadata::Callsign("KCBS".to_string()),
ChannelMetadata::Region("los-angeles".to_string()),
ChannelMetadata::Extra("tsid".to_string(), "42".to_string()),
ChannelMetadata::Extra("frequency".to_string(), "573000000".to_string()),
],
};
let broadcast = channel.broadcast_id("ATSC").unwrap();
assert_eq!(broadcast.standard, "atsc");
assert_eq!(broadcast.transport_stream_id, Some(42));
assert_eq!(broadcast.program_number, Some(3));
assert_eq!(broadcast.virtual_channel.as_deref(), Some("2.1"));
assert_eq!(broadcast.callsign.as_deref(), Some("KCBS"));
assert_eq!(broadcast.region.as_deref(), Some("los-angeles"));
assert_eq!(broadcast.frequency.as_deref(), Some("573000000"));
}
#[test]
fn stream_key_prefers_broadcast_scope_when_channel_identity_exists() {
let channel = Channel {
id: ChannelId("kcbs".to_string()),
name: "KCBS-HD".to_string(),
number: Some("2.1".to_string()),
program_id: None,
metadata: vec![ChannelMetadata::Callsign("KCBS".to_string())],
};
let source = SourceId {
kind: "hdhr".to_string(),
device_id: Some("ABCDEF01".to_string()),
channel: Some("2.1".to_string()),
};
let key = StreamKey::for_channel_or_source(
Some(&channel),
Some("atsc"),
source,
Some("chunk-2000ms".to_string()),
None,
);
assert!(key.broadcast.is_some());
assert!(key.source.is_none());
assert_eq!(
key.to_stream_id().0,
"ec/stream/v1/broadcast/atsc/channel-2_1/callsign-kcbs/profile-chunk-2000ms"
);
}
#[test]
fn stream_key_falls_back_to_source_scope_without_channel_identity() {
let channel = Channel {
id: ChannelId("unknown".to_string()),
name: "".to_string(),
number: None,
program_id: None,
metadata: Vec::new(),
};
let source = SourceId {
kind: "ts".to_string(),
device_id: None,
channel: Some("file.ts".to_string()),
};
let key = StreamKey::for_channel_or_source(
Some(&channel),
Some("atsc"),
source,
Some("chunk-2000ms".to_string()),
None,
);
assert!(key.broadcast.is_none());
assert_eq!(
key.to_stream_id().0,
"ec/stream/v1/source/ts/channel-file_ts/profile-chunk-2000ms"
);
}
}

View file

@ -8,5 +8,8 @@ license.workspace = true
blake3 = "1"
chacha20poly1305 = "0.10"
ed25519-dalek = { version = "2", features = ["pkcs8"] }
ec-eth = { path = "../ec-eth" }
hex = "0.4"
ec-core = { path = "../ec-core" }
k256 = { version = "0.13", features = ["ecdsa"] }
sha3 = "0.10"

View file

@ -1,12 +1,19 @@
//! Cryptographic helpers for every.channel.
use chacha20poly1305::{aead::Aead, KeyInit, XChaCha20Poly1305, XNonce};
use ec_core::ManifestSignature;
use ec_core::{ManifestBody, ManifestSignature};
use ec_eth::{manifest_body_eip712_signing_hash, ETH_MANIFEST_SIG_ALG};
use ed25519_dalek::{Signature, Signer, SigningKey, Verifier, VerifyingKey};
use k256::ecdsa::{
RecoveryId as SecpRecoveryId, Signature as SecpSignature, SigningKey as SecpSigningKey,
VerifyingKey as SecpVerifyingKey,
};
use sha3::{Digest, Keccak256};
use std::env;
use std::fs;
pub const MANIFEST_SIG_ALG: &str = "ed25519";
pub const ETH_MANIFEST_SIGNING_KEY_ENV: &str = "EVERY_CHANNEL_ETH_MANIFEST_SIGNING_KEY";
pub const ENCRYPTION_ALG: &str = "xchacha20poly1305";
@ -83,19 +90,29 @@ pub struct ManifestKeypair {
pub verifying_key: VerifyingKey,
}
#[derive(Debug, Clone)]
pub struct EthereumManifestKeypair {
pub signing_key: SecpSigningKey,
pub verifying_key: SecpVerifyingKey,
}
fn decode_env_hex_or_file(value: &str) -> Result<Vec<u8>, String> {
let trimmed = value.trim();
if std::path::Path::new(trimmed).exists() {
let text = fs::read_to_string(trimmed).map_err(|err| err.to_string())?;
hex::decode(text.trim().trim_start_matches("0x")).map_err(|err| err.to_string())
} else {
hex::decode(trimmed.trim_start_matches("0x")).map_err(|err| err.to_string())
}
}
pub fn load_manifest_keypair_from_env() -> Result<Option<ManifestKeypair>, String> {
let value = match env::var("EVERY_CHANNEL_MANIFEST_SIGNING_KEY") {
Ok(value) => value,
Err(env::VarError::NotPresent) => return Ok(None),
Err(err) => return Err(err.to_string()),
};
let trimmed = value.trim();
let key_bytes = if std::path::Path::new(trimmed).exists() {
let text = fs::read_to_string(trimmed).map_err(|err| err.to_string())?;
hex::decode(text.trim()).map_err(|err| err.to_string())?
} else {
hex::decode(trimmed).map_err(|err| err.to_string())?
};
let key_bytes = decode_env_hex_or_file(&value)?;
let bytes = if key_bytes.len() == 32 {
key_bytes
} else if key_bytes.len() == 64 {
@ -113,10 +130,36 @@ pub fn load_manifest_keypair_from_env() -> Result<Option<ManifestKeypair>, Strin
}))
}
pub fn load_ethereum_manifest_keypair_from_env() -> Result<Option<EthereumManifestKeypair>, String>
{
let value = match env::var(ETH_MANIFEST_SIGNING_KEY_ENV) {
Ok(value) => value,
Err(env::VarError::NotPresent) => return Ok(None),
Err(err) => return Err(err.to_string()),
};
let key_bytes = decode_env_hex_or_file(&value)?;
if key_bytes.len() != 32 {
return Err("ethereum manifest signing key must be exactly 32 hex bytes".to_string());
}
let signing_key =
SecpSigningKey::from_bytes((&key_bytes[..]).into()).map_err(|err| err.to_string())?;
let verifying_key = *signing_key.verifying_key();
Ok(Some(EthereumManifestKeypair {
signing_key,
verifying_key,
}))
}
pub fn signer_id_from_key(key: &VerifyingKey) -> String {
format!("ed25519:{}", hex::encode(key.to_bytes()))
}
pub fn ethereum_signer_id_from_key(key: &SecpVerifyingKey) -> String {
let encoded = key.to_encoded_point(false);
let digest = Keccak256::digest(&encoded.as_bytes()[1..]);
format!("eth:0x{}", hex::encode(&digest[12..]))
}
pub fn sign_manifest_id(manifest_id: &str, keypair: &ManifestKeypair) -> ManifestSignature {
let signature: Signature = keypair.signing_key.sign(manifest_id.as_bytes());
ManifestSignature {
@ -126,6 +169,25 @@ pub fn sign_manifest_id(manifest_id: &str, keypair: &ManifestKeypair) -> Manifes
}
}
pub fn sign_manifest_body_eip712(
body: &ManifestBody,
keypair: &EthereumManifestKeypair,
) -> Result<ManifestSignature, String> {
let digest = manifest_body_eip712_signing_hash(body).map_err(|err| err.to_string())?;
let (signature, recovery_id) = keypair
.signing_key
.sign_prehash_recoverable(digest.as_slice())
.map_err(|err| err.to_string())?;
let mut bytes = Vec::with_capacity(65);
bytes.extend_from_slice(&signature.to_bytes());
bytes.push(recovery_id.to_byte());
Ok(ManifestSignature {
signer_id: ethereum_signer_id_from_key(&keypair.verifying_key),
alg: ETH_MANIFEST_SIG_ALG.to_string(),
signature: hex::encode(bytes),
})
}
pub fn verify_manifest_signature(manifest_id: &str, sig: &ManifestSignature) -> bool {
if sig.alg != MANIFEST_SIG_ALG {
return false;
@ -156,6 +218,57 @@ pub fn verify_manifest_signature(manifest_id: &str, sig: &ManifestSignature) ->
.is_ok()
}
fn normalize_eth_signer_id(value: &str) -> Option<String> {
let trimmed = value
.strip_prefix("eth:")
.or_else(|| value.strip_prefix("ETH:"))
.unwrap_or(value)
.trim();
let address = trimmed.trim_start_matches("0x");
if address.len() != 40 || !address.chars().all(|c| c.is_ascii_hexdigit()) {
return None;
}
Some(format!("eth:0x{}", address.to_ascii_lowercase()))
}
pub fn verify_manifest_body_eip712_signature(body: &ManifestBody, sig: &ManifestSignature) -> bool {
if sig.alg != ETH_MANIFEST_SIG_ALG {
return false;
}
let Some(expected_signer_id) = normalize_eth_signer_id(&sig.signer_id) else {
return false;
};
let Ok(sig_bytes) = hex::decode(sig.signature.trim().trim_start_matches("0x")) else {
return false;
};
if sig_bytes.len() != 65 {
return false;
}
let Ok(signature) = SecpSignature::from_slice(&sig_bytes[..64]) else {
return false;
};
let Ok(recovery_id) = SecpRecoveryId::try_from(sig_bytes[64]) else {
return false;
};
let Ok(digest) = manifest_body_eip712_signing_hash(body) else {
return false;
};
let Ok(verifying_key) =
SecpVerifyingKey::recover_from_prehash(digest.as_slice(), &signature, recovery_id)
else {
return false;
};
ethereum_signer_id_from_key(&verifying_key) == expected_signer_id
}
pub fn verify_manifest_signature_with_body(
manifest_id: &str,
body: &ManifestBody,
sig: &ManifestSignature,
) -> bool {
verify_manifest_signature(manifest_id, sig) || verify_manifest_body_eip712_signature(body, sig)
}
#[cfg(test)]
mod tests {
use super::*;
@ -212,6 +325,35 @@ mod tests {
assert!(!verify_manifest_signature("evil", &sig));
}
#[test]
fn ethereum_manifest_sign_verify_roundtrip() {
let body = ManifestBody {
stream_id: ec_core::StreamId("stream".to_string()),
epoch_id: "epoch-1".to_string(),
chunk_duration_ms: 2000,
total_chunks: 1,
chunk_start_index: 0,
encoder_profile_id: "p".to_string(),
merkle_root: "11".repeat(32),
created_unix_ms: 1,
metadata: Vec::new(),
chunk_hashes: vec!["22".repeat(32)],
variants: None,
};
let secret = [7u8; 32];
let signing_key = SecpSigningKey::from_bytes((&secret).into()).unwrap();
let verifying_key = *signing_key.verifying_key();
let keypair = EthereumManifestKeypair {
signing_key,
verifying_key,
};
let sig = sign_manifest_body_eip712(&body, &keypair).unwrap();
assert!(verify_manifest_body_eip712_signature(&body, &sig));
let mut tampered = body.clone();
tampered.created_unix_ms = 2;
assert!(!verify_manifest_body_eip712_signature(&tampered, &sig));
}
#[test]
fn load_keypair_from_env_hex() {
let prev = env::var("EVERY_CHANNEL_MANIFEST_SIGNING_KEY").ok();
@ -224,4 +366,17 @@ mod tests {
None => env::remove_var("EVERY_CHANNEL_MANIFEST_SIGNING_KEY"),
}
}
#[test]
fn load_ethereum_keypair_from_env_hex() {
let prev = env::var(ETH_MANIFEST_SIGNING_KEY_ENV).ok();
env::set_var(ETH_MANIFEST_SIGNING_KEY_ENV, "01".repeat(32));
let loaded = load_ethereum_manifest_keypair_from_env().unwrap().unwrap();
let id = ethereum_signer_id_from_key(&loaded.verifying_key);
assert!(id.starts_with("eth:0x"));
match prev {
Some(value) => env::set_var(ETH_MANIFEST_SIGNING_KEY_ENV, value),
None => env::remove_var(ETH_MANIFEST_SIGNING_KEY_ENV),
}
}
}

12
crates/ec-eth/Cargo.toml Normal file
View file

@ -0,0 +1,12 @@
[package]
name = "ec-eth"
version = "0.0.0"
edition.workspace = true
license.workspace = true
[dependencies]
alloy-primitives = "1.5.7"
alloy-sol-types = "1.5.7"
blake3 = "1"
ec-core = { path = "../ec-core" }
hex = "0.4"

642
crates/ec-eth/src/lib.rs Normal file
View file

@ -0,0 +1,642 @@
//! Ethereum-compatible representations and commitments for every.channel core types.
use alloy_primitives::{keccak256, B256};
use alloy_sol_types::{eip712_domain, sol, Eip712Domain, SolStruct, SolValue};
use ec_core::{
BroadcastId, ChainCommitment, ChunkId, Manifest, ManifestBody, ManifestSignature,
ManifestVariant, SourceId, StreamControlAnnouncement, StreamDescriptor, StreamKey,
StreamMetadata, StreamTransportDescriptor,
};
use std::fmt;
pub const ETHEREUM_CHAIN: &str = "ethereum";
pub const SCHEME_STREAM_ID_KECCAK: &str = "stream-id-keccak256-v1";
pub const SCHEME_STREAM_DESCRIPTOR_ABI: &str = "stream-descriptor-abi-keccak256-v1";
pub const SCHEME_CONTROL_ANNOUNCEMENT_ABI: &str = "control-announcement-abi-keccak256-v1";
pub const SCHEME_MANIFEST_DATA_ROOT: &str = "manifest-data-merkle-keccak256-v1";
pub const SCHEME_MANIFEST_BODY_ABI: &str = "manifest-body-abi-keccak256-v1";
pub const SCHEME_MANIFEST_ENVELOPE_ABI: &str = "manifest-envelope-abi-keccak256-v1";
pub const ETH_MANIFEST_SIG_ALG: &str = "secp256k1-eip712-manifest-body-v1";
sol! {
struct EthStreamMetadata {
string key;
string value;
}
struct EthBroadcastId {
string standard;
bool hasTransportStreamId;
uint16 transportStreamId;
bool hasProgramNumber;
uint16 programNumber;
string virtualChannel;
string callsign;
string region;
string frequency;
}
struct EthSourceId {
string kind;
string deviceId;
string channel;
}
struct EthStreamKey {
uint16 version;
bool hasBroadcast;
EthBroadcastId broadcast;
bool hasSource;
EthSourceId source;
string profile;
string variant;
}
struct EthStreamDescriptor {
string id;
string title;
string number;
string source;
EthStreamMetadata[] metadata;
}
struct EthStreamTransportDescriptor {
uint8 kind;
string url;
string endpoint;
string broadcastName;
string trackName;
}
struct EthStreamControlAnnouncement {
EthStreamDescriptor stream;
EthStreamTransportDescriptor[] transports;
uint64 updatedUnixMs;
uint64 ttlMs;
}
struct EthChunkId {
string streamId;
string epochId;
uint64 chunkIndex;
bytes32 chunkHash;
}
struct EthManifestVariant {
string variantId;
string streamId;
uint64 chunkStartIndex;
uint64 totalChunks;
bytes32 merkleRoot;
bytes32[] chunkHashes;
EthStreamMetadata[] metadata;
}
struct EthManifestBody {
string streamId;
string epochId;
uint64 chunkDurationMs;
uint64 totalChunks;
uint64 chunkStartIndex;
string encoderProfileId;
bytes32 merkleRoot;
uint64 createdUnixMs;
EthStreamMetadata[] metadata;
bytes32[] chunkHashes;
EthManifestVariant[] variants;
}
struct EthManifestSignature {
string signerId;
string alg;
bytes signature;
}
struct EthManifest {
EthManifestBody body;
bytes32 manifestId;
EthManifestSignature[] signatures;
}
}
#[derive(Debug, Clone)]
pub enum EthCommitmentError {
Empty,
InvalidHex(String),
}
impl fmt::Display for EthCommitmentError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
EthCommitmentError::Empty => write!(f, "no hashes supplied"),
EthCommitmentError::InvalidHex(value) => {
write!(f, "invalid 32-byte hex value: {value}")
}
}
}
}
impl std::error::Error for EthCommitmentError {}
fn commitment(scheme: &str, digest: B256) -> ChainCommitment {
ChainCommitment {
chain: ETHEREUM_CHAIN.to_string(),
scheme: scheme.to_string(),
digest: format!("0x{}", hex::encode(digest)),
}
}
fn abi_commitment<T: SolValue>(scheme: &str, value: &T) -> ChainCommitment {
commitment(scheme, keccak256(value.abi_encode()))
}
fn parse_b256(value: &str) -> Result<B256, EthCommitmentError> {
let trimmed = value.trim().strip_prefix("0x").unwrap_or(value.trim());
let bytes =
hex::decode(trimmed).map_err(|_| EthCommitmentError::InvalidHex(value.to_string()))?;
if bytes.len() != 32 {
return Err(EthCommitmentError::InvalidHex(value.to_string()));
}
let mut out = [0u8; 32];
out.copy_from_slice(&bytes);
Ok(B256::from(out))
}
fn parse_bytes(value: &str) -> Vec<u8> {
let trimmed = value.trim().strip_prefix("0x").unwrap_or(value.trim());
hex::decode(trimmed).unwrap_or_default()
}
pub fn manifest_eip712_domain() -> Eip712Domain {
eip712_domain! {
name: "every.channel",
version: "1",
}
}
fn eth_metadata(items: &[StreamMetadata]) -> Vec<EthStreamMetadata> {
items
.iter()
.map(|item| EthStreamMetadata {
key: item.key.clone(),
value: item.value.clone(),
})
.collect()
}
pub fn eth_broadcast_id(value: &BroadcastId) -> EthBroadcastId {
EthBroadcastId {
standard: value.standard.clone(),
hasTransportStreamId: value.transport_stream_id.is_some(),
transportStreamId: value.transport_stream_id.unwrap_or_default(),
hasProgramNumber: value.program_number.is_some(),
programNumber: value.program_number.unwrap_or_default(),
virtualChannel: value.virtual_channel.clone().unwrap_or_default(),
callsign: value.callsign.clone().unwrap_or_default(),
region: value.region.clone().unwrap_or_default(),
frequency: value.frequency.clone().unwrap_or_default(),
}
}
pub fn eth_source_id(value: &SourceId) -> EthSourceId {
EthSourceId {
kind: value.kind.clone(),
deviceId: value.device_id.clone().unwrap_or_default(),
channel: value.channel.clone().unwrap_or_default(),
}
}
pub fn eth_stream_key(value: &StreamKey) -> EthStreamKey {
EthStreamKey {
version: value.version,
hasBroadcast: value.broadcast.is_some(),
broadcast: value
.broadcast
.as_ref()
.map(eth_broadcast_id)
.unwrap_or_else(|| EthBroadcastId {
standard: String::new(),
hasTransportStreamId: false,
transportStreamId: 0,
hasProgramNumber: false,
programNumber: 0,
virtualChannel: String::new(),
callsign: String::new(),
region: String::new(),
frequency: String::new(),
}),
hasSource: value.source.is_some(),
source: value
.source
.as_ref()
.map(eth_source_id)
.unwrap_or_else(|| EthSourceId {
kind: String::new(),
deviceId: String::new(),
channel: String::new(),
}),
profile: value.profile.clone().unwrap_or_default(),
variant: value.variant.clone().unwrap_or_default(),
}
}
pub fn eth_stream_descriptor(value: &StreamDescriptor) -> EthStreamDescriptor {
EthStreamDescriptor {
id: value.id.0.clone(),
title: value.title.clone(),
number: value.number.clone().unwrap_or_default(),
source: value.source.clone(),
metadata: eth_metadata(&value.metadata),
}
}
pub fn eth_stream_transport_descriptor(
value: &StreamTransportDescriptor,
) -> EthStreamTransportDescriptor {
match value {
StreamTransportDescriptor::RelayMoq {
url,
broadcast_name,
track_name,
} => EthStreamTransportDescriptor {
kind: 0,
url: url.clone(),
endpoint: String::new(),
broadcastName: broadcast_name.clone(),
trackName: track_name.clone(),
},
StreamTransportDescriptor::IrohDirect {
endpoint,
broadcast_name,
track_name,
} => EthStreamTransportDescriptor {
kind: 1,
url: String::new(),
endpoint: endpoint.clone(),
broadcastName: broadcast_name.clone(),
trackName: track_name.clone(),
},
}
}
pub fn eth_stream_control_announcement(
value: &StreamControlAnnouncement,
) -> EthStreamControlAnnouncement {
EthStreamControlAnnouncement {
stream: eth_stream_descriptor(&value.stream),
transports: value
.transports
.iter()
.map(eth_stream_transport_descriptor)
.collect(),
updatedUnixMs: value.updated_unix_ms,
ttlMs: value.ttl_ms,
}
}
pub fn eth_chunk_id(value: &ChunkId) -> Result<EthChunkId, EthCommitmentError> {
Ok(EthChunkId {
streamId: value.stream_id.0.clone(),
epochId: value.epoch_id.clone(),
chunkIndex: value.chunk_index,
chunkHash: parse_b256(&value.chunk_hash)?,
})
}
pub fn eth_manifest_variant(
value: &ManifestVariant,
) -> Result<EthManifestVariant, EthCommitmentError> {
Ok(EthManifestVariant {
variantId: value.variant_id.clone(),
streamId: value.stream_id.0.clone(),
chunkStartIndex: value.chunk_start_index,
totalChunks: value.total_chunks,
merkleRoot: parse_b256(&value.merkle_root)?,
chunkHashes: value
.chunk_hashes
.iter()
.map(|hash| parse_b256(hash))
.collect::<Result<Vec<_>, _>>()?,
metadata: eth_metadata(&value.metadata),
})
}
pub fn eth_manifest_body(value: &ManifestBody) -> Result<EthManifestBody, EthCommitmentError> {
let variants = value
.variants
.as_ref()
.map(|variants| {
variants
.iter()
.map(eth_manifest_variant)
.collect::<Result<Vec<_>, _>>()
})
.transpose()?
.unwrap_or_default();
Ok(EthManifestBody {
streamId: value.stream_id.0.clone(),
epochId: value.epoch_id.clone(),
chunkDurationMs: value.chunk_duration_ms,
totalChunks: value.total_chunks,
chunkStartIndex: value.chunk_start_index,
encoderProfileId: value.encoder_profile_id.clone(),
merkleRoot: parse_b256(&value.merkle_root)?,
createdUnixMs: value.created_unix_ms,
metadata: eth_metadata(&value.metadata),
chunkHashes: value
.chunk_hashes
.iter()
.map(|hash| parse_b256(hash))
.collect::<Result<Vec<_>, _>>()?,
variants,
})
}
pub fn manifest_body_eip712_signing_hash(value: &ManifestBody) -> Result<B256, EthCommitmentError> {
Ok(eth_manifest_body(value)?.eip712_signing_hash(&manifest_eip712_domain()))
}
pub fn eth_manifest_signature(value: &ManifestSignature) -> EthManifestSignature {
EthManifestSignature {
signerId: value.signer_id.clone(),
alg: value.alg.clone(),
signature: parse_bytes(&value.signature).into(),
}
}
pub fn eth_manifest(value: &Manifest) -> Result<EthManifest, EthCommitmentError> {
Ok(EthManifest {
body: eth_manifest_body(&value.body)?,
manifestId: parse_b256(&value.manifest_id)?,
signatures: value
.signatures
.iter()
.map(eth_manifest_signature)
.collect(),
})
}
fn keccak_merkle_root(leaves: &[B256]) -> Result<B256, EthCommitmentError> {
if leaves.is_empty() {
return Err(EthCommitmentError::Empty);
}
let mut nodes = leaves.to_vec();
while nodes.len() > 1 {
if nodes.len() % 2 == 1 {
if let Some(last) = nodes.last().copied() {
nodes.push(last);
}
}
let mut parents = Vec::with_capacity(nodes.len() / 2);
for pair in nodes.chunks(2) {
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(pair[0].as_slice());
merged[32..].copy_from_slice(pair[1].as_slice());
parents.push(keccak256(merged));
}
nodes = parents;
}
Ok(nodes[0])
}
pub fn ethereum_merkle_root_from_hashes(hashes: &[String]) -> Result<String, EthCommitmentError> {
let leaves = hashes
.iter()
.map(|hash| parse_b256(hash))
.collect::<Result<Vec<_>, _>>()?;
Ok(format!("0x{}", hex::encode(keccak_merkle_root(&leaves)?)))
}
pub fn ethereum_merkle_proof_for_index(
hashes: &[String],
index: usize,
) -> Result<Vec<String>, EthCommitmentError> {
if hashes.is_empty() {
return Err(EthCommitmentError::Empty);
}
if index >= hashes.len() {
return Err(EthCommitmentError::InvalidHex(format!(
"index {index} out of bounds"
)));
}
let mut nodes = hashes
.iter()
.map(|hash| parse_b256(hash))
.collect::<Result<Vec<_>, _>>()?;
let mut proof = Vec::new();
let mut pos = index;
while nodes.len() > 1 {
if nodes.len() % 2 == 1 {
if let Some(last) = nodes.last().copied() {
nodes.push(last);
}
}
let sibling_index = if pos % 2 == 0 { pos + 1 } else { pos - 1 };
let sibling = nodes[sibling_index];
proof.push(format!("0x{}", hex::encode(sibling)));
let mut parents = Vec::with_capacity(nodes.len() / 2);
for pair in nodes.chunks(2) {
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(pair[0].as_slice());
merged[32..].copy_from_slice(pair[1].as_slice());
parents.push(keccak256(merged));
}
nodes = parents;
pos /= 2;
}
Ok(proof)
}
pub fn verify_ethereum_merkle_proof(
leaf_hash: &str,
mut index: usize,
branch: &[String],
expected_root: &str,
) -> bool {
let Ok(mut acc) = parse_b256(leaf_hash) else {
return false;
};
for sibling_hex in branch {
let Ok(sibling) = parse_b256(sibling_hex) else {
return false;
};
let (left, right) = if index % 2 == 0 {
(acc, sibling)
} else {
(sibling, acc)
};
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left.as_slice());
merged[32..].copy_from_slice(right.as_slice());
acc = keccak256(merged);
index /= 2;
}
match parse_b256(expected_root) {
Ok(root) => acc == root,
Err(_) => false,
}
}
pub fn stream_id_commitment(stream_id: &str) -> ChainCommitment {
commitment(SCHEME_STREAM_ID_KECCAK, keccak256(stream_id.as_bytes()))
}
pub fn broadcast_id_commitment(value: &BroadcastId) -> ChainCommitment {
abi_commitment("broadcast-id-abi-keccak256-v1", &eth_broadcast_id(value))
}
pub fn stream_key_commitment(value: &StreamKey) -> ChainCommitment {
abi_commitment("stream-key-abi-keccak256-v1", &eth_stream_key(value))
}
pub fn stream_descriptor_commitments(value: &StreamDescriptor) -> Vec<ChainCommitment> {
vec![
stream_id_commitment(&value.id.0),
abi_commitment(SCHEME_STREAM_DESCRIPTOR_ABI, &eth_stream_descriptor(value)),
]
}
pub fn control_announcement_commitments(value: &StreamControlAnnouncement) -> Vec<ChainCommitment> {
vec![abi_commitment(
SCHEME_CONTROL_ANNOUNCEMENT_ABI,
&eth_stream_control_announcement(value),
)]
}
fn manifest_data_root_commitment(
body: &ManifestBody,
) -> Result<ChainCommitment, EthCommitmentError> {
let root = if let Some(variants) = body
.variants
.as_ref()
.filter(|variants| !variants.is_empty())
{
let roots = variants
.iter()
.map(|variant| variant.merkle_root.clone())
.collect::<Vec<_>>();
ethereum_merkle_root_from_hashes(&roots)?
} else {
ethereum_merkle_root_from_hashes(&body.chunk_hashes)?
};
Ok(ChainCommitment {
chain: ETHEREUM_CHAIN.to_string(),
scheme: SCHEME_MANIFEST_DATA_ROOT.to_string(),
digest: root,
})
}
pub fn manifest_commitments(value: &Manifest) -> Result<Vec<ChainCommitment>, EthCommitmentError> {
let eth_body = eth_manifest_body(&value.body)?;
let eth_manifest = eth_manifest(value)?;
Ok(vec![
manifest_data_root_commitment(&value.body)?,
abi_commitment(SCHEME_MANIFEST_BODY_ABI, &eth_body),
abi_commitment(SCHEME_MANIFEST_ENVELOPE_ABI, &eth_manifest),
])
}
pub fn manifest_commitments_match(value: &Manifest) -> Result<bool, EthCommitmentError> {
let present = value
.commitments
.iter()
.filter(|item| item.chain == ETHEREUM_CHAIN)
.collect::<Vec<_>>();
if present.is_empty() {
return Ok(true);
}
let expected = manifest_commitments(value)?;
Ok(present.into_iter().all(|actual| expected.contains(actual)))
}
#[cfg(test)]
mod tests {
use super::*;
use ec_core::{ManifestBody, StreamId};
fn sample_body() -> ManifestBody {
let chunk_hashes = vec![
blake3::hash(b"chunk0").to_hex().to_string(),
blake3::hash(b"chunk1").to_hex().to_string(),
];
let merkle_root = ec_core::merkle_root_from_hashes(&chunk_hashes).unwrap();
ManifestBody {
stream_id: StreamId("ec/stream/v1/broadcast/atsc/tsid-42/program-3".to_string()),
epoch_id: "epoch-1".to_string(),
chunk_duration_ms: 2000,
total_chunks: 2,
chunk_start_index: 10,
encoder_profile_id: "deterministic-h264-aac".to_string(),
merkle_root,
created_unix_ms: 1234,
metadata: vec![StreamMetadata {
key: "callsign".to_string(),
value: "KCBS".to_string(),
}],
chunk_hashes,
variants: None,
}
}
#[test]
fn keccak_merkle_root_and_proof_roundtrip() {
let body = sample_body();
let root = ethereum_merkle_root_from_hashes(&body.chunk_hashes).unwrap();
let proof = ethereum_merkle_proof_for_index(&body.chunk_hashes, 1).unwrap();
assert!(verify_ethereum_merkle_proof(
&body.chunk_hashes[1],
1,
&proof,
&root
));
}
#[test]
fn manifest_commitments_are_stable_and_match_present_values() {
let body = sample_body();
let manifest_id = body.manifest_id().unwrap();
let mut manifest = Manifest {
body,
manifest_id,
signatures: Vec::new(),
commitments: Vec::new(),
};
let commitments = manifest_commitments(&manifest).unwrap();
assert_eq!(commitments.len(), 3);
manifest.commitments = commitments.clone();
assert!(manifest_commitments_match(&manifest).unwrap());
manifest.commitments[0].digest = "0xdeadbeef".to_string();
assert!(!manifest_commitments_match(&manifest).unwrap());
}
#[test]
fn manifest_body_eip712_hash_is_stable() {
let body = sample_body();
let h1 = manifest_body_eip712_signing_hash(&body).unwrap();
let h2 = manifest_body_eip712_signing_hash(&body).unwrap();
assert_eq!(h1, h2);
}
#[test]
fn stream_descriptor_commitments_include_stream_id_and_descriptor_hashes() {
let descriptor = StreamDescriptor {
id: StreamId("ec/stream/v1/source/test/device-a/channel-b".to_string()),
title: "Test".to_string(),
number: Some("2.1".to_string()),
source: "control".to_string(),
metadata: vec![StreamMetadata {
key: "broadcast".to_string(),
value: "la-nbc".to_string(),
}],
commitments: Vec::new(),
};
let commitments = stream_descriptor_commitments(&descriptor);
assert_eq!(commitments.len(), 2);
assert_eq!(commitments[0].scheme, SCHEME_STREAM_ID_KECCAK);
assert_eq!(commitments[1].scheme, SCHEME_STREAM_DESCRIPTOR_ABI);
}
}

View file

@ -555,9 +555,39 @@ fn lineup_from_json_value(json: &Value, device_id: Option<&DeviceId>) -> Result<
));
}
if let Some(callsign) = json_string_field(entry, &["CallSign", "Callsign", "CallSignRaw"]) {
metadata.push(ChannelMetadata::Callsign(callsign));
}
if let Some(network) = json_string_field(entry, &["Network"]) {
metadata.push(ChannelMetadata::Network(network));
}
if let Some(region) = json_string_field(entry, &["Region", "Market"]) {
metadata.push(ChannelMetadata::Region(region));
}
if let Some(frequency) = json_string_field(entry, &["Frequency", "FrequencyHz"]) {
metadata.push(ChannelMetadata::Frequency(frequency));
}
let program_id = json_u16_field(entry, &["ProgramNumber", "ProgramID", "Program"]);
if let Some(obj) = entry.as_object() {
for (key, value) in obj.iter() {
if key == "GuideNumber" || key == "GuideName" || key == "Tags" || key == "URL" {
if key == "GuideNumber"
|| key == "GuideName"
|| key == "Tags"
|| key == "URL"
|| key == "CallSign"
|| key == "Callsign"
|| key == "CallSignRaw"
|| key == "Network"
|| key == "Region"
|| key == "Market"
|| key == "Frequency"
|| key == "FrequencyHz"
|| key == "ProgramNumber"
|| key == "ProgramID"
|| key == "Program"
{
continue;
}
metadata.push(ChannelMetadata::Extra(key.clone(), value.to_string()));
@ -568,7 +598,7 @@ fn lineup_from_json_value(json: &Value, device_id: Option<&DeviceId>) -> Result<
id,
name: guide_name,
number: parsed.guide_number,
program_id: None,
program_id,
metadata,
};
@ -583,6 +613,44 @@ fn lineup_from_json_value(json: &Value, device_id: Option<&DeviceId>) -> Result<
Ok(output)
}
fn json_string_field(value: &Value, keys: &[&str]) -> Option<String> {
let obj = value.as_object()?;
for key in keys {
let Some(value) = obj.get(*key) else {
continue;
};
let text = match value {
Value::String(text) => text.trim().to_string(),
Value::Number(number) => number.to_string(),
_ => continue,
};
if !text.is_empty() {
return Some(text);
}
}
None
}
fn json_u16_field(value: &Value, keys: &[&str]) -> Option<u16> {
let obj = value.as_object()?;
for key in keys {
let Some(value) = obj.get(*key) else {
continue;
};
let parsed = match value {
Value::Number(number) => number
.as_u64()
.and_then(|number| u16::try_from(number).ok()),
Value::String(text) => text.trim().parse::<u16>().ok(),
_ => None,
};
if parsed.is_some() {
return parsed;
}
}
None
}
#[cfg(test)]
mod tests {
use super::*;
@ -648,6 +716,9 @@ mod tests {
"GuideName": "KCBS-HD",
"Tags": "drm,encrypted,",
"URL": "http://hdhr/auto/v2.1",
"CallSign": "KCBS",
"ProgramNumber": "3",
"Frequency": "573000000",
"Foo": "Bar"
},
{
@ -662,8 +733,17 @@ mod tests {
assert_eq!(entries[0].channel.id.0, "hdhr:ABCDEF01:2.1");
assert_eq!(entries[0].channel.name, "KCBS-HD");
assert_eq!(entries[0].channel.number.as_deref(), Some("2.1"));
assert_eq!(entries[0].channel.program_id, Some(3));
assert_eq!(entries[0].stream_url, "http://hdhr/auto/v2.1");
assert!(entries[0].tags.iter().any(|t| t == "drm"));
assert!(entries[0].channel.metadata.iter().any(|m| match m {
ChannelMetadata::Callsign(value) => value == "KCBS",
_ => false,
}));
assert!(entries[0].channel.metadata.iter().any(|m| match m {
ChannelMetadata::Frequency(value) => value == "573000000",
_ => false,
}));
assert!(entries[0].channel.metadata.iter().any(|m| match m {
ChannelMetadata::Extra(key, value) => key == "guide_number" && value == "2.1",
_ => false,

View file

@ -730,6 +730,7 @@ mod tests {
},
manifest_id: "m".to_string(),
signatures: Vec::new(),
commitments: Vec::new(),
};
let bytes = encode_manifest_frame(&manifest).unwrap();
let decoded = decode_manifest_frame(&bytes).unwrap();
@ -768,6 +769,7 @@ mod tests {
body,
manifest_id: manifest_id.clone(),
signatures: vec![sig],
commitments: Vec::new(),
};
let bytes = encode_manifest_frame(&manifest).unwrap();
let decoded = decode_manifest_frame(&bytes).unwrap();

View file

@ -13,9 +13,11 @@ ec-crypto = { path = "../ec-crypto" }
ec-direct = { path = "../ec-direct" }
ec-moq = { path = "../ec-moq" }
ec-chopper = { path = "../ec-chopper" }
ec-eth = { path = "../ec-eth" }
ec-hdhomerun = { path = "../ec-hdhomerun" }
ec-iroh = { path = "../ec-iroh" }
ec-linux-iptv = { path = "../ec-linux-iptv" }
ec-ts = { path = "../ec-ts" }
hex = "0.4"
iroh = "0.96"
just-webrtc = "0.2"

View file

@ -8,15 +8,21 @@ use clap::ValueEnum;
use clap::{Parser, Subcommand};
use ec_chopper::{build_manifest_body_for_chunks, TsChunk};
use ec_core::{
merkle_proof_for_index, verify_merkle_proof, Manifest, ManifestSummary, ManifestVariant,
MoqStreamDescriptor, StreamCatalogEntry, StreamControlAnnouncement, StreamDescriptor,
StreamEncryptionInfo, StreamId, StreamKey, StreamMetadata, StreamTransportDescriptor,
merkle_proof_for_index, verify_merkle_proof, Manifest, ManifestBody, ManifestSummary,
ManifestVariant, MoqStreamDescriptor, StreamCatalogEntry, StreamControlAnnouncement,
StreamDescriptor, StreamEncryptionInfo, StreamId, StreamKey, StreamMetadata,
StreamTransportDescriptor, MERKLE_PROOF_ALG_BLAKE3,
};
use ec_crypto::{
decrypt_stream_data, encrypt_stream_data, load_manifest_keypair_from_env, sign_manifest_id,
verify_manifest_signature, ENCRYPTION_ALG,
decrypt_stream_data, encrypt_stream_data, load_ethereum_manifest_keypair_from_env,
load_manifest_keypair_from_env, sign_manifest_body_eip712, sign_manifest_id,
verify_manifest_signature_with_body, ENCRYPTION_ALG,
};
use ec_direct::{decode_direct_link, encode_direct_link, DirectCodeV1};
use ec_eth::{
control_announcement_commitments, manifest_commitments, manifest_commitments_match,
stream_descriptor_commitments,
};
use ec_iroh::DiscoveryConfig;
use ec_moq::{
chunk_duration_secs, decode_object_frame, encode_object_frame, FileRelay, GroupId, HlsWriter,
@ -779,7 +785,6 @@ fn ingest(args: IngestArgs) -> Result<()> {
};
let source_id = source.source_id();
let source_id_for_stream = source_id.clone();
let reader = source.open_stream()?;
let encoder_profile_id = if deterministic {
"deterministic-h264-aac".to_string()
@ -815,17 +820,12 @@ fn ingest(args: IngestArgs) -> Result<()> {
let relay = FileRelay::new(args.relay_dir);
let track = TrackName {
namespace: "every.channel".to_string(),
name: args.stream_id.unwrap_or_else(|| {
StreamKey {
version: 1,
broadcast: None,
source: Some(source_id_for_stream),
profile: Some(format!("chunk-{}ms", args.chunk_ms)),
variant: None,
name: match args.stream_id {
Some(stream_id) => stream_id,
None => {
default_stream_id_for_source(source.as_ref(), format!("chunk-{}ms", args.chunk_ms))?
}
.to_stream_id()
.0
}),
},
};
let stream_id = StreamId(track.name.clone());
let manifest_payload = build_manifest(
@ -1072,12 +1072,52 @@ fn deterministic_enabled(flag: bool) -> bool {
.unwrap_or(false)
}
fn default_stream_id_for_source(
source: &dyn StreamSource,
profile: impl Into<String>,
) -> Result<String> {
let source_id = source.source_id();
let profile = profile.into();
let stream_key = match source.broadcast_id()? {
Some(broadcast) => StreamKey {
version: 1,
broadcast: Some(broadcast),
source: None,
profile: Some(profile.clone()),
variant: None,
},
None => StreamKey {
version: 1,
broadcast: None,
source: Some(source_id),
profile: Some(profile),
variant: None,
},
};
Ok(stream_key.to_stream_id().0)
}
fn read_chunk_bytes_and_hash(path: &std::path::Path) -> Result<(Vec<u8>, String)> {
let data = fs::read(path).with_context(|| format!("failed to read {}", path.display()))?;
let hash = blake3::hash(&data).to_hex().to_string();
Ok((data, hash))
}
fn collect_manifest_signatures(
manifest_id: &str,
body: &ManifestBody,
) -> Result<Vec<ec_core::ManifestSignature>> {
let mut signatures = Vec::new();
if let Some(keypair) = load_manifest_keypair_from_env().map_err(|err| anyhow!(err))? {
signatures.push(sign_manifest_id(manifest_id, &keypair));
}
if let Some(keypair) = load_ethereum_manifest_keypair_from_env().map_err(|err| anyhow!(err))? {
signatures.push(sign_manifest_body_eip712(body, &keypair).map_err(|err| anyhow!(err))?);
}
Ok(signatures)
}
fn build_manifest(
stream_id: StreamId,
epoch_id: impl Into<String>,
@ -1099,15 +1139,15 @@ fn build_manifest(
&chunk_hashes,
)?;
let manifest_id = body.manifest_id()?;
let mut signatures = Vec::new();
if let Some(keypair) = load_manifest_keypair_from_env().map_err(|err| anyhow!(err))? {
signatures.push(sign_manifest_id(&manifest_id, &keypair));
}
Ok(Manifest {
let signatures = collect_manifest_signatures(&manifest_id, &body)?;
let mut manifest = Manifest {
body,
manifest_id,
signatures,
})
commitments: Vec::new(),
};
manifest.commitments = manifest_commitments(&manifest).map_err(|err| anyhow!("{err}"))?;
Ok(manifest)
}
#[derive(Debug, Clone)]
@ -1224,15 +1264,15 @@ fn build_multi_variant_manifest(
variants: Some(entries),
};
let manifest_id = body.manifest_id()?;
let mut signatures = Vec::new();
if let Some(keypair) = load_manifest_keypair_from_env().map_err(|err| anyhow!(err))? {
signatures.push(sign_manifest_id(&manifest_id, &keypair));
}
Ok(Manifest {
let signatures = collect_manifest_signatures(&manifest_id, &body)?;
let mut manifest = Manifest {
body,
manifest_id,
signatures,
})
commitments: Vec::new(),
};
manifest.commitments = manifest_commitments(&manifest).map_err(|err| anyhow!("{err}"))?;
Ok(manifest)
}
struct EpochBuffer {
@ -1343,10 +1383,13 @@ fn validate_manifest(manifest: &Manifest, allowlist: Option<&HashSet<String>>) -
_ => return false,
}
}
if !matches!(manifest_commitments_match(manifest), Ok(true)) {
return false;
}
if let Some(allowlist) = allowlist {
return manifest.signatures.iter().any(|sig| {
verify_manifest_signature(&manifest.manifest_id, sig)
verify_manifest_signature_with_body(&manifest.manifest_id, &manifest.body, sig)
&& allowlist.contains(&sig.signer_id)
});
}
@ -1363,7 +1406,7 @@ fn validate_manifest(manifest: &Manifest, allowlist: Option<&HashSet<String>>) -
manifest
.signatures
.iter()
.any(|sig| verify_manifest_signature(&manifest.manifest_id, sig))
.any(|sig| verify_manifest_signature_with_body(&manifest.manifest_id, &manifest.body, sig))
}
fn strip_init_suffix(key_id: &str) -> &str {
@ -1482,7 +1525,7 @@ fn build_object(
chunk_hash: Some(chunk_hash),
chunk_hash_alg: Some("blake3".to_string()),
chunk_proof,
chunk_proof_alg: Some("merkle+blake3".to_string()),
chunk_proof_alg: Some(MERKLE_PROOF_ALG_BLAKE3.to_string()),
manifest_id: manifest_id.map(|value| value.to_string()),
};
@ -1592,6 +1635,8 @@ fn flush_epoch_publish(
#[cfg(test)]
mod tests {
use super::*;
use ec_core::BroadcastId;
use std::io::Cursor;
#[test]
fn parse_manifest_allowlist_splits_and_trims() {
@ -1653,11 +1698,14 @@ mod tests {
vec![sig]
};
Manifest {
let mut manifest = Manifest {
body,
manifest_id,
signatures,
}
commitments: Vec::new(),
};
manifest.commitments = manifest_commitments(&manifest).unwrap();
manifest
}
#[test]
@ -1684,6 +1732,65 @@ mod tests {
assert!(!validate_manifest(&manifest, Some(&deny)));
}
#[test]
fn validate_manifest_accepts_ethereum_signature() {
let chunk_hashes = vec![blake3::hash(b"c0").to_hex().to_string()];
let body = build_manifest_body_for_chunks(
StreamId("s".to_string()),
"epoch-eth",
2000,
0,
"p",
1,
Vec::new(),
&chunk_hashes,
)
.unwrap();
let manifest_id = body.manifest_id().unwrap();
let prev = std::env::var(ec_crypto::ETH_MANIFEST_SIGNING_KEY_ENV).ok();
std::env::set_var(ec_crypto::ETH_MANIFEST_SIGNING_KEY_ENV, "22".repeat(32));
let keypair = load_ethereum_manifest_keypair_from_env().unwrap().unwrap();
let sig = sign_manifest_body_eip712(&body, &keypair).unwrap();
match prev {
Some(value) => std::env::set_var(ec_crypto::ETH_MANIFEST_SIGNING_KEY_ENV, value),
None => std::env::remove_var(ec_crypto::ETH_MANIFEST_SIGNING_KEY_ENV),
}
let mut manifest = Manifest {
body,
manifest_id,
signatures: vec![sig],
commitments: Vec::new(),
};
manifest.commitments = manifest_commitments(&manifest).unwrap();
assert!(validate_manifest(&manifest, None));
let allow = HashSet::from([manifest.signatures[0].signer_id.clone()]);
assert!(validate_manifest(&manifest, Some(&allow)));
}
#[test]
fn validate_manifest_rejects_bad_ethereum_commitment() {
let mut manifest = build_valid_manifest(true);
manifest.commitments[0].digest = "0xdeadbeef".to_string();
assert!(!validate_manifest(&manifest, None));
}
#[test]
fn control_announcement_carries_stream_and_announcement_commitments() {
let announcement = build_control_announcement(
"ec/stream/v1/broadcast/atsc/tsid-42/program-3".to_string(),
"KCBS".to_string(),
vec![StreamTransportDescriptor::IrohDirect {
endpoint: "ed25519:node".to_string(),
broadcast_name: "kcbs".to_string(),
track_name: "video0.m4s".to_string(),
}],
5_000,
);
assert!(!announcement.stream.commitments.is_empty());
assert!(!announcement.commitments.is_empty());
}
#[test]
fn manifest_hash_for_chunk_indexes_into_hash_list() {
let manifest = build_valid_manifest(true);
@ -1699,6 +1806,70 @@ mod tests {
);
assert!(manifest_hash_for_chunk(&manifest, sid, 12).is_none());
}
#[derive(Clone)]
struct DummySource {
source_id: ec_core::SourceId,
broadcast_id: Option<BroadcastId>,
}
impl StreamSource for DummySource {
fn open_stream(&self) -> Result<Box<dyn Read + Send>> {
Ok(Box::new(Cursor::new(Vec::<u8>::new())))
}
fn source_id(&self) -> ec_core::SourceId {
self.source_id.clone()
}
fn broadcast_id(&self) -> Result<Option<BroadcastId>> {
Ok(self.broadcast_id.clone())
}
}
#[test]
fn default_stream_id_for_source_uses_broadcast_identity_when_present() {
let source = DummySource {
source_id: ec_core::SourceId {
kind: "hdhr".to_string(),
device_id: Some("ABCDEF01".to_string()),
channel: Some("2.1".to_string()),
},
broadcast_id: Some(BroadcastId {
standard: "atsc".to_string(),
transport_stream_id: None,
program_number: Some(3),
virtual_channel: Some("2.1".to_string()),
callsign: Some("KCBS".to_string()),
region: None,
frequency: None,
}),
};
let stream_id = default_stream_id_for_source(&source, "chunk-2000ms").unwrap();
assert_eq!(
stream_id,
"ec/stream/v1/broadcast/atsc/program-3/channel-2_1/callsign-kcbs/profile-chunk-2000ms"
);
}
#[test]
fn default_stream_id_for_source_falls_back_to_source_identity() {
let source = DummySource {
source_id: ec_core::SourceId {
kind: "ts".to_string(),
device_id: None,
channel: Some("capture.ts".to_string()),
},
broadcast_id: None,
};
let stream_id = default_stream_id_for_source(&source, "chunk-2000ms").unwrap();
assert_eq!(
stream_id,
"ec/stream/v1/source/ts/channel-capture_ts/profile-chunk-2000ms"
);
}
}
async fn moq_publish(args: MoqPublishArgs) -> Result<()> {
@ -1747,19 +1918,13 @@ async fn moq_publish(args: MoqPublishArgs) -> Result<()> {
};
let source_id = source.source_id();
let source_id_for_stream = source_id.clone();
let stream_id = args.stream_id.unwrap_or_else(|| {
StreamKey {
version: 1,
broadcast: None,
source: Some(source_id_for_stream),
profile: Some(format!("chunk-{}ms", args.chunk_ms)),
variant: None,
let stream_id = match args.stream_id {
Some(stream_id) => stream_id,
None => {
default_stream_id_for_source(source.as_ref(), format!("chunk-{}ms", args.chunk_ms))?
}
.to_stream_id()
.0
});
};
let broadcast_name = args.broadcast_name.unwrap_or_else(|| stream_id.clone());
let track_name = args.track_name.clone();
@ -4409,7 +4574,10 @@ fn build_catalog_entry(
key: "broadcast".to_string(),
value: broadcast_name.to_string(),
}],
commitments: Vec::new(),
};
let mut stream = stream;
stream.commitments = stream_descriptor_commitments(&stream);
let encryption = StreamEncryptionInfo {
alg: ENCRYPTION_ALG.to_string(),
@ -4448,7 +4616,7 @@ fn build_control_announcement(
transports: Vec<StreamTransportDescriptor>,
ttl_ms: u64,
) -> StreamControlAnnouncement {
let stream = StreamDescriptor {
let mut stream = StreamDescriptor {
id: StreamId(stream_id.clone()),
title,
number: None,
@ -4457,14 +4625,19 @@ fn build_control_announcement(
key: "stream_id".to_string(),
value: stream_id,
}],
commitments: Vec::new(),
};
stream.commitments = stream_descriptor_commitments(&stream);
StreamControlAnnouncement {
let mut announcement = StreamControlAnnouncement {
stream,
transports,
updated_unix_ms: now_unix_ms(),
ttl_ms,
}
commitments: Vec::new(),
};
announcement.commitments = control_announcement_commitments(&announcement);
announcement
}
async fn spawn_control_announcer_task(

View file

@ -1,9 +1,10 @@
use anyhow::{anyhow, Result};
use clap::ValueEnum;
use ec_chopper::{deterministic_h264_profile, ffmpeg_profile_args};
use ec_core::SourceId;
use ec_hdhomerun::{find_lineup_entry_by_name, find_lineup_entry_by_number};
use ec_core::{BroadcastId, SourceId};
use ec_hdhomerun::{find_lineup_entry_by_name, find_lineup_entry_by_number, LineupEntry};
use ec_linux_iptv::LinuxDvbConfig;
use ec_ts::probe_transport_stream_identity;
use std::io::Read;
use std::process::{Child, Command, Stdio};
use std::thread;
@ -11,6 +12,9 @@ use std::thread;
pub trait StreamSource: Send {
fn open_stream(&self) -> Result<Box<dyn Read + Send>>;
fn source_id(&self) -> SourceId;
fn broadcast_id(&self) -> Result<Option<BroadcastId>> {
Ok(None)
}
}
#[derive(Debug, Clone)]
@ -24,20 +28,8 @@ pub struct HdhrSource {
impl StreamSource for HdhrSource {
fn open_stream(&self) -> Result<Box<dyn Read + Send>> {
let device = resolve_hdhr_device(self)?;
let lineup = ec_hdhomerun::fetch_lineup(&device)?;
let entry = if let Some(channel) = &self.channel {
find_lineup_entry_by_number(&lineup, channel)
.or_else(|| find_lineup_entry_by_name(&lineup, channel))
.ok_or_else(|| anyhow!("channel not found: {channel}"))?
} else if let Some(name) = &self.name {
find_lineup_entry_by_name(&lineup, name)
.ok_or_else(|| anyhow!("channel not found: {name}"))?
} else {
return Err(anyhow!("--channel or --name required for hdhr"));
};
Ok(Box::new(ec_hdhomerun::open_stream_entry(entry, None)?))
let entry = resolve_hdhr_lineup_entry(self)?;
Ok(Box::new(ec_hdhomerun::open_stream_entry(&entry, None)?))
}
fn source_id(&self) -> SourceId {
@ -48,6 +40,21 @@ impl StreamSource for HdhrSource {
channel: self.channel.clone().or_else(|| self.name.clone()),
}
}
fn broadcast_id(&self) -> Result<Option<BroadcastId>> {
let entry = resolve_hdhr_lineup_entry(self)?;
let mut broadcast = entry.channel.broadcast_id("atsc");
if broadcast.as_ref().is_none_or(|identity| {
identity.transport_stream_id.is_none() || identity.program_number.is_none()
}) {
let probe = ec_hdhomerun::open_stream_entry(&entry, Some(2))?;
broadcast = merge_broadcast_identity(
broadcast,
probe_transport_stream_broadcast(Box::new(probe), Some("atsc"))?,
);
}
Ok(broadcast)
}
}
fn resolve_hdhr_device(source: &HdhrSource) -> Result<ec_hdhomerun::HdhomerunDevice> {
@ -72,6 +79,23 @@ fn resolve_hdhr_device(source: &HdhrSource) -> Result<ec_hdhomerun::HdhomerunDev
.ok_or_else(|| anyhow!("no HDHomeRun devices found"))
}
fn resolve_hdhr_lineup_entry(source: &HdhrSource) -> Result<LineupEntry> {
let device = resolve_hdhr_device(source)?;
let lineup = ec_hdhomerun::fetch_lineup(&device)?;
let entry = if let Some(channel) = &source.channel {
find_lineup_entry_by_number(&lineup, channel)
.or_else(|| find_lineup_entry_by_name(&lineup, channel))
.ok_or_else(|| anyhow!("channel not found: {channel}"))?
} else if let Some(name) = &source.name {
find_lineup_entry_by_name(&lineup, name)
.ok_or_else(|| anyhow!("channel not found: {name}"))?
} else {
return Err(anyhow!("--channel or --name required for hdhr"));
};
Ok(entry.clone())
}
#[derive(Debug, Clone)]
pub struct LinuxDvbSource {
pub adapter: u32,
@ -126,6 +150,16 @@ impl StreamSource for TsSource {
channel: None,
}
}
fn broadcast_id(&self) -> Result<Option<BroadcastId>> {
if self.input.starts_with("http://") || self.input.starts_with("https://") {
let reader = ec_hdhomerun::open_stream_url(&self.input, Some(2))?;
probe_transport_stream_broadcast(Box::new(reader), None)
} else {
let reader = std::fs::File::open(&self.input)?;
probe_transport_stream_broadcast(Box::new(reader), None)
}
}
}
#[derive(Debug, Clone, Copy, ValueEnum)]
@ -197,6 +231,67 @@ impl StreamSource for HlsSource {
}
}
fn probe_transport_stream_broadcast(
reader: Box<dyn Read + Send>,
fallback_standard: Option<&str>,
) -> Result<Option<BroadcastId>> {
let Some(identity) = probe_transport_stream_identity(reader, 256)? else {
return Ok(None);
};
let virtual_channel = match (identity.major_channel_number, identity.minor_channel_number) {
(Some(major), Some(minor)) => Some(format!("{major}.{minor}")),
_ => None,
};
Ok(Some(BroadcastId {
standard: identity
.standard
.or_else(|| fallback_standard.map(|value| value.to_string()))
.unwrap_or_else(|| "mpegts".to_string()),
transport_stream_id: Some(identity.transport_stream_id),
program_number: identity.program_number,
virtual_channel,
callsign: identity.short_name,
region: None,
frequency: None,
}))
}
fn merge_broadcast_identity(
base: Option<BroadcastId>,
probed: Option<BroadcastId>,
) -> Option<BroadcastId> {
match (base, probed) {
(None, other) => other,
(some, None) => some,
(Some(mut base), Some(probed)) => {
if base.standard.trim().is_empty() {
base.standard = probed.standard;
}
if base.transport_stream_id.is_none() {
base.transport_stream_id = probed.transport_stream_id;
}
if base.program_number.is_none() {
base.program_number = probed.program_number;
}
if base.virtual_channel.is_none() {
base.virtual_channel = probed.virtual_channel;
}
if base.callsign.is_none() {
base.callsign = probed.callsign;
}
if base.region.is_none() {
base.region = probed.region;
}
if base.frequency.is_none() {
base.frequency = probed.frequency;
}
Some(base)
}
}
}
struct FfmpegChildStream {
child: Child,
stdout: std::process::ChildStdout,

File diff suppressed because it is too large Load diff