Advance forge rollout, Ethereum rails, and NBC sources

This commit is contained in:
every.channel 2026-04-01 15:58:49 -07:00
parent be26313225
commit 7d84510eac
No known key found for this signature in database
88 changed files with 11230 additions and 302 deletions

View file

@ -8,3 +8,5 @@ license.workspace = true
serde.workspace = true
blake3.workspace = true
serde_json.workspace = true
hex = "0.4"
sha3 = "0.10"

View file

@ -1,8 +1,14 @@
//! Core types shared across every.channel.
use serde::{Deserialize, Serialize};
use sha3::{Digest, Keccak256};
use std::fmt;
pub const MANIFEST_ID_ALG_BLAKE3: &str = "blake3";
pub const MANIFEST_ID_ALG_KECCAK256: &str = "keccak256";
pub const MERKLE_PROOF_ALG_BLAKE3: &str = "merkle+blake3";
pub const MERKLE_PROOF_ALG_KECCAK256: &str = "merkle+keccak256";
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ChannelId(pub String);
@ -12,6 +18,13 @@ pub struct DeviceId(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct StreamId(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct ChainCommitment {
pub chain: String,
pub scheme: String,
pub digest: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StreamDescriptor {
pub id: StreamId,
@ -19,6 +32,8 @@ pub struct StreamDescriptor {
pub number: Option<String>,
pub source: String,
pub metadata: Vec<StreamMetadata>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub commitments: Vec<ChainCommitment>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -32,6 +47,7 @@ pub struct BroadcastId {
pub standard: String,
pub transport_stream_id: Option<u16>,
pub program_number: Option<u16>,
pub virtual_channel: Option<String>,
pub callsign: Option<String>,
pub region: Option<String>,
pub frequency: Option<String>,
@ -54,6 +70,30 @@ pub struct StreamKey {
}
impl StreamKey {
pub fn for_channel_or_source(
channel: Option<&Channel>,
standard: Option<&str>,
source: SourceId,
profile: Option<String>,
variant: Option<String>,
) -> Self {
let broadcast = channel
.and_then(|channel| standard.and_then(|standard| channel.broadcast_id(standard)));
let source = if broadcast.is_some() {
None
} else {
Some(source)
};
Self {
version: 1,
broadcast,
source,
profile,
variant,
}
}
pub fn to_stream_id(&self) -> StreamId {
let mut parts = vec![
"ec".to_string(),
@ -70,6 +110,9 @@ impl StreamKey {
if let Some(program) = broadcast.program_number {
parts.push(format!("program-{program}"));
}
if let Some(channel) = &broadcast.virtual_channel {
parts.push(format!("channel-{}", sanitize(channel)));
}
if let Some(callsign) = &broadcast.callsign {
parts.push(format!("callsign-{}", sanitize(callsign)));
}
@ -132,6 +175,90 @@ pub enum ChannelMetadata {
Extra(String, String),
}
impl BroadcastId {
pub fn is_usable(&self) -> bool {
self.transport_stream_id.is_some()
|| self.program_number.is_some()
|| self
.virtual_channel
.as_ref()
.is_some_and(|value| !value.trim().is_empty())
|| self
.callsign
.as_ref()
.is_some_and(|value| !value.trim().is_empty())
}
}
impl Channel {
pub fn broadcast_id(&self, standard: &str) -> Option<BroadcastId> {
let standard = standard.trim().to_ascii_lowercase();
if standard.is_empty() {
return None;
}
let callsign = channel_metadata_value(&self.metadata, "callsign").or_else(|| {
let name = self.name.trim();
(!name.is_empty()).then(|| name.to_string())
});
let region = channel_metadata_value(&self.metadata, "region");
let frequency = channel_metadata_value(&self.metadata, "frequency");
let transport_stream_id = channel_metadata_u16(&self.metadata, "transport_stream_id")
.or_else(|| channel_metadata_u16(&self.metadata, "tsid"));
let program_number = self
.program_id
.or_else(|| channel_metadata_u16(&self.metadata, "program_number"))
.or_else(|| channel_metadata_u16(&self.metadata, "program_id"));
let virtual_channel = self.number.as_ref().and_then(|value| {
let trimmed = value.trim();
(!trimmed.is_empty()).then(|| trimmed.to_string())
});
let broadcast = BroadcastId {
standard,
transport_stream_id,
program_number,
virtual_channel,
callsign,
region,
frequency,
};
broadcast.is_usable().then_some(broadcast)
}
}
fn channel_metadata_value(metadata: &[ChannelMetadata], key: &str) -> Option<String> {
for item in metadata {
match item {
ChannelMetadata::Callsign(value) if key == "callsign" => {
return Some(value.trim().to_string())
}
ChannelMetadata::Region(value) if key == "region" => {
return Some(value.trim().to_string())
}
ChannelMetadata::Frequency(value) if key == "frequency" => {
return Some(value.trim().to_string())
}
ChannelMetadata::Network(value) if key == "network" => {
return Some(value.trim().to_string())
}
ChannelMetadata::Extra(extra_key, value) if extra_key.eq_ignore_ascii_case(key) => {
let trimmed = value.trim_matches('"').trim().to_string();
if !trimmed.is_empty() {
return Some(trimmed);
}
}
_ => {}
}
}
None
}
fn channel_metadata_u16(metadata: &[ChannelMetadata], key: &str) -> Option<u16> {
channel_metadata_value(metadata, key).and_then(|value| value.parse().ok())
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PacketDigest {
pub algorithm: String,
@ -207,6 +334,8 @@ pub struct StreamControlAnnouncement {
pub updated_unix_ms: u64,
/// Suggested freshness window for this announcement.
pub ttl_ms: u64,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub commitments: Vec<ChainCommitment>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -218,6 +347,8 @@ pub struct ManifestSummary {
pub chunk_start_index: u64,
pub encoder_profile_id: String,
pub signed_by: Vec<String>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub commitments: Vec<ChainCommitment>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
@ -268,6 +399,8 @@ pub struct Manifest {
pub body: ManifestBody,
pub manifest_id: String,
pub signatures: Vec<ManifestSignature>,
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub commitments: Vec<ChainCommitment>,
}
impl Manifest {
@ -284,6 +417,7 @@ impl Manifest {
.iter()
.map(|sig| sig.signer_id.clone())
.collect(),
commitments: self.commitments.clone(),
}
}
}
@ -307,44 +441,96 @@ impl std::error::Error for ManifestError {}
impl ManifestBody {
pub fn manifest_id(&self) -> Result<String, serde_json::Error> {
self.manifest_id_blake3()
}
pub fn manifest_id_blake3(&self) -> Result<String, serde_json::Error> {
let bytes = serde_json::to_vec(self)?;
Ok(blake3::hash(&bytes).to_hex().to_string())
}
pub fn manifest_id_keccak256(&self) -> Result<String, serde_json::Error> {
let bytes = serde_json::to_vec(self)?;
Ok(hex::encode(keccak256(&bytes)))
}
}
pub fn merkle_root_from_hashes(hashes: &[String]) -> Result<String, ManifestError> {
fn parse_hash32(value: &str) -> Result<[u8; 32], ManifestError> {
let trimmed = value.trim().strip_prefix("0x").unwrap_or(value.trim());
let bytes = hex::decode(trimmed).map_err(|_| ManifestError::InvalidHash(value.to_string()))?;
if bytes.len() != 32 {
return Err(ManifestError::InvalidHash(value.to_string()));
}
let mut out = [0u8; 32];
out.copy_from_slice(&bytes);
Ok(out)
}
fn keccak256(bytes: &[u8]) -> [u8; 32] {
let mut hasher = Keccak256::new();
hasher.update(bytes);
let digest = hasher.finalize();
let mut out = [0u8; 32];
out.copy_from_slice(&digest);
out
}
fn blake3_pair_hash(left: &[u8; 32], right: &[u8; 32]) -> [u8; 32] {
let mut hasher = blake3::Hasher::new();
hasher.update(left);
hasher.update(right);
*hasher.finalize().as_bytes()
}
fn keccak_pair_hash(left: &[u8; 32], right: &[u8; 32]) -> [u8; 32] {
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left);
merged[32..].copy_from_slice(right);
keccak256(&merged)
}
fn merkle_root_from_hashes_with(
hashes: &[String],
pair_hash: fn(&[u8; 32], &[u8; 32]) -> [u8; 32],
) -> Result<String, ManifestError> {
if hashes.is_empty() {
return Err(ManifestError::Empty);
}
let mut nodes: Vec<blake3::Hash> = Vec::with_capacity(hashes.len());
let mut nodes: Vec<[u8; 32]> = Vec::with_capacity(hashes.len());
for hash in hashes {
let parsed = blake3::Hash::from_hex(hash.as_bytes())
.map_err(|_| ManifestError::InvalidHash(hash.clone()))?;
nodes.push(parsed);
nodes.push(parse_hash32(hash)?);
}
while nodes.len() > 1 {
if nodes.len() % 2 == 1 {
if let Some(last) = nodes.last().cloned() {
if let Some(last) = nodes.last().copied() {
nodes.push(last);
}
}
let mut parents = Vec::with_capacity(nodes.len() / 2);
for pair in nodes.chunks(2) {
let left = pair[0].as_bytes();
let right = pair[1].as_bytes();
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left);
merged[32..].copy_from_slice(right);
parents.push(blake3::hash(&merged));
parents.push(pair_hash(&pair[0], &pair[1]));
}
nodes = parents;
}
Ok(nodes[0].to_hex().to_string())
Ok(hex::encode(nodes[0]))
}
pub fn merkle_proof_for_index(
pub fn merkle_root_from_hashes(hashes: &[String]) -> Result<String, ManifestError> {
blake3_merkle_root_from_hashes(hashes)
}
pub fn blake3_merkle_root_from_hashes(hashes: &[String]) -> Result<String, ManifestError> {
merkle_root_from_hashes_with(hashes, blake3_pair_hash)
}
pub fn keccak_merkle_root_from_hashes(hashes: &[String]) -> Result<String, ManifestError> {
merkle_root_from_hashes_with(hashes, keccak_pair_hash)
}
fn merkle_proof_for_index_with(
hashes: &[String],
index: usize,
pair_hash: fn(&[u8; 32], &[u8; 32]) -> [u8; 32],
) -> Result<Vec<String>, ManifestError> {
if hashes.is_empty() {
return Err(ManifestError::Empty);
@ -355,18 +541,16 @@ pub fn merkle_proof_for_index(
)));
}
let mut nodes: Vec<blake3::Hash> = Vec::with_capacity(hashes.len());
let mut nodes: Vec<[u8; 32]> = Vec::with_capacity(hashes.len());
for hash in hashes {
let parsed = blake3::Hash::from_hex(hash.as_bytes())
.map_err(|_| ManifestError::InvalidHash(hash.clone()))?;
nodes.push(parsed);
nodes.push(parse_hash32(hash)?);
}
let mut proof = Vec::new();
let mut pos = index;
while nodes.len() > 1 {
if nodes.len() % 2 == 1 {
if let Some(last) = nodes.last().cloned() {
if let Some(last) = nodes.last().copied() {
nodes.push(last);
}
}
@ -375,16 +559,11 @@ pub fn merkle_proof_for_index(
let sibling = nodes
.get(sibling_index)
.ok_or_else(|| ManifestError::InvalidHash("missing sibling".to_string()))?;
proof.push(sibling.to_hex().to_string());
proof.push(hex::encode(sibling));
let mut parents = Vec::with_capacity(nodes.len() / 2);
for pair in nodes.chunks(2) {
let left = pair[0].as_bytes();
let right = pair[1].as_bytes();
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left);
merged[32..].copy_from_slice(right);
parents.push(blake3::hash(&merged));
parents.push(pair_hash(&pair[0], &pair[1]));
}
nodes = parents;
pos /= 2;
@ -393,17 +572,39 @@ pub fn merkle_proof_for_index(
Ok(proof)
}
pub fn verify_merkle_proof(
pub fn merkle_proof_for_index(
hashes: &[String],
index: usize,
) -> Result<Vec<String>, ManifestError> {
blake3_merkle_proof_for_index(hashes, index)
}
pub fn blake3_merkle_proof_for_index(
hashes: &[String],
index: usize,
) -> Result<Vec<String>, ManifestError> {
merkle_proof_for_index_with(hashes, index, blake3_pair_hash)
}
pub fn keccak_merkle_proof_for_index(
hashes: &[String],
index: usize,
) -> Result<Vec<String>, ManifestError> {
merkle_proof_for_index_with(hashes, index, keccak_pair_hash)
}
fn verify_merkle_proof_with(
leaf_hash: &str,
mut index: usize,
branch: &[String],
expected_root: &str,
pair_hash: fn(&[u8; 32], &[u8; 32]) -> [u8; 32],
) -> bool {
let Ok(mut acc) = blake3::Hash::from_hex(leaf_hash.as_bytes()) else {
let Ok(mut acc) = parse_hash32(leaf_hash) else {
return false;
};
for sibling_hex in branch {
let Ok(sibling) = blake3::Hash::from_hex(sibling_hex.as_bytes()) else {
let Ok(sibling) = parse_hash32(sibling_hex) else {
return false;
};
let (left, right) = if index % 2 == 0 {
@ -411,13 +612,40 @@ pub fn verify_merkle_proof(
} else {
(sibling, acc)
};
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left.as_bytes());
merged[32..].copy_from_slice(right.as_bytes());
acc = blake3::hash(&merged);
acc = pair_hash(&left, &right);
index /= 2;
}
acc.to_hex().to_string() == expected_root
match parse_hash32(expected_root) {
Ok(root) => acc == root,
Err(_) => false,
}
}
pub fn verify_merkle_proof(
leaf_hash: &str,
index: usize,
branch: &[String],
expected_root: &str,
) -> bool {
verify_blake3_merkle_proof(leaf_hash, index, branch, expected_root)
}
pub fn verify_blake3_merkle_proof(
leaf_hash: &str,
index: usize,
branch: &[String],
expected_root: &str,
) -> bool {
verify_merkle_proof_with(leaf_hash, index, branch, expected_root, blake3_pair_hash)
}
pub fn verify_keccak_merkle_proof(
leaf_hash: &str,
index: usize,
branch: &[String],
expected_root: &str,
) -> bool {
verify_merkle_proof_with(leaf_hash, index, branch, expected_root, keccak_pair_hash)
}
#[cfg(test)]
@ -446,11 +674,39 @@ mod tests {
assert_ne!(id1, id2);
}
#[test]
fn manifest_id_defaults_to_blake3() {
let body = ManifestBody {
stream_id: StreamId("s".to_string()),
epoch_id: "e".to_string(),
chunk_duration_ms: 2000,
total_chunks: 1,
chunk_start_index: 0,
encoder_profile_id: "p".to_string(),
merkle_root: "00".repeat(32),
created_unix_ms: 1,
metadata: Vec::new(),
chunk_hashes: vec!["11".repeat(32)],
variants: None,
};
let bytes = serde_json::to_vec(&body).unwrap();
assert_eq!(
body.manifest_id().unwrap(),
blake3::hash(&bytes).to_hex().to_string()
);
assert_ne!(
body.manifest_id().unwrap(),
body.manifest_id_keccak256().unwrap()
);
}
#[test]
fn merkle_root_single_is_leaf() {
let leaf = blake3::hash(b"leaf").to_hex().to_string();
let root = merkle_root_from_hashes(&[leaf.clone()]).unwrap();
assert_eq!(root, leaf);
let keccak_root = keccak_merkle_root_from_hashes(&[leaf.clone()]).unwrap();
assert_eq!(keccak_root, leaf);
}
#[test]
@ -476,6 +732,23 @@ mod tests {
}
}
#[test]
fn keccak_merkle_proof_roundtrip_small_sets() {
for size in 1..=9usize {
let leaves = (0..size)
.map(|i| blake3::hash(&[i as u8]).to_hex().to_string())
.collect::<Vec<_>>();
let root = keccak_merkle_root_from_hashes(&leaves).unwrap();
for idx in 0..size {
let proof = keccak_merkle_proof_for_index(&leaves, idx).unwrap();
assert!(
verify_keccak_merkle_proof(&leaves[idx], idx, &proof, &root),
"size {size} idx {idx} failed"
);
}
}
}
#[test]
fn merkle_proof_detects_tampering() {
let leaves = (0..4usize)
@ -486,4 +759,90 @@ mod tests {
proof[0] = blake3::hash(b"evil").to_hex().to_string();
assert!(!verify_merkle_proof(&leaves[2], 2, &proof, &root));
}
#[test]
fn channel_broadcast_id_uses_typed_and_extra_metadata() {
let channel = Channel {
id: ChannelId("kcbs".to_string()),
name: "KCBS-HD".to_string(),
number: Some("2.1".to_string()),
program_id: Some(3),
metadata: vec![
ChannelMetadata::Callsign("KCBS".to_string()),
ChannelMetadata::Region("los-angeles".to_string()),
ChannelMetadata::Extra("tsid".to_string(), "42".to_string()),
ChannelMetadata::Extra("frequency".to_string(), "573000000".to_string()),
],
};
let broadcast = channel.broadcast_id("ATSC").unwrap();
assert_eq!(broadcast.standard, "atsc");
assert_eq!(broadcast.transport_stream_id, Some(42));
assert_eq!(broadcast.program_number, Some(3));
assert_eq!(broadcast.virtual_channel.as_deref(), Some("2.1"));
assert_eq!(broadcast.callsign.as_deref(), Some("KCBS"));
assert_eq!(broadcast.region.as_deref(), Some("los-angeles"));
assert_eq!(broadcast.frequency.as_deref(), Some("573000000"));
}
#[test]
fn stream_key_prefers_broadcast_scope_when_channel_identity_exists() {
let channel = Channel {
id: ChannelId("kcbs".to_string()),
name: "KCBS-HD".to_string(),
number: Some("2.1".to_string()),
program_id: None,
metadata: vec![ChannelMetadata::Callsign("KCBS".to_string())],
};
let source = SourceId {
kind: "hdhr".to_string(),
device_id: Some("ABCDEF01".to_string()),
channel: Some("2.1".to_string()),
};
let key = StreamKey::for_channel_or_source(
Some(&channel),
Some("atsc"),
source,
Some("chunk-2000ms".to_string()),
None,
);
assert!(key.broadcast.is_some());
assert!(key.source.is_none());
assert_eq!(
key.to_stream_id().0,
"ec/stream/v1/broadcast/atsc/channel-2_1/callsign-kcbs/profile-chunk-2000ms"
);
}
#[test]
fn stream_key_falls_back_to_source_scope_without_channel_identity() {
let channel = Channel {
id: ChannelId("unknown".to_string()),
name: "".to_string(),
number: None,
program_id: None,
metadata: Vec::new(),
};
let source = SourceId {
kind: "ts".to_string(),
device_id: None,
channel: Some("file.ts".to_string()),
};
let key = StreamKey::for_channel_or_source(
Some(&channel),
Some("atsc"),
source,
Some("chunk-2000ms".to_string()),
None,
);
assert!(key.broadcast.is_none());
assert_eq!(
key.to_stream_id().0,
"ec/stream/v1/source/ts/channel-file_ts/profile-chunk-2000ms"
);
}
}