every.channel: sanitized baseline

This commit is contained in:
every.channel 2026-02-15 16:17:27 -05:00
commit 897e556bea
No known key found for this signature in database
258 changed files with 74298 additions and 0 deletions

10
crates/ec-core/Cargo.toml Normal file
View file

@ -0,0 +1,10 @@
[package]
name = "ec-core"
version = "0.0.0"
edition.workspace = true
license.workspace = true
[dependencies]
serde.workspace = true
blake3.workspace = true
serde_json.workspace = true

463
crates/ec-core/src/lib.rs Normal file
View file

@ -0,0 +1,463 @@
//! Core types shared across every.channel.
use serde::{Deserialize, Serialize};
use std::fmt;
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct ChannelId(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct DeviceId(pub String);
#[derive(Debug, Clone, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct StreamId(pub String);
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StreamDescriptor {
pub id: StreamId,
pub title: String,
pub number: Option<String>,
pub source: String,
pub metadata: Vec<StreamMetadata>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StreamMetadata {
pub key: String,
pub value: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct BroadcastId {
pub standard: String,
pub transport_stream_id: Option<u16>,
pub program_number: Option<u16>,
pub callsign: Option<String>,
pub region: Option<String>,
pub frequency: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct SourceId {
pub kind: String,
pub device_id: Option<String>,
pub channel: Option<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StreamKey {
pub version: u16,
pub broadcast: Option<BroadcastId>,
pub source: Option<SourceId>,
pub profile: Option<String>,
pub variant: Option<String>,
}
impl StreamKey {
pub fn to_stream_id(&self) -> StreamId {
let mut parts = vec![
"ec".to_string(),
"stream".to_string(),
format!("v{}", self.version),
];
if let Some(broadcast) = &self.broadcast {
parts.push("broadcast".to_string());
parts.push(sanitize(&broadcast.standard));
if let Some(tsid) = broadcast.transport_stream_id {
parts.push(format!("tsid-{tsid}"));
}
if let Some(program) = broadcast.program_number {
parts.push(format!("program-{program}"));
}
if let Some(callsign) = &broadcast.callsign {
parts.push(format!("callsign-{}", sanitize(callsign)));
}
if let Some(region) = &broadcast.region {
parts.push(format!("region-{}", sanitize(region)));
}
if let Some(freq) = &broadcast.frequency {
parts.push(format!("freq-{}", sanitize(freq)));
}
} else if let Some(source) = &self.source {
parts.push("source".to_string());
parts.push(sanitize(&source.kind));
if let Some(device) = &source.device_id {
parts.push(format!("device-{}", sanitize(device)));
}
if let Some(channel) = &source.channel {
parts.push(format!("channel-{}", sanitize(channel)));
}
} else {
parts.push("unknown".to_string());
}
if let Some(profile) = &self.profile {
parts.push(format!("profile-{}", sanitize(profile)));
}
if let Some(variant) = &self.variant {
parts.push(format!("variant-{}", sanitize(variant)));
}
StreamId(parts.join("/"))
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Channel {
pub id: ChannelId,
pub name: String,
pub number: Option<String>,
pub program_id: Option<u16>,
pub metadata: Vec<ChannelMetadata>,
}
fn sanitize(value: &str) -> String {
value
.chars()
.map(|c| match c {
'a'..='z' | '0'..='9' | '-' | '_' => c,
'A'..='Z' => c.to_ascii_lowercase(),
_ => '_',
})
.collect()
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub enum ChannelMetadata {
Callsign(String),
Network(String),
Region(String),
Frequency(String),
Extra(String, String),
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct PacketDigest {
pub algorithm: String,
pub hex: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct DeterminismProfile {
pub name: String,
pub description: String,
pub encoder: String,
pub encoder_args: Vec<String>,
pub chunk_duration_ms: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct NodeDescriptor {
pub node_id: String,
pub human_name: String,
pub location_hint: Option<String>,
pub capabilities: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StreamEncryptionInfo {
pub alg: String,
pub key_id: String,
pub nonce_scheme: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct MoqStreamDescriptor {
pub endpoint: String,
pub broadcast_name: String,
pub track_name: String,
pub encryption: Option<StreamEncryptionInfo>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StreamCatalogEntry {
pub stream: StreamDescriptor,
pub moq: Option<MoqStreamDescriptor>,
pub manifest: Option<ManifestSummary>,
pub updated_unix_ms: u64,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct StreamCatalog {
pub entries: Vec<StreamCatalogEntry>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestSummary {
pub manifest_id: String,
pub merkle_root: String,
pub epoch_id: String,
pub total_chunks: u64,
pub chunk_start_index: u64,
pub encoder_profile_id: String,
pub signed_by: Vec<String>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ChunkId {
pub stream_id: StreamId,
pub epoch_id: String,
pub chunk_index: u64,
pub chunk_hash: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestVariant {
pub variant_id: String,
pub stream_id: StreamId,
pub chunk_start_index: u64,
pub total_chunks: u64,
pub merkle_root: String,
pub chunk_hashes: Vec<String>,
#[serde(default)]
pub metadata: Vec<StreamMetadata>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestBody {
pub stream_id: StreamId,
pub epoch_id: String,
pub chunk_duration_ms: u64,
pub total_chunks: u64,
pub chunk_start_index: u64,
pub encoder_profile_id: String,
pub merkle_root: String,
pub created_unix_ms: u64,
pub metadata: Vec<StreamMetadata>,
pub chunk_hashes: Vec<String>,
#[serde(default)]
pub variants: Option<Vec<ManifestVariant>>,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct ManifestSignature {
pub signer_id: String,
pub alg: String,
pub signature: String,
}
#[derive(Debug, Clone, Serialize, Deserialize)]
pub struct Manifest {
pub body: ManifestBody,
pub manifest_id: String,
pub signatures: Vec<ManifestSignature>,
}
impl Manifest {
pub fn summary(&self) -> ManifestSummary {
ManifestSummary {
manifest_id: self.manifest_id.clone(),
merkle_root: self.body.merkle_root.clone(),
epoch_id: self.body.epoch_id.clone(),
total_chunks: self.body.total_chunks,
chunk_start_index: self.body.chunk_start_index,
encoder_profile_id: self.body.encoder_profile_id.clone(),
signed_by: self
.signatures
.iter()
.map(|sig| sig.signer_id.clone())
.collect(),
}
}
}
#[derive(Debug, Clone)]
pub enum ManifestError {
Empty,
InvalidHash(String),
}
impl fmt::Display for ManifestError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ManifestError::Empty => write!(f, "no chunk hashes supplied"),
ManifestError::InvalidHash(value) => write!(f, "invalid chunk hash: {value}"),
}
}
}
impl std::error::Error for ManifestError {}
impl ManifestBody {
pub fn manifest_id(&self) -> Result<String, serde_json::Error> {
let bytes = serde_json::to_vec(self)?;
Ok(blake3::hash(&bytes).to_hex().to_string())
}
}
pub fn merkle_root_from_hashes(hashes: &[String]) -> Result<String, ManifestError> {
if hashes.is_empty() {
return Err(ManifestError::Empty);
}
let mut nodes: Vec<blake3::Hash> = Vec::with_capacity(hashes.len());
for hash in hashes {
let parsed = blake3::Hash::from_hex(hash.as_bytes())
.map_err(|_| ManifestError::InvalidHash(hash.clone()))?;
nodes.push(parsed);
}
while nodes.len() > 1 {
if nodes.len() % 2 == 1 {
if let Some(last) = nodes.last().cloned() {
nodes.push(last);
}
}
let mut parents = Vec::with_capacity(nodes.len() / 2);
for pair in nodes.chunks(2) {
let left = pair[0].as_bytes();
let right = pair[1].as_bytes();
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left);
merged[32..].copy_from_slice(right);
parents.push(blake3::hash(&merged));
}
nodes = parents;
}
Ok(nodes[0].to_hex().to_string())
}
pub fn merkle_proof_for_index(
hashes: &[String],
index: usize,
) -> Result<Vec<String>, ManifestError> {
if hashes.is_empty() {
return Err(ManifestError::Empty);
}
if index >= hashes.len() {
return Err(ManifestError::InvalidHash(format!(
"index {index} out of bounds"
)));
}
let mut nodes: Vec<blake3::Hash> = Vec::with_capacity(hashes.len());
for hash in hashes {
let parsed = blake3::Hash::from_hex(hash.as_bytes())
.map_err(|_| ManifestError::InvalidHash(hash.clone()))?;
nodes.push(parsed);
}
let mut proof = Vec::new();
let mut pos = index;
while nodes.len() > 1 {
if nodes.len() % 2 == 1 {
if let Some(last) = nodes.last().cloned() {
nodes.push(last);
}
}
let sibling_index = if pos % 2 == 0 { pos + 1 } else { pos - 1 };
let sibling = nodes
.get(sibling_index)
.ok_or_else(|| ManifestError::InvalidHash("missing sibling".to_string()))?;
proof.push(sibling.to_hex().to_string());
let mut parents = Vec::with_capacity(nodes.len() / 2);
for pair in nodes.chunks(2) {
let left = pair[0].as_bytes();
let right = pair[1].as_bytes();
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left);
merged[32..].copy_from_slice(right);
parents.push(blake3::hash(&merged));
}
nodes = parents;
pos /= 2;
}
Ok(proof)
}
pub fn verify_merkle_proof(
leaf_hash: &str,
mut index: usize,
branch: &[String],
expected_root: &str,
) -> bool {
let Ok(mut acc) = blake3::Hash::from_hex(leaf_hash.as_bytes()) else {
return false;
};
for sibling_hex in branch {
let Ok(sibling) = blake3::Hash::from_hex(sibling_hex.as_bytes()) else {
return false;
};
let (left, right) = if index % 2 == 0 {
(acc, sibling)
} else {
(sibling, acc)
};
let mut merged = [0u8; 64];
merged[..32].copy_from_slice(left.as_bytes());
merged[32..].copy_from_slice(right.as_bytes());
acc = blake3::hash(&merged);
index /= 2;
}
acc.to_hex().to_string() == expected_root
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn manifest_id_changes_with_body() {
let body = ManifestBody {
stream_id: StreamId("s".to_string()),
epoch_id: "e".to_string(),
chunk_duration_ms: 2000,
total_chunks: 1,
chunk_start_index: 0,
encoder_profile_id: "p".to_string(),
merkle_root: "00".repeat(32),
created_unix_ms: 1,
metadata: Vec::new(),
chunk_hashes: vec!["11".repeat(32)],
variants: None,
};
let id1 = body.manifest_id().unwrap();
let mut body2 = body.clone();
body2.created_unix_ms = 2;
let id2 = body2.manifest_id().unwrap();
assert_ne!(id1, id2);
}
#[test]
fn merkle_root_single_is_leaf() {
let leaf = blake3::hash(b"leaf").to_hex().to_string();
let root = merkle_root_from_hashes(&[leaf.clone()]).unwrap();
assert_eq!(root, leaf);
}
#[test]
fn merkle_root_rejects_invalid_hash() {
let err = merkle_root_from_hashes(&["not-hex".to_string()]).unwrap_err();
assert!(matches!(err, ManifestError::InvalidHash(_)));
}
#[test]
fn merkle_proof_roundtrip_small_sets() {
for size in 1..=9usize {
let leaves = (0..size)
.map(|i| blake3::hash(&[i as u8]).to_hex().to_string())
.collect::<Vec<_>>();
let root = merkle_root_from_hashes(&leaves).unwrap();
for idx in 0..size {
let proof = merkle_proof_for_index(&leaves, idx).unwrap();
assert!(
verify_merkle_proof(&leaves[idx], idx, &proof, &root),
"size {size} idx {idx} failed"
);
}
}
}
#[test]
fn merkle_proof_detects_tampering() {
let leaves = (0..4usize)
.map(|i| blake3::hash(&[i as u8]).to_hex().to_string())
.collect::<Vec<_>>();
let root = merkle_root_from_hashes(&leaves).unwrap();
let mut proof = merkle_proof_for_index(&leaves, 2).unwrap();
proof[0] = blake3::hash(b"evil").to_hex().to_string();
assert!(!verify_merkle_proof(&leaves[2], 2, &proof, &root));
}
}