every.channel: sanitized baseline
This commit is contained in:
commit
897e556bea
258 changed files with 74298 additions and 0 deletions
10
scripts/build-web.sh
Executable file
10
scripts/build-web.sh
Executable file
|
|
@ -0,0 +1,10 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "${root}"
|
||||
|
||||
# Trunk breaks if NO_COLOR is set to "1" in some environments.
|
||||
cd apps/tauri/ui
|
||||
exec nix develop --accept-flake-config -c env -u NO_COLOR trunk build --release --public-url /
|
||||
|
||||
23
scripts/coverage-summary.sh
Executable file
23
scripts/coverage-summary.sh
Executable file
|
|
@ -0,0 +1,23 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
lcov_path="${1:-}"
|
||||
if [[ -z "${lcov_path}" || ! -f "${lcov_path}" ]]; then
|
||||
echo "usage: $0 <path/to/file.lcov>" >&2
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# lcov format includes per-file LF (lines found) and LH (lines hit). Sum those.
|
||||
awk '
|
||||
/^LF:/ { lf += substr($0, 4) }
|
||||
/^LH:/ { lh += substr($0, 4) }
|
||||
END {
|
||||
if (lf == 0) {
|
||||
printf("lines: 0/0 (0.00%%)\n")
|
||||
exit 0
|
||||
}
|
||||
pct = (lh * 100.0) / lf
|
||||
printf("lines: %d/%d (%.2f%%)\n", lh, lf, pct)
|
||||
}
|
||||
' "${lcov_path}"
|
||||
|
||||
17
scripts/coverage.sh
Executable file
17
scripts/coverage.sh
Executable file
|
|
@ -0,0 +1,17 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "${root}"
|
||||
|
||||
out_dir="${root}/tmp/coverage"
|
||||
mkdir -p "${out_dir}"
|
||||
|
||||
# Generates an HTML coverage report + a single-line summary.
|
||||
# (llvm-cov doesn't allow --summary-only together with --html, so we do LCOV + HTML.)
|
||||
exec nix develop --accept-flake-config -c bash -lc "
|
||||
set -euo pipefail
|
||||
cargo llvm-cov --workspace --lcov --output-path '${out_dir}/coverage.lcov'
|
||||
'${root}/scripts/coverage-summary.sh' '${out_dir}/coverage.lcov' | tee '${out_dir}/summary.txt'
|
||||
cargo llvm-cov --workspace --html --output-dir '${out_dir}/html'
|
||||
"
|
||||
30
scripts/deploy-workers.sh
Executable file
30
scripts/deploy-workers.sh
Executable file
|
|
@ -0,0 +1,30 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "${root}"
|
||||
|
||||
# Load Cloudflare deploy credentials from the founder's Vault by default, but never commit secrets.
|
||||
# Override paths via EVERY_CHANNEL_CF_TOKEN_FILE / EVERY_CHANNEL_CF_ACCOUNT_FILE, or set env vars directly.
|
||||
token_file="${EVERY_CHANNEL_CF_TOKEN_FILE:-$HOME/Vault/Secrets/ecp-cf-token.txt}"
|
||||
account_file="${EVERY_CHANNEL_CF_ACCOUNT_FILE:-$HOME/Vault/Secrets/ecp-cf-account.txt}"
|
||||
|
||||
if [[ -z "${CLOUDFLARE_API_TOKEN:-}" && -f "${token_file}" ]]; then
|
||||
export CLOUDFLARE_API_TOKEN
|
||||
CLOUDFLARE_API_TOKEN="$(cat "${token_file}")"
|
||||
fi
|
||||
|
||||
if [[ -z "${CLOUDFLARE_ACCOUNT_ID:-}" && -f "${account_file}" ]]; then
|
||||
export CLOUDFLARE_ACCOUNT_ID
|
||||
CLOUDFLARE_ACCOUNT_ID="$(cat "${account_file}")"
|
||||
fi
|
||||
|
||||
if [[ -z "${CLOUDFLARE_API_TOKEN:-}" ]]; then
|
||||
echo "error: CLOUDFLARE_API_TOKEN is not set (set env var or provide ${token_file})" >&2
|
||||
exit 2
|
||||
fi
|
||||
|
||||
./scripts/build-web.sh
|
||||
|
||||
cd deploy/cloudflare-worker
|
||||
exec nix develop --accept-flake-config -c bash -lc 'npm ci && wrangler deploy'
|
||||
108
scripts/determinism-cmaf.sh
Executable file
108
scripts/determinism-cmaf.sh
Executable file
|
|
@ -0,0 +1,108 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "${root}"
|
||||
|
||||
input=""
|
||||
seconds="6"
|
||||
chunk_ms="2000"
|
||||
|
||||
usage() {
|
||||
cat >&2 <<'EOF'
|
||||
usage:
|
||||
scripts/determinism-cmaf.sh --input <TS_FILE_OR_URL> [--seconds N] [--chunk-ms MS]
|
||||
|
||||
what it does:
|
||||
- runs the CMAF (HLS fMP4) encoder/segmenter twice with the deterministic x264 profile
|
||||
- compares sha256 of init + segments to check byte-for-byte determinism
|
||||
|
||||
notes:
|
||||
- requires `ffmpeg` on PATH
|
||||
EOF
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--input)
|
||||
input="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--seconds)
|
||||
seconds="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--chunk-ms)
|
||||
chunk_ms="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "error: unknown arg: $1" >&2
|
||||
usage
|
||||
exit 2
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "${input}" ]]; then
|
||||
echo "error: --input is required" >&2
|
||||
usage
|
||||
exit 2
|
||||
fi
|
||||
|
||||
tmp="$(mktemp -d)"
|
||||
trap 'rm -rf "$tmp"' EXIT
|
||||
|
||||
ts="$input"
|
||||
if [[ "${input}" == http://* || "${input}" == https://* ]]; then
|
||||
ts="$tmp/in.ts"
|
||||
ffmpeg -hide_banner -loglevel error -nostdin -y \
|
||||
-t "${seconds}" -i "${input}" -c copy -f mpegts "${ts}"
|
||||
fi
|
||||
|
||||
seg_secs="$(python3 - <<PY
|
||||
ms=int("${chunk_ms}")
|
||||
print(f"{ms/1000:.3f}")
|
||||
PY
|
||||
)"
|
||||
|
||||
run_one() {
|
||||
out="$1"
|
||||
rm -rf "$out"
|
||||
mkdir -p "$out"
|
||||
(cd "$out" && ffmpeg -hide_banner -loglevel error -nostdin -y \
|
||||
-i "pipe:0" \
|
||||
-map 0:v:0 -map 0:a:0? -sn -dn -map_metadata -1 \
|
||||
-c:v libx264 \
|
||||
-c:a aac -b:a 128k -ac 2 -ar 48000 \
|
||||
-pix_fmt yuv420p -g 60 -keyint_min 60 -sc_threshold 0 -bf 0 \
|
||||
-threads 1 \
|
||||
-fflags +bitexact -flags:v +bitexact -flags:a +bitexact \
|
||||
-f hls \
|
||||
-hls_time "${seg_secs}" \
|
||||
-hls_list_size 0 \
|
||||
-hls_segment_type fmp4 \
|
||||
-hls_flags independent_segments \
|
||||
-hls_fmp4_init_filename init.mp4 \
|
||||
-hls_segment_filename segment_%06d.m4s \
|
||||
index.m3u8 < "$ts")
|
||||
}
|
||||
|
||||
run_one "$tmp/a"
|
||||
run_one "$tmp/b"
|
||||
|
||||
(cd "$tmp/a" && shasum -a 256 init.mp4 segment_*.m4s | sort) > "$tmp/a.sha"
|
||||
(cd "$tmp/b" && shasum -a 256 init.mp4 segment_*.m4s | sort) > "$tmp/b.sha"
|
||||
|
||||
if diff -u "$tmp/a.sha" "$tmp/b.sha" >/dev/null; then
|
||||
echo "OK: deterministic (init + segments match)"
|
||||
else
|
||||
echo "DIFF: non-deterministic output"
|
||||
diff -u "$tmp/a.sha" "$tmp/b.sha" || true
|
||||
exit 1
|
||||
fi
|
||||
|
||||
53
scripts/e2e-hdhr.sh
Executable file
53
scripts/e2e-hdhr.sh
Executable file
|
|
@ -0,0 +1,53 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "${root}"
|
||||
|
||||
host="${EVERY_CHANNEL_E2E_HDHR_HOST:-}"
|
||||
channel="${EVERY_CHANNEL_E2E_HDHR_CHANNEL:-}"
|
||||
|
||||
usage() {
|
||||
cat >&2 <<'EOF'
|
||||
usage:
|
||||
scripts/e2e-hdhr.sh --host <HDHR_HOST> --channel <CHANNEL>
|
||||
|
||||
notes:
|
||||
- runs inside `nix develop` so FFmpeg headers are present for ac-ffmpeg
|
||||
- test is ignored by default; this script enables it
|
||||
EOF
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--host)
|
||||
host="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--channel)
|
||||
channel="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "error: unknown arg: $1" >&2
|
||||
usage
|
||||
exit 2
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "${host}" || -z "${channel}" ]]; then
|
||||
echo "error: --host and --channel are required" >&2
|
||||
usage
|
||||
exit 2
|
||||
fi
|
||||
|
||||
export EVERY_CHANNEL_E2E_HDHR_HOST="${host}"
|
||||
export EVERY_CHANNEL_E2E_HDHR_CHANNEL="${channel}"
|
||||
|
||||
nix develop --accept-flake-config -c \
|
||||
bash -lc 'cargo test -p ec-node --test e2e_hdhr -- --ignored --nocapture'
|
||||
57
scripts/e2e-mesh-split-cmaf.sh
Executable file
57
scripts/e2e-mesh-split-cmaf.sh
Executable file
|
|
@ -0,0 +1,57 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "${root}"
|
||||
|
||||
host="${EVERY_CHANNEL_E2E_HDHR_HOST:-}"
|
||||
channel="${EVERY_CHANNEL_E2E_HDHR_CHANNEL:-}"
|
||||
|
||||
usage() {
|
||||
cat >&2 <<'EOF'
|
||||
usage:
|
||||
scripts/e2e-mesh-split-cmaf.sh --host <HDHR_HOST> --channel <CHANNEL>
|
||||
|
||||
what it does:
|
||||
- runs an ignored E2E test that publishes CMAF init+segments as objects from one peer
|
||||
- publishes manifests from another peer
|
||||
- subscriber stitches the two streams and enforces --require-manifest
|
||||
|
||||
notes:
|
||||
- runs inside `nix develop`
|
||||
EOF
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--host)
|
||||
host="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--channel)
|
||||
channel="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "error: unknown arg: $1" >&2
|
||||
usage
|
||||
exit 2
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "${host}" || -z "${channel}" ]]; then
|
||||
echo "error: --host and --channel are required" >&2
|
||||
usage
|
||||
exit 2
|
||||
fi
|
||||
|
||||
export EVERY_CHANNEL_E2E_HDHR_HOST="${host}"
|
||||
export EVERY_CHANNEL_E2E_HDHR_CHANNEL="${channel}"
|
||||
|
||||
nix develop --accept-flake-config -c \
|
||||
bash -lc 'cargo test -p ec-node --test e2e_mesh_split_cmaf -- --ignored --nocapture'
|
||||
56
scripts/e2e-mesh-split.sh
Executable file
56
scripts/e2e-mesh-split.sh
Executable file
|
|
@ -0,0 +1,56 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "${root}"
|
||||
|
||||
host="${EVERY_CHANNEL_E2E_HDHR_HOST:-}"
|
||||
channel="${EVERY_CHANNEL_E2E_HDHR_CHANNEL:-}"
|
||||
|
||||
usage() {
|
||||
cat >&2 <<'EOF'
|
||||
usage:
|
||||
scripts/e2e-mesh-split.sh --host <HDHR_HOST> --channel <CHANNEL>
|
||||
|
||||
what it does:
|
||||
- runs an ignored E2E test that publishes manifests from one peer and objects from another
|
||||
- subscriber stitches the two streams and enforces --require-manifest
|
||||
|
||||
notes:
|
||||
- runs inside `nix develop` so FFmpeg headers are present for ac-ffmpeg
|
||||
EOF
|
||||
}
|
||||
|
||||
while [[ $# -gt 0 ]]; do
|
||||
case "$1" in
|
||||
--host)
|
||||
host="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
--channel)
|
||||
channel="${2:-}"
|
||||
shift 2
|
||||
;;
|
||||
-h|--help)
|
||||
usage
|
||||
exit 0
|
||||
;;
|
||||
*)
|
||||
echo "error: unknown arg: $1" >&2
|
||||
usage
|
||||
exit 2
|
||||
;;
|
||||
esac
|
||||
done
|
||||
|
||||
if [[ -z "${host}" || -z "${channel}" ]]; then
|
||||
echo "error: --host and --channel are required" >&2
|
||||
usage
|
||||
exit 2
|
||||
fi
|
||||
|
||||
export EVERY_CHANNEL_E2E_HDHR_HOST="${host}"
|
||||
export EVERY_CHANNEL_E2E_HDHR_CHANNEL="${channel}"
|
||||
|
||||
nix develop --accept-flake-config -c \
|
||||
bash -lc 'cargo test -p ec-node --test e2e_mesh_split -- --ignored --nocapture'
|
||||
11
scripts/e2e-remote-website-direct.sh
Executable file
11
scripts/e2e-remote-website-direct.sh
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "${root}"
|
||||
|
||||
# Runs the ignored Rust E2E that drives https://every.channel via headless Chrome
|
||||
# and connects to a local `ec-node direct-publish` process.
|
||||
exec nix develop --accept-flake-config -c \
|
||||
bash -lc 'cargo test -p ec-node --test e2e_remote_website_direct -- --ignored --nocapture'
|
||||
|
||||
11
scripts/e2e-remote-website-directory.sh
Executable file
11
scripts/e2e-remote-website-directory.sh
Executable file
|
|
@ -0,0 +1,11 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
root="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
cd "${root}"
|
||||
|
||||
# Runs the ignored Rust E2E that drives https://every.channel via headless Chrome
|
||||
# and connects to a local `ec-node direct-publish --directory-url ...` process.
|
||||
exec nix develop --accept-flake-config -c \
|
||||
bash -lc 'cargo test -p ec-node --test e2e_remote_website_directory -- --ignored --nocapture'
|
||||
|
||||
34
scripts/vendor-yt-dlp.sh
Executable file
34
scripts/vendor-yt-dlp.sh
Executable file
|
|
@ -0,0 +1,34 @@
|
|||
#!/usr/bin/env bash
|
||||
set -euo pipefail
|
||||
|
||||
ROOT="$(cd "$(dirname "${BASH_SOURCE[0]}")/.." && pwd)"
|
||||
|
||||
case "$(uname -s)" in
|
||||
Darwin) platform="macos" ;;
|
||||
Linux) platform="linux" ;;
|
||||
MINGW*|MSYS*|CYGWIN*) platform="windows" ;;
|
||||
*) platform="unknown" ;;
|
||||
esac
|
||||
|
||||
if [[ "${platform}" == "unknown" ]]; then
|
||||
echo "Unsupported platform for bundling yt-dlp" >&2
|
||||
exit 1
|
||||
fi
|
||||
|
||||
DEST="${ROOT}/apps/tauri/resources/yt-dlp/${platform}/venv"
|
||||
|
||||
if command -v uv >/dev/null 2>&1; then
|
||||
uv venv "${DEST}" --python 3.12
|
||||
"${DEST}/bin/python" -m pip install --upgrade pip
|
||||
"${DEST}/bin/python" -m pip install --upgrade yt-dlp
|
||||
else
|
||||
if ! command -v python3 >/dev/null 2>&1; then
|
||||
echo "python3 not found; install python or uv" >&2
|
||||
exit 1
|
||||
fi
|
||||
python3 -m venv "${DEST}"
|
||||
"${DEST}/bin/python" -m pip install --upgrade pip
|
||||
"${DEST}/bin/python" -m pip install --upgrade yt-dlp
|
||||
fi
|
||||
|
||||
echo "yt-dlp bundled at ${DEST}"
|
||||
Loading…
Add table
Add a link
Reference in a new issue