every.channel: sanitized baseline
This commit is contained in:
commit
897e556bea
258 changed files with 74298 additions and 0 deletions
7
third_party/iroh-live/.cargo/config.toml
vendored
Normal file
7
third_party/iroh-live/.cargo/config.toml
vendored
Normal file
|
|
@ -0,0 +1,7 @@
|
|||
[env]
|
||||
SDKROOT = "/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk"
|
||||
LLVM_CONFIG_PATH = "/opt/homebrew/opt/llvm/bin/llvm-config"
|
||||
BINDGEN_EXTRA_CLANG_ARGS = "-I/Applications/Xcode.app/Contents/Developer/Platforms/MacOSX.platform/Developer/SDKs/MacOSX.sdk/usr/include"
|
||||
|
||||
[target.aarch64-apple-darwin]
|
||||
rustflags = []
|
||||
3
third_party/iroh-live/.gitignore
vendored
Normal file
3
third_party/iroh-live/.gitignore
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
/target
|
||||
SANDBOX
|
||||
target
|
||||
9665
third_party/iroh-live/Cargo.lock
generated
vendored
Normal file
9665
third_party/iroh-live/Cargo.lock
generated
vendored
Normal file
File diff suppressed because it is too large
Load diff
11
third_party/iroh-live/Cargo.toml
vendored
Normal file
11
third_party/iroh-live/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,11 @@
|
|||
[workspace]
|
||||
members = [
|
||||
"iroh-live",
|
||||
"iroh-moq",
|
||||
"moq-media",
|
||||
"web-transport-iroh",
|
||||
]
|
||||
resolver = "2"
|
||||
|
||||
[profile.release]
|
||||
debug = true
|
||||
201
third_party/iroh-live/LICENSE-APACHE
vendored
Normal file
201
third_party/iroh-live/LICENSE-APACHE
vendored
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [2025] [N0, INC]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
201
third_party/iroh-live/LICENSE-MIT
vendored
Normal file
201
third_party/iroh-live/LICENSE-MIT
vendored
Normal file
|
|
@ -0,0 +1,201 @@
|
|||
Apache License
|
||||
Version 2.0, January 2004
|
||||
http://www.apache.org/licenses/
|
||||
|
||||
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
||||
|
||||
1. Definitions.
|
||||
|
||||
"License" shall mean the terms and conditions for use, reproduction,
|
||||
and distribution as defined by Sections 1 through 9 of this document.
|
||||
|
||||
"Licensor" shall mean the copyright owner or entity authorized by
|
||||
the copyright owner that is granting the License.
|
||||
|
||||
"Legal Entity" shall mean the union of the acting entity and all
|
||||
other entities that control, are controlled by, or are under common
|
||||
control with that entity. For the purposes of this definition,
|
||||
"control" means (i) the power, direct or indirect, to cause the
|
||||
direction or management of such entity, whether by contract or
|
||||
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
||||
outstanding shares, or (iii) beneficial ownership of such entity.
|
||||
|
||||
"You" (or "Your") shall mean an individual or Legal Entity
|
||||
exercising permissions granted by this License.
|
||||
|
||||
"Source" form shall mean the preferred form for making modifications,
|
||||
including but not limited to software source code, documentation
|
||||
source, and configuration files.
|
||||
|
||||
"Object" form shall mean any form resulting from mechanical
|
||||
transformation or translation of a Source form, including but
|
||||
not limited to compiled object code, generated documentation,
|
||||
and conversions to other media types.
|
||||
|
||||
"Work" shall mean the work of authorship, whether in Source or
|
||||
Object form, made available under the License, as indicated by a
|
||||
copyright notice that is included in or attached to the work
|
||||
(an example is provided in the Appendix below).
|
||||
|
||||
"Derivative Works" shall mean any work, whether in Source or Object
|
||||
form, that is based on (or derived from) the Work and for which the
|
||||
editorial revisions, annotations, elaborations, or other modifications
|
||||
represent, as a whole, an original work of authorship. For the purposes
|
||||
of this License, Derivative Works shall not include works that remain
|
||||
separable from, or merely link (or bind by name) to the interfaces of,
|
||||
the Work and Derivative Works thereof.
|
||||
|
||||
"Contribution" shall mean any work of authorship, including
|
||||
the original version of the Work and any modifications or additions
|
||||
to that Work or Derivative Works thereof, that is intentionally
|
||||
submitted to Licensor for inclusion in the Work by the copyright owner
|
||||
or by an individual or Legal Entity authorized to submit on behalf of
|
||||
the copyright owner. For the purposes of this definition, "submitted"
|
||||
means any form of electronic, verbal, or written communication sent
|
||||
to the Licensor or its representatives, including but not limited to
|
||||
communication on electronic mailing lists, source code control systems,
|
||||
and issue tracking systems that are managed by, or on behalf of, the
|
||||
Licensor for the purpose of discussing and improving the Work, but
|
||||
excluding communication that is conspicuously marked or otherwise
|
||||
designated in writing by the copyright owner as "Not a Contribution."
|
||||
|
||||
"Contributor" shall mean Licensor and any individual or Legal Entity
|
||||
on behalf of whom a Contribution has been received by Licensor and
|
||||
subsequently incorporated within the Work.
|
||||
|
||||
2. Grant of Copyright License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
copyright license to reproduce, prepare Derivative Works of,
|
||||
publicly display, publicly perform, sublicense, and distribute the
|
||||
Work and such Derivative Works in Source or Object form.
|
||||
|
||||
3. Grant of Patent License. Subject to the terms and conditions of
|
||||
this License, each Contributor hereby grants to You a perpetual,
|
||||
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
||||
(except as stated in this section) patent license to make, have made,
|
||||
use, offer to sell, sell, import, and otherwise transfer the Work,
|
||||
where such license applies only to those patent claims licensable
|
||||
by such Contributor that are necessarily infringed by their
|
||||
Contribution(s) alone or by combination of their Contribution(s)
|
||||
with the Work to which such Contribution(s) was submitted. If You
|
||||
institute patent litigation against any entity (including a
|
||||
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
||||
or a Contribution incorporated within the Work constitutes direct
|
||||
or contributory patent infringement, then any patent licenses
|
||||
granted to You under this License for that Work shall terminate
|
||||
as of the date such litigation is filed.
|
||||
|
||||
4. Redistribution. You may reproduce and distribute copies of the
|
||||
Work or Derivative Works thereof in any medium, with or without
|
||||
modifications, and in Source or Object form, provided that You
|
||||
meet the following conditions:
|
||||
|
||||
(a) You must give any other recipients of the Work or
|
||||
Derivative Works a copy of this License; and
|
||||
|
||||
(b) You must cause any modified files to carry prominent notices
|
||||
stating that You changed the files; and
|
||||
|
||||
(c) You must retain, in the Source form of any Derivative Works
|
||||
that You distribute, all copyright, patent, trademark, and
|
||||
attribution notices from the Source form of the Work,
|
||||
excluding those notices that do not pertain to any part of
|
||||
the Derivative Works; and
|
||||
|
||||
(d) If the Work includes a "NOTICE" text file as part of its
|
||||
distribution, then any Derivative Works that You distribute must
|
||||
include a readable copy of the attribution notices contained
|
||||
within such NOTICE file, excluding those notices that do not
|
||||
pertain to any part of the Derivative Works, in at least one
|
||||
of the following places: within a NOTICE text file distributed
|
||||
as part of the Derivative Works; within the Source form or
|
||||
documentation, if provided along with the Derivative Works; or,
|
||||
within a display generated by the Derivative Works, if and
|
||||
wherever such third-party notices normally appear. The contents
|
||||
of the NOTICE file are for informational purposes only and
|
||||
do not modify the License. You may add Your own attribution
|
||||
notices within Derivative Works that You distribute, alongside
|
||||
or as an addendum to the NOTICE text from the Work, provided
|
||||
that such additional attribution notices cannot be construed
|
||||
as modifying the License.
|
||||
|
||||
You may add Your own copyright statement to Your modifications and
|
||||
may provide additional or different license terms and conditions
|
||||
for use, reproduction, or distribution of Your modifications, or
|
||||
for any such Derivative Works as a whole, provided Your use,
|
||||
reproduction, and distribution of the Work otherwise complies with
|
||||
the conditions stated in this License.
|
||||
|
||||
5. Submission of Contributions. Unless You explicitly state otherwise,
|
||||
any Contribution intentionally submitted for inclusion in the Work
|
||||
by You to the Licensor shall be under the terms and conditions of
|
||||
this License, without any additional terms or conditions.
|
||||
Notwithstanding the above, nothing herein shall supersede or modify
|
||||
the terms of any separate license agreement you may have executed
|
||||
with Licensor regarding such Contributions.
|
||||
|
||||
6. Trademarks. This License does not grant permission to use the trade
|
||||
names, trademarks, service marks, or product names of the Licensor,
|
||||
except as required for reasonable and customary use in describing the
|
||||
origin of the Work and reproducing the content of the NOTICE file.
|
||||
|
||||
7. Disclaimer of Warranty. Unless required by applicable law or
|
||||
agreed to in writing, Licensor provides the Work (and each
|
||||
Contributor provides its Contributions) on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
||||
implied, including, without limitation, any warranties or conditions
|
||||
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
||||
PARTICULAR PURPOSE. You are solely responsible for determining the
|
||||
appropriateness of using or redistributing the Work and assume any
|
||||
risks associated with Your exercise of permissions under this License.
|
||||
|
||||
8. Limitation of Liability. In no event and under no legal theory,
|
||||
whether in tort (including negligence), contract, or otherwise,
|
||||
unless required by applicable law (such as deliberate and grossly
|
||||
negligent acts) or agreed to in writing, shall any Contributor be
|
||||
liable to You for damages, including any direct, indirect, special,
|
||||
incidental, or consequential damages of any character arising as a
|
||||
result of this License or out of the use or inability to use the
|
||||
Work (including but not limited to damages for loss of goodwill,
|
||||
work stoppage, computer failure or malfunction, or any and all
|
||||
other commercial damages or losses), even if such Contributor
|
||||
has been advised of the possibility of such damages.
|
||||
|
||||
9. Accepting Warranty or Additional Liability. While redistributing
|
||||
the Work or Derivative Works thereof, You may choose to offer,
|
||||
and charge a fee for, acceptance of support, warranty, indemnity,
|
||||
or other liability obligations and/or rights consistent with this
|
||||
License. However, in accepting such obligations, You may act only
|
||||
on Your own behalf and on Your sole responsibility, not on behalf
|
||||
of any other Contributor, and only if You agree to indemnify,
|
||||
defend, and hold each Contributor harmless for any liability
|
||||
incurred by, or claims asserted against, such Contributor by reason
|
||||
of your accepting any such warranty or additional liability.
|
||||
|
||||
END OF TERMS AND CONDITIONS
|
||||
|
||||
APPENDIX: How to apply the Apache License to your work.
|
||||
|
||||
To apply the Apache License to your work, attach the following
|
||||
boilerplate notice, with the fields enclosed by brackets "[]"
|
||||
replaced with your own identifying information. (Don't include
|
||||
the brackets!) The text should be enclosed in the appropriate
|
||||
comment syntax for the file format. We also recommend that a
|
||||
file or class name and description of purpose be included on the
|
||||
same "printed page" as the copyright notice for easier
|
||||
identification within third-party archives.
|
||||
|
||||
Copyright [2025] [N0, INC]
|
||||
|
||||
Licensed under the Apache License, Version 2.0 (the "License");
|
||||
you may not use this file except in compliance with the License.
|
||||
You may obtain a copy of the License at
|
||||
|
||||
http://www.apache.org/licenses/LICENSE-2.0
|
||||
|
||||
Unless required by applicable law or agreed to in writing, software
|
||||
distributed under the License is distributed on an "AS IS" BASIS,
|
||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
See the License for the specific language governing permissions and
|
||||
limitations under the License.
|
||||
28
third_party/iroh-live/Makefile.toml
vendored
Normal file
28
third_party/iroh-live/Makefile.toml
vendored
Normal file
|
|
@ -0,0 +1,28 @@
|
|||
# Use cargo-make to run tasks here: https://crates.io/crates/cargo-make
|
||||
|
||||
[tasks.format]
|
||||
workspace = false
|
||||
command = "cargo"
|
||||
args = [
|
||||
"fmt",
|
||||
"--all",
|
||||
"--",
|
||||
"--config",
|
||||
"unstable_features=true",
|
||||
"--config",
|
||||
"imports_granularity=Crate,group_imports=StdExternalCrate,reorder_imports=true,format_code_in_doc_comments=true",
|
||||
]
|
||||
|
||||
[tasks.format-check]
|
||||
workspace = false
|
||||
command = "cargo"
|
||||
args = [
|
||||
"fmt",
|
||||
"--all",
|
||||
"--check",
|
||||
"--",
|
||||
"--config",
|
||||
"unstable_features=true",
|
||||
"--config",
|
||||
"imports_granularity=Crate,group_imports=StdExternalCrate,reorder_imports=true,format_code_in_doc_comments=true",
|
||||
]
|
||||
72
third_party/iroh-live/README.md
vendored
Normal file
72
third_party/iroh-live/README.md
vendored
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
# iroh-live
|
||||
|
||||
Livestreaming video and audio over iroh
|
||||
|
||||
**Status: experimental / work in progress**
|
||||
|
||||
This repository implements a live streaming protocol over iroh with [Media over Quic](https://moq.dev/).
|
||||
It uses [moq-rs](https://github.com/kixelated/moq) to transfer audio and video streams over iroh connections.
|
||||
|
||||
## Structure of the repository
|
||||
|
||||
* [**`web-transport-iroh`**](web-transport-iroh): Implements the [web-transport](https://github.com/kixelated/web-transport) traits for iroh connections
|
||||
* [**`iroh-moq`**](iroh-moq): Adapters to create and accept [moq-lite](https://github.com/kixelated/moq/tree/main/rs/moq) sessions over iroh
|
||||
* [**`iroh-live`**](iroh-live): Native capture, encoding and decoding of audio and video. This is an early preview of a high-level live streaming toolkit for iroh. Currently, it has these features, all subject to change:
|
||||
* Support for [hang](https://github.com/kixelated/moq/blob/main/rs/hang/) catalogs in MoQ sessions
|
||||
* Capture and playout audio (with [firewheel](https://github.com/BillyDM/Firewheel/))
|
||||
* Capture camera (with [nokwha](https://github.com/l1npengtul/nokhwa/))
|
||||
* Capture screens (with [xcap](https://github.com/nashaofu/xcap/))
|
||||
* Encode and decode video (h264) and audio (Opus) using [ffmpeg](https://docs.rs/ffmpeg-next/latest/ffmpeg_next/). Video encoding is hardware accelerated on supported platforms.
|
||||
* Support multiple renditions and on-demand switching of the encoding
|
||||
|
||||
There's still bugs and a lot of missing optimizations. This is an early, work-in-progress preview!
|
||||
|
||||
## Building
|
||||
|
||||
By default `ffmpeg` is dynamically linked. Enable the `static` feature to build ffmpeg from source and statically link it.
|
||||
|
||||
#### Build dependencies
|
||||
|
||||
*incomplete list, please file issues or PRs to expand this*
|
||||
|
||||
##### Linux
|
||||
|
||||
* For building with `static` feature: `apt install nasm pkg-config`
|
||||
|
||||
## Demo and examples
|
||||
|
||||
Check out the [`rooms`](iroh-live/examples/rooms.rs) example:
|
||||
|
||||
```
|
||||
cargo run --release --example rooms
|
||||
```
|
||||
|
||||
This will print a *room ticket*. Copy this to another device, and run:
|
||||
```
|
||||
cargo run --release --example rooms -- <TICKET>
|
||||
```
|
||||
|
||||
Now you're chatting! With video and audio! Over iroh!
|
||||
|
||||
Use the `room-publish-file` example to publish a video form a file into a room.
|
||||
|
||||
There's also a [`publish`](iroh-live/examples/publish.rs) example (publish only, no GUI), and a [`watch`](iroh-live/examples/watch.rs) example (watch a stream from the publish example).
|
||||
|
||||
The examples use [`egui`](https://github.com/emilk/egui), however `iroh-live` is not coupled to any GUI framework and should work with anything that can render raw images to the screen.
|
||||
|
||||
## License
|
||||
|
||||
Copyright 2025 N0, INC.
|
||||
|
||||
This project is licensed under either of
|
||||
|
||||
* Apache License, Version 2.0, ([LICENSE-APACHE](LICENSE-APACHE) or
|
||||
http://www.apache.org/licenses/LICENSE-2.0)
|
||||
* MIT license ([LICENSE-MIT](LICENSE-MIT) or
|
||||
http://opensource.org/licenses/MIT)
|
||||
|
||||
at your option.
|
||||
|
||||
## Contribution
|
||||
|
||||
Unless you explicitly state otherwise, any contribution intentionally submitted for inclusion in this project by you, as defined in the Apache-2.0 license, shall be dual licensed as above, without any additional terms or conditions.
|
||||
94
third_party/iroh-live/iroh-live/Cargo.toml
vendored
Normal file
94
third_party/iroh-live/iroh-live/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
[package]
|
||||
name = "iroh-live"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
description = "audio and video live streaming over iroh"
|
||||
authors = ["Franz Heinzmann <frando@n0.computer>"]
|
||||
repository = "https://github.com/n0-computer/iroh-live"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.100"
|
||||
bytemuck = "1.24.0"
|
||||
byte-unit = { version = "5.1", features = ["bit"] }
|
||||
data-encoding = "2.9.0"
|
||||
derive_more = { version = "2.0.1", features = ["display", "debug", "eq"] }
|
||||
ffmpeg-next = { version = "8.0.0", default-features = false, features = ["device", "format", "filter", "software-resampling", "software-scaling"] }
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true }
|
||||
firewheel = { version = "0.9.1", features = ["cpal", "peak_meter_node", "std", "stream_nodes", "cpal_resample_inputs"] }
|
||||
hang = "0.9.0"
|
||||
image = { version = "0.25.8", default-features = false }
|
||||
iroh = "0.95.1"
|
||||
iroh-gossip = "0.95.0"
|
||||
iroh-moq = { path = "../iroh-moq" }
|
||||
iroh-tickets = "0.2.0"
|
||||
moq-lite = "0.10.1"
|
||||
moq-media = { version = "0.1.0", path = "../moq-media" }
|
||||
n0-error = { version = "0.1.2", features = ["anyhow"] }
|
||||
n0-future = "0.3.1"
|
||||
n0-watcher = "0.6.0"
|
||||
nokhwa = { version = "0.10", features = [
|
||||
"input-native",
|
||||
"input-v4l",
|
||||
"output-threaded",
|
||||
] }
|
||||
postcard = "1.1.3"
|
||||
rand = "0.9.2"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
strum = { version = "0.27", features = ["derive"] }
|
||||
tokio = { version = "1.48.0", features = ["sync"] }
|
||||
tokio-util = "0.7.17"
|
||||
tracing = "0.1.41"
|
||||
xcap = "0.8"
|
||||
webrtc-audio-processing = { version = "0.5.0", features = ["bundled"] }
|
||||
bytes = "1.11.0"
|
||||
iroh-smol-kv = { git = "https://github.com/Frando/iroh-smol-kv", branch = "iroh-095", version = "0.3.1", default-features = false }
|
||||
buf-list = "1.1.2"
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
eframe = "0.33.0"
|
||||
postcard = "1.1.3"
|
||||
tokio = { version = "1.48.0", features = ["full"] }
|
||||
tracing-subscriber = "0.3.20"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
# Enable static build of ffmpeg
|
||||
static = [
|
||||
"ffmpeg-next/static",
|
||||
"ffmpeg-next/build-lib-openssl",
|
||||
"ffmpeg-next/build-license-version3",
|
||||
"ffmpeg-next/build-lib-opus",
|
||||
"ffmpeg-next/build-lib-x264",
|
||||
"ffmpeg-next/build-license-gpl",
|
||||
"dep:ffmpeg-sys-next",
|
||||
]
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
|
||||
"build-videotoolbox",
|
||||
"build-audiotoolbox",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
|
||||
"build-vaapi",
|
||||
# "build-vulkan",
|
||||
# "build-lib-libmfx",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
|
||||
"build-lib-d3d11va",
|
||||
"build-lib-dxva2",
|
||||
# "build-nvidia",
|
||||
# "build-amf",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "android")'.dependencies]
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
|
||||
# "build-mediacodec",
|
||||
] }
|
||||
3
third_party/iroh-live/iroh-live/README.md
vendored
Normal file
3
third_party/iroh-live/iroh-live/README.md
vendored
Normal file
|
|
@ -0,0 +1,3 @@
|
|||
# iroh-live
|
||||
|
||||
See [../README.md](../README.md)
|
||||
143
third_party/iroh-live/iroh-live/examples/common/import.rs
vendored
Normal file
143
third_party/iroh-live/iroh-live/examples/common/import.rs
vendored
Normal file
|
|
@ -0,0 +1,143 @@
|
|||
use std::{
|
||||
path::{Path, PathBuf},
|
||||
process::Stdio,
|
||||
};
|
||||
|
||||
use bytes::BytesMut;
|
||||
use clap::ValueEnum;
|
||||
use moq_lite::BroadcastProducer;
|
||||
use n0_error::Result;
|
||||
use tokio::{
|
||||
io::{AsyncRead, AsyncReadExt},
|
||||
process::Command,
|
||||
};
|
||||
use tracing::info;
|
||||
|
||||
#[derive(ValueEnum, Debug, Clone, Default, Copy)]
|
||||
pub enum ImportType {
|
||||
#[default]
|
||||
Cmaf,
|
||||
AnnexB,
|
||||
}
|
||||
|
||||
impl ImportType {
|
||||
pub fn as_str(&self) -> &'static str {
|
||||
match self {
|
||||
ImportType::AnnexB => "annex-b",
|
||||
ImportType::Cmaf => "cmaf",
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Taken from
|
||||
// https://github.com/moq-dev/moq/blob/30c28b8c3b6bd941fe1279c0fd8855139a1d4f6a/rs/hang-cli/src/import.rs
|
||||
// License: Apache-2.0
|
||||
pub struct Import {
|
||||
decoder: hang::import::Decoder,
|
||||
buffer: BytesMut,
|
||||
}
|
||||
|
||||
impl Import {
|
||||
pub fn new(broadcast: BroadcastProducer, format: ImportType) -> Self {
|
||||
let decoder = hang::import::Decoder::new(broadcast.into(), format.as_str())
|
||||
.expect("supported format");
|
||||
Self {
|
||||
decoder,
|
||||
buffer: BytesMut::new(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Import {
|
||||
pub async fn init_from<T: AsyncRead + Unpin>(&mut self, input: &mut T) -> anyhow::Result<()> {
|
||||
while !self.decoder.is_initialized() && input.read_buf(&mut self.buffer).await? > 0 {
|
||||
self.decoder.decode_stream(&mut self.buffer)?;
|
||||
}
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn read_from<T: AsyncRead + Unpin>(&mut self, input: &mut T) -> anyhow::Result<()> {
|
||||
while input.read_buf(&mut self.buffer).await? > 0 {
|
||||
self.decoder.decode_stream(&mut self.buffer)?;
|
||||
}
|
||||
|
||||
// Flush the final frame.
|
||||
self.decoder.decode_frame(&mut self.buffer, None)
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn transcode(input: PathBuf, format: ImportType) -> Result<impl AsyncRead> {
|
||||
let copy_video = is_h264(&input).await?;
|
||||
|
||||
let mut cmd = Command::new("ffmpeg");
|
||||
cmd.args([
|
||||
"-hide_banner",
|
||||
"-loglevel",
|
||||
"error",
|
||||
"-stream_loop",
|
||||
"-1",
|
||||
"-re",
|
||||
"-i",
|
||||
]);
|
||||
cmd.arg(input.as_os_str());
|
||||
|
||||
if copy_video {
|
||||
info!("input is h264, copy video");
|
||||
cmd.args(["-c:v", "copy"]);
|
||||
} else {
|
||||
info!("input is not h264, transcode");
|
||||
cmd.args(["-c:v", "libx264", "-pix_fmt", "yuv420p"]);
|
||||
}
|
||||
|
||||
match format {
|
||||
ImportType::Cmaf => {
|
||||
cmd.args(["-c:a", "libopus", "-b:a", "128k"]);
|
||||
cmd.args([
|
||||
"-movflags",
|
||||
"cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame",
|
||||
"-f",
|
||||
"mp4",
|
||||
]);
|
||||
}
|
||||
ImportType::AnnexB => {
|
||||
cmd.args([
|
||||
"-a",
|
||||
"n",
|
||||
"-bsf:v",
|
||||
"h264_mp4toannexb",
|
||||
"-f",
|
||||
"h264",
|
||||
"-movflags",
|
||||
"cmaf+separate_moof+delay_moov+skip_trailer+frag_every_frame",
|
||||
"-f",
|
||||
"mp4",
|
||||
]);
|
||||
}
|
||||
}
|
||||
cmd.arg("-");
|
||||
|
||||
let mut child = cmd.stdout(Stdio::piped()).spawn()?;
|
||||
let stdout = child.stdout.take().unwrap();
|
||||
|
||||
Ok(stdout)
|
||||
}
|
||||
|
||||
pub async fn is_h264(input: &Path) -> Result<bool> {
|
||||
let out = Command::new("ffprobe")
|
||||
.args([
|
||||
"-v",
|
||||
"error",
|
||||
"-select_streams",
|
||||
"v:0",
|
||||
"-show_entries",
|
||||
"stream=codec_name",
|
||||
"-of",
|
||||
"default=nokey=1:noprint_wrappers=1",
|
||||
])
|
||||
.arg(input.as_os_str())
|
||||
.output()
|
||||
.await?;
|
||||
|
||||
Ok(String::from_utf8_lossy(&out.stdout).trim() == "h264")
|
||||
}
|
||||
1
third_party/iroh-live/iroh-live/examples/common/mod.rs
vendored
Normal file
1
third_party/iroh-live/iroh-live/examples/common/mod.rs
vendored
Normal file
|
|
@ -0,0 +1 @@
|
|||
pub mod import;
|
||||
95
third_party/iroh-live/iroh-live/examples/publish.rs
vendored
Normal file
95
third_party/iroh-live/iroh-live/examples/publish.rs
vendored
Normal file
|
|
@ -0,0 +1,95 @@
|
|||
use clap::Parser;
|
||||
use iroh::{Endpoint, SecretKey, protocol::Router};
|
||||
use iroh_live::{
|
||||
Live,
|
||||
media::{
|
||||
audio::AudioBackend,
|
||||
av::{AudioPreset, VideoCodec, VideoPreset},
|
||||
capture::CameraCapturer,
|
||||
ffmpeg::{H264Encoder, OpusEncoder},
|
||||
publish::{AudioRenditions, PublishBroadcast, VideoRenditions},
|
||||
},
|
||||
ticket::LiveTicket,
|
||||
};
|
||||
use n0_error::StdResultExt;
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> n0_error::Result {
|
||||
tracing_subscriber::fmt::init();
|
||||
let cli = Cli::parse();
|
||||
|
||||
// Setup audio backend.
|
||||
let audio_ctx = AudioBackend::new();
|
||||
|
||||
// Setup iroh and iroh-live.
|
||||
let endpoint = Endpoint::builder()
|
||||
.secret_key(secret_key_from_env()?)
|
||||
.bind()
|
||||
.await?;
|
||||
let live = Live::new(endpoint.clone());
|
||||
let router = Router::builder(endpoint)
|
||||
.accept(iroh_live::ALPN, live.moq.protocol_handler())
|
||||
.spawn();
|
||||
|
||||
// Create a publish broadcast.
|
||||
let mut broadcast = PublishBroadcast::new();
|
||||
|
||||
// Capture audio, and encode with the cli-provided preset.
|
||||
if !cli.no_audio {
|
||||
let mic = audio_ctx.default_input().await?;
|
||||
let audio = AudioRenditions::new::<OpusEncoder>(mic, [cli.audio_preset]);
|
||||
broadcast.set_audio(Some(audio))?;
|
||||
}
|
||||
|
||||
// Capture camera, and encode with the cli-provided presets.
|
||||
if !cli.no_video {
|
||||
let camera = CameraCapturer::new()?;
|
||||
let video = VideoRenditions::new::<H264Encoder>(camera, cli.video_presets);
|
||||
broadcast.set_video(Some(video))?;
|
||||
}
|
||||
|
||||
// Publish under the name "hello".
|
||||
let name = "hello";
|
||||
live.publish(name, broadcast.producer()).await?;
|
||||
|
||||
// Create a ticket string and print
|
||||
let ticket = LiveTicket::new(router.endpoint().id(), name);
|
||||
println!("publishing at {ticket}");
|
||||
let long_ticket = LiveTicket::new(router.endpoint().addr(), name);
|
||||
println!("\nticket with addrs: {long_ticket}");
|
||||
|
||||
// Wait for ctrl-c and then shutdown.
|
||||
tokio::signal::ctrl_c().await?;
|
||||
live.shutdown();
|
||||
router.shutdown().await.std_context("router shutdown")?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
struct Cli {
|
||||
#[arg(long, default_value_t=VideoCodec::H264)]
|
||||
codec: VideoCodec,
|
||||
#[arg(long, value_delimiter=',', default_values_t=[VideoPreset::P180, VideoPreset::P360, VideoPreset::P720, VideoPreset::P1080])]
|
||||
video_presets: Vec<VideoPreset>,
|
||||
#[arg(long, default_value_t=AudioPreset::Hq)]
|
||||
audio_preset: AudioPreset,
|
||||
#[arg(long)]
|
||||
no_video: bool,
|
||||
#[arg(long)]
|
||||
no_audio: bool,
|
||||
}
|
||||
|
||||
fn secret_key_from_env() -> n0_error::Result<SecretKey> {
|
||||
Ok(match std::env::var("IROH_SECRET") {
|
||||
Ok(key) => key.parse()?,
|
||||
Err(_) => {
|
||||
let key = SecretKey::generate(&mut rand::rng());
|
||||
println!(
|
||||
"Created new secret. Reuse with IROH_SECRET={}",
|
||||
data_encoding::HEXLOWER.encode(&key.to_bytes())
|
||||
);
|
||||
key
|
||||
}
|
||||
})
|
||||
}
|
||||
71
third_party/iroh-live/iroh-live/examples/push.rs
vendored
Normal file
71
third_party/iroh-live/iroh-live/examples/push.rs
vendored
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
use std::{path::PathBuf, pin::Pin};
|
||||
|
||||
use clap::Parser;
|
||||
use iroh::EndpointId;
|
||||
use iroh_live::LiveNode;
|
||||
use moq_lite::BroadcastProducer;
|
||||
use n0_error::Result;
|
||||
use tokio::io::AsyncRead;
|
||||
use tracing::warn;
|
||||
|
||||
mod common;
|
||||
use self::common::import::{Import, ImportType, transcode};
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Cli {
|
||||
#[clap(short, long)]
|
||||
target: EndpointId,
|
||||
#[clap(short, long, default_value = "anon/bbb")]
|
||||
path: String,
|
||||
|
||||
/// The format of the input media.
|
||||
#[clap(long, value_enum, default_value_t = ImportType::Cmaf)]
|
||||
format: ImportType,
|
||||
|
||||
/// Input file.
|
||||
#[clap(short, long)]
|
||||
file: Option<PathBuf>,
|
||||
|
||||
/// Transcode the video with ffmpeg.
|
||||
#[clap(long)]
|
||||
transcode: bool,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
let cli = Cli::parse();
|
||||
|
||||
let node = LiveNode::spawn_from_env().await?;
|
||||
let session = node.live.connect(cli.target).await?;
|
||||
|
||||
let mut input: Pin<Box<dyn AsyncRead + Send + 'static>> = match (cli.file, cli.transcode) {
|
||||
(Some(path), true) => Box::pin(transcode(path.clone(), cli.format).await?),
|
||||
(Some(path), false) => Box::pin(tokio::fs::File::open(path).await?),
|
||||
(None, false) => Box::pin(tokio::io::stdin()),
|
||||
(None, true) => panic!("transcoding stdin is not supported"),
|
||||
};
|
||||
|
||||
let broadcast = BroadcastProducer::default();
|
||||
session.publish(cli.path, broadcast.consume());
|
||||
|
||||
let import = async move {
|
||||
let mut import = Import::new(broadcast.into(), cli.format);
|
||||
import.init_from(&mut input).await?;
|
||||
import.read_from(&mut input).await?;
|
||||
n0_error::Ok(())
|
||||
};
|
||||
tokio::pin!(import);
|
||||
|
||||
tokio::select! {
|
||||
res = &mut import => {
|
||||
if let Err(err) = res {
|
||||
warn!("Import failed: {err:#}");
|
||||
}
|
||||
}
|
||||
_ = tokio::signal::ctrl_c() => {}
|
||||
};
|
||||
drop(import);
|
||||
node.shutdown().await?;
|
||||
Ok(())
|
||||
}
|
||||
75
third_party/iroh-live/iroh-live/examples/room-publish-file.rs
vendored
Normal file
75
third_party/iroh-live/iroh-live/examples/room-publish-file.rs
vendored
Normal file
|
|
@ -0,0 +1,75 @@
|
|||
use std::{path::PathBuf, pin::Pin};
|
||||
|
||||
use clap::Parser;
|
||||
use iroh_live::{LiveNode, rooms::RoomTicket};
|
||||
use moq_lite::BroadcastProducer;
|
||||
use n0_error::Result;
|
||||
use tokio::io::AsyncRead;
|
||||
use tracing::warn;
|
||||
|
||||
mod common;
|
||||
use self::common::import::{Import, ImportType, transcode};
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Cli {
|
||||
/// Room to join. If empty a new room will be created.
|
||||
/// Will also be read from the IROH_LIVE_ROOM environment variable.
|
||||
#[clap(short, long)]
|
||||
room: Option<RoomTicket>,
|
||||
|
||||
/// The format of the input media.
|
||||
#[clap(long, value_enum, default_value_t = ImportType::Cmaf)]
|
||||
format: ImportType,
|
||||
|
||||
/// Input file. If empty reads from stdin.
|
||||
file: Option<PathBuf>,
|
||||
|
||||
/// Transcode the video with ffmpeg.
|
||||
#[clap(long)]
|
||||
transcode: bool,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
let cli = Cli::parse();
|
||||
|
||||
let ticket = match cli.room {
|
||||
Some(ticket) => ticket,
|
||||
None => RoomTicket::new_from_env()?,
|
||||
};
|
||||
|
||||
let node = LiveNode::spawn_from_env().await?;
|
||||
let room = node.join_room(ticket).await?;
|
||||
|
||||
let mut input: Pin<Box<dyn AsyncRead + Send + 'static>> = match (cli.file, cli.transcode) {
|
||||
(Some(path), true) => Box::pin(transcode(path.clone(), cli.format).await?),
|
||||
(Some(path), false) => Box::pin(tokio::fs::File::open(path).await?),
|
||||
(None, false) => Box::pin(tokio::io::stdin()),
|
||||
(None, true) => panic!("transcoding stdin is not supported"),
|
||||
};
|
||||
|
||||
let broadcast = BroadcastProducer::default();
|
||||
room.publish("file", broadcast.clone()).await?;
|
||||
|
||||
let import = async move {
|
||||
let mut import = Import::new(broadcast.into(), cli.format);
|
||||
import.init_from(&mut input).await?;
|
||||
import.read_from(&mut input).await?;
|
||||
n0_error::Ok(())
|
||||
};
|
||||
tokio::pin!(import);
|
||||
|
||||
tokio::select! {
|
||||
res = &mut import => {
|
||||
if let Err(err) = res {
|
||||
warn!("Import failed: {err:#}");
|
||||
}
|
||||
}
|
||||
_ = tokio::signal::ctrl_c() => {}
|
||||
};
|
||||
drop(import);
|
||||
drop(room);
|
||||
node.shutdown().await?;
|
||||
Ok(())
|
||||
}
|
||||
428
third_party/iroh-live/iroh-live/examples/rooms.rs
vendored
Normal file
428
third_party/iroh-live/iroh-live/examples/rooms.rs
vendored
Normal file
|
|
@ -0,0 +1,428 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use clap::Parser;
|
||||
use eframe::egui::{self, Color32, Id, Vec2};
|
||||
use iroh::{Endpoint, protocol::Router};
|
||||
use iroh_gossip::{Gossip, TopicId};
|
||||
use iroh_live::{
|
||||
Live,
|
||||
media::{
|
||||
audio::AudioBackend,
|
||||
av::{AudioPreset, VideoPreset},
|
||||
capture::{CameraCapturer, ScreenCapturer},
|
||||
ffmpeg::{FfmpegDecoders, FfmpegVideoDecoder, H264Encoder, OpusEncoder, ffmpeg_log_init},
|
||||
publish::{AudioRenditions, PublishBroadcast, VideoRenditions},
|
||||
subscribe::{AudioTrack, AvRemoteTrack, SubscribeBroadcast, WatchTrack},
|
||||
},
|
||||
moq::MoqSession,
|
||||
rooms::{Room, RoomEvent, RoomTicket},
|
||||
util::StatsSmoother,
|
||||
};
|
||||
use n0_error::{Result, StdResultExt, anyerr};
|
||||
use tracing::{info, warn};
|
||||
|
||||
const BROADCAST_NAME: &str = "cam";
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Cli {
|
||||
join: Option<RoomTicket>,
|
||||
#[clap(long)]
|
||||
screen: bool,
|
||||
#[clap(long)]
|
||||
no_audio: bool,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
tracing_subscriber::fmt::init();
|
||||
ffmpeg_log_init();
|
||||
let cli = Cli::parse();
|
||||
|
||||
let rt = tokio::runtime::Builder::new_multi_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.unwrap();
|
||||
|
||||
let audio_ctx = AudioBackend::new();
|
||||
let (router, broadcast, room) = rt.block_on(setup(cli, audio_ctx.clone()))?;
|
||||
|
||||
let _guard = rt.enter();
|
||||
eframe::run_native(
|
||||
"IrohLive",
|
||||
eframe::NativeOptions::default(),
|
||||
Box::new(|cc| {
|
||||
let app = App {
|
||||
rt,
|
||||
room,
|
||||
peers: vec![],
|
||||
self_video: broadcast
|
||||
.watch_local(Default::default())
|
||||
.map(|track| VideoView::new(&cc.egui_ctx, track, usize::MAX)),
|
||||
router,
|
||||
_broadcast: broadcast,
|
||||
audio_ctx,
|
||||
};
|
||||
Ok(Box::new(app))
|
||||
}),
|
||||
)
|
||||
.map_err(|err| anyerr!("eframe failed: {err:#}"))
|
||||
}
|
||||
|
||||
async fn setup(cli: Cli, audio_ctx: AudioBackend) -> Result<(Router, PublishBroadcast, Room)> {
|
||||
let endpoint = Endpoint::builder()
|
||||
.secret_key(secret_key_from_env()?)
|
||||
.bind()
|
||||
.await?;
|
||||
info!(endpoint_id=%endpoint.id(), "endpoint bound");
|
||||
|
||||
let gossip = Gossip::builder().spawn(endpoint.clone());
|
||||
let live = Live::new(endpoint.clone());
|
||||
|
||||
let router = Router::builder(endpoint)
|
||||
.accept(iroh_gossip::ALPN, gossip.clone())
|
||||
.accept(iroh_moq::ALPN, live.protocol_handler())
|
||||
.spawn();
|
||||
|
||||
// Publish ourselves.
|
||||
let broadcast = {
|
||||
let mut broadcast = PublishBroadcast::new();
|
||||
if !cli.no_audio {
|
||||
let mic = audio_ctx.default_input().await?;
|
||||
let audio = AudioRenditions::new::<OpusEncoder>(mic, [AudioPreset::Hq]);
|
||||
broadcast.set_audio(Some(audio))?;
|
||||
}
|
||||
let video = if cli.screen {
|
||||
let screen = ScreenCapturer::new()?;
|
||||
VideoRenditions::new::<H264Encoder>(screen, VideoPreset::all())
|
||||
} else {
|
||||
let camera = CameraCapturer::new()?;
|
||||
VideoRenditions::new::<H264Encoder>(camera, VideoPreset::all())
|
||||
};
|
||||
broadcast.set_video(Some(video))?;
|
||||
broadcast
|
||||
};
|
||||
let ticket = match cli.join {
|
||||
None => RoomTicket::new(topic_id_from_env()?, vec![]),
|
||||
Some(ticket) => ticket,
|
||||
};
|
||||
|
||||
let room = Room::new(router.endpoint(), gossip, live, ticket).await?;
|
||||
room.publish(BROADCAST_NAME, broadcast.producer()).await?;
|
||||
|
||||
println!("room ticket: {}", room.ticket());
|
||||
|
||||
Ok((router, broadcast, room))
|
||||
}
|
||||
|
||||
struct App {
|
||||
room: Room,
|
||||
peers: Vec<RemoteTrackView>,
|
||||
self_video: Option<VideoView>,
|
||||
router: Router,
|
||||
_broadcast: PublishBroadcast,
|
||||
audio_ctx: AudioBackend,
|
||||
rt: tokio::runtime::Runtime,
|
||||
}
|
||||
|
||||
impl eframe::App for App {
|
||||
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
|
||||
ctx.request_repaint_after(Duration::from_millis(30)); // min 30 fps
|
||||
|
||||
// Remove closed peers.
|
||||
self.peers.retain(|track| !track.is_closed());
|
||||
|
||||
// Add newly subscribed peers.
|
||||
while let Ok(event) = self.room.try_recv() {
|
||||
match event {
|
||||
RoomEvent::RemoteAnnounced { remote, broadcasts } => {
|
||||
info!(
|
||||
"peer announced: {} with broadcasts {broadcasts:?}",
|
||||
remote.fmt_short(),
|
||||
);
|
||||
}
|
||||
RoomEvent::RemoteConnected { session } => {
|
||||
info!("peer connected: {}", session.conn().remote_id().fmt_short());
|
||||
}
|
||||
RoomEvent::BroadcastSubscribed { session, broadcast } => {
|
||||
info!(
|
||||
"subscribing to {}:{}",
|
||||
session.remote_id(),
|
||||
broadcast.broadcast_name()
|
||||
);
|
||||
let track = match self.rt.block_on(async {
|
||||
let audio_out = self.audio_ctx.default_output().await?;
|
||||
broadcast.watch_and_listen::<FfmpegDecoders>(audio_out, Default::default())
|
||||
}) {
|
||||
Ok(track) => track,
|
||||
Err(err) => {
|
||||
warn!("failed to add track: {err}");
|
||||
continue;
|
||||
}
|
||||
};
|
||||
|
||||
self.peers
|
||||
.push(RemoteTrackView::new(ctx, session, track, self.peers.len()));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
egui::CentralPanel::default()
|
||||
.frame(egui::Frame::new().inner_margin(0.0).outer_margin(0.0))
|
||||
.show(ctx, |ui| {
|
||||
ui.spacing_mut().item_spacing = egui::vec2(0.0, 0.0);
|
||||
show_video_grid(ctx, ui, &mut self.peers);
|
||||
|
||||
// Render video preview of self
|
||||
if let Some(self_view) = self.self_video.as_mut() {
|
||||
let size = (200., 200.);
|
||||
egui::Area::new(Id::new("self-video"))
|
||||
.anchor(egui::Align2::RIGHT_BOTTOM, [-10.0, -10.0]) // 10px from the bottom-right edge
|
||||
.order(egui::Order::Foreground)
|
||||
.show(ui.ctx(), |ui| {
|
||||
egui::Frame::new()
|
||||
.fill(egui::Color32::from_rgba_unmultiplied(0, 0, 0, 128))
|
||||
.corner_radius(8.0)
|
||||
.show(ui, |ui| {
|
||||
ui.set_width(size.0);
|
||||
ui.set_height(size.1);
|
||||
ui.add_sized(size, self_view.render_image(ctx, size.into()));
|
||||
});
|
||||
});
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
fn on_exit(&mut self, _gl: Option<&eframe::glow::Context>) {
|
||||
let router = self.router.clone();
|
||||
self.rt.block_on(async move {
|
||||
if let Err(err) = router.shutdown().await {
|
||||
warn!("shutdown error: {err:?}");
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
struct RemoteTrackView {
|
||||
id: usize,
|
||||
video: Option<VideoView>,
|
||||
_audio_track: Option<AudioTrack>,
|
||||
session: MoqSession,
|
||||
broadcast: SubscribeBroadcast,
|
||||
stats: StatsSmoother,
|
||||
}
|
||||
|
||||
impl RemoteTrackView {
|
||||
fn new(ctx: &egui::Context, session: MoqSession, track: AvRemoteTrack, id: usize) -> Self {
|
||||
Self {
|
||||
video: track.video.map(|video| VideoView::new(ctx, video, id)),
|
||||
stats: StatsSmoother::new(),
|
||||
broadcast: track.broadcast,
|
||||
id,
|
||||
_audio_track: track.audio,
|
||||
session,
|
||||
}
|
||||
}
|
||||
|
||||
fn is_closed(&self) -> bool {
|
||||
self.session.conn().close_reason().is_some()
|
||||
}
|
||||
|
||||
fn render_image(
|
||||
&mut self,
|
||||
ctx: &egui::Context,
|
||||
available_size: Vec2,
|
||||
) -> Option<egui::Image<'_>> {
|
||||
self.video
|
||||
.as_mut()
|
||||
.map(|video| video.render_image(ctx, available_size))
|
||||
}
|
||||
|
||||
fn render_overlay_in_rect(&mut self, ui: &mut egui::Ui, rect: egui::Rect) {
|
||||
let pos = rect.left_bottom() + egui::vec2(8.0, -8.0);
|
||||
let overlay_id = egui::Id::new(("overlay", self.id));
|
||||
|
||||
egui::Area::new(overlay_id)
|
||||
.order(egui::Order::Foreground)
|
||||
.fixed_pos(pos)
|
||||
.show(ui.ctx(), |ui| {
|
||||
egui::Frame::new()
|
||||
.fill(egui::Color32::from_rgba_unmultiplied(0, 0, 0, 128))
|
||||
.corner_radius(3.0)
|
||||
.show(ui, |ui| {
|
||||
ui.spacing_mut().item_spacing = egui::vec2(8.0, 8.0);
|
||||
ui.set_min_width(100.);
|
||||
self.render_overlay(ui);
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn render_overlay(&mut self, ui: &mut egui::Ui) {
|
||||
ui.vertical(|ui| {
|
||||
let selected = self.video.as_ref().map(|v| v.track.rendition().to_owned());
|
||||
egui::ComboBox::from_id_salt(format!("video{}", self.id))
|
||||
.selected_text(selected.clone().unwrap_or_default())
|
||||
.show_ui(ui, |ui| {
|
||||
for name in self.broadcast.catalog().video_renditions() {
|
||||
if ui
|
||||
.selectable_label(selected.as_deref() == Some(name), name)
|
||||
.clicked()
|
||||
{
|
||||
if let Ok(track) = self
|
||||
.broadcast
|
||||
.watch_rendition::<FfmpegVideoDecoder>(&Default::default(), name)
|
||||
{
|
||||
if let Some(video) = self.video.as_mut() {
|
||||
video.set_track(track);
|
||||
} else {
|
||||
self.video = Some(VideoView::new(ui.ctx(), track, self.id))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let stats = self.stats.smoothed(|| self.session.conn().stats());
|
||||
ui.label(format!(
|
||||
"peer: {}",
|
||||
self.session.conn().remote_id().fmt_short()
|
||||
));
|
||||
ui.label(format!("BW up: {}", stats.up.rate_str));
|
||||
ui.label(format!("BW down: {}", stats.down.rate_str));
|
||||
ui.label(format!("RTT: {}ms", stats.rtt.as_millis()));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
struct VideoView {
|
||||
track: WatchTrack,
|
||||
size: egui::Vec2,
|
||||
texture: egui::TextureHandle,
|
||||
}
|
||||
|
||||
impl VideoView {
|
||||
fn new(ctx: &egui::Context, track: WatchTrack, id: usize) -> Self {
|
||||
let texture_name = format!("video-texture-{}", id);
|
||||
let size = egui::vec2(100., 100.);
|
||||
let color_image =
|
||||
egui::ColorImage::filled([size.x as usize, size.y as usize], Color32::BLACK);
|
||||
let texture = ctx.load_texture(&texture_name, color_image, egui::TextureOptions::default());
|
||||
Self {
|
||||
size,
|
||||
texture,
|
||||
track,
|
||||
}
|
||||
}
|
||||
|
||||
fn set_track(&mut self, track: WatchTrack) {
|
||||
self.track = track;
|
||||
}
|
||||
|
||||
fn render_image(&mut self, ctx: &egui::Context, available_size: Vec2) -> egui::Image<'_> {
|
||||
let available_size = available_size.into();
|
||||
if available_size != self.size {
|
||||
self.size = available_size;
|
||||
let ppp = ctx.pixels_per_point();
|
||||
let w = (available_size.x * ppp) as u32;
|
||||
let h = (available_size.y * ppp) as u32;
|
||||
self.track.set_viewport(w, h);
|
||||
}
|
||||
if let Some(frame) = self.track.current_frame() {
|
||||
let (w, h) = frame.img().dimensions();
|
||||
let image = egui::ColorImage::from_rgba_unmultiplied(
|
||||
[w as usize, h as usize],
|
||||
frame.img().as_raw(),
|
||||
);
|
||||
self.texture.set(image, Default::default());
|
||||
}
|
||||
egui::Image::from_texture(&self.texture).shrink_to_fit()
|
||||
}
|
||||
}
|
||||
|
||||
/// Show `textures` as squares in a compact auto grid that fills the parent as much as
|
||||
/// possible without breaking square aspect.
|
||||
fn show_video_grid(ctx: &egui::Context, ui: &mut egui::Ui, videos: &mut [RemoteTrackView]) {
|
||||
let n = videos.len();
|
||||
if n == 0 {
|
||||
return;
|
||||
}
|
||||
|
||||
// Parent size we’re allowed to use
|
||||
let avail = ui.available_size(); // egui docs recommend this for filling containers
|
||||
// Choose columns ≈ ceil(sqrt(n)), rows to fit the rest
|
||||
let cols = (n as f32).sqrt().ceil() as usize;
|
||||
let rows = (n + cols - 1) / cols;
|
||||
|
||||
// Side length of each square in points (fill the limiting axis)
|
||||
let cell = (avail.x / cols as f32).min(avail.y / rows as f32).floor();
|
||||
let cell_size = [cell, cell];
|
||||
|
||||
// Compute the grid’s actual pixel footprint
|
||||
let grid_w = cell * cols as f32;
|
||||
let grid_h = cell * rows as f32;
|
||||
|
||||
// Center the grid in any leftover space
|
||||
let pad_x = ((avail.x - grid_w) * 0.5).max(0.0);
|
||||
let pad_y = ((avail.y - grid_h) * 0.5).max(0.0);
|
||||
|
||||
ui.add_space(pad_y);
|
||||
ui.horizontal(|ui| {
|
||||
ui.add_space(pad_x);
|
||||
|
||||
egui::Grid::new("image_grid")
|
||||
.spacing(Vec2::ZERO) // no gaps; tiles butt together
|
||||
.show(ui, |ui| {
|
||||
let mut i = 0;
|
||||
for _r in 0..rows {
|
||||
for _c in 0..cols {
|
||||
if i < n {
|
||||
// Force exact square size for each image
|
||||
if let Some(image) = videos[i].render_image(ctx, cell_size.into()) {
|
||||
let response = ui.add_sized(cell_size, image);
|
||||
let rect = response.rect;
|
||||
videos[i].render_overlay_in_rect(ui, rect);
|
||||
}
|
||||
i += 1;
|
||||
} else {
|
||||
// Keep the grid rectangular when N isn’t a multiple of cols
|
||||
ui.allocate_exact_size(Vec2::splat(cell), egui::Sense::hover());
|
||||
}
|
||||
}
|
||||
ui.end_row();
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
fn secret_key_from_env() -> n0_error::Result<iroh::SecretKey> {
|
||||
Ok(match std::env::var("IROH_SECRET") {
|
||||
Ok(key) => key.parse()?,
|
||||
Err(_) => {
|
||||
let key = iroh::SecretKey::generate(&mut rand::rng());
|
||||
println!(
|
||||
"Created new secret. Reuse with IROH_SECRET={}",
|
||||
data_encoding::HEXLOWER.encode(&key.to_bytes())
|
||||
);
|
||||
key
|
||||
}
|
||||
})
|
||||
}
|
||||
|
||||
fn topic_id_from_env() -> n0_error::Result<TopicId> {
|
||||
Ok(match std::env::var("IROH_TOPIC") {
|
||||
Ok(topic) => TopicId::from_bytes(
|
||||
data_encoding::HEXLOWER
|
||||
.decode(topic.as_bytes())
|
||||
.std_context("invalid hex")?
|
||||
.as_slice()
|
||||
.try_into()
|
||||
.std_context("invalid length")?,
|
||||
),
|
||||
Err(_) => {
|
||||
let topic = TopicId::from_bytes(rand::random());
|
||||
println!(
|
||||
"Created new topic. Reuse with IROH_TOPIC={}",
|
||||
data_encoding::HEXLOWER.encode(topic.as_bytes())
|
||||
);
|
||||
topic
|
||||
}
|
||||
})
|
||||
}
|
||||
225
third_party/iroh-live/iroh-live/examples/watch.rs
vendored
Normal file
225
third_party/iroh-live/iroh-live/examples/watch.rs
vendored
Normal file
|
|
@ -0,0 +1,225 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use clap::Parser;
|
||||
use eframe::egui::{self, Color32, Id, Vec2};
|
||||
use iroh::{Endpoint, EndpointId};
|
||||
use iroh_live::{
|
||||
Live,
|
||||
media::{
|
||||
audio::AudioBackend,
|
||||
ffmpeg::{FfmpegDecoders, FfmpegVideoDecoder, ffmpeg_log_init},
|
||||
subscribe::{AudioTrack, SubscribeBroadcast, WatchTrack},
|
||||
},
|
||||
moq::MoqSession,
|
||||
ticket::LiveTicket,
|
||||
util::StatsSmoother,
|
||||
};
|
||||
use n0_error::{Result, anyerr};
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Debug, Parser)]
|
||||
struct Cli {
|
||||
#[clap(long, conflicts_with = "endpoint-id")]
|
||||
ticket: Option<LiveTicket>,
|
||||
#[clap(long, conflicts_with = "ticket", requires = "name")]
|
||||
endpoint_id: Option<EndpointId>,
|
||||
#[clap(long, conflicts_with = "ticket", requires = "endpoint-id")]
|
||||
name: Option<String>,
|
||||
}
|
||||
|
||||
fn main() -> Result<()> {
|
||||
let cli = Cli::parse();
|
||||
let ticket = match (cli.ticket, cli.endpoint_id, cli.name) {
|
||||
(Some(ticket), None, None) => ticket,
|
||||
(None, Some(endpoint_id), Some(name)) => LiveTicket::new(endpoint_id, name),
|
||||
_ => {
|
||||
eprintln!("Invalid arguments: Use either --ticket, or --endpoint and --name");
|
||||
std::process::exit(1);
|
||||
}
|
||||
};
|
||||
|
||||
tracing_subscriber::fmt::init();
|
||||
ffmpeg_log_init();
|
||||
let rt = tokio::runtime::Builder::new_multi_thread()
|
||||
.enable_all()
|
||||
.build()
|
||||
.unwrap();
|
||||
let audio_ctx = AudioBackend::new();
|
||||
|
||||
println!("connecting to {ticket} ...");
|
||||
let (endpoint, session, track) = rt.block_on({
|
||||
let audio_ctx = audio_ctx.clone();
|
||||
async move {
|
||||
let endpoint = Endpoint::bind().await?;
|
||||
let live = Live::new(endpoint.clone());
|
||||
let audio_out = audio_ctx.default_output().await?;
|
||||
let (session, track) = live
|
||||
.watch_and_listen::<FfmpegDecoders>(
|
||||
ticket.endpoint,
|
||||
&ticket.broadcast_name,
|
||||
audio_out,
|
||||
Default::default(),
|
||||
)
|
||||
.await?;
|
||||
println!("connected!");
|
||||
n0_error::Ok((endpoint, session, track))
|
||||
}
|
||||
})?;
|
||||
|
||||
let _guard = rt.enter();
|
||||
|
||||
eframe::run_native(
|
||||
"IrohLive",
|
||||
eframe::NativeOptions::default(),
|
||||
Box::new(|cc| {
|
||||
let egui_ctx = cc.egui_ctx.clone();
|
||||
rt.spawn(async move {
|
||||
let _ = tokio::signal::ctrl_c().await;
|
||||
egui_ctx.send_viewport_cmd(egui::ViewportCommand::Close);
|
||||
// TODO: When the app is not visible, this will not trigger `update` immediately.
|
||||
// See https://github.com/emilk/egui/issues/5112
|
||||
egui_ctx.request_repaint();
|
||||
});
|
||||
let app = App {
|
||||
video: track.video.map(|video| VideoView::new(&cc.egui_ctx, video)),
|
||||
_audio_ctx: audio_ctx,
|
||||
_audio: track.audio,
|
||||
broadcast: track.broadcast,
|
||||
session: session,
|
||||
stats: StatsSmoother::new(),
|
||||
endpoint,
|
||||
rt,
|
||||
};
|
||||
Ok(Box::new(app))
|
||||
}),
|
||||
)
|
||||
.map_err(|err| anyerr!("eframe failed: {err:#}"))
|
||||
}
|
||||
|
||||
struct App {
|
||||
video: Option<VideoView>,
|
||||
_audio: Option<AudioTrack>,
|
||||
_audio_ctx: AudioBackend,
|
||||
endpoint: Endpoint,
|
||||
session: MoqSession,
|
||||
broadcast: SubscribeBroadcast,
|
||||
stats: StatsSmoother,
|
||||
rt: tokio::runtime::Runtime,
|
||||
}
|
||||
|
||||
impl eframe::App for App {
|
||||
fn update(&mut self, ctx: &egui::Context, _frame: &mut eframe::Frame) {
|
||||
ctx.request_repaint_after(Duration::from_millis(30)); // min 30 fps
|
||||
egui::CentralPanel::default()
|
||||
.frame(egui::Frame::new().inner_margin(0.0).outer_margin(0.0))
|
||||
.show(ctx, |ui| {
|
||||
ui.spacing_mut().item_spacing = egui::vec2(0.0, 0.0);
|
||||
|
||||
let avail = ui.available_size();
|
||||
if let Some(video) = self.video.as_mut() {
|
||||
ui.add_sized(avail, video.render(ctx, avail));
|
||||
}
|
||||
|
||||
egui::Area::new(Id::new("overlay"))
|
||||
.anchor(egui::Align2::LEFT_BOTTOM, [8.0, -8.0])
|
||||
.show(ctx, |ui| {
|
||||
egui::Frame::new()
|
||||
.fill(egui::Color32::from_rgba_unmultiplied(0, 0, 0, 128))
|
||||
.corner_radius(3.0)
|
||||
.show(ui, |ui| {
|
||||
ui.spacing_mut().item_spacing = egui::vec2(8.0, 8.0);
|
||||
ui.set_min_width(100.);
|
||||
self.render_overlay(ctx, ui);
|
||||
})
|
||||
})
|
||||
});
|
||||
}
|
||||
|
||||
fn on_exit(&mut self, _gl: Option<&eframe::glow::Context>) {
|
||||
info!("exit");
|
||||
self.broadcast.shutdown();
|
||||
self.session.close(0, b"bye");
|
||||
let endpoint = self.endpoint.clone();
|
||||
self.rt.block_on(async move {
|
||||
endpoint.close().await;
|
||||
info!("endpoint closed");
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
impl App {
|
||||
fn render_overlay(&mut self, ctx: &egui::Context, ui: &mut egui::Ui) {
|
||||
ui.vertical(|ui| {
|
||||
let selected = self
|
||||
.video
|
||||
.as_ref()
|
||||
.map(|video| video.track.rendition().to_owned());
|
||||
egui::ComboBox::from_label("")
|
||||
.selected_text(selected.clone().unwrap_or_default())
|
||||
.show_ui(ui, |ui| {
|
||||
for name in self.broadcast.catalog().video_renditions() {
|
||||
if ui
|
||||
.selectable_label(selected.as_deref() == Some(name), name)
|
||||
.clicked()
|
||||
{
|
||||
if let Ok(track) = self
|
||||
.broadcast
|
||||
.watch_rendition::<FfmpegVideoDecoder>(&Default::default(), name)
|
||||
{
|
||||
self.video = Some(VideoView::new(ctx, track));
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
let stats = self.stats.smoothed(|| self.session.conn().stats());
|
||||
ui.label(format!(
|
||||
"peer: {}",
|
||||
self.session.conn().remote_id().fmt_short()
|
||||
));
|
||||
ui.label(format!("BW up: {}", stats.up.rate_str));
|
||||
ui.label(format!("BW down: {}", stats.down.rate_str));
|
||||
ui.label(format!("RTT: {}ms", stats.rtt.as_millis()));
|
||||
});
|
||||
}
|
||||
}
|
||||
|
||||
struct VideoView {
|
||||
track: WatchTrack,
|
||||
texture: egui::TextureHandle,
|
||||
size: egui::Vec2,
|
||||
}
|
||||
|
||||
impl VideoView {
|
||||
fn new(ctx: &egui::Context, track: WatchTrack) -> Self {
|
||||
let size = egui::vec2(100., 100.);
|
||||
let color_image =
|
||||
egui::ColorImage::filled([size.x as usize, size.y as usize], Color32::BLACK);
|
||||
let texture = ctx.load_texture("video", color_image, egui::TextureOptions::default());
|
||||
Self {
|
||||
size,
|
||||
texture,
|
||||
track,
|
||||
}
|
||||
}
|
||||
|
||||
fn render(&mut self, ctx: &egui::Context, available_size: Vec2) -> egui::Image<'_> {
|
||||
let available_size = available_size.into();
|
||||
if available_size != self.size {
|
||||
self.size = available_size;
|
||||
let ppp = ctx.pixels_per_point();
|
||||
let w = (available_size.x * ppp) as u32;
|
||||
let h = (available_size.y * ppp) as u32;
|
||||
self.track.set_viewport(w, h);
|
||||
}
|
||||
if let Some(frame) = self.track.current_frame() {
|
||||
let (w, h) = frame.img().dimensions();
|
||||
let image = egui::ColorImage::from_rgba_unmultiplied(
|
||||
[w as usize, h as usize],
|
||||
frame.img().as_raw(),
|
||||
);
|
||||
self.texture.set(image, Default::default());
|
||||
}
|
||||
egui::Image::from_texture(&self.texture).shrink_to_fit()
|
||||
}
|
||||
}
|
||||
15
third_party/iroh-live/iroh-live/src/lib.rs
vendored
Normal file
15
third_party/iroh-live/iroh-live/src/lib.rs
vendored
Normal file
|
|
@ -0,0 +1,15 @@
|
|||
mod live;
|
||||
mod node;
|
||||
pub mod rooms;
|
||||
pub mod ticket;
|
||||
pub mod util;
|
||||
|
||||
pub use self::live::Live;
|
||||
pub use self::node::LiveNode;
|
||||
|
||||
pub use iroh_moq as moq;
|
||||
pub use iroh_moq::ALPN;
|
||||
|
||||
pub use hang::catalog;
|
||||
|
||||
pub use moq_media as media;
|
||||
62
third_party/iroh-live/iroh-live/src/live.rs
vendored
Normal file
62
third_party/iroh-live/iroh-live/src/live.rs
vendored
Normal file
|
|
@ -0,0 +1,62 @@
|
|||
use iroh::{Endpoint, EndpointAddr};
|
||||
use iroh_moq::{Moq, MoqProtocolHandler, MoqSession};
|
||||
use moq_lite::BroadcastProducer;
|
||||
use moq_media::{
|
||||
av::{AudioSink, Decoders, PlaybackConfig},
|
||||
subscribe::{AvRemoteTrack, SubscribeBroadcast},
|
||||
};
|
||||
use n0_error::Result;
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct Live {
|
||||
pub moq: Moq,
|
||||
}
|
||||
|
||||
impl Live {
|
||||
pub fn new(endpoint: Endpoint) -> Self {
|
||||
Self {
|
||||
moq: Moq::new(endpoint),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn connect(&self, remote: impl Into<EndpointAddr>) -> Result<MoqSession> {
|
||||
self.moq.connect(remote).await
|
||||
}
|
||||
|
||||
pub async fn connect_and_subscribe(
|
||||
&self,
|
||||
remote: impl Into<EndpointAddr>,
|
||||
broadcast_name: &str,
|
||||
) -> Result<(MoqSession, SubscribeBroadcast)> {
|
||||
let mut session = self.connect(remote).await?;
|
||||
info!(id=%session.conn().remote_id(), "new peer connected");
|
||||
let broadcast = session.subscribe(broadcast_name).await?;
|
||||
let broadcast = SubscribeBroadcast::new(broadcast_name.to_string(), broadcast).await?;
|
||||
Ok((session, broadcast))
|
||||
}
|
||||
|
||||
pub async fn watch_and_listen<D: Decoders>(
|
||||
&self,
|
||||
remote: impl Into<EndpointAddr>,
|
||||
broadcast_name: &str,
|
||||
audio_out: impl AudioSink,
|
||||
config: PlaybackConfig,
|
||||
) -> Result<(MoqSession, AvRemoteTrack)> {
|
||||
let (session, broadcast) = self.connect_and_subscribe(remote, &broadcast_name).await?;
|
||||
let track = broadcast.watch_and_listen::<D>(audio_out, config)?;
|
||||
Ok((session, track))
|
||||
}
|
||||
|
||||
pub fn protocol_handler(&self) -> MoqProtocolHandler {
|
||||
self.moq.protocol_handler()
|
||||
}
|
||||
|
||||
pub async fn publish(&self, name: impl ToString, producer: BroadcastProducer) -> Result<()> {
|
||||
self.moq.publish(name, producer).await
|
||||
}
|
||||
|
||||
pub fn shutdown(&self) {
|
||||
self.moq.shutdown();
|
||||
}
|
||||
}
|
||||
72
third_party/iroh-live/iroh-live/src/node.rs
vendored
Normal file
72
third_party/iroh-live/iroh-live/src/node.rs
vendored
Normal file
|
|
@ -0,0 +1,72 @@
|
|||
use crate::{
|
||||
live::Live,
|
||||
rooms::{Room, RoomTicket},
|
||||
};
|
||||
use iroh::{Endpoint, protocol::Router};
|
||||
use iroh_gossip::Gossip;
|
||||
use n0_error::{Result, StdResultExt};
|
||||
use tracing::info;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct LiveNode {
|
||||
router: Router,
|
||||
pub live: Live,
|
||||
pub gossip: Gossip,
|
||||
}
|
||||
|
||||
impl LiveNode {
|
||||
pub async fn spawn_from_env() -> Result<Self> {
|
||||
let endpoint = Endpoint::builder()
|
||||
.secret_key(secret_key_from_env()?)
|
||||
.bind()
|
||||
.await?;
|
||||
info!(endpoint_id=%endpoint.id(), "endpoint bound");
|
||||
|
||||
let gossip = Gossip::builder().spawn(endpoint.clone());
|
||||
let live = Live::new(endpoint.clone());
|
||||
|
||||
let router = Router::builder(endpoint)
|
||||
.accept(iroh_gossip::ALPN, gossip.clone())
|
||||
.accept(iroh_moq::ALPN, live.protocol_handler())
|
||||
.spawn();
|
||||
|
||||
Ok(Self {
|
||||
router,
|
||||
gossip,
|
||||
live,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn shutdown(&self) -> Result<()> {
|
||||
self.live.shutdown();
|
||||
self.router.shutdown().await.anyerr()
|
||||
}
|
||||
|
||||
pub fn endpoint(&self) -> &Endpoint {
|
||||
self.router.endpoint()
|
||||
}
|
||||
|
||||
pub async fn join_room(&self, ticket: RoomTicket) -> Result<Room> {
|
||||
Room::new(
|
||||
self.endpoint(),
|
||||
self.gossip.clone(),
|
||||
self.live.clone(),
|
||||
ticket,
|
||||
)
|
||||
.await
|
||||
}
|
||||
}
|
||||
|
||||
fn secret_key_from_env() -> n0_error::Result<iroh::SecretKey> {
|
||||
Ok(match std::env::var("IROH_SECRET") {
|
||||
Ok(key) => key.parse()?,
|
||||
Err(_) => {
|
||||
let key = iroh::SecretKey::generate(&mut rand::rng());
|
||||
println!(
|
||||
"Created new secret. Reuse with IROH_SECRET={}",
|
||||
data_encoding::HEXLOWER.encode(&key.to_bytes())
|
||||
);
|
||||
key
|
||||
}
|
||||
})
|
||||
}
|
||||
391
third_party/iroh-live/iroh-live/src/rooms.rs
vendored
Normal file
391
third_party/iroh-live/iroh-live/src/rooms.rs
vendored
Normal file
|
|
@ -0,0 +1,391 @@
|
|||
use std::collections::HashSet;
|
||||
use std::pin::Pin;
|
||||
use std::sync::Arc;
|
||||
use std::time::Duration;
|
||||
|
||||
use bytes::Bytes;
|
||||
use iroh::{Endpoint, EndpointId, SecretKey};
|
||||
use iroh_gossip::Gossip;
|
||||
use iroh_moq::MoqSession;
|
||||
use iroh_smol_kv::{ExpiryConfig, Filter, SignedValue, Subscribe, SubscribeMode, WriteScope};
|
||||
use moq_lite::BroadcastProducer;
|
||||
use moq_media::subscribe::SubscribeBroadcast;
|
||||
use n0_error::{Result, StdResultExt, anyerr};
|
||||
use n0_future::FuturesUnordered;
|
||||
use n0_future::{StreamExt, task::AbortOnDropHandle};
|
||||
use serde::{Deserialize, Serialize};
|
||||
use tokio::sync::mpsc::{self, error::TryRecvError};
|
||||
use tracing::{Instrument, debug, error_span, warn};
|
||||
|
||||
use crate::Live;
|
||||
|
||||
pub use self::publisher::{PublishOpts, RoomPublisherSync, StreamKind};
|
||||
pub use self::ticket::RoomTicket;
|
||||
|
||||
type BoxFuture<T> = Pin<Box<dyn Future<Output = T> + Send + Sync + 'static>>;
|
||||
|
||||
mod publisher;
|
||||
|
||||
pub struct Room {
|
||||
handle: RoomHandle,
|
||||
events: mpsc::Receiver<RoomEvent>,
|
||||
}
|
||||
|
||||
pub type RoomEvents = mpsc::Receiver<RoomEvent>;
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct RoomHandle {
|
||||
me: EndpointId,
|
||||
ticket: RoomTicket,
|
||||
tx: mpsc::Sender<ApiMessage>,
|
||||
_actor_handle: Arc<AbortOnDropHandle<()>>,
|
||||
}
|
||||
|
||||
impl RoomHandle {
|
||||
pub fn ticket(&self) -> RoomTicket {
|
||||
let mut ticket = self.ticket.clone();
|
||||
ticket.bootstrap = vec![self.me];
|
||||
ticket
|
||||
}
|
||||
|
||||
pub async fn publish(&self, name: impl ToString, producer: BroadcastProducer) -> Result<()> {
|
||||
self.tx
|
||||
.send(ApiMessage::Publish {
|
||||
name: name.to_string(),
|
||||
producer: producer,
|
||||
})
|
||||
.await
|
||||
.map_err(|_| anyerr!("room actor died"))
|
||||
}
|
||||
}
|
||||
|
||||
impl Room {
|
||||
pub async fn new(
|
||||
endpoint: &Endpoint,
|
||||
gossip: Gossip,
|
||||
live: Live,
|
||||
ticket: RoomTicket,
|
||||
) -> Result<Self> {
|
||||
let endpoint_id = endpoint.id();
|
||||
let (actor_tx, actor_rx) = mpsc::channel(16);
|
||||
let (event_tx, event_rx) = mpsc::channel(16);
|
||||
|
||||
let actor = Actor::new(
|
||||
endpoint.secret_key(),
|
||||
live,
|
||||
event_tx,
|
||||
gossip,
|
||||
ticket.clone(),
|
||||
)
|
||||
.await?;
|
||||
let actor_task = tokio::task::spawn(
|
||||
async move { actor.run(actor_rx).await }
|
||||
.instrument(error_span!("RoomActor", id = ticket.topic_id.fmt_short())),
|
||||
);
|
||||
|
||||
Ok(Self {
|
||||
handle: RoomHandle {
|
||||
ticket,
|
||||
me: endpoint_id,
|
||||
tx: actor_tx,
|
||||
_actor_handle: Arc::new(AbortOnDropHandle::new(actor_task)),
|
||||
},
|
||||
events: event_rx,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn recv(&mut self) -> Result<RoomEvent> {
|
||||
self.events.recv().await.std_context("sender stopped")
|
||||
}
|
||||
|
||||
pub fn try_recv(&mut self) -> Result<RoomEvent, TryRecvError> {
|
||||
self.events.try_recv()
|
||||
}
|
||||
|
||||
pub fn ticket(&self) -> RoomTicket {
|
||||
self.handle.ticket()
|
||||
}
|
||||
|
||||
pub fn split(self) -> (RoomEvents, RoomHandle) {
|
||||
(self.events, self.handle)
|
||||
}
|
||||
|
||||
pub async fn publish(&self, name: impl ToString, producer: BroadcastProducer) -> Result<()> {
|
||||
self.handle.publish(name, producer).await
|
||||
}
|
||||
}
|
||||
|
||||
enum ApiMessage {
|
||||
Publish {
|
||||
name: String,
|
||||
producer: BroadcastProducer,
|
||||
},
|
||||
}
|
||||
|
||||
pub enum RoomEvent {
|
||||
RemoteAnnounced {
|
||||
remote: EndpointId,
|
||||
broadcasts: Vec<String>,
|
||||
},
|
||||
RemoteConnected {
|
||||
session: MoqSession,
|
||||
},
|
||||
BroadcastSubscribed {
|
||||
session: MoqSession,
|
||||
broadcast: SubscribeBroadcast,
|
||||
},
|
||||
}
|
||||
|
||||
const PEER_STATE_KEY: &[u8] = b"s";
|
||||
|
||||
#[derive(Debug, Clone, Serialize, Deserialize)]
|
||||
struct PeerState {
|
||||
broadcasts: Vec<String>,
|
||||
}
|
||||
|
||||
type KvEntry = (EndpointId, Bytes, SignedValue);
|
||||
|
||||
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd, Hash, derive_more::Display)]
|
||||
#[display("{}:{}", _0.fmt_short(), _1)]
|
||||
struct BroadcastId(EndpointId, String);
|
||||
|
||||
struct Actor {
|
||||
me: EndpointId,
|
||||
_gossip: Gossip,
|
||||
live: Live,
|
||||
active_subscribe: HashSet<BroadcastId>,
|
||||
active_publish: HashSet<String>,
|
||||
connecting:
|
||||
FuturesUnordered<BoxFuture<(BroadcastId, Result<(MoqSession, SubscribeBroadcast)>)>>,
|
||||
subscribe_closed: FuturesUnordered<BoxFuture<BroadcastId>>,
|
||||
publish_closed: FuturesUnordered<BoxFuture<String>>,
|
||||
event_tx: mpsc::Sender<RoomEvent>,
|
||||
kv: iroh_smol_kv::Client,
|
||||
kv_writer: WriteScope,
|
||||
}
|
||||
|
||||
impl Actor {
|
||||
async fn new(
|
||||
me: &SecretKey,
|
||||
live: Live,
|
||||
event_tx: mpsc::Sender<RoomEvent>,
|
||||
gossip: Gossip,
|
||||
ticket: RoomTicket,
|
||||
) -> Result<Self> {
|
||||
let topic = gossip
|
||||
.subscribe(ticket.topic_id, ticket.bootstrap.clone())
|
||||
.await?;
|
||||
let kv = iroh_smol_kv::Client::local(
|
||||
topic,
|
||||
iroh_smol_kv::Config {
|
||||
anti_entropy_interval: Duration::from_secs(60),
|
||||
fast_anti_entropy_interval: Duration::from_secs(1),
|
||||
expiry: Some(ExpiryConfig {
|
||||
check_interval: Duration::from_secs(10),
|
||||
horizon: Duration::from_secs(60 * 2),
|
||||
}),
|
||||
},
|
||||
);
|
||||
let kv_writer = kv.write(me.clone());
|
||||
Ok(Self {
|
||||
me: me.public(),
|
||||
live,
|
||||
_gossip: gossip,
|
||||
active_subscribe: Default::default(),
|
||||
active_publish: Default::default(),
|
||||
connecting: Default::default(),
|
||||
subscribe_closed: Default::default(),
|
||||
publish_closed: Default::default(),
|
||||
event_tx,
|
||||
kv,
|
||||
kv_writer,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn run(mut self, mut inbox: mpsc::Receiver<ApiMessage>) {
|
||||
let updates = self
|
||||
.kv
|
||||
.subscribe_with_opts(Subscribe {
|
||||
mode: SubscribeMode::Both,
|
||||
filter: Filter::ALL,
|
||||
})
|
||||
.stream();
|
||||
tokio::pin!(updates);
|
||||
|
||||
loop {
|
||||
tokio::select! {
|
||||
Some(update) = updates.next() => {
|
||||
match update {
|
||||
Err(err) => warn!("gossip kv update failed: {err:#}"),
|
||||
Ok(update) => self.handle_gossip_update(update).await,
|
||||
}
|
||||
}
|
||||
msg = inbox.recv() => {
|
||||
match msg {
|
||||
None => break,
|
||||
Some(msg) => self.handle_api_message(msg).await
|
||||
}
|
||||
}
|
||||
Some((id, res)) = self.connecting.next(), if !self.connecting.is_empty() => {
|
||||
match res {
|
||||
Ok((session, broadcast)) => {
|
||||
let closed_fut = broadcast.closed();
|
||||
self.event_tx.send(RoomEvent::BroadcastSubscribed { session, broadcast }).await.ok();
|
||||
self.subscribe_closed.push(Box::pin(async move {
|
||||
closed_fut.await;
|
||||
id
|
||||
}))
|
||||
}
|
||||
Err(err) => {
|
||||
self.active_subscribe.remove(&id);
|
||||
warn!("Subscribing to broadcast {id} failed: {err:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Some(id) = self.subscribe_closed.next(), if !self.subscribe_closed.is_empty() => {
|
||||
debug!("broadcast closed: {id}");
|
||||
self.active_subscribe.remove(&id);
|
||||
}
|
||||
Some(name) = self.publish_closed.next(), if !self.publish_closed.is_empty() => {
|
||||
self.active_publish.remove(&name);
|
||||
self.update_kv().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_api_message(&mut self, msg: ApiMessage) {
|
||||
match msg {
|
||||
ApiMessage::Publish { name, producer } => {
|
||||
let closed = producer.consume().closed();
|
||||
self.live.publish(name.clone(), producer).await.ok();
|
||||
self.active_publish.insert(name.clone());
|
||||
self.publish_closed.push(Box::pin(async move {
|
||||
closed.await;
|
||||
name
|
||||
}));
|
||||
self.update_kv().await;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
async fn handle_gossip_update(&mut self, entry: KvEntry) {
|
||||
let (remote, key, value) = entry;
|
||||
if remote == self.me || &key != PEER_STATE_KEY {
|
||||
return;
|
||||
}
|
||||
let Ok(value) = postcard::from_bytes::<PeerState>(&value.value) else {
|
||||
return;
|
||||
};
|
||||
let PeerState { broadcasts } = value;
|
||||
for name in broadcasts.clone() {
|
||||
let id = BroadcastId(remote, name.clone());
|
||||
if !self.active_subscribe.insert(id.clone()) {
|
||||
continue;
|
||||
}
|
||||
let live = self.live.clone();
|
||||
self.connecting.push(Box::pin(async move {
|
||||
let session = live.connect_and_subscribe(remote, &name).await;
|
||||
(id, session)
|
||||
}));
|
||||
}
|
||||
self.event_tx
|
||||
.send(RoomEvent::RemoteAnnounced { remote, broadcasts })
|
||||
.await
|
||||
.ok();
|
||||
}
|
||||
|
||||
async fn update_kv(&self) {
|
||||
let state = PeerState {
|
||||
broadcasts: self.active_publish.iter().cloned().collect(),
|
||||
};
|
||||
if let Err(err) = self
|
||||
.kv_writer
|
||||
.put(PEER_STATE_KEY, postcard::to_stdvec(&state).unwrap())
|
||||
.await
|
||||
{
|
||||
warn!("failed to update gossip kv: {err:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod ticket {
|
||||
use std::str::FromStr;
|
||||
|
||||
use iroh::EndpointId;
|
||||
use iroh_gossip::TopicId;
|
||||
use n0_error::{Result, StdResultExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Serialize, Deserialize, Clone, derive_more::Display)]
|
||||
#[display("{}", iroh_tickets::Ticket::serialize(self))]
|
||||
pub struct RoomTicket {
|
||||
pub bootstrap: Vec<EndpointId>,
|
||||
pub topic_id: TopicId,
|
||||
}
|
||||
|
||||
impl RoomTicket {
|
||||
pub fn new(topic_id: TopicId, bootstrap: impl IntoIterator<Item = EndpointId>) -> Self {
|
||||
Self {
|
||||
bootstrap: bootstrap.into_iter().collect(),
|
||||
topic_id,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn generate() -> Self {
|
||||
Self {
|
||||
bootstrap: vec![],
|
||||
topic_id: TopicId::from_bytes(rand::random()),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn new_from_env() -> Result<Self> {
|
||||
if let Ok(value) = std::env::var("IROH_LIVE_ROOM") {
|
||||
value
|
||||
.parse()
|
||||
.std_context("failed to parse ticket from IROH_LIVE_ROOM environment variable")
|
||||
} else {
|
||||
let topic_id = match std::env::var("IROH_LIVE_TOPIC") {
|
||||
Ok(topic) => TopicId::from_bytes(
|
||||
data_encoding::HEXLOWER
|
||||
.decode(topic.as_bytes())
|
||||
.std_context("invalid hex")?
|
||||
.as_slice()
|
||||
.try_into()
|
||||
.std_context("invalid length")?,
|
||||
),
|
||||
Err(_) => {
|
||||
let topic = TopicId::from_bytes(rand::random());
|
||||
println!(
|
||||
"Created new topic. Reuse with IROH_TOPIC={}",
|
||||
data_encoding::HEXLOWER.encode(topic.as_bytes())
|
||||
);
|
||||
topic
|
||||
}
|
||||
};
|
||||
Ok(Self::new(topic_id, vec![]))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl FromStr for RoomTicket {
|
||||
type Err = iroh_tickets::ParseError;
|
||||
|
||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||
iroh_tickets::Ticket::deserialize(s)
|
||||
}
|
||||
}
|
||||
|
||||
impl iroh_tickets::Ticket for RoomTicket {
|
||||
const KIND: &'static str = "room";
|
||||
|
||||
fn to_bytes(&self) -> Vec<u8> {
|
||||
postcard::to_stdvec(self).unwrap()
|
||||
}
|
||||
|
||||
fn from_bytes(bytes: &[u8]) -> Result<Self, iroh_tickets::ParseError> {
|
||||
let ticket = postcard::from_bytes(bytes)?;
|
||||
Ok(ticket)
|
||||
}
|
||||
}
|
||||
}
|
||||
199
third_party/iroh-live/iroh-live/src/rooms/publisher.rs
vendored
Normal file
199
third_party/iroh-live/iroh-live/src/rooms/publisher.rs
vendored
Normal file
|
|
@ -0,0 +1,199 @@
|
|||
use std::sync::{Arc, Mutex};
|
||||
|
||||
use moq_lite::BroadcastProducer;
|
||||
use moq_media::{
|
||||
audio::AudioBackend,
|
||||
av::{AudioPreset, VideoPreset},
|
||||
capture::{CameraCapturer, ScreenCapturer},
|
||||
ffmpeg::{H264Encoder, OpusEncoder},
|
||||
publish::{AudioRenditions, PublishBroadcast, VideoRenditions},
|
||||
};
|
||||
use n0_error::{AnyError, Result};
|
||||
use tracing::{info, warn};
|
||||
|
||||
use crate::rooms::RoomHandle;
|
||||
|
||||
#[derive(Debug, strum::Display, strum::EnumString)]
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
enum Broadcasts {
|
||||
Camera,
|
||||
Screen,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub enum StreamKind {
|
||||
Camera,
|
||||
Screen,
|
||||
Microphone,
|
||||
}
|
||||
|
||||
#[derive(Default, Clone, Debug)]
|
||||
pub struct PublishOpts {
|
||||
pub camera: bool,
|
||||
pub screen: bool,
|
||||
pub audio: bool,
|
||||
}
|
||||
|
||||
/// Manager for publish broadcasts in a room
|
||||
///
|
||||
/// Synchronous version which spawns all async ops on new tokio tasks. Panics if methods are
|
||||
/// not called in the context of a tokio runtime.
|
||||
///
|
||||
/// Why does this have sync methods? In UI land it is so much easier for the operations to be sync,
|
||||
/// so this just spawns all async ops on tokio threads. Not yet sure about where this should evolve to
|
||||
/// but this kept me moving for now.
|
||||
pub struct RoomPublisherSync {
|
||||
audio_ctx: AudioBackend,
|
||||
room: RoomHandle,
|
||||
camera: Option<Arc<Mutex<PublishBroadcast>>>,
|
||||
screen: Option<Arc<Mutex<PublishBroadcast>>>,
|
||||
state: PublishOpts,
|
||||
}
|
||||
|
||||
impl RoomPublisherSync {
|
||||
pub fn new(room: RoomHandle, audio_ctx: AudioBackend) -> Self {
|
||||
Self {
|
||||
room,
|
||||
audio_ctx,
|
||||
camera: None,
|
||||
screen: None,
|
||||
state: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn set_state(&mut self, state: &PublishOpts) -> Result<(), Vec<(StreamKind, AnyError)>> {
|
||||
info!(new=?state, old=?self.state, "set publish state");
|
||||
let errors = [
|
||||
self.set_audio(state.audio)
|
||||
.err()
|
||||
.map(|e| (StreamKind::Microphone, e)),
|
||||
self.set_camera(state.camera)
|
||||
.err()
|
||||
.map(|e| (StreamKind::Camera, e)),
|
||||
self.set_screen(state.screen)
|
||||
.err()
|
||||
.map(|e| (StreamKind::Screen, e)),
|
||||
]
|
||||
.into_iter()
|
||||
.flatten()
|
||||
.collect::<Vec<_>>();
|
||||
if errors.is_empty() {
|
||||
Ok(())
|
||||
} else {
|
||||
Err(errors)
|
||||
}
|
||||
}
|
||||
|
||||
pub fn state(&self) -> &PublishOpts {
|
||||
&self.state
|
||||
}
|
||||
|
||||
pub fn camera(&self) -> bool {
|
||||
self.state.camera
|
||||
}
|
||||
|
||||
pub fn camera_broadcast(&self) -> Option<Arc<Mutex<PublishBroadcast>>> {
|
||||
self.camera.clone()
|
||||
}
|
||||
|
||||
pub fn screen_broadcast(&self) -> Option<Arc<Mutex<PublishBroadcast>>> {
|
||||
self.screen.clone()
|
||||
}
|
||||
|
||||
pub fn set_camera(&mut self, enable: bool) -> Result<()> {
|
||||
if self.state.camera != enable {
|
||||
if enable {
|
||||
let camera = CameraCapturer::new()?;
|
||||
let renditions = VideoRenditions::new::<H264Encoder>(camera, VideoPreset::all());
|
||||
self.ensure_camera();
|
||||
self.camera
|
||||
.as_ref()
|
||||
.unwrap()
|
||||
.lock()
|
||||
.unwrap()
|
||||
.set_video(Some(renditions))?;
|
||||
} else if let Some(camera) = self.camera.as_ref() {
|
||||
camera.lock().unwrap().set_video(None)?;
|
||||
}
|
||||
self.state.camera = enable;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn screen(&self) -> bool {
|
||||
self.state.screen
|
||||
}
|
||||
|
||||
fn ensure_camera(&mut self) {
|
||||
if self.camera.is_none() {
|
||||
let broadcast = PublishBroadcast::new();
|
||||
self.publish(Broadcasts::Camera, broadcast.producer());
|
||||
self.camera = Some(Arc::new(Mutex::new(broadcast)));
|
||||
};
|
||||
}
|
||||
|
||||
fn publish(&self, name: Broadcasts, producer: BroadcastProducer) {
|
||||
let room = self.room.clone();
|
||||
tokio::spawn(async move {
|
||||
if let Err(err) = room.publish(name, producer).await {
|
||||
warn!("publish to room failed: {err:#}");
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
pub fn set_screen(&mut self, enable: bool) -> Result<()> {
|
||||
if self.state.screen != enable {
|
||||
if enable {
|
||||
if self.screen.is_none() {
|
||||
let broadcast = PublishBroadcast::new();
|
||||
self.publish(Broadcasts::Screen, broadcast.producer());
|
||||
self.screen = Some(Arc::new(Mutex::new(broadcast)));
|
||||
};
|
||||
|
||||
let screen = ScreenCapturer::new()?;
|
||||
let renditions = VideoRenditions::new::<H264Encoder>(screen, VideoPreset::all());
|
||||
self.screen
|
||||
.as_mut()
|
||||
.unwrap()
|
||||
.lock()
|
||||
.unwrap()
|
||||
.set_video(Some(renditions))?;
|
||||
} else {
|
||||
let _ = self.screen.take();
|
||||
}
|
||||
self.state.screen = enable;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn audio(&self) -> bool {
|
||||
self.state.audio
|
||||
}
|
||||
|
||||
pub fn set_audio(&mut self, enable: bool) -> Result<()> {
|
||||
if self.state.audio != enable {
|
||||
if enable {
|
||||
self.ensure_camera();
|
||||
let camera = self.camera.as_ref().unwrap().clone();
|
||||
let audio_ctx = self.audio_ctx.clone();
|
||||
tokio::spawn(async move {
|
||||
let mic = match audio_ctx.default_input().await {
|
||||
Err(err) => {
|
||||
warn!("failed to open audio input: {err:#}");
|
||||
return;
|
||||
}
|
||||
Ok(mic) => mic,
|
||||
};
|
||||
let renditions = AudioRenditions::new::<OpusEncoder>(mic, [AudioPreset::Hq]);
|
||||
if let Err(err) = camera.lock().unwrap().set_audio(Some(renditions)) {
|
||||
warn!("failed to set audio: {err:#}");
|
||||
}
|
||||
});
|
||||
} else if let Some(camera) = self.camera.as_mut() {
|
||||
camera.lock().unwrap().set_audio(None)?;
|
||||
}
|
||||
self.state.audio = enable;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
61
third_party/iroh-live/iroh-live/src/ticket.rs
vendored
Normal file
61
third_party/iroh-live/iroh-live/src/ticket.rs
vendored
Normal file
|
|
@ -0,0 +1,61 @@
|
|||
use iroh::EndpointAddr;
|
||||
use n0_error::{Result, StdResultExt};
|
||||
use serde::{Deserialize, Serialize};
|
||||
|
||||
#[derive(Debug, Clone, PartialEq, Eq, derive_more::Display, Serialize, Deserialize)]
|
||||
#[display("{}", self.serialize())]
|
||||
pub struct LiveTicket {
|
||||
pub endpoint: EndpointAddr,
|
||||
pub broadcast_name: String,
|
||||
}
|
||||
|
||||
impl LiveTicket {
|
||||
pub fn new(endpoint: impl Into<EndpointAddr>, broadcast_name: impl ToString) -> Self {
|
||||
Self {
|
||||
endpoint: endpoint.into(),
|
||||
broadcast_name: broadcast_name.to_string(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn to_bytes(&self) -> Vec<u8> {
|
||||
postcard::to_stdvec(self).unwrap()
|
||||
}
|
||||
|
||||
pub fn from_bytes(bytes: &[u8]) -> Result<Self> {
|
||||
let ticket = postcard::from_bytes(bytes).std_context("failed to deserialize")?;
|
||||
Ok(ticket)
|
||||
}
|
||||
|
||||
/// Serialize to string.
|
||||
pub fn serialize(&self) -> String {
|
||||
let mut out = self.broadcast_name.clone();
|
||||
out.push_str("@");
|
||||
data_encoding::BASE32_NOPAD
|
||||
.encode_append(&postcard::to_stdvec(&self.endpoint).unwrap(), &mut out);
|
||||
out.to_ascii_lowercase()
|
||||
}
|
||||
|
||||
/// Deserialize from a string.
|
||||
pub fn deserialize(str: &str) -> Result<Self> {
|
||||
let (broadcast_name, encoded_addr) = str
|
||||
.split_once("@")
|
||||
.std_context("invalid ticket: missing @")?;
|
||||
let endpoint_addr: EndpointAddr = postcard::from_bytes(
|
||||
&(data_encoding::BASE32_NOPAD_NOCASE
|
||||
.decode(encoded_addr.as_bytes())
|
||||
.std_context("invalid base32")?),
|
||||
)
|
||||
.std_context("failed to parse")?;
|
||||
Ok(Self {
|
||||
broadcast_name: broadcast_name.to_string(),
|
||||
endpoint: endpoint_addr,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl std::str::FromStr for LiveTicket {
|
||||
type Err = n0_error::AnyError;
|
||||
fn from_str(s: &str) -> std::result::Result<Self, Self::Err> {
|
||||
LiveTicket::deserialize(s)
|
||||
}
|
||||
}
|
||||
84
third_party/iroh-live/iroh-live/src/util.rs
vendored
Normal file
84
third_party/iroh-live/iroh-live/src/util.rs
vendored
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
use std::time::{Duration, Instant};
|
||||
|
||||
use byte_unit::{Bit, UnitType};
|
||||
use iroh::endpoint::ConnectionStats;
|
||||
|
||||
/// Spawn a named OS thread and panic if spawning fails.
|
||||
pub fn spawn_thread<F, T>(name: impl ToString, f: F) -> std::thread::JoinHandle<T>
|
||||
where
|
||||
F: FnOnce() -> T + Send + 'static,
|
||||
T: Send + 'static,
|
||||
{
|
||||
let name_str = name.to_string();
|
||||
std::thread::Builder::new()
|
||||
.name(name_str.clone())
|
||||
.spawn(f)
|
||||
.expect(&format!("failed to spawn thread: {}", name_str))
|
||||
}
|
||||
|
||||
pub struct StatsSmoother {
|
||||
rate_up: Rate,
|
||||
rate_down: Rate,
|
||||
last_update: Instant,
|
||||
rtt: Duration,
|
||||
}
|
||||
|
||||
impl StatsSmoother {
|
||||
pub fn new() -> Self {
|
||||
Self {
|
||||
rate_up: Default::default(),
|
||||
rate_down: Default::default(),
|
||||
last_update: Instant::now(),
|
||||
rtt: Duration::from_secs(0),
|
||||
}
|
||||
}
|
||||
pub fn smoothed(&mut self, total: impl FnOnce() -> ConnectionStats) -> SmoothedStats<'_> {
|
||||
let now = Instant::now();
|
||||
let elapsed = now.duration_since(self.last_update);
|
||||
if elapsed >= Duration::from_secs(1) {
|
||||
let stats = (total)();
|
||||
self.rate_down.update(elapsed, stats.udp_rx.bytes);
|
||||
self.rate_up.update(elapsed, stats.udp_tx.bytes);
|
||||
self.last_update = now;
|
||||
self.rtt = stats.path.rtt;
|
||||
}
|
||||
SmoothedStats {
|
||||
down: &self.rate_down,
|
||||
up: &self.rate_up,
|
||||
rtt: self.rtt,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Default)]
|
||||
pub struct Rate {
|
||||
/// Total bytes
|
||||
pub total: u64,
|
||||
/// Rate in bytes per second
|
||||
pub rate: f32,
|
||||
/// Rate rendered as a string
|
||||
pub rate_str: String,
|
||||
}
|
||||
|
||||
impl Rate {
|
||||
fn update(&mut self, delta_time: Duration, new_total: u64) {
|
||||
let delta = new_total.saturating_sub(self.total);
|
||||
let delta_secs = delta_time.as_secs_f32();
|
||||
let rate = if delta_secs > 0.0 && delta > 0 {
|
||||
(delta as f32 * 8.0) / delta_secs
|
||||
} else {
|
||||
0.0
|
||||
};
|
||||
let bit = Bit::from_f32(rate).unwrap();
|
||||
let adjusted = bit.get_appropriate_unit(UnitType::Decimal);
|
||||
self.rate = rate;
|
||||
self.rate_str = format!("{adjusted:.2}/s");
|
||||
self.total = new_total;
|
||||
}
|
||||
}
|
||||
|
||||
pub struct SmoothedStats<'a> {
|
||||
pub rtt: Duration,
|
||||
pub down: &'a Rate,
|
||||
pub up: &'a Rate,
|
||||
}
|
||||
19
third_party/iroh-live/iroh-moq/Cargo.toml
vendored
Normal file
19
third_party/iroh-live/iroh-moq/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,19 @@
|
|||
[package]
|
||||
name = "iroh-moq"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
description = "audio and video live streaming over iroh"
|
||||
authors = ["Franz Heinzmann <frando@n0.computer>"]
|
||||
repository = "https://github.com/n0-computer/iroh-live"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[dependencies]
|
||||
iroh = "0.96"
|
||||
moq-lite = "0.10.1"
|
||||
n0-error = { version = "0.1.2", features = ["anyhow"] }
|
||||
n0-future = "0.3.1"
|
||||
tokio = { version = "1.48.0", features = ["sync"] }
|
||||
tokio-util = "0.7.17"
|
||||
tracing = "0.1.41"
|
||||
url = "2.5.7"
|
||||
web-transport-iroh = { version = "0.1.0", path = "../web-transport-iroh" }
|
||||
446
third_party/iroh-live/iroh-moq/src/lib.rs
vendored
Normal file
446
third_party/iroh-live/iroh-moq/src/lib.rs
vendored
Normal file
|
|
@ -0,0 +1,446 @@
|
|||
use std::{
|
||||
collections::{HashMap, hash_map},
|
||||
sync::Arc,
|
||||
};
|
||||
|
||||
use iroh::{
|
||||
Endpoint, EndpointAddr, EndpointId,
|
||||
endpoint::{Connection, ConnectionError},
|
||||
protocol::ProtocolHandler,
|
||||
};
|
||||
use moq_lite::{BroadcastConsumer, BroadcastProducer, OriginConsumer, OriginProducer};
|
||||
use n0_error::{AnyError, Result, StdResultExt, anyerr, e, stack_error};
|
||||
use n0_future::{
|
||||
FuturesUnordered, StreamExt,
|
||||
boxed::BoxFuture,
|
||||
task::{AbortOnDropHandle, JoinSet},
|
||||
};
|
||||
use tokio::sync::{mpsc, oneshot};
|
||||
use tokio_util::sync::CancellationToken;
|
||||
use tracing::{Instrument, debug, error_span, info, instrument};
|
||||
use web_transport_iroh::SessionError;
|
||||
|
||||
pub const ALPN: &[u8] = moq_lite::lite::ALPN.as_bytes();
|
||||
|
||||
#[stack_error(derive, add_meta, from_sources)]
|
||||
#[allow(private_interfaces)]
|
||||
pub enum Error {
|
||||
#[error(transparent)]
|
||||
Connect(iroh::endpoint::ConnectError),
|
||||
#[error(transparent)]
|
||||
Moq(#[error(source, std_err)] moq_lite::Error),
|
||||
#[error(transparent)]
|
||||
Server(#[error(source, std_err)] web_transport_iroh::ServerError),
|
||||
#[error("internal consistency error")]
|
||||
InternalConsistencyError(#[error(source)] LiveActorDiedError),
|
||||
#[error("failed to perform request")]
|
||||
Request(#[error(source, std_err)] iroh::endpoint::WriteError),
|
||||
}
|
||||
|
||||
#[stack_error(derive, add_meta, from_sources)]
|
||||
#[allow(private_interfaces)]
|
||||
pub enum SubscribeError {
|
||||
#[error("track was not announced")]
|
||||
NotAnnounced,
|
||||
#[error("track was closed")]
|
||||
Closed,
|
||||
#[error("session was closed")]
|
||||
SessionClosed(#[error(source, std_err)] SessionError),
|
||||
}
|
||||
|
||||
#[stack_error(derive)]
|
||||
#[error("live actor died")]
|
||||
struct LiveActorDiedError;
|
||||
|
||||
impl From<mpsc::error::SendError<ActorMessage>> for LiveActorDiedError {
|
||||
fn from(_value: mpsc::error::SendError<ActorMessage>) -> Self {
|
||||
Self
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct Moq {
|
||||
tx: mpsc::Sender<ActorMessage>,
|
||||
shutdown_token: CancellationToken,
|
||||
_actor_handle: Arc<AbortOnDropHandle<()>>,
|
||||
}
|
||||
|
||||
impl Moq {
|
||||
pub fn new(endpoint: Endpoint) -> Self {
|
||||
let (tx, rx) = mpsc::channel(16);
|
||||
let actor = Actor::new(endpoint);
|
||||
let shutdown_token = actor.shutdown_token.clone();
|
||||
let actor_task = n0_future::task::spawn(async move {
|
||||
actor.run(rx).instrument(error_span!("LiveActor")).await
|
||||
});
|
||||
Self {
|
||||
shutdown_token,
|
||||
tx,
|
||||
_actor_handle: Arc::new(AbortOnDropHandle::new(actor_task)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn protocol_handler(&self) -> MoqProtocolHandler {
|
||||
MoqProtocolHandler {
|
||||
tx: self.tx.clone(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn publish(&self, name: impl ToString, producer: BroadcastProducer) -> Result<()> {
|
||||
self.tx
|
||||
.send(ActorMessage::PublishBroadcast {
|
||||
broadcast_name: name.to_string(),
|
||||
producer,
|
||||
})
|
||||
.await
|
||||
.std_context("live actor died")?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn published_broadcasts(&self) -> Vec<String> {
|
||||
let (reply, reply_rx) = oneshot::channel();
|
||||
if let Err(_) = self.tx.send(ActorMessage::GetPublished { reply }).await {
|
||||
return vec![];
|
||||
}
|
||||
reply_rx.await.unwrap_or_default()
|
||||
}
|
||||
|
||||
pub async fn connect(&self, remote: impl Into<EndpointAddr>) -> Result<MoqSession, AnyError> {
|
||||
// MoqSession::connect(&self.endpoint, addr).await
|
||||
let (reply, reply_rx) = oneshot::channel();
|
||||
self.tx
|
||||
.send(ActorMessage::Connect {
|
||||
remote: remote.into(),
|
||||
reply,
|
||||
})
|
||||
.await
|
||||
.map_err(|_| LiveActorDiedError)?;
|
||||
reply_rx
|
||||
.await
|
||||
.map_err(|_| LiveActorDiedError)?
|
||||
.map_err(|err| anyerr!(err))
|
||||
}
|
||||
|
||||
pub fn shutdown(&self) {
|
||||
self.shutdown_token.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct MoqProtocolHandler {
|
||||
tx: mpsc::Sender<ActorMessage>,
|
||||
}
|
||||
|
||||
impl MoqProtocolHandler {
|
||||
async fn handle_connection(&self, connection: Connection) -> Result<(), Error> {
|
||||
info!(remote = %connection.remote_id().fmt_short(), "accepted");
|
||||
let session = web_transport_iroh::Session::raw(connection);
|
||||
let session = MoqSession::session_accept(session).await?;
|
||||
self.tx
|
||||
.send(ActorMessage::HandleSession { session })
|
||||
.await
|
||||
.map_err(LiveActorDiedError::from)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl ProtocolHandler for MoqProtocolHandler {
|
||||
async fn accept(&self, connection: Connection) -> Result<(), iroh::protocol::AcceptError> {
|
||||
self.handle_connection(connection)
|
||||
.await
|
||||
.map_err(AnyError::from)?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: resubscribing session?
|
||||
// struct MoqSession2 {
|
||||
// session: MoqSession,
|
||||
// tx: mpsc::Sender<ActorMessage>,
|
||||
// remote: EndpointAddr,
|
||||
// }
|
||||
|
||||
// impl MoqSession2 {
|
||||
// pub async fn subscribe(&mut self, name: &str) -> Result<BroadcastConsumer> {
|
||||
// match self.session.subscribe(name).await {
|
||||
// Ok(consumer) => return Ok(consumer),
|
||||
// Err(err) => {
|
||||
// warn!("first attempt to subscribe failed, retrying. reason: {err:#}");
|
||||
// let (reply, reply_rx) = oneshot::channel();
|
||||
// self.tx
|
||||
// .send(ActorMessage::Connect {
|
||||
// remote: self.remote.clone(),
|
||||
// reply,
|
||||
// })
|
||||
// .await
|
||||
// .map_err(|_| LiveActorDiedError)?;
|
||||
// self.session = reply_rx
|
||||
// .await
|
||||
// .map_err(|_| LiveActorDiedError)?
|
||||
// .map_err(|err| anyerr!(err))?;
|
||||
// self.session.subscribe(name).await.map_err(Into::into)
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct MoqSession {
|
||||
wt_session: web_transport_iroh::Session,
|
||||
publish: OriginProducer,
|
||||
subscribe: OriginConsumer,
|
||||
}
|
||||
|
||||
impl MoqSession {
|
||||
#[instrument(skip_all, fields(remote=tracing::field::Empty))]
|
||||
pub async fn connect(
|
||||
endpoint: &Endpoint,
|
||||
remote_addr: impl Into<EndpointAddr>,
|
||||
) -> Result<Self, Error> {
|
||||
let addr = remote_addr.into();
|
||||
tracing::Span::current().record("remote", tracing::field::display(addr.id.fmt_short()));
|
||||
let connection = endpoint.connect(addr, ALPN).await?;
|
||||
let wt_session = web_transport_iroh::Session::raw(connection);
|
||||
Self::session_connect(wt_session).await
|
||||
}
|
||||
|
||||
pub async fn session_connect(wt_session: web_transport_iroh::Session) -> Result<Self, Error> {
|
||||
let publish = moq_lite::Origin::produce();
|
||||
let subscribe = moq_lite::Origin::produce();
|
||||
// We can drop the moq_lite::Session, it spawns it tasks in the background anyway.
|
||||
// If that changes and it becomes a guard, we should keep it around.
|
||||
let _moq_session =
|
||||
moq_lite::Session::connect(wt_session.clone(), publish.consumer, subscribe.producer)
|
||||
.await?;
|
||||
Ok(Self {
|
||||
publish: publish.producer,
|
||||
subscribe: subscribe.consumer,
|
||||
wt_session,
|
||||
})
|
||||
}
|
||||
|
||||
pub async fn session_accept(wt_session: web_transport_iroh::Session) -> Result<Self, Error> {
|
||||
let publish = moq_lite::Origin::produce();
|
||||
let subscribe = moq_lite::Origin::produce();
|
||||
// We can drop the moq_lite::Session, it spawns it tasks in the background anyway.
|
||||
// If that changes and it becomes a guard, we should keep it around.
|
||||
let _moq_session =
|
||||
moq_lite::Session::accept(wt_session.clone(), publish.consumer, subscribe.producer)
|
||||
.await?;
|
||||
Ok(Self {
|
||||
publish: publish.producer,
|
||||
subscribe: subscribe.consumer,
|
||||
wt_session,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn remote_id(&self) -> EndpointId {
|
||||
self.wt_session.remote_id()
|
||||
}
|
||||
|
||||
pub fn conn(&self) -> &iroh::endpoint::Connection {
|
||||
self.wt_session.conn()
|
||||
}
|
||||
|
||||
pub async fn subscribe(&mut self, name: &str) -> Result<BroadcastConsumer, SubscribeError> {
|
||||
if let Some(reason) = self.conn().close_reason() {
|
||||
return Err(SessionError::from(reason).into());
|
||||
}
|
||||
if let Some(consumer) = self.subscribe.consume_broadcast(name) {
|
||||
return Ok(consumer);
|
||||
}
|
||||
loop {
|
||||
let res = tokio::select! {
|
||||
res = self.subscribe.announced() => res,
|
||||
reason = self.wt_session.closed() => {
|
||||
return Err(reason.into())
|
||||
}
|
||||
};
|
||||
let (path, consumer) = res.ok_or_else(|| e!(SubscribeError::NotAnnounced))?;
|
||||
debug!("peer announced broadcast: {path}");
|
||||
if path.as_str() == name {
|
||||
return consumer.ok_or_else(|| e!(SubscribeError::Closed));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn publish(&self, name: String, broadcast: BroadcastConsumer) {
|
||||
self.publish.publish_broadcast(name, broadcast);
|
||||
}
|
||||
|
||||
pub fn close(&self, error_code: u32, reason: &[u8]) {
|
||||
self.wt_session.close(error_code, reason);
|
||||
}
|
||||
|
||||
pub async fn closed(&self) -> web_transport_iroh::SessionError {
|
||||
self.wt_session.closed().await
|
||||
}
|
||||
}
|
||||
|
||||
enum ActorMessage {
|
||||
HandleSession {
|
||||
session: MoqSession,
|
||||
},
|
||||
PublishBroadcast {
|
||||
broadcast_name: BroadcastName,
|
||||
producer: BroadcastProducer,
|
||||
},
|
||||
Connect {
|
||||
remote: EndpointAddr,
|
||||
reply: oneshot::Sender<Result<MoqSession, Arc<AnyError>>>,
|
||||
},
|
||||
GetPublished {
|
||||
reply: oneshot::Sender<Vec<BroadcastName>>,
|
||||
},
|
||||
}
|
||||
|
||||
type BroadcastName = String;
|
||||
|
||||
#[derive()]
|
||||
struct Actor {
|
||||
endpoint: Endpoint,
|
||||
shutdown_token: CancellationToken,
|
||||
publishing: HashMap<BroadcastName, BroadcastProducer>,
|
||||
publishing_closed_futs: FuturesUnordered<BoxFuture<BroadcastName>>,
|
||||
sessions: HashMap<EndpointId, MoqSession>,
|
||||
session_tasks: JoinSet<(EndpointId, Result<(), web_transport_iroh::SessionError>)>,
|
||||
pending_connects: HashMap<EndpointId, Vec<oneshot::Sender<Result<MoqSession, Arc<AnyError>>>>>,
|
||||
pending_connect_tasks: JoinSet<(EndpointId, Result<MoqSession, AnyError>)>,
|
||||
}
|
||||
|
||||
impl Actor {
|
||||
pub fn new(endpoint: Endpoint) -> Self {
|
||||
Self {
|
||||
endpoint,
|
||||
shutdown_token: CancellationToken::new(),
|
||||
publishing: Default::default(),
|
||||
publishing_closed_futs: Default::default(),
|
||||
sessions: Default::default(),
|
||||
session_tasks: Default::default(),
|
||||
pending_connects: Default::default(),
|
||||
pending_connect_tasks: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn run(mut self, mut inbox: mpsc::Receiver<ActorMessage>) {
|
||||
loop {
|
||||
tokio::select! {
|
||||
msg = inbox.recv() => {
|
||||
match msg {
|
||||
None => break,
|
||||
Some(msg) => self.handle_message(msg)
|
||||
}
|
||||
}
|
||||
Some(res) = self.session_tasks.join_next(), if !self.session_tasks.is_empty() => {
|
||||
let (endpoint_id, res) = res.expect("session task panicked");
|
||||
info!(remote=%endpoint_id.fmt_short(), "session closed: {res:?}");
|
||||
self.sessions.remove(&endpoint_id);
|
||||
}
|
||||
Some(name) = self.publishing_closed_futs.next(), if !self.publishing_closed_futs.is_empty() => {
|
||||
self.publishing.remove(&name);
|
||||
}
|
||||
Some(res) = self.pending_connect_tasks.join_next(), if !self.pending_connect_tasks.is_empty() => {
|
||||
let (endpoint_id, res) = res.expect("connect task panicked");
|
||||
match res {
|
||||
Ok(session) => {
|
||||
info!(remote=%endpoint_id.fmt_short(), "connected");
|
||||
self.handle_incoming_session(session);
|
||||
}
|
||||
Err(err) => {
|
||||
info!(remote=%endpoint_id.fmt_short(), "connect failed: {err:#}");
|
||||
let replies = self.pending_connects.remove(&endpoint_id).into_iter().flatten();
|
||||
let err = Arc::new(err);
|
||||
for reply in replies {
|
||||
reply.send(Err(err.clone())).ok();
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_message(&mut self, msg: ActorMessage) {
|
||||
match msg {
|
||||
ActorMessage::HandleSession { session: msg } => self.handle_incoming_session(msg),
|
||||
ActorMessage::PublishBroadcast {
|
||||
broadcast_name: name,
|
||||
producer,
|
||||
} => self.handle_publish_broadcast(name, producer),
|
||||
ActorMessage::Connect { remote, reply } => self.handle_connect(remote, reply),
|
||||
ActorMessage::GetPublished { reply } => {
|
||||
let names = self.publishing.keys().cloned().collect();
|
||||
reply.send(names).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_incoming_session(&mut self, session: MoqSession) {
|
||||
tracing::info!("handle new incoming session");
|
||||
let remote = session.remote_id();
|
||||
for (name, producer) in self.publishing.iter() {
|
||||
session.publish(name.to_string(), producer.consume());
|
||||
}
|
||||
self.sessions.insert(remote, session.clone());
|
||||
for reply in self.pending_connects.remove(&remote).into_iter().flatten() {
|
||||
reply.send(Ok(session.clone())).ok();
|
||||
}
|
||||
|
||||
let shutdown = self.shutdown_token.child_token();
|
||||
self.session_tasks.spawn(async move {
|
||||
let res = tokio::select! {
|
||||
_ = shutdown.cancelled() => {
|
||||
session.close(0u32.into(), b"cancelled");
|
||||
Ok(())
|
||||
}
|
||||
result = session.closed() => match result {
|
||||
SessionError::ConnectionError(ConnectionError::LocallyClosed) => Ok(()),
|
||||
err @ _ => Err(err)
|
||||
},
|
||||
};
|
||||
(remote, res)
|
||||
});
|
||||
}
|
||||
|
||||
fn handle_publish_broadcast(&mut self, name: BroadcastName, producer: BroadcastProducer) {
|
||||
for session in self.sessions.values_mut() {
|
||||
session
|
||||
.publish
|
||||
.publish_broadcast(name.clone(), producer.consume());
|
||||
}
|
||||
let closed = producer.consume().closed();
|
||||
self.publishing.insert(name.clone(), producer);
|
||||
self.publishing_closed_futs.push(Box::pin(async move {
|
||||
closed.await;
|
||||
name
|
||||
}));
|
||||
}
|
||||
|
||||
fn handle_connect(
|
||||
&mut self,
|
||||
remote: EndpointAddr,
|
||||
reply: oneshot::Sender<Result<MoqSession, Arc<AnyError>>>,
|
||||
) {
|
||||
let remote_id = remote.id;
|
||||
if let Some(session) = self.sessions.get(&remote_id) {
|
||||
reply.send(Ok(session.clone())).ok();
|
||||
return;
|
||||
}
|
||||
match self.pending_connects.entry(remote_id) {
|
||||
hash_map::Entry::Occupied(mut entry) => {
|
||||
entry.get_mut().push(reply);
|
||||
}
|
||||
hash_map::Entry::Vacant(entry) => {
|
||||
let endpoint = self.endpoint.clone();
|
||||
self.pending_connect_tasks.spawn(async move {
|
||||
let res = MoqSession::connect(&endpoint, remote)
|
||||
.await
|
||||
.map_err(Into::into);
|
||||
(remote_id, res)
|
||||
});
|
||||
entry.insert(Default::default()).push(reply);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
88
third_party/iroh-live/moq-media/Cargo.toml
vendored
Normal file
88
third_party/iroh-live/moq-media/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,88 @@
|
|||
[package]
|
||||
name = "moq-media"
|
||||
version = "0.1.0"
|
||||
edition = "2024"
|
||||
description = "native audio and video capturing, playback, encoding, decoding"
|
||||
authors = ["Franz Heinzmann <frando@n0.computer>"]
|
||||
repository = "https://github.com/n0-computer/iroh-live"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
[dependencies]
|
||||
anyhow = "1.0.100"
|
||||
bytemuck = "1.24.0"
|
||||
byte-unit = { version = "5.1", features = ["bit"] }
|
||||
data-encoding = "2.9.0"
|
||||
derive_more = { version = "2.0.1", features = ["display", "debug", "eq"] }
|
||||
ffmpeg-next = { version = "8.0.0", default-features = false, features = ["device", "format", "filter", "software-resampling", "software-scaling"] }
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true }
|
||||
firewheel = { version = "0.9.1", features = ["cpal", "peak_meter_node", "std", "stream_nodes", "cpal_resample_inputs"] }
|
||||
hang = "0.9.0"
|
||||
image = { version = "0.25.8", default-features = false }
|
||||
moq-lite = "0.10.1"
|
||||
n0-error = { version = "0.1.2", features = ["anyhow"] }
|
||||
n0-future = "0.3.1"
|
||||
n0-watcher = "0.6.0"
|
||||
nokhwa = { version = "0.10", features = [
|
||||
"input-native",
|
||||
"input-v4l",
|
||||
"output-threaded",
|
||||
] }
|
||||
postcard = "1.1.3"
|
||||
rand = "0.9.2"
|
||||
serde = { version = "1.0.228", features = ["derive"] }
|
||||
strum = { version = "0.27", features = ["derive"] }
|
||||
tokio = { version = "1.48.0", features = ["sync"] }
|
||||
tokio-util = "0.7.17"
|
||||
tracing = "0.1.41"
|
||||
xcap = "0.8"
|
||||
webrtc-audio-processing = { version = "0.5.0", features = ["bundled"] }
|
||||
bytes = "1.11.0"
|
||||
buf-list = "1.1.2"
|
||||
|
||||
|
||||
[dev-dependencies]
|
||||
clap = { version = "4.5", features = ["derive"] }
|
||||
eframe = "0.33.0"
|
||||
postcard = "1.1.3"
|
||||
tokio = { version = "1.48.0", features = ["full"] }
|
||||
tracing-subscriber = "0.3.20"
|
||||
|
||||
[features]
|
||||
default = []
|
||||
|
||||
# Enable static build of ffmpeg
|
||||
static = [
|
||||
"ffmpeg-next/static",
|
||||
"ffmpeg-next/build-lib-openssl",
|
||||
"ffmpeg-next/build-license-version3",
|
||||
"ffmpeg-next/build-lib-opus",
|
||||
"ffmpeg-next/build-lib-x264",
|
||||
"ffmpeg-next/build-license-gpl",
|
||||
"dep:ffmpeg-sys-next",
|
||||
]
|
||||
|
||||
[target.'cfg(target_os = "macos")'.dependencies]
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
|
||||
"build-videotoolbox",
|
||||
"build-audiotoolbox",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "linux")'.dependencies]
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
|
||||
"build-vaapi",
|
||||
# "build-vulkan",
|
||||
# "build-lib-libmfx",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "windows")'.dependencies]
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
|
||||
"build-lib-d3d11va",
|
||||
"build-lib-dxva2",
|
||||
# "build-nvidia",
|
||||
# "build-amf",
|
||||
] }
|
||||
|
||||
[target.'cfg(target_os = "android")'.dependencies]
|
||||
ffmpeg-sys-next = { version = "8.0.1", optional = true, features = [
|
||||
# "build-mediacodec",
|
||||
] }
|
||||
527
third_party/iroh-live/moq-media/src/audio.rs
vendored
Normal file
527
third_party/iroh-live/moq-media/src/audio.rs
vendored
Normal file
|
|
@ -0,0 +1,527 @@
|
|||
use std::{
|
||||
collections::HashMap,
|
||||
sync::{
|
||||
Arc, Mutex,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
},
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use firewheel::{
|
||||
CpalConfig, CpalInputConfig, CpalOutputConfig, FirewheelConfig, FirewheelContext,
|
||||
channel_config::{ChannelConfig, ChannelCount, NonZeroChannelCount},
|
||||
dsp::volume::{DEFAULT_DB_EPSILON, DbMeterNormalizer},
|
||||
graph::PortIdx,
|
||||
node::NodeID,
|
||||
nodes::{
|
||||
peak_meter::{PeakMeterNode, PeakMeterSmoother, PeakMeterState},
|
||||
stream::{
|
||||
ResamplingChannelConfig,
|
||||
reader::{StreamReaderConfig, StreamReaderNode, StreamReaderState},
|
||||
writer::{StreamWriterConfig, StreamWriterNode, StreamWriterState},
|
||||
},
|
||||
},
|
||||
};
|
||||
use tokio::sync::{mpsc, mpsc::error::TryRecvError, oneshot};
|
||||
use tracing::{debug, error, info, trace, warn};
|
||||
|
||||
use self::aec::{AecCaptureNode, AecProcessor, AecProcessorConfig, AecRenderNode};
|
||||
use crate::{
|
||||
av::{AudioFormat, AudioSink, AudioSinkHandle, AudioSource},
|
||||
util::spawn_thread,
|
||||
};
|
||||
|
||||
mod aec;
|
||||
|
||||
type StreamWriterHandle = Arc<Mutex<StreamWriterState>>;
|
||||
type StreamReaderHandle = Arc<Mutex<StreamReaderState>>;
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AudioBackend {
|
||||
tx: mpsc::Sender<DriverMessage>,
|
||||
}
|
||||
|
||||
impl AudioBackend {
|
||||
pub fn new() -> Self {
|
||||
let (tx, rx) = mpsc::channel(32);
|
||||
let _handle = spawn_thread("audiodriver", move || AudioDriver::new(rx).run());
|
||||
Self { tx }
|
||||
}
|
||||
|
||||
pub async fn default_input(&self) -> Result<InputStream> {
|
||||
self.input(AudioFormat::mono_48k()).await
|
||||
}
|
||||
|
||||
pub async fn input(&self, format: AudioFormat) -> Result<InputStream> {
|
||||
let (reply, reply_rx) = oneshot::channel();
|
||||
self.tx
|
||||
.send(DriverMessage::InputStream { format, reply })
|
||||
.await?;
|
||||
let handle = reply_rx.await??;
|
||||
Ok(InputStream { handle, format })
|
||||
}
|
||||
|
||||
pub async fn default_output(&self) -> Result<OutputStream> {
|
||||
self.output(AudioFormat::stereo_48k()).await
|
||||
}
|
||||
|
||||
pub async fn output(&self, format: AudioFormat) -> Result<OutputStream> {
|
||||
let (reply, reply_rx) = oneshot::channel();
|
||||
self.tx
|
||||
.send(DriverMessage::OutputStream { format, reply })
|
||||
.await?;
|
||||
let handle = reply_rx.await??;
|
||||
Ok(handle)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone)]
|
||||
pub struct OutputStream {
|
||||
handle: StreamWriterHandle,
|
||||
paused: Arc<AtomicBool>,
|
||||
peaks: Arc<Mutex<PeakMeterSmoother<2>>>,
|
||||
normalizer: DbMeterNormalizer,
|
||||
}
|
||||
|
||||
impl AudioSinkHandle for OutputStream {
|
||||
fn is_paused(&self) -> bool {
|
||||
self.paused.load(Ordering::Relaxed)
|
||||
}
|
||||
fn pause(&self) {
|
||||
self.paused.store(true, Ordering::Relaxed);
|
||||
self.handle.lock().expect("poisoned").pause_stream();
|
||||
}
|
||||
|
||||
fn resume(&self) {
|
||||
self.paused.store(false, Ordering::Relaxed);
|
||||
self.handle.lock().expect("poisoned").resume();
|
||||
}
|
||||
|
||||
fn toggle_pause(&self) {
|
||||
let was_paused = self.paused.fetch_xor(true, Ordering::Relaxed);
|
||||
if was_paused {
|
||||
self.handle.lock().expect("poisoned").resume();
|
||||
} else {
|
||||
self.handle.lock().expect("poisoned").pause_stream();
|
||||
}
|
||||
}
|
||||
|
||||
fn smoothed_peak_normalized(&self) -> Option<f32> {
|
||||
Some(
|
||||
self.peaks
|
||||
.lock()
|
||||
.expect("poisoned")
|
||||
.smoothed_peaks_normalized_mono(&self.normalizer),
|
||||
)
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioSink for OutputStream {
|
||||
fn handle(&self) -> Box<dyn AudioSinkHandle> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
fn format(&self) -> Result<AudioFormat> {
|
||||
let info = self.handle.lock().expect("poisoned");
|
||||
let sample_rate = info
|
||||
.sample_rate()
|
||||
.context("output stream misses sample rate")?
|
||||
.get();
|
||||
let channel_count = info.num_channels().get().get();
|
||||
Ok(AudioFormat {
|
||||
sample_rate,
|
||||
channel_count,
|
||||
})
|
||||
}
|
||||
|
||||
fn push_samples(&mut self, samples: &[f32]) -> Result<()> {
|
||||
let mut handle = self.handle.lock().unwrap();
|
||||
|
||||
// If this happens excessively in Release mode, you may want to consider
|
||||
// increasing [`StreamWriterConfig::channel_config.latency_seconds`].
|
||||
if handle.underflow_occurred() {
|
||||
warn!("Underflow occured in stream writer node!");
|
||||
}
|
||||
|
||||
// If this happens excessively in Release mode, you may want to consider
|
||||
// increasing [`StreamWriterConfig::channel_config.capacity_seconds`]. For
|
||||
// example, if you are streaming data from a network, you may want to
|
||||
// increase the capacity to several seconds.
|
||||
if handle.overflow_occurred() {
|
||||
warn!("Overflow occured in stream writer node!");
|
||||
}
|
||||
|
||||
// Wait until the node's processor is ready to receive data.
|
||||
if handle.is_ready() {
|
||||
// let expected_bytes =
|
||||
// frame.samples() * frame.channels() as usize * core::mem::size_of::<f32>();
|
||||
// let cpal_sample_data: &[f32] = bytemuck::cast_slice(&frame.data(0)[..expected_bytes]);
|
||||
handle.push_interleaved(samples);
|
||||
trace!("pushed samples {}", samples.len());
|
||||
} else {
|
||||
warn!("output handle is inactive")
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl OutputStream {
|
||||
#[allow(unused)]
|
||||
pub fn is_active(&self) -> bool {
|
||||
self.handle.lock().expect("poisoned").is_active()
|
||||
}
|
||||
}
|
||||
|
||||
/// A simple AudioSource that reads from the default microphone via Firewheel.
|
||||
#[derive(Clone)]
|
||||
pub struct InputStream {
|
||||
handle: StreamReaderHandle,
|
||||
format: AudioFormat,
|
||||
}
|
||||
|
||||
impl AudioSource for InputStream {
|
||||
fn cloned_boxed(&self) -> Box<dyn AudioSource> {
|
||||
Box::new(self.clone())
|
||||
}
|
||||
|
||||
fn format(&self) -> AudioFormat {
|
||||
self.format
|
||||
}
|
||||
|
||||
fn pop_samples(&mut self, buf: &mut [f32]) -> Result<Option<usize>> {
|
||||
use firewheel::nodes::stream::ReadStatus;
|
||||
let mut handle = self.handle.lock().expect("poisoned");
|
||||
match handle.read_interleaved(buf) {
|
||||
Some(ReadStatus::Ok) => Ok(Some(buf.len())),
|
||||
Some(ReadStatus::InputNotReady) => {
|
||||
tracing::warn!("audio input not ready");
|
||||
// Maintain pacing; still return a frame-sized buffer
|
||||
Ok(Some(buf.len()))
|
||||
}
|
||||
Some(ReadStatus::UnderflowOccurred { num_frames_read }) => {
|
||||
tracing::warn!(
|
||||
"audio input underflow: {} frames missing",
|
||||
buf.len() - num_frames_read
|
||||
);
|
||||
Ok(Some(buf.len()))
|
||||
}
|
||||
Some(ReadStatus::OverflowCorrected {
|
||||
num_frames_discarded,
|
||||
}) => {
|
||||
tracing::warn!("audio input overflow: {num_frames_discarded} frames discarded");
|
||||
Ok(Some(buf.len()))
|
||||
}
|
||||
None => {
|
||||
tracing::warn!("audio input stream is inactive");
|
||||
Ok(None)
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(derive_more::Debug)]
|
||||
enum DriverMessage {
|
||||
OutputStream {
|
||||
format: AudioFormat,
|
||||
#[debug("Sender")]
|
||||
reply: oneshot::Sender<Result<OutputStream>>,
|
||||
},
|
||||
InputStream {
|
||||
format: AudioFormat,
|
||||
#[debug("Sender")]
|
||||
reply: oneshot::Sender<Result<StreamReaderHandle>>,
|
||||
},
|
||||
}
|
||||
|
||||
struct AudioDriver {
|
||||
cx: FirewheelContext,
|
||||
rx: mpsc::Receiver<DriverMessage>,
|
||||
aec_processor: AecProcessor,
|
||||
aec_render_node: NodeID,
|
||||
aec_capture_node: NodeID,
|
||||
peak_meters: HashMap<NodeID, Arc<Mutex<PeakMeterSmoother<2>>>>,
|
||||
}
|
||||
|
||||
impl AudioDriver {
|
||||
fn new(rx: mpsc::Receiver<DriverMessage>) -> Self {
|
||||
let config = FirewheelConfig {
|
||||
num_graph_inputs: ChannelCount::new(1).unwrap(),
|
||||
..Default::default()
|
||||
};
|
||||
let mut cx = FirewheelContext::new(config);
|
||||
info!("inputs: {:?}", cx.available_input_devices());
|
||||
info!("outputs: {:?}", cx.available_output_devices());
|
||||
let config = CpalConfig {
|
||||
output: CpalOutputConfig {
|
||||
#[cfg(target_os = "linux")]
|
||||
device_name: Some("pipewire".to_string()),
|
||||
|
||||
..Default::default()
|
||||
},
|
||||
input: Some(CpalInputConfig {
|
||||
#[cfg(target_os = "linux")]
|
||||
device_name: Some("pipewire".to_string()),
|
||||
fail_on_no_input: true,
|
||||
..Default::default()
|
||||
}),
|
||||
};
|
||||
cx.start_stream(config).unwrap();
|
||||
info!(
|
||||
"audio graph in: {:?}",
|
||||
cx.node_info(cx.graph_in_node_id()).map(|x| &x.info)
|
||||
);
|
||||
info!(
|
||||
"audio graph out: {:?}",
|
||||
cx.node_info(cx.graph_out_node_id()).map(|x| &x.info)
|
||||
);
|
||||
|
||||
cx.set_graph_channel_config(ChannelConfig {
|
||||
num_inputs: ChannelCount::new(2).unwrap(),
|
||||
num_outputs: ChannelCount::new(2).unwrap(),
|
||||
});
|
||||
|
||||
let aec_processor = AecProcessor::new(AecProcessorConfig::stereo_in_out(), true)
|
||||
.expect("failed to initialize AEC processor");
|
||||
let aec_render_node = cx.add_node(AecRenderNode::default(), Some(aec_processor.clone()));
|
||||
let aec_capture_node = cx.add_node(AecCaptureNode::default(), Some(aec_processor.clone()));
|
||||
|
||||
let layout = &[(0, 0), (1, 1)];
|
||||
|
||||
cx.connect(cx.graph_in_node_id(), aec_capture_node, layout, true)
|
||||
.unwrap();
|
||||
cx.connect(aec_render_node, cx.graph_out_node_id(), layout, true)
|
||||
.unwrap();
|
||||
|
||||
Self {
|
||||
cx,
|
||||
rx,
|
||||
aec_processor,
|
||||
aec_render_node,
|
||||
aec_capture_node,
|
||||
peak_meters: Default::default(),
|
||||
}
|
||||
}
|
||||
|
||||
fn run(&mut self) {
|
||||
const INTERVAL: Duration = Duration::from_millis(10);
|
||||
const PEAK_UPDATE_INTERVAL: Duration = Duration::from_millis(40);
|
||||
let mut last_delay: f64 = 0.;
|
||||
let mut last_peak_update = Instant::now();
|
||||
|
||||
loop {
|
||||
let tick = Instant::now();
|
||||
if self.drain_messages().is_err() {
|
||||
info!("closing audio driver: message channel closed");
|
||||
break;
|
||||
}
|
||||
|
||||
if let Err(e) = self.cx.update() {
|
||||
error!("audio backend error: {:?}", &e);
|
||||
|
||||
// if let UpdateError::StreamStoppedUnexpectedly(_) = e {
|
||||
// // Notify the stream node handles that the output stream has stopped.
|
||||
// // This will automatically stop any active streams on the nodes.
|
||||
// cx.node_state_mut::<StreamWriterState>(stream_writer_id)
|
||||
// .unwrap()
|
||||
// .stop_stream();
|
||||
// cx.node_state_mut::<StreamReaderState>(stream_reader_id)
|
||||
// .unwrap()
|
||||
// .stop_stream();
|
||||
|
||||
// // The stream has stopped unexpectedly (i.e the user has
|
||||
// // unplugged their headphones.)
|
||||
// //
|
||||
// // Typically you should start a new stream as soon as
|
||||
// // possible to resume processing (event if it's a dummy
|
||||
// // output device).
|
||||
// //
|
||||
// // In this example we just quit the application.
|
||||
// break;
|
||||
// }
|
||||
}
|
||||
|
||||
if let Some(info) = self.cx.stream_info() {
|
||||
let delay = info.input_to_output_latency_seconds;
|
||||
if (last_delay - delay).abs() > (1. / 1000.) {
|
||||
let delay_ms = (delay * 1000.) as u32;
|
||||
info!("update processor delay to {delay_ms}ms");
|
||||
self.aec_processor.set_stream_delay(delay_ms);
|
||||
last_delay = delay;
|
||||
}
|
||||
}
|
||||
|
||||
// Update peak meters
|
||||
let delta = last_peak_update.elapsed();
|
||||
if delta > PEAK_UPDATE_INTERVAL {
|
||||
for (id, smoother) in self.peak_meters.iter_mut() {
|
||||
smoother.lock().expect("poisoned").update(
|
||||
self.cx
|
||||
.node_state::<PeakMeterState<2>>(*id)
|
||||
.unwrap()
|
||||
.peak_gain_db(DEFAULT_DB_EPSILON),
|
||||
delta.as_secs_f32(),
|
||||
);
|
||||
}
|
||||
last_peak_update = Instant::now();
|
||||
}
|
||||
|
||||
std::thread::sleep(INTERVAL.saturating_sub(tick.elapsed()));
|
||||
}
|
||||
}
|
||||
|
||||
fn drain_messages(&mut self) -> Result<(), ()> {
|
||||
loop {
|
||||
match self.rx.try_recv() {
|
||||
Err(TryRecvError::Disconnected) => {
|
||||
info!("stopping audio thread: backend handle dropped");
|
||||
break Err(());
|
||||
}
|
||||
Err(TryRecvError::Empty) => {
|
||||
break Ok(());
|
||||
}
|
||||
Ok(message) => self.handle_message(message),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn handle_message(&mut self, message: DriverMessage) {
|
||||
debug!("handle {message:?}");
|
||||
match message {
|
||||
DriverMessage::OutputStream { format, reply } => {
|
||||
let res = self
|
||||
.output_stream(format)
|
||||
.inspect_err(|err| warn!("failed to create audio output stream: {err:#}"));
|
||||
reply.send(res).ok();
|
||||
}
|
||||
DriverMessage::InputStream { format, reply } => {
|
||||
let res = self
|
||||
.input_stream(format)
|
||||
.inspect_err(|err| warn!("failed to create audio input stream: {err:#}"));
|
||||
reply.send(res).ok();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
fn output_stream(&mut self, format: AudioFormat) -> Result<OutputStream> {
|
||||
let channel_count = format.channel_count;
|
||||
let sample_rate = format.sample_rate;
|
||||
// setup stream
|
||||
let stream_writer_id = self.cx.add_node(
|
||||
StreamWriterNode,
|
||||
Some(StreamWriterConfig {
|
||||
channels: NonZeroChannelCount::new(channel_count)
|
||||
.context("channel count may not be zero")?,
|
||||
..Default::default()
|
||||
}),
|
||||
);
|
||||
let graph_out = self.aec_render_node;
|
||||
// let graph_out_info = self
|
||||
// .cx
|
||||
// .node_info(graph_out)
|
||||
// .context("missing audio output node")?;
|
||||
|
||||
let peak_meter_node = PeakMeterNode::<2> { enabled: true };
|
||||
let peak_meter_id = self.cx.add_node(peak_meter_node.clone(), None);
|
||||
let peak_meter_smoother =
|
||||
Arc::new(Mutex::new(PeakMeterSmoother::<2>::new(Default::default())));
|
||||
self.peak_meters
|
||||
.insert(peak_meter_id, peak_meter_smoother.clone());
|
||||
self.cx
|
||||
.connect(peak_meter_id, graph_out, &[(0, 0), (1, 1)], true)
|
||||
.unwrap();
|
||||
|
||||
let layout: &[(PortIdx, PortIdx)] = match channel_count {
|
||||
0 => anyhow::bail!("audio stream has no channels"),
|
||||
1 => &[(0, 0), (0, 1)],
|
||||
_ => &[(0, 0), (1, 1)],
|
||||
};
|
||||
self.cx
|
||||
.connect(stream_writer_id, peak_meter_id, layout, false)
|
||||
.unwrap();
|
||||
let output_stream_sample_rate = self.cx.stream_info().unwrap().sample_rate;
|
||||
let event = self
|
||||
.cx
|
||||
.node_state_mut::<StreamWriterState>(stream_writer_id)
|
||||
.unwrap()
|
||||
.start_stream(
|
||||
sample_rate.try_into().unwrap(),
|
||||
output_stream_sample_rate,
|
||||
ResamplingChannelConfig {
|
||||
capacity_seconds: 3.,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
info!("started output stream");
|
||||
self.cx.queue_event_for(stream_writer_id, event.into());
|
||||
// Wrap the handles in an `Arc<Mutex<T>>>` so that we can send them to other threads.
|
||||
let handle = self
|
||||
.cx
|
||||
.node_state::<StreamWriterState>(stream_writer_id)
|
||||
.unwrap()
|
||||
.handle();
|
||||
Ok(OutputStream {
|
||||
handle: Arc::new(handle),
|
||||
paused: Arc::new(AtomicBool::new(false)),
|
||||
peaks: peak_meter_smoother,
|
||||
normalizer: DbMeterNormalizer::new(-60., 0., -20.),
|
||||
})
|
||||
}
|
||||
|
||||
fn input_stream(&mut self, format: AudioFormat) -> Result<StreamReaderHandle> {
|
||||
let sample_rate = format.sample_rate;
|
||||
let channel_count = format.channel_count;
|
||||
// Setup stream reader node
|
||||
let stream_reader_id = self.cx.add_node(
|
||||
StreamReaderNode,
|
||||
Some(StreamReaderConfig {
|
||||
channels: NonZeroChannelCount::new(channel_count)
|
||||
.context("channel count may not be zero")?,
|
||||
..Default::default()
|
||||
}),
|
||||
);
|
||||
let graph_in_node_id = self.aec_capture_node;
|
||||
let graph_in_info = self
|
||||
.cx
|
||||
.node_info(graph_in_node_id)
|
||||
.context("missing audio input node")?;
|
||||
|
||||
let layout: &[(PortIdx, PortIdx)] = match (
|
||||
graph_in_info.info.channel_config.num_outputs.get(),
|
||||
channel_count,
|
||||
) {
|
||||
(0, _) => anyhow::bail!("audio input has no channels"),
|
||||
(1, 2) => &[(0, 0), (0, 1)],
|
||||
(2, 2) => &[(0, 0), (1, 1)],
|
||||
(_, 1) => &[(0, 0)],
|
||||
_ => &[(0, 0), (1, 1)],
|
||||
};
|
||||
self.cx
|
||||
.connect(graph_in_node_id, stream_reader_id, layout, false)
|
||||
.unwrap();
|
||||
|
||||
let input_stream_sample_rate = self.cx.stream_info().unwrap().sample_rate;
|
||||
let event = self
|
||||
.cx
|
||||
.node_state_mut::<StreamReaderState>(stream_reader_id)
|
||||
.unwrap()
|
||||
.start_stream(
|
||||
sample_rate.try_into().unwrap(),
|
||||
input_stream_sample_rate,
|
||||
ResamplingChannelConfig {
|
||||
capacity_seconds: 3.0,
|
||||
..Default::default()
|
||||
},
|
||||
)
|
||||
.unwrap();
|
||||
self.cx.queue_event_for(stream_reader_id, event.into());
|
||||
|
||||
let handle = self
|
||||
.cx
|
||||
.node_state::<StreamReaderState>(stream_reader_id)
|
||||
.unwrap()
|
||||
.handle();
|
||||
Ok(Arc::new(handle))
|
||||
}
|
||||
}
|
||||
452
third_party/iroh-live/moq-media/src/audio/aec.rs
vendored
Normal file
452
third_party/iroh-live/moq-media/src/audio/aec.rs
vendored
Normal file
|
|
@ -0,0 +1,452 @@
|
|||
pub use self::{
|
||||
firewheel_nodes::{AecCaptureNode, AecRenderNode},
|
||||
processor::{AecProcessor, AecProcessorConfig},
|
||||
};
|
||||
|
||||
mod processor {
|
||||
use std::{
|
||||
num::NonZeroU32,
|
||||
sync::{
|
||||
Arc, Mutex,
|
||||
atomic::{AtomicBool, Ordering},
|
||||
},
|
||||
};
|
||||
|
||||
use anyhow::Result;
|
||||
use tracing::{debug, info};
|
||||
use webrtc_audio_processing::{
|
||||
Config, EchoCancellation, EchoCancellationSuppressionLevel, InitializationConfig,
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub struct AecProcessorConfig {
|
||||
pub num_input_channels: NonZeroU32,
|
||||
pub num_output_channels: NonZeroU32,
|
||||
}
|
||||
|
||||
impl Default for AecProcessorConfig {
|
||||
fn default() -> Self {
|
||||
Self {
|
||||
num_input_channels: 2.try_into().unwrap(),
|
||||
num_output_channels: 2.try_into().unwrap(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl AecProcessorConfig {
|
||||
pub fn stereo_in_out() -> Self {
|
||||
Self::default()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct AecProcessor(Arc<Inner>);
|
||||
|
||||
#[derive(derive_more::Debug)]
|
||||
struct Inner {
|
||||
#[debug("Processor")]
|
||||
processor: Mutex<webrtc_audio_processing::Processor>,
|
||||
config: Mutex<Config>,
|
||||
// capture_delay: AtomicU64,
|
||||
// playback_delay: AtomicU64,
|
||||
enabled: AtomicBool,
|
||||
// capture_channels: AtomicUsize,
|
||||
// playback_channels: AtomicUsize,
|
||||
}
|
||||
|
||||
impl Default for AecProcessor {
|
||||
fn default() -> Self {
|
||||
Self::new(Default::default(), true).expect("failed to initialize AecProcessor")
|
||||
}
|
||||
}
|
||||
|
||||
impl AecProcessor {
|
||||
pub fn new(config: AecProcessorConfig, enabled: bool) -> anyhow::Result<Self> {
|
||||
let suppression_level = EchoCancellationSuppressionLevel::High;
|
||||
// High pass filter is a prerequisite to running echo cancellation.
|
||||
let processor_config = Config {
|
||||
echo_cancellation: Some(EchoCancellation {
|
||||
suppression_level,
|
||||
stream_delay_ms: None,
|
||||
enable_delay_agnostic: true,
|
||||
enable_extended_filter: true,
|
||||
}),
|
||||
enable_high_pass_filter: true,
|
||||
..Config::default()
|
||||
};
|
||||
|
||||
let mut processor = webrtc_audio_processing::Processor::new(&InitializationConfig {
|
||||
num_capture_channels: config.num_input_channels.get() as i32,
|
||||
num_render_channels: config.num_output_channels.get() as i32,
|
||||
enable_experimental_agc: true,
|
||||
enable_intelligibility_enhancer: true, // ..InitializationConfig::default()
|
||||
})?;
|
||||
processor.set_config(processor_config.clone());
|
||||
|
||||
// processor.set_config(config.clone());
|
||||
info!("init audio processor (config={config:?})");
|
||||
Ok(Self(Arc::new(Inner {
|
||||
processor: Mutex::new(processor),
|
||||
config: Mutex::new(processor_config),
|
||||
enabled: AtomicBool::new(enabled),
|
||||
})))
|
||||
}
|
||||
|
||||
pub fn is_enabled(&self) -> bool {
|
||||
self.0.enabled.load(Ordering::SeqCst)
|
||||
}
|
||||
|
||||
#[allow(unused)]
|
||||
pub fn set_enabled(&self, enabled: bool) {
|
||||
let _prev = self.0.enabled.swap(enabled, Ordering::SeqCst);
|
||||
}
|
||||
|
||||
/// Processes and modifies the audio frame from a capture device by applying
|
||||
/// signal processing as specified in the config. `frame` should hold an
|
||||
/// interleaved f32 audio frame, with [`NUM_SAMPLES_PER_FRAME`] samples.
|
||||
// webrtc-audio-processing expects a 10ms chunk for each process call.
|
||||
pub fn process_capture_frame(
|
||||
&self,
|
||||
frame: &mut [f32],
|
||||
) -> Result<(), webrtc_audio_processing::Error> {
|
||||
if !self.is_enabled() {
|
||||
return Ok(());
|
||||
}
|
||||
self.0
|
||||
.processor
|
||||
.lock()
|
||||
.expect("poisoned")
|
||||
.process_capture_frame(frame)
|
||||
}
|
||||
|
||||
/// Processes and optionally modifies the audio frame from a playback device.
|
||||
/// `frame` should hold an interleaved `f32` audio frame, with
|
||||
/// [`NUM_SAMPLES_PER_FRAME`] samples.
|
||||
pub fn process_render_frame(
|
||||
&self,
|
||||
frame: &mut [f32],
|
||||
) -> Result<(), webrtc_audio_processing::Error> {
|
||||
if !self.is_enabled() {
|
||||
return Ok(());
|
||||
}
|
||||
self.0
|
||||
.processor
|
||||
.lock()
|
||||
.expect("poisoned")
|
||||
.process_render_frame(frame)
|
||||
}
|
||||
|
||||
pub fn set_stream_delay(&self, delay_ms: u32) {
|
||||
debug!("updating stream delay to {delay_ms}ms");
|
||||
// let playback = self.0.playback_delay.load(Ordering::Relaxed);
|
||||
// let capture = self.0.capture_delay.load(Ordering::Relaxed);
|
||||
// let total = playback + capture;
|
||||
let mut config = self.0.config.lock().expect("poisoned");
|
||||
config.echo_cancellation.as_mut().unwrap().stream_delay_ms = Some(delay_ms as i32);
|
||||
self.0
|
||||
.processor
|
||||
.lock()
|
||||
.expect("poisoned")
|
||||
.set_config(config.clone());
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
mod firewheel_nodes {
|
||||
use std::collections::VecDeque;
|
||||
|
||||
use firewheel::{
|
||||
StreamInfo,
|
||||
channel_config::{ChannelConfig, ChannelCount},
|
||||
diff::{Diff, Patch},
|
||||
event::ProcEvents,
|
||||
node::{
|
||||
AudioNode, AudioNodeInfo, AudioNodeProcessor, ConstructProcessorContext, ProcBuffers,
|
||||
ProcExtra, ProcInfo, ProcStreamCtx, ProcessStatus,
|
||||
},
|
||||
};
|
||||
use webrtc_audio_processing::NUM_SAMPLES_PER_FRAME;
|
||||
|
||||
use super::AecProcessor;
|
||||
|
||||
const CHANNELS: usize = 2;
|
||||
const FRAME_SAMPLES: usize = (NUM_SAMPLES_PER_FRAME as usize) * CHANNELS;
|
||||
|
||||
/// Simple render-side node: feeds output audio into WebRTC's render stream.
|
||||
#[derive(Diff, Patch, Debug, Clone, Copy, PartialEq)]
|
||||
pub struct AecRenderNode {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
impl Default for AecRenderNode {
|
||||
fn default() -> Self {
|
||||
Self { enabled: true }
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNode for AecRenderNode {
|
||||
/// We use the wrapped WebRTC processor as our configuration object.
|
||||
///
|
||||
/// Note: `WebrtcAudioProcessor` already internally wraps an `Arc<Inner>`,
|
||||
/// so cloning this config shares the underlying processor between nodes.
|
||||
type Configuration = AecProcessor;
|
||||
|
||||
fn info(&self, _config: &Self::Configuration) -> AudioNodeInfo {
|
||||
AudioNodeInfo::new()
|
||||
.debug_name("webrtc_render")
|
||||
.channel_config(ChannelConfig {
|
||||
num_inputs: ChannelCount::STEREO,
|
||||
num_outputs: ChannelCount::STEREO,
|
||||
})
|
||||
}
|
||||
|
||||
fn construct_processor(
|
||||
&self,
|
||||
config: &Self::Configuration,
|
||||
_cx: ConstructProcessorContext,
|
||||
) -> impl AudioNodeProcessor {
|
||||
// Clone = share the same underlying Arc<Inner>.
|
||||
let webrtc = config.clone();
|
||||
|
||||
// Inform the processor how many playback channels we have.
|
||||
// (You can handle errors here instead of unwrap() in real code.)
|
||||
// webrtc.init_playback(CHANNELS).ok();
|
||||
|
||||
RenderProcessor {
|
||||
enabled: self.enabled,
|
||||
processor: webrtc,
|
||||
in_ring: VecDeque::with_capacity(FRAME_SAMPLES * 4),
|
||||
out_ring: VecDeque::with_capacity(FRAME_SAMPLES * 4),
|
||||
tmp_chunk: vec![0.0; FRAME_SAMPLES],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct RenderProcessor {
|
||||
enabled: bool,
|
||||
processor: AecProcessor,
|
||||
// Interleaved input samples to be fed into WebRTC in 10ms chunks.
|
||||
in_ring: VecDeque<f32>,
|
||||
// Interleaved processed samples coming back from WebRTC.
|
||||
out_ring: VecDeque<f32>,
|
||||
// Scratch buffer for one NUM_SAMPLES_PER_FRAME chunk (interleaved).
|
||||
tmp_chunk: Vec<f32>,
|
||||
}
|
||||
|
||||
impl AudioNodeProcessor for RenderProcessor {
|
||||
fn process(
|
||||
&mut self,
|
||||
info: &ProcInfo,
|
||||
buffers: ProcBuffers,
|
||||
events: &mut ProcEvents,
|
||||
_extra: &mut ProcExtra,
|
||||
) -> ProcessStatus {
|
||||
// Handle parameter patches.
|
||||
for patch in events.drain_patches::<AecRenderNode>() {
|
||||
match patch {
|
||||
AecRenderNodePatch::Enabled(enabled) => {
|
||||
self.enabled = enabled;
|
||||
if !self.enabled {
|
||||
// Clear any buffered state when disabling to avoid stale audio.
|
||||
self.in_ring.clear();
|
||||
self.out_ring.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let num_frames = info.frames as usize;
|
||||
// println!("num_frames: {num_frames}");
|
||||
|
||||
// Get input/output slices like in the FilterNode example.
|
||||
let in_l = &buffers.inputs[0][..num_frames];
|
||||
let in_r = &buffers.inputs[1][..num_frames];
|
||||
|
||||
let (out_l, out_rest) = buffers.outputs.split_first_mut().unwrap();
|
||||
let out_l = &mut out_l[..num_frames];
|
||||
let out_r = &mut out_rest[0][..num_frames];
|
||||
|
||||
// If disabled, just pass through.
|
||||
if !self.enabled {
|
||||
out_l.copy_from_slice(in_l);
|
||||
out_r.copy_from_slice(in_r);
|
||||
return ProcessStatus::OutputsModified;
|
||||
}
|
||||
|
||||
// 1. Push current block into the interleaved input ring buffer.
|
||||
for i in 0..num_frames {
|
||||
self.in_ring.push_back(in_l[i]);
|
||||
self.in_ring.push_back(in_r[i]);
|
||||
}
|
||||
|
||||
// 2. While we have at least one full 10ms frame, process it.
|
||||
while self.in_ring.len() >= FRAME_SAMPLES {
|
||||
// Fill tmp_chunk with a full frame of interleaved samples.
|
||||
for s in &mut self.tmp_chunk[..FRAME_SAMPLES] {
|
||||
*s = self.in_ring.pop_front().unwrap();
|
||||
}
|
||||
|
||||
// Feed into processor render stream.
|
||||
let _ = self.processor.process_render_frame(&mut self.tmp_chunk);
|
||||
|
||||
// Store processed samples into the output ring.
|
||||
for &s in &self.tmp_chunk[..FRAME_SAMPLES] {
|
||||
self.out_ring.push_back(s);
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Produce outputs for this audio block.
|
||||
//
|
||||
// We always need `num_frames * CHANNELS` samples. If we don't have
|
||||
// enough processed samples yet, we output silence for the missing part.
|
||||
for i in 0..num_frames {
|
||||
if self.out_ring.len() >= CHANNELS {
|
||||
out_l[i] = self.out_ring.pop_front().unwrap();
|
||||
out_r[i] = self.out_ring.pop_front().unwrap();
|
||||
} else {
|
||||
// Not enough processed data yet -> output silence.
|
||||
out_l[i] = 0.0;
|
||||
out_r[i] = 0.0;
|
||||
}
|
||||
}
|
||||
|
||||
ProcessStatus::OutputsModified
|
||||
}
|
||||
|
||||
fn new_stream(&mut self, _stream_info: &StreamInfo, _ctx: &mut ProcStreamCtx) {
|
||||
// Reset buffers for new stream.
|
||||
self.in_ring.clear();
|
||||
self.out_ring.clear();
|
||||
}
|
||||
}
|
||||
|
||||
/// Capture-side node: feeds mic audio into [`AecProcessor`]'s capture stream.
|
||||
#[derive(Diff, Patch, Debug, Clone, Copy, PartialEq)]
|
||||
pub struct AecCaptureNode {
|
||||
pub enabled: bool,
|
||||
}
|
||||
|
||||
impl Default for AecCaptureNode {
|
||||
fn default() -> Self {
|
||||
Self { enabled: true }
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioNode for AecCaptureNode {
|
||||
type Configuration = AecProcessor;
|
||||
|
||||
fn info(&self, _config: &Self::Configuration) -> AudioNodeInfo {
|
||||
AudioNodeInfo::new()
|
||||
.debug_name("webrtc_capture")
|
||||
.channel_config(ChannelConfig {
|
||||
num_inputs: ChannelCount::STEREO,
|
||||
num_outputs: ChannelCount::STEREO,
|
||||
})
|
||||
}
|
||||
|
||||
fn construct_processor(
|
||||
&self,
|
||||
config: &Self::Configuration,
|
||||
_cx: ConstructProcessorContext,
|
||||
) -> impl AudioNodeProcessor {
|
||||
CaptureProcessor {
|
||||
enabled: self.enabled,
|
||||
processor: config.clone(),
|
||||
in_ring: VecDeque::with_capacity(FRAME_SAMPLES * 4),
|
||||
out_ring: VecDeque::with_capacity(FRAME_SAMPLES * 4),
|
||||
tmp_chunk: vec![0.0; FRAME_SAMPLES],
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct CaptureProcessor {
|
||||
enabled: bool,
|
||||
processor: AecProcessor,
|
||||
// Interleaved input samples to be fed into WebRTC in 10ms chunks.
|
||||
in_ring: VecDeque<f32>,
|
||||
// Interleaved processed samples coming back from WebRTC.
|
||||
out_ring: VecDeque<f32>,
|
||||
// Scratch buffer for one NUM_SAMPLES_PER_FRAME chunk (interleaved).
|
||||
tmp_chunk: Vec<f32>,
|
||||
}
|
||||
|
||||
impl AudioNodeProcessor for CaptureProcessor {
|
||||
fn process(
|
||||
&mut self,
|
||||
info: &ProcInfo,
|
||||
buffers: ProcBuffers,
|
||||
events: &mut ProcEvents,
|
||||
_extra: &mut ProcExtra,
|
||||
) -> ProcessStatus {
|
||||
for patch in events.drain_patches::<AecCaptureNode>() {
|
||||
match patch {
|
||||
AecCaptureNodePatch::Enabled(enabled) => {
|
||||
self.enabled = enabled;
|
||||
if !self.enabled {
|
||||
self.in_ring.clear();
|
||||
self.out_ring.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let frames = info.frames;
|
||||
let num_frames = frames as usize;
|
||||
|
||||
let in_l = &buffers.inputs[0][..num_frames];
|
||||
let in_r = &buffers.inputs[1][..num_frames];
|
||||
|
||||
let (out_l, out_rest) = buffers.outputs.split_first_mut().unwrap();
|
||||
let out_l = &mut out_l[..num_frames];
|
||||
let out_r = &mut out_rest[0][..num_frames];
|
||||
|
||||
if !self.enabled {
|
||||
// Bypass if disabled.
|
||||
out_l.copy_from_slice(in_l);
|
||||
out_r.copy_from_slice(in_r);
|
||||
return ProcessStatus::OutputsModified;
|
||||
}
|
||||
|
||||
// 1. Push current block into the interleaved input ring buffer.
|
||||
for i in 0..num_frames {
|
||||
self.in_ring.push_back(in_l[i]);
|
||||
self.in_ring.push_back(in_r[i]);
|
||||
}
|
||||
|
||||
// 2. While we have at least one full 10ms frame, process it.
|
||||
while self.in_ring.len() >= FRAME_SAMPLES {
|
||||
for s in &mut self.tmp_chunk[..FRAME_SAMPLES] {
|
||||
*s = self.in_ring.pop_front().unwrap();
|
||||
}
|
||||
|
||||
let _ = self.processor.process_capture_frame(&mut self.tmp_chunk);
|
||||
|
||||
for &s in &self.tmp_chunk[..FRAME_SAMPLES] {
|
||||
self.out_ring.push_back(s);
|
||||
}
|
||||
}
|
||||
|
||||
// 3. Produce outputs for this audio block.
|
||||
//
|
||||
// If we don't have enough processed samples to cover the whole block,
|
||||
// we output silence for the missing frames.
|
||||
for i in 0..num_frames {
|
||||
if self.out_ring.len() >= CHANNELS {
|
||||
out_l[i] = self.out_ring.pop_front().unwrap();
|
||||
out_r[i] = self.out_ring.pop_front().unwrap();
|
||||
} else {
|
||||
out_l[i] = 0.0;
|
||||
out_r[i] = 0.0;
|
||||
}
|
||||
}
|
||||
|
||||
ProcessStatus::OutputsModified
|
||||
}
|
||||
|
||||
fn new_stream(&mut self, _stream_info: &StreamInfo, _ctx: &mut ProcStreamCtx) {
|
||||
// Reset state for new stream.
|
||||
self.in_ring.clear();
|
||||
self.out_ring.clear();
|
||||
}
|
||||
}
|
||||
}
|
||||
265
third_party/iroh-live/moq-media/src/av.rs
vendored
Normal file
265
third_party/iroh-live/moq-media/src/av.rs
vendored
Normal file
|
|
@ -0,0 +1,265 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use anyhow::Result;
|
||||
use image::RgbaImage;
|
||||
use strum::{Display, EnumString, VariantNames};
|
||||
|
||||
#[derive(Copy, Clone, Debug)]
|
||||
pub struct AudioFormat {
|
||||
pub sample_rate: u32,
|
||||
pub channel_count: u32,
|
||||
}
|
||||
|
||||
impl AudioFormat {
|
||||
pub fn mono_48k() -> Self {
|
||||
Self {
|
||||
sample_rate: 48_000,
|
||||
channel_count: 1,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn stereo_48k() -> Self {
|
||||
Self {
|
||||
sample_rate: 48_000,
|
||||
channel_count: 2,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_hang_config(config: &hang::catalog::AudioConfig) -> Self {
|
||||
Self {
|
||||
channel_count: config.channel_count,
|
||||
sample_rate: config.sample_rate,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub trait Decoders {
|
||||
type Audio: AudioDecoder;
|
||||
type Video: VideoDecoder;
|
||||
}
|
||||
|
||||
pub trait AudioSource: Send + 'static {
|
||||
fn cloned_boxed(&self) -> Box<dyn AudioSource>;
|
||||
fn format(&self) -> AudioFormat;
|
||||
fn pop_samples(&mut self, buf: &mut [f32]) -> Result<Option<usize>>;
|
||||
}
|
||||
|
||||
pub trait AudioSink: AudioSinkHandle {
|
||||
fn format(&self) -> Result<AudioFormat>;
|
||||
fn push_samples(&mut self, buf: &[f32]) -> Result<()>;
|
||||
fn handle(&self) -> Box<dyn AudioSinkHandle>;
|
||||
}
|
||||
|
||||
pub trait AudioSinkHandle: Send + 'static {
|
||||
fn pause(&self);
|
||||
fn resume(&self);
|
||||
fn is_paused(&self) -> bool;
|
||||
fn toggle_pause(&self);
|
||||
/// Smoothed peak, normalized to 0..1.
|
||||
// TODO: document how smoothing and normalization are expected
|
||||
fn smoothed_peak_normalized(&self) -> Option<f32> {
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AudioEncoder: AudioEncoderInner {
|
||||
fn with_preset(format: AudioFormat, preset: AudioPreset) -> Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
}
|
||||
pub trait AudioEncoderInner: Send + 'static {
|
||||
fn name(&self) -> &str;
|
||||
fn config(&self) -> hang::catalog::AudioConfig;
|
||||
fn push_samples(&mut self, samples: &[f32]) -> Result<()>;
|
||||
fn pop_packet(&mut self) -> Result<Option<hang::Frame>>;
|
||||
}
|
||||
|
||||
impl AudioEncoderInner for Box<dyn AudioEncoder> {
|
||||
fn name(&self) -> &str {
|
||||
(&**self).name()
|
||||
}
|
||||
|
||||
fn config(&self) -> hang::catalog::AudioConfig {
|
||||
(&**self).config()
|
||||
}
|
||||
|
||||
fn push_samples(&mut self, samples: &[f32]) -> Result<()> {
|
||||
(&mut **self).push_samples(samples)
|
||||
}
|
||||
|
||||
fn pop_packet(&mut self) -> Result<Option<hang::Frame>> {
|
||||
(&mut **self).pop_packet()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait AudioDecoder: Send + 'static {
|
||||
fn new(config: &hang::catalog::AudioConfig, target_format: AudioFormat) -> Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
fn push_packet(&mut self, packet: hang::Frame) -> Result<()>;
|
||||
fn pop_samples(&mut self) -> Result<Option<&[f32]>>;
|
||||
}
|
||||
|
||||
#[derive(Copy, Clone, Debug, PartialEq)]
|
||||
pub enum PixelFormat {
|
||||
Rgba,
|
||||
Bgra,
|
||||
}
|
||||
|
||||
impl Default for PixelFormat {
|
||||
fn default() -> Self {
|
||||
PixelFormat::Rgba
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct VideoFormat {
|
||||
pub pixel_format: PixelFormat,
|
||||
pub dimensions: [u32; 2],
|
||||
}
|
||||
|
||||
#[derive(Clone, Debug)]
|
||||
pub struct VideoFrame {
|
||||
pub format: VideoFormat,
|
||||
pub raw: bytes::Bytes,
|
||||
}
|
||||
|
||||
pub trait VideoSource: Send + 'static {
|
||||
fn name(&self) -> &str;
|
||||
fn format(&self) -> VideoFormat;
|
||||
fn pop_frame(&mut self) -> Result<Option<VideoFrame>>;
|
||||
fn start(&mut self) -> Result<()>;
|
||||
fn stop(&mut self) -> Result<()>;
|
||||
}
|
||||
|
||||
pub trait VideoEncoder: VideoEncoderInner {
|
||||
fn with_preset(preset: VideoPreset) -> Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
}
|
||||
|
||||
pub trait VideoEncoderInner: Send + 'static {
|
||||
fn name(&self) -> &str;
|
||||
fn config(&self) -> hang::catalog::VideoConfig;
|
||||
fn push_frame(&mut self, frame: VideoFrame) -> Result<()>;
|
||||
fn pop_packet(&mut self) -> Result<Option<hang::Frame>>;
|
||||
}
|
||||
|
||||
impl VideoEncoderInner for Box<dyn VideoEncoder> {
|
||||
fn name(&self) -> &str {
|
||||
(&**self).name()
|
||||
}
|
||||
|
||||
fn config(&self) -> hang::catalog::VideoConfig {
|
||||
(&**self).config()
|
||||
}
|
||||
|
||||
fn push_frame(&mut self, frame: VideoFrame) -> Result<()> {
|
||||
(&mut **self).push_frame(frame)
|
||||
}
|
||||
|
||||
fn pop_packet(&mut self) -> Result<Option<hang::Frame>> {
|
||||
(&mut **self).pop_packet()
|
||||
}
|
||||
}
|
||||
|
||||
pub trait VideoDecoder: Send + 'static {
|
||||
fn new(config: &hang::catalog::VideoConfig, playback_config: &DecodeConfig) -> Result<Self>
|
||||
where
|
||||
Self: Sized;
|
||||
fn name(&self) -> &str;
|
||||
fn pop_frame(&mut self) -> Result<Option<DecodedFrame>>;
|
||||
fn push_packet(&mut self, packet: hang::Frame) -> Result<()>;
|
||||
fn set_viewport(&mut self, w: u32, h: u32);
|
||||
}
|
||||
|
||||
pub struct DecodedFrame {
|
||||
pub frame: image::Frame,
|
||||
pub timestamp: Duration,
|
||||
}
|
||||
|
||||
impl DecodedFrame {
|
||||
pub fn img(&self) -> &RgbaImage {
|
||||
self.frame.buffer()
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Display, EnumString, VariantNames)]
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
pub enum AudioCodec {
|
||||
Opus,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Display, EnumString, VariantNames)]
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
pub enum VideoCodec {
|
||||
H264,
|
||||
Av1,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Display, EnumString, VariantNames, Eq, PartialEq, Ord, PartialOrd)]
|
||||
pub enum VideoPreset {
|
||||
#[strum(serialize = "180p")]
|
||||
P180,
|
||||
#[strum(serialize = "360p")]
|
||||
P360,
|
||||
#[strum(serialize = "720p")]
|
||||
P720,
|
||||
#[strum(serialize = "1080p")]
|
||||
P1080,
|
||||
}
|
||||
|
||||
impl VideoPreset {
|
||||
pub fn all() -> [VideoPreset; 4] {
|
||||
[Self::P180, Self::P360, Self::P720, Self::P1080]
|
||||
}
|
||||
|
||||
pub fn dimensions(&self) -> (u32, u32) {
|
||||
match self {
|
||||
Self::P180 => (320, 180),
|
||||
Self::P360 => (640, 360),
|
||||
Self::P720 => (1280, 720),
|
||||
Self::P1080 => (1920, 1080),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn width(&self) -> u32 {
|
||||
self.dimensions().0
|
||||
}
|
||||
|
||||
pub fn height(&self) -> u32 {
|
||||
self.dimensions().1
|
||||
}
|
||||
|
||||
pub fn fps(&self) -> u32 {
|
||||
30
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Display, EnumString, VariantNames, Eq, PartialEq)]
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
pub enum AudioPreset {
|
||||
Hq,
|
||||
Lq,
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone, Copy, Display, EnumString, VariantNames, Eq, PartialEq, Default)]
|
||||
#[strum(serialize_all = "lowercase")]
|
||||
pub enum Quality {
|
||||
Highest,
|
||||
#[default]
|
||||
High,
|
||||
Mid,
|
||||
Low,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct DecodeConfig {
|
||||
pub pixel_format: PixelFormat,
|
||||
}
|
||||
|
||||
#[derive(Clone, Default)]
|
||||
pub struct PlaybackConfig {
|
||||
pub decode_config: DecodeConfig,
|
||||
pub quality: Quality,
|
||||
}
|
||||
233
third_party/iroh-live/moq-media/src/capture.rs
vendored
Normal file
233
third_party/iroh-live/moq-media/src/capture.rs
vendored
Normal file
|
|
@ -0,0 +1,233 @@
|
|||
use std::str::FromStr;
|
||||
|
||||
use anyhow::{Context, Result};
|
||||
use nokhwa::{
|
||||
nokhwa_initialize,
|
||||
pixel_format::RgbFormat,
|
||||
utils::{
|
||||
CameraFormat, CameraIndex, FrameFormat, RequestedFormat, RequestedFormatType, Resolution,
|
||||
},
|
||||
};
|
||||
use tracing::{debug, info, trace, warn};
|
||||
use xcap::{Monitor, VideoRecorder};
|
||||
|
||||
use crate::{
|
||||
av::{PixelFormat, VideoFormat, VideoFrame, VideoSource},
|
||||
ffmpeg::util::MjpgDecoder,
|
||||
};
|
||||
|
||||
pub struct ScreenCapturer {
|
||||
pub(crate) _monitor: Monitor,
|
||||
pub(crate) width: u32,
|
||||
pub(crate) height: u32,
|
||||
pub(crate) video_recorder: VideoRecorder,
|
||||
pub(crate) rx: std::sync::mpsc::Receiver<xcap::Frame>,
|
||||
}
|
||||
|
||||
// TODO: Review if sound.
|
||||
unsafe impl Send for ScreenCapturer {}
|
||||
|
||||
impl Drop for ScreenCapturer {
|
||||
fn drop(&mut self) {
|
||||
self.video_recorder.stop().ok();
|
||||
}
|
||||
}
|
||||
|
||||
impl ScreenCapturer {
|
||||
pub fn new() -> Result<Self> {
|
||||
info!("Initializing screen capturer (xcap)");
|
||||
|
||||
let monitors = Monitor::all().context("Failed to get monitors")?;
|
||||
if monitors.is_empty() {
|
||||
return Err(anyhow::anyhow!("No monitors available"));
|
||||
}
|
||||
info!("Available monitors: {monitors:?}");
|
||||
|
||||
let monitor = monitors.into_iter().next().unwrap();
|
||||
let width = monitor.width()?;
|
||||
let height = monitor.height()?;
|
||||
let name = monitor
|
||||
.name()
|
||||
.unwrap_or_else(|_| "Unknown Monitor".to_string());
|
||||
|
||||
info!("Using monitor: {} ({}x{})", name, width, height);
|
||||
|
||||
let (video_recorder, rx) = monitor.video_recorder()?;
|
||||
|
||||
Ok(Self {
|
||||
_monitor: monitor,
|
||||
video_recorder,
|
||||
rx,
|
||||
width,
|
||||
height,
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
impl VideoSource for ScreenCapturer {
|
||||
fn name(&self) -> &str {
|
||||
"screen"
|
||||
}
|
||||
|
||||
fn format(&self) -> VideoFormat {
|
||||
VideoFormat {
|
||||
pixel_format: PixelFormat::Rgba,
|
||||
dimensions: [self.width, self.height],
|
||||
}
|
||||
}
|
||||
|
||||
fn start(&mut self) -> Result<()> {
|
||||
self.video_recorder.start()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&mut self) -> Result<()> {
|
||||
self.video_recorder.stop()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pop_frame(&mut self) -> anyhow::Result<Option<VideoFrame>> {
|
||||
let mut raw_frame = None;
|
||||
// We are only interested in the latest frame.
|
||||
// Drain the channel to not build up memory.
|
||||
while let Ok(next) = self.rx.try_recv() {
|
||||
raw_frame = Some(next)
|
||||
}
|
||||
let raw_frame = match raw_frame {
|
||||
Some(frame) => frame,
|
||||
None => self
|
||||
.rx
|
||||
.recv()
|
||||
.context("Screen recorder did not produce new frame")?,
|
||||
};
|
||||
Ok(Some(VideoFrame {
|
||||
format: VideoFormat {
|
||||
pixel_format: PixelFormat::Rgba,
|
||||
dimensions: [raw_frame.width, raw_frame.height],
|
||||
},
|
||||
raw: raw_frame.raw.into(),
|
||||
}))
|
||||
}
|
||||
}
|
||||
|
||||
pub struct CameraCapturer {
|
||||
pub(crate) camera: nokhwa::Camera,
|
||||
pub(crate) mjpg_decoder: MjpgDecoder,
|
||||
pub(crate) width: u32,
|
||||
pub(crate) height: u32,
|
||||
}
|
||||
|
||||
impl CameraCapturer {
|
||||
pub fn new() -> Result<Self> {
|
||||
info!("Initializing camera capturer (nokhwa)");
|
||||
nokhwa_initialize(|granted| {
|
||||
debug!("User selected camera access: {}", granted);
|
||||
});
|
||||
|
||||
let cameras = nokhwa::query(nokhwa::utils::ApiBackend::Auto)?;
|
||||
if cameras.is_empty() {
|
||||
return Err(anyhow::anyhow!("No cameras available"));
|
||||
}
|
||||
info!("Available cameras: {cameras:?}");
|
||||
|
||||
let camera_index = match std::env::var("IROH_LIVE_CAMERA").ok() {
|
||||
None => {
|
||||
// Order of cameras in nokhwa is reversed from usual order (primary camera is last).
|
||||
let first_camera = cameras.last().unwrap();
|
||||
info!(": {}", first_camera.human_name());
|
||||
first_camera.index().clone()
|
||||
}
|
||||
Some(camera_name) => match u32::from_str(&camera_name).ok() {
|
||||
Some(num) => CameraIndex::Index(num),
|
||||
None => CameraIndex::String(camera_name),
|
||||
},
|
||||
};
|
||||
let mut camera = nokhwa::Camera::new(
|
||||
camera_index,
|
||||
RequestedFormat::new::<RgbFormat>(RequestedFormatType::AbsoluteHighestResolution),
|
||||
)?;
|
||||
info!("Using camera: {}", camera.info().human_name());
|
||||
let available_formats = camera.compatible_camera_formats()?;
|
||||
debug!("Available formats: {available_formats:?}",);
|
||||
if let Some(format) = Self::select_format(available_formats, Resolution::new(1920, 1080)) {
|
||||
if let Err(err) = camera.set_camera_requset(RequestedFormat::new::<RgbFormat>(
|
||||
RequestedFormatType::Exact(format),
|
||||
)) {
|
||||
warn!(?format, "Failed to change camera format: {err:#}");
|
||||
}
|
||||
}
|
||||
info!("Using format: {}", camera.camera_format());
|
||||
let resolution = camera.resolution();
|
||||
Ok(Self {
|
||||
camera,
|
||||
mjpg_decoder: MjpgDecoder::new()?,
|
||||
width: resolution.width(),
|
||||
height: resolution.height(),
|
||||
})
|
||||
}
|
||||
|
||||
fn select_format(
|
||||
mut formats: Vec<CameraFormat>,
|
||||
desired_resolution: Resolution,
|
||||
) -> Option<CameraFormat> {
|
||||
formats.sort_by(|a, b| {
|
||||
a.resolution()
|
||||
.cmp(&b.resolution())
|
||||
.then(a.frame_rate().cmp(&b.frame_rate()))
|
||||
});
|
||||
formats
|
||||
.iter()
|
||||
.find(|format| format.resolution() >= desired_resolution)
|
||||
.or_else(|| formats.last())
|
||||
.cloned()
|
||||
}
|
||||
}
|
||||
|
||||
impl VideoSource for CameraCapturer {
|
||||
fn name(&self) -> &str {
|
||||
"cam"
|
||||
}
|
||||
fn format(&self) -> VideoFormat {
|
||||
VideoFormat {
|
||||
pixel_format: PixelFormat::Rgba,
|
||||
dimensions: [self.width, self.height],
|
||||
}
|
||||
}
|
||||
|
||||
fn start(&mut self) -> Result<()> {
|
||||
self.camera.open_stream()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&mut self) -> Result<()> {
|
||||
self.camera.stop_stream()?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pop_frame(&mut self) -> anyhow::Result<Option<VideoFrame>> {
|
||||
let start = std::time::Instant::now();
|
||||
let frame = self
|
||||
.camera
|
||||
.frame()
|
||||
.context("Failed to capture camera frame")?;
|
||||
trace!("pop frame: capture took {:?}", start.elapsed());
|
||||
let start = std::time::Instant::now();
|
||||
let frame = match frame.source_frame_format() {
|
||||
FrameFormat::MJPEG if std::env::var("IROH_LIVE_MJPEG_FFMPEG").is_ok() => {
|
||||
trace!("decode ffmpeg");
|
||||
self.mjpg_decoder.decode_frame(frame.buffer())?
|
||||
}
|
||||
_ => {
|
||||
let image = frame
|
||||
.decode_image::<nokhwa::pixel_format::RgbAFormat>()
|
||||
.context("Failed to decode camera frame")?;
|
||||
VideoFrame {
|
||||
format: self.format(),
|
||||
raw: image.into_raw().into(),
|
||||
}
|
||||
}
|
||||
};
|
||||
trace!("pop frame: decode took {:?}", start.elapsed());
|
||||
Ok(Some(frame))
|
||||
}
|
||||
}
|
||||
93
third_party/iroh-live/moq-media/src/ffmpeg/audio/decoder.rs
vendored
Normal file
93
third_party/iroh-live/moq-media/src/ffmpeg/audio/decoder.rs
vendored
Normal file
|
|
@ -0,0 +1,93 @@
|
|||
use anyhow::Result;
|
||||
use ffmpeg_next::{self as ffmpeg, util::channel_layout::ChannelLayout};
|
||||
use hang::catalog::AudioConfig;
|
||||
|
||||
use crate::{
|
||||
av::{AudioDecoder, AudioFormat},
|
||||
ffmpeg::ext::{CodecContextExt, PacketExt},
|
||||
};
|
||||
|
||||
pub struct FfmpegAudioDecoder {
|
||||
codec: ffmpeg::decoder::Audio,
|
||||
resampler: ffmpeg::software::resampling::Context,
|
||||
decoded_frame: ffmpeg::util::frame::Audio,
|
||||
resampled_frame: ffmpeg::util::frame::Audio,
|
||||
}
|
||||
|
||||
impl AudioDecoder for FfmpegAudioDecoder {
|
||||
fn new(config: &AudioConfig, target_format: AudioFormat) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
let codec = match config.codec {
|
||||
hang::catalog::AudioCodec::Opus => {
|
||||
let codec_id = ffmpeg::codec::Id::OPUS;
|
||||
let codec = ffmpeg::decoder::find(codec_id).unwrap();
|
||||
let mut ctx = ffmpeg::codec::Context::new_with_codec(codec)
|
||||
.decoder()
|
||||
.audio()?;
|
||||
if let Some(extradata) = &config.description {
|
||||
ctx.set_extradata(&extradata)?;
|
||||
}
|
||||
ctx.set_channel_layout(if config.channel_count == 1 {
|
||||
ChannelLayout::MONO
|
||||
} else {
|
||||
ChannelLayout::STEREO
|
||||
});
|
||||
unsafe {
|
||||
let ctx_mut = ctx.as_mut_ptr();
|
||||
(*ctx_mut).sample_rate = config.sample_rate as i32;
|
||||
}
|
||||
ctx
|
||||
}
|
||||
_ => anyhow::bail!(
|
||||
"Unsupported codec {} (only opus is supported)",
|
||||
config.codec
|
||||
),
|
||||
};
|
||||
let target_channel_layout = match target_format.channel_count {
|
||||
1 => ChannelLayout::MONO,
|
||||
2 => ChannelLayout::STEREO,
|
||||
_ => anyhow::bail!("unsupported target channel count"),
|
||||
};
|
||||
let target_sample_format = ffmpeg_next::util::format::sample::Sample::F32(
|
||||
ffmpeg_next::util::format::sample::Type::Packed,
|
||||
);
|
||||
let resampler = ffmpeg::software::resampling::Context::get(
|
||||
codec.format(),
|
||||
codec.channel_layout(),
|
||||
codec.rate(),
|
||||
target_sample_format,
|
||||
target_channel_layout,
|
||||
target_format.sample_rate,
|
||||
)?;
|
||||
Ok(Self {
|
||||
codec,
|
||||
resampler,
|
||||
decoded_frame: ffmpeg::util::frame::Audio::empty(),
|
||||
resampled_frame: ffmpeg::util::frame::Audio::empty(),
|
||||
})
|
||||
}
|
||||
|
||||
fn push_packet(&mut self, packet: hang::Frame) -> Result<()> {
|
||||
let packet = packet.payload.to_ffmpeg_packet();
|
||||
self.codec.send_packet(&packet)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pop_samples(&mut self) -> Result<Option<&[f32]>> {
|
||||
match self.codec.receive_frame(&mut self.decoded_frame) {
|
||||
Err(err) => Err(err.into()),
|
||||
Ok(()) => {
|
||||
// Create an empty frame to hold the resampled audio data.
|
||||
self.resampler
|
||||
.run(&self.decoded_frame, &mut self.resampled_frame)
|
||||
.unwrap();
|
||||
let frame = &self.resampled_frame;
|
||||
let expected_bytes =
|
||||
frame.samples() * frame.channels() as usize * core::mem::size_of::<f32>();
|
||||
Ok(Some(bytemuck::cast_slice(&frame.data(0)[..expected_bytes])))
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
153
third_party/iroh-live/moq-media/src/ffmpeg/audio/encoder.rs
vendored
Normal file
153
third_party/iroh-live/moq-media/src/ffmpeg/audio/encoder.rs
vendored
Normal file
|
|
@ -0,0 +1,153 @@
|
|||
use anyhow::{Context, Result};
|
||||
use ffmpeg_next::{self as ffmpeg, Rational};
|
||||
use hang::{Timestamp, catalog::AudioConfig};
|
||||
use tracing::trace;
|
||||
|
||||
use crate::{
|
||||
av::{AudioEncoder, AudioEncoderInner, AudioFormat, AudioPreset},
|
||||
ffmpeg::ext::CodecContextExt,
|
||||
};
|
||||
|
||||
const SAMPLE_RATE: u32 = 48_000;
|
||||
const BITRATE: u64 = 128_000; // 128 kbps
|
||||
|
||||
pub struct OpusEncoder {
|
||||
encoder: ffmpeg::encoder::Audio,
|
||||
frame_count: u64,
|
||||
sample_rate: u32,
|
||||
bitrate: u64,
|
||||
channel_count: u32,
|
||||
extradata: Vec<u8>,
|
||||
}
|
||||
|
||||
impl OpusEncoder {
|
||||
pub fn stereo() -> Result<Self> {
|
||||
Self::new(SAMPLE_RATE, 2, BITRATE)
|
||||
}
|
||||
|
||||
pub fn mono() -> Result<Self> {
|
||||
Self::new(SAMPLE_RATE, 1, BITRATE)
|
||||
}
|
||||
|
||||
pub fn new(sample_rate: u32, channel_count: u32, bitrate: u64) -> Result<Self> {
|
||||
tracing::info!(
|
||||
"Initializing Opus encoder: {}Hz, {} channels",
|
||||
sample_rate,
|
||||
channel_count
|
||||
);
|
||||
ffmpeg::init()?;
|
||||
|
||||
let codec =
|
||||
ffmpeg::encoder::find(ffmpeg::codec::Id::OPUS).context("Opus encoder not found")?;
|
||||
tracing::debug!("Found Opus codec: {:?}", codec.name());
|
||||
let mut ctx = ffmpeg::codec::context::Context::new_with_codec(codec)
|
||||
.encoder()
|
||||
.audio()?;
|
||||
|
||||
let sample_rate = sample_rate as i32;
|
||||
ctx.set_rate(sample_rate);
|
||||
ctx.set_bit_rate(bitrate as usize);
|
||||
ctx.set_format(ffmpeg::format::Sample::F32(
|
||||
ffmpeg_next::format::sample::Type::Packed,
|
||||
));
|
||||
ctx.set_time_base(Rational::new(1, sample_rate));
|
||||
ctx.set_channel_layout(if channel_count == 1 {
|
||||
ffmpeg::util::channel_layout::ChannelLayout::MONO
|
||||
} else {
|
||||
ffmpeg::util::channel_layout::ChannelLayout::STEREO
|
||||
});
|
||||
|
||||
let encoder = ctx.open()?;
|
||||
|
||||
let extradata = encoder.extradata().unwrap_or(&[]).to_vec();
|
||||
|
||||
tracing::info!("Opus encoder initialized successfully");
|
||||
Ok(Self {
|
||||
encoder,
|
||||
frame_count: 0,
|
||||
sample_rate: sample_rate as u32,
|
||||
channel_count,
|
||||
extradata,
|
||||
bitrate,
|
||||
})
|
||||
}
|
||||
}
|
||||
impl AudioEncoder for OpusEncoder {
|
||||
fn with_preset(format: AudioFormat, preset: AudioPreset) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
let channel_count = format.channel_count;
|
||||
let bitrate = match preset {
|
||||
AudioPreset::Hq => BITRATE,
|
||||
AudioPreset::Lq => 32_000,
|
||||
};
|
||||
Self::new(SAMPLE_RATE, channel_count, bitrate)
|
||||
}
|
||||
}
|
||||
|
||||
impl AudioEncoderInner for OpusEncoder {
|
||||
fn name(&self) -> &str {
|
||||
self.encoder.id().name()
|
||||
}
|
||||
|
||||
fn config(&self) -> AudioConfig {
|
||||
hang::catalog::AudioConfig {
|
||||
codec: hang::catalog::AudioCodec::Opus,
|
||||
sample_rate: self.sample_rate,
|
||||
channel_count: self.channel_count,
|
||||
bitrate: Some(self.bitrate),
|
||||
description: Some(self.extradata.clone().into()),
|
||||
}
|
||||
}
|
||||
|
||||
fn push_samples(&mut self, samples: &[f32]) -> Result<()> {
|
||||
if samples.is_empty() {
|
||||
return Ok(());
|
||||
}
|
||||
|
||||
let samples_per_channel = samples.len() / self.channel_count as usize;
|
||||
debug_assert_eq!(samples_per_channel as u32, self.encoder.frame_size());
|
||||
|
||||
let mut audio_frame = ffmpeg::util::frame::Audio::new(
|
||||
ffmpeg::util::format::sample::Sample::F32(ffmpeg::util::format::sample::Type::Packed),
|
||||
samples_per_channel,
|
||||
ffmpeg::util::channel_layout::ChannelLayout::default(self.channel_count as i32),
|
||||
);
|
||||
|
||||
// Copy interleaved samples directly since we're using packed format
|
||||
let frame_data = audio_frame.data_mut(0);
|
||||
let frame_samples: &mut [f32] = bytemuck::cast_slice_mut(frame_data);
|
||||
|
||||
let copy_len = samples.len().min(frame_samples.len());
|
||||
frame_samples[..copy_len].copy_from_slice(&samples[..copy_len]);
|
||||
|
||||
audio_frame.set_pts(Some(self.frame_count as i64));
|
||||
self.frame_count += samples_per_channel as u64;
|
||||
|
||||
trace!("push samples {}", audio_frame.samples());
|
||||
self.encoder.send_frame(&audio_frame)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pop_packet(&mut self) -> Result<Option<hang::Frame>> {
|
||||
let mut packet = ffmpeg::packet::Packet::empty();
|
||||
match self.encoder.receive_packet(&mut packet) {
|
||||
Ok(()) => {
|
||||
let payload = packet.data().unwrap_or(&[]).to_vec();
|
||||
let hang_frame = hang::Frame {
|
||||
payload: payload.into(),
|
||||
timestamp: Timestamp::from_micros(
|
||||
(self.frame_count * 1_000_000) / self.sample_rate as u64,
|
||||
)?,
|
||||
keyframe: true, // Audio frames are generally independent
|
||||
};
|
||||
trace!("poll frame {}", hang_frame.payload.num_bytes());
|
||||
Ok(Some(hang_frame))
|
||||
}
|
||||
Err(ffmpeg::Error::Eof) => Ok(None),
|
||||
Err(ffmpeg::Error::Other { errno }) if errno == ffmpeg::util::error::EAGAIN => Ok(None),
|
||||
Err(e) => Err(e.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
108
third_party/iroh-live/moq-media/src/ffmpeg/mod.rs
vendored
Normal file
108
third_party/iroh-live/moq-media/src/ffmpeg/mod.rs
vendored
Normal file
|
|
@ -0,0 +1,108 @@
|
|||
use crate::av::Decoders;
|
||||
|
||||
pub use self::{audio::*, ext::ffmpeg_log_init, video::*};
|
||||
|
||||
#[derive(Debug, Clone, Copy)]
|
||||
pub struct FfmpegDecoders;
|
||||
|
||||
impl Decoders for FfmpegDecoders {
|
||||
type Audio = FfmpegAudioDecoder;
|
||||
type Video = FfmpegVideoDecoder;
|
||||
}
|
||||
|
||||
mod audio {
|
||||
mod decoder;
|
||||
mod encoder;
|
||||
pub use decoder::*;
|
||||
pub use encoder::*;
|
||||
}
|
||||
|
||||
pub mod video {
|
||||
mod decoder;
|
||||
mod encoder;
|
||||
pub(crate) mod util;
|
||||
pub use decoder::*;
|
||||
pub use encoder::*;
|
||||
}
|
||||
|
||||
pub(crate) mod ext {
|
||||
use buf_list::BufList;
|
||||
use bytes::Buf;
|
||||
use ffmpeg_next as ffmpeg;
|
||||
pub fn ffmpeg_log_init() {
|
||||
use ffmpeg::util::log::Level::*;
|
||||
let level = if let Ok(val) = std::env::var("FFMPEG_LOG") {
|
||||
match val.as_str() {
|
||||
"quiet" => Quiet,
|
||||
"panic" => Panic,
|
||||
"fatal" => Fatal,
|
||||
"error" => Error,
|
||||
"warn" | "warning" => Warning,
|
||||
"info" => Info,
|
||||
"verbose" => Verbose,
|
||||
"debug" => Debug,
|
||||
"trace" => Trace,
|
||||
_ => Warning,
|
||||
}
|
||||
} else {
|
||||
Warning
|
||||
};
|
||||
ffmpeg::util::log::set_level(level);
|
||||
}
|
||||
|
||||
pub trait PacketExt {
|
||||
fn to_ffmpeg_packet(self) -> ffmpeg::Packet;
|
||||
}
|
||||
|
||||
impl PacketExt for BufList {
|
||||
fn to_ffmpeg_packet(mut self) -> ffmpeg_next::Packet {
|
||||
let mut packet = ffmpeg::Packet::new(self.num_bytes());
|
||||
let dst = packet.data_mut().unwrap();
|
||||
self.copy_to_slice(dst);
|
||||
packet
|
||||
}
|
||||
}
|
||||
|
||||
pub trait CodecContextExt {
|
||||
fn extradata(&self) -> Option<&[u8]>;
|
||||
fn set_extradata(&mut self, extradata: &[u8]) -> Result<(), ffmpeg::Error>;
|
||||
}
|
||||
|
||||
impl CodecContextExt for ffmpeg::codec::Context {
|
||||
// SAFETY: Written by ChatGPT, so, dunno.
|
||||
fn extradata(&self) -> Option<&[u8]> {
|
||||
unsafe {
|
||||
let ctx = self.as_ptr();
|
||||
if (*ctx).extradata.is_null() || (*ctx).extradata_size <= 0 {
|
||||
return None;
|
||||
}
|
||||
Some(std::slice::from_raw_parts(
|
||||
(*ctx).extradata as *const u8,
|
||||
(*ctx).extradata_size as usize,
|
||||
))
|
||||
}
|
||||
}
|
||||
|
||||
// SAFETY: Written by ChatGPT, so, dunno.
|
||||
fn set_extradata(&mut self, extradata: &[u8]) -> Result<(), ffmpeg::Error> {
|
||||
unsafe {
|
||||
let ctx = self.as_mut_ptr();
|
||||
// allocate extradata + padding
|
||||
let pad = ffmpeg::ffi::AV_INPUT_BUFFER_PADDING_SIZE as usize;
|
||||
let size = extradata.len() + pad;
|
||||
(*ctx).extradata = ffmpeg::ffi::av_mallocz(size).cast::<u8>();
|
||||
if (*ctx).extradata.is_null() {
|
||||
return Err(ffmpeg::Error::Bug.into());
|
||||
}
|
||||
// copy bytes and zero the padding
|
||||
std::ptr::copy_nonoverlapping(
|
||||
extradata.as_ptr(),
|
||||
(*ctx).extradata,
|
||||
extradata.len(),
|
||||
);
|
||||
(*ctx).extradata_size = extradata.len() as i32;
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
}
|
||||
139
third_party/iroh-live/moq-media/src/ffmpeg/video/decoder.rs
vendored
Normal file
139
third_party/iroh-live/moq-media/src/ffmpeg/video/decoder.rs
vendored
Normal file
|
|
@ -0,0 +1,139 @@
|
|||
use anyhow::{Context, Result};
|
||||
use ffmpeg_next::{
|
||||
self as ffmpeg, codec, codec::Id as CodecId, util::frame::video::Video as FfmpegFrame,
|
||||
};
|
||||
|
||||
use crate::{
|
||||
av::{self, DecodeConfig, DecodedFrame, VideoDecoder},
|
||||
ffmpeg::{
|
||||
ext::{CodecContextExt, PacketExt},
|
||||
video::util::{Rescaler, StreamClock},
|
||||
},
|
||||
};
|
||||
|
||||
pub struct FfmpegVideoDecoder {
|
||||
codec: ffmpeg::decoder::Video,
|
||||
rescaler: Rescaler,
|
||||
clock: StreamClock,
|
||||
decoded: FfmpegFrame,
|
||||
viewport_changed: Option<(u32, u32)>,
|
||||
last_timestamp: Option<hang::Timestamp>,
|
||||
}
|
||||
|
||||
impl VideoDecoder for FfmpegVideoDecoder {
|
||||
fn name(&self) -> &str {
|
||||
self.codec.id().name()
|
||||
}
|
||||
|
||||
fn new(config: &hang::catalog::VideoConfig, playback_config: &DecodeConfig) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
ffmpeg::init()?;
|
||||
|
||||
// Build a decoder context for H.264 and attach extradata (e.g., avcC)
|
||||
let codec = match &config.codec {
|
||||
hang::catalog::VideoCodec::H264(_meta) => {
|
||||
let codec =
|
||||
codec::decoder::find(CodecId::H264).context("H.264 decoder not found")?;
|
||||
let mut ctx = codec::context::Context::new_with_codec(codec);
|
||||
if let Some(description) = &config.description {
|
||||
ctx.set_extradata(&description)?;
|
||||
}
|
||||
ctx.decoder().video().unwrap()
|
||||
}
|
||||
hang::catalog::VideoCodec::AV1(_meta) => {
|
||||
let codec = codec::decoder::find(CodecId::AV1).context("AV1 decoder not found")?;
|
||||
let mut ctx = codec::context::Context::new_with_codec(codec);
|
||||
if let Some(description) = &config.description {
|
||||
ctx.set_extradata(&description)?;
|
||||
}
|
||||
ctx.decoder().video().unwrap()
|
||||
}
|
||||
_ => anyhow::bail!(
|
||||
"Unsupported codec {} (only h264 and av1 are supported)",
|
||||
config.codec
|
||||
),
|
||||
};
|
||||
let rescaler = Rescaler::new(playback_config.pixel_format.to_ffmpeg(), None)?;
|
||||
let clock = StreamClock::default();
|
||||
let decoded = FfmpegFrame::empty();
|
||||
Ok(Self {
|
||||
codec,
|
||||
rescaler,
|
||||
clock,
|
||||
decoded,
|
||||
viewport_changed: None,
|
||||
last_timestamp: None,
|
||||
})
|
||||
}
|
||||
|
||||
fn set_viewport(&mut self, w: u32, h: u32) {
|
||||
self.viewport_changed = Some((w, h));
|
||||
}
|
||||
|
||||
fn push_packet(&mut self, packet: hang::Frame) -> Result<()> {
|
||||
let ffmpeg_packet = packet.payload.to_ffmpeg_packet();
|
||||
self.codec.send_packet(&ffmpeg_packet)?;
|
||||
self.last_timestamp = Some(packet.timestamp);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pop_frame(&mut self) -> Result<Option<av::DecodedFrame>> {
|
||||
// Pull all available decoded frames
|
||||
match self.codec.receive_frame(&mut self.decoded) {
|
||||
Ok(()) => {
|
||||
// Apply clamped target size.
|
||||
if let Some((max_width, max_height)) = self.viewport_changed.take() {
|
||||
let (width, height) =
|
||||
calculate_resized_size(&self.decoded, max_width, max_height);
|
||||
self.rescaler.set_target_dimensions(width, height);
|
||||
}
|
||||
|
||||
let frame = self.rescaler.process(&mut self.decoded)?;
|
||||
let last_timestamp = self
|
||||
.last_timestamp
|
||||
.as_ref()
|
||||
.context("missing last packet")?;
|
||||
let frame = DecodedFrame::from_ffmpeg(
|
||||
frame,
|
||||
self.clock.frame_delay(&last_timestamp),
|
||||
std::time::Duration::from(*last_timestamp),
|
||||
);
|
||||
Ok(Some(frame))
|
||||
}
|
||||
Err(ffmpeg::util::error::Error::BufferTooSmall) => Ok(None),
|
||||
Err(ffmpeg::Error::Other { errno }) if errno == ffmpeg::util::error::EAGAIN => Ok(None),
|
||||
Err(err) => {
|
||||
// tracing::warn!("decoder error: {err} {err:?} {err:#?}");
|
||||
// Ok(None)
|
||||
Err(err.into())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Calculates the target frame size to fit into the requested bounds while preserving aspect ratio.
|
||||
fn calculate_resized_size(decoded: &FfmpegFrame, max_width: u32, max_height: u32) -> (u32, u32) {
|
||||
let src_w = decoded.width().max(1);
|
||||
let src_h = decoded.height().max(1);
|
||||
let max_w = max_width.max(1);
|
||||
let max_h = max_height.max(1);
|
||||
|
||||
// Fit within requested bounds, preserve aspect ratio, never upscale
|
||||
let scale_w = (max_w as f32) / (src_w as f32);
|
||||
let scale_h = (max_h as f32) / (src_h as f32);
|
||||
let scale = scale_w.min(scale_h).min(1.0).max(0.0);
|
||||
let target_width = ((src_w as f32) * scale).floor().max(1.0) as u32;
|
||||
let target_height = ((src_h as f32) * scale).floor().max(1.0) as u32;
|
||||
tracing::debug!(
|
||||
src_w,
|
||||
src_h,
|
||||
max_w,
|
||||
max_h,
|
||||
target_width,
|
||||
target_height,
|
||||
"scale"
|
||||
);
|
||||
(target_width, target_height)
|
||||
}
|
||||
568
third_party/iroh-live/moq-media/src/ffmpeg/video/encoder.rs
vendored
Normal file
568
third_party/iroh-live/moq-media/src/ffmpeg/video/encoder.rs
vendored
Normal file
|
|
@ -0,0 +1,568 @@
|
|||
use std::{
|
||||
ffi::{CString, c_int},
|
||||
ptr,
|
||||
task::Poll,
|
||||
};
|
||||
|
||||
use anyhow::{Context, Result, anyhow};
|
||||
use ffmpeg_next::{self as ffmpeg, codec, format::Pixel, frame::Video as VideoFrame};
|
||||
use hang::Timestamp;
|
||||
use tracing::{debug, info, trace};
|
||||
|
||||
use crate::{
|
||||
av,
|
||||
ffmpeg::{ext::CodecContextExt, util::Rescaler},
|
||||
};
|
||||
|
||||
#[derive(Debug, Clone, Copy, Default)]
|
||||
// Allow unused because usage is cfg-gated on platform.
|
||||
#[allow(unused)]
|
||||
enum HwBackend {
|
||||
#[default]
|
||||
Software,
|
||||
/// Linux
|
||||
Vaapi,
|
||||
/// macOS
|
||||
Videotoolbox,
|
||||
/// Nvidia GPUs
|
||||
Nvenc,
|
||||
/// Intel GPUs
|
||||
Qsv,
|
||||
/// AMD GPUs
|
||||
Amf,
|
||||
// TODO:
|
||||
// Add DirectX (Windows)
|
||||
// Add MediaCodec (Android)
|
||||
}
|
||||
|
||||
impl HwBackend {
|
||||
fn codec_name(&self) -> &'static str {
|
||||
match self {
|
||||
Self::Software => "libx264",
|
||||
Self::Vaapi => "h264_vaapi",
|
||||
Self::Videotoolbox => "h264_videotoolbox",
|
||||
Self::Nvenc => "h264_nvenc",
|
||||
Self::Qsv => "h264_qsv",
|
||||
Self::Amf => "h264_amf",
|
||||
}
|
||||
}
|
||||
|
||||
fn candidates() -> Vec<Self> {
|
||||
// vec![HwBackend::Software]
|
||||
let mut candidates = Vec::new();
|
||||
// Platform-preferred order
|
||||
#[cfg(target_os = "macos")]
|
||||
candidates.extend_from_slice(&[HwBackend::Videotoolbox]);
|
||||
#[cfg(target_os = "windows")]
|
||||
candidates.extend_from_slice(&[HwBackend::Nvenc, HwBackend::Qsv, HwBackend::Amf]);
|
||||
#[cfg(target_os = "linux")]
|
||||
candidates.extend_from_slice(&[HwBackend::Vaapi, HwBackend::Nvenc, HwBackend::Qsv]);
|
||||
|
||||
// Always end with software
|
||||
candidates.push(HwBackend::Software);
|
||||
candidates
|
||||
}
|
||||
|
||||
fn pixel_format(&self) -> Pixel {
|
||||
match self {
|
||||
HwBackend::Vaapi | HwBackend::Qsv => Pixel::NV12,
|
||||
// These rest accepts yuv420p SW frames:
|
||||
_ => Pixel::YUV420P,
|
||||
}
|
||||
}
|
||||
fn hardware_pixel_format(&self) -> Pixel {
|
||||
match self {
|
||||
HwBackend::Vaapi => Pixel::VAAPI,
|
||||
HwBackend::Qsv => Pixel::NV12,
|
||||
// These rest accepts yuv420p SW frames:
|
||||
_ => Pixel::YUV420P,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
struct EncoderOpts {
|
||||
width: u32,
|
||||
height: u32,
|
||||
framerate: u32,
|
||||
bitrate: u64,
|
||||
}
|
||||
|
||||
pub struct H264Encoder {
|
||||
encoder: ffmpeg::encoder::video::Encoder,
|
||||
rescaler: Rescaler,
|
||||
backend: HwBackend,
|
||||
vaapi: Option<VaapiState>,
|
||||
opts: EncoderOpts,
|
||||
frame_count: u64,
|
||||
}
|
||||
|
||||
impl H264Encoder {
|
||||
pub fn new(width: u32, height: u32, framerate: u32) -> Result<Self> {
|
||||
info!("Initializing H264 encoder: {width}x{height} @ {framerate}fps");
|
||||
ffmpeg::init()?;
|
||||
|
||||
// Bitrate heuristic (from your original)
|
||||
let pixels = width * height;
|
||||
let framerate_factor = 30.0 + (framerate as f32 - 30.) / 2.;
|
||||
let bitrate = (pixels as f32 * 0.07 * framerate_factor).round() as u64;
|
||||
|
||||
let opts = EncoderOpts {
|
||||
width,
|
||||
height,
|
||||
framerate,
|
||||
bitrate,
|
||||
};
|
||||
|
||||
let candidates = HwBackend::candidates();
|
||||
|
||||
// Try each backend
|
||||
let mut last_err: Option<anyhow::Error> = None;
|
||||
for backend in candidates {
|
||||
match Self::open_encoder(backend, &opts) {
|
||||
Ok((encoder, rescaler, vaapi)) => {
|
||||
info!(
|
||||
"Using encoder backend: {} ({backend:?})",
|
||||
backend.codec_name()
|
||||
);
|
||||
return Ok(Self {
|
||||
encoder,
|
||||
rescaler,
|
||||
vaapi,
|
||||
backend,
|
||||
opts,
|
||||
frame_count: 0,
|
||||
});
|
||||
}
|
||||
Err(e) => {
|
||||
debug!(
|
||||
"Backend {backend:?} ({}) not available: {e:#}",
|
||||
backend.codec_name()
|
||||
);
|
||||
last_err = Some(e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
Err(last_err.unwrap_or_else(|| anyhow!("no H.264 encoder available")))
|
||||
}
|
||||
|
||||
fn open_encoder(
|
||||
backend: HwBackend,
|
||||
opts: &EncoderOpts,
|
||||
) -> Result<(
|
||||
ffmpeg::encoder::video::Encoder,
|
||||
Rescaler,
|
||||
Option<VaapiState>,
|
||||
)> {
|
||||
// Find encoder
|
||||
let codec = ffmpeg::codec::encoder::find_by_name(backend.codec_name())
|
||||
.with_context(|| format!("encoder {} not found", backend.codec_name()))?;
|
||||
debug!("Found encoder: {}", codec.name());
|
||||
|
||||
// Build ctx
|
||||
let mut ctx = codec::context::Context::new_with_codec(codec);
|
||||
unsafe {
|
||||
let ctx_mut = ctx.as_mut_ptr();
|
||||
(*ctx_mut).width = opts.width as i32;
|
||||
(*ctx_mut).height = opts.height as i32;
|
||||
(*ctx_mut).time_base.num = 1;
|
||||
(*ctx_mut).time_base.den = opts.framerate as i32;
|
||||
(*ctx_mut).framerate.num = opts.framerate as i32;
|
||||
(*ctx_mut).framerate.den = 1;
|
||||
(*ctx_mut).gop_size = opts.framerate as i32;
|
||||
(*ctx_mut).bit_rate = opts.bitrate as i64;
|
||||
(*ctx_mut).flags = (*ctx_mut).flags | codec::Flags::GLOBAL_HEADER.bits() as c_int;
|
||||
(*ctx_mut).pix_fmt = backend.hardware_pixel_format().into();
|
||||
}
|
||||
|
||||
// Backend-specific prep
|
||||
let vaapi_state = if matches!(backend, HwBackend::Vaapi) {
|
||||
// single-GPU default; make configurable if needed
|
||||
let va = VaapiState::new(opts.width, opts.height, "/dev/dri/renderD128")?;
|
||||
va.bind_to_context(&mut ctx);
|
||||
Some(va)
|
||||
} else {
|
||||
None
|
||||
};
|
||||
|
||||
// Setup encoder options
|
||||
let enc_opts = {
|
||||
let mut opts = vec![
|
||||
// Disable annexB so that we get an avcC header in extradata
|
||||
// annexb=0 → MP4/ISO BMFF style (length-prefixed NAL units + avcC extradata),
|
||||
// as opposed to Annex B start codes (00 00 00 01).
|
||||
("annexB", "0"),
|
||||
];
|
||||
if matches!(backend, HwBackend::Software) {
|
||||
opts.extend_from_slice(&[
|
||||
("preset", "ultrafast"),
|
||||
("tune", "zerolatency"),
|
||||
("profile", "baseline"),
|
||||
]);
|
||||
}
|
||||
ffmpeg::Dictionary::from_iter(opts.into_iter())
|
||||
};
|
||||
// Open encoder
|
||||
let encoder = ctx.encoder().video()?.open_as_with(codec, enc_opts)?;
|
||||
|
||||
// Build rescaler to SW input fmt expected per-backend
|
||||
let rescaler = Rescaler::new(backend.pixel_format(), Some((opts.width, opts.height)))?;
|
||||
|
||||
Ok((encoder, rescaler, vaapi_state))
|
||||
}
|
||||
|
||||
pub fn video_config(&self) -> Result<hang::catalog::VideoConfig> {
|
||||
Ok(hang::catalog::VideoConfig {
|
||||
codec: hang::catalog::VideoCodec::H264(hang::catalog::H264 {
|
||||
profile: 0x42, // Baseline
|
||||
constraints: 0xE0,
|
||||
level: 0x1E, // Level 3.0
|
||||
inline: false, // TODO: is this correct?
|
||||
}),
|
||||
description: Some(self.avcc_description()?.to_vec().into()),
|
||||
coded_width: Some(self.opts.width),
|
||||
coded_height: Some(self.opts.height),
|
||||
display_ratio_width: None,
|
||||
display_ratio_height: None,
|
||||
bitrate: Some(self.opts.bitrate),
|
||||
framerate: Some(self.opts.framerate as f64),
|
||||
optimize_for_latency: Some(true),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn avcc_description(&self) -> Result<&[u8]> {
|
||||
self.encoder.extradata().context("missing avcC extradata")
|
||||
}
|
||||
|
||||
pub fn receive_packet(&mut self) -> Result<Poll<Option<hang::Frame>>> {
|
||||
loop {
|
||||
let mut packet = ffmpeg::packet::Packet::empty();
|
||||
match self.encoder.receive_packet(&mut packet) {
|
||||
Ok(()) => {
|
||||
let payload = packet.data().unwrap_or(&[]).to_vec();
|
||||
let hang_frame = hang::Frame {
|
||||
payload: payload.into(),
|
||||
timestamp: Timestamp::from_micros(
|
||||
self.frame_count * 1_000_000 / self.opts.framerate as u64,
|
||||
)?,
|
||||
keyframe: packet.is_key(),
|
||||
};
|
||||
return Ok(Poll::Ready(Some(hang_frame)));
|
||||
}
|
||||
Err(ffmpeg::Error::Eof) => return Ok(Poll::Ready(None)),
|
||||
Err(ffmpeg::Error::Other { errno }) if errno == ffmpeg::util::error::EAGAIN => {
|
||||
return Ok(Poll::Pending);
|
||||
}
|
||||
Err(e) => return Err(e.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub fn encode_frame(&mut self, mut frame: VideoFrame) -> Result<()> {
|
||||
frame.set_pts(Some(self.frame_count as i64));
|
||||
self.frame_count += 1;
|
||||
|
||||
if self.frame_count % self.opts.framerate as u64 == 0 {
|
||||
tracing::trace!(
|
||||
"Encoding {}: {}x{} fmt={:?} pts={:?} backend={:?}",
|
||||
self.frame_count,
|
||||
frame.width(),
|
||||
frame.height(),
|
||||
frame.format(),
|
||||
frame.pts(),
|
||||
self.backend
|
||||
);
|
||||
}
|
||||
|
||||
// Convert incoming frame to the SW format the backend expects.
|
||||
let frame = self
|
||||
.rescaler
|
||||
.process(&frame)
|
||||
.context("failed to color-convert frame")?
|
||||
.clone();
|
||||
|
||||
let frame = match self.backend {
|
||||
HwBackend::Vaapi => {
|
||||
let va = self
|
||||
.vaapi
|
||||
.as_ref()
|
||||
.ok_or_else(|| anyhow!("no vaapi state"))?;
|
||||
let hw_frame = va.transfer_nv12_to_hw(&frame)?;
|
||||
hw_frame
|
||||
}
|
||||
// Other backends accept SW frames directly
|
||||
_ => frame,
|
||||
};
|
||||
|
||||
self.encoder
|
||||
.send_frame(&frame)
|
||||
.map_err(|e| anyhow!("send_frame failed: {e:?}"))?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn flush(&mut self) -> Result<()> {
|
||||
self.encoder.send_eof()?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl av::VideoEncoder for H264Encoder {
|
||||
fn with_preset(preset: av::VideoPreset) -> Result<Self>
|
||||
where
|
||||
Self: Sized,
|
||||
{
|
||||
Self::new(preset.width(), preset.height(), preset.fps())
|
||||
}
|
||||
}
|
||||
|
||||
impl av::VideoEncoderInner for H264Encoder {
|
||||
fn name(&self) -> &str {
|
||||
self.encoder.id().name()
|
||||
}
|
||||
|
||||
fn config(&self) -> hang::catalog::VideoConfig {
|
||||
self.video_config().expect("video_config available")
|
||||
}
|
||||
|
||||
fn push_frame(&mut self, frame: av::VideoFrame) -> anyhow::Result<()> {
|
||||
trace!(len = frame.raw.len(), format=?frame.format, "push frame");
|
||||
let frame = frame.to_ffmpeg();
|
||||
self.encode_frame(frame)
|
||||
}
|
||||
|
||||
fn pop_packet(&mut self) -> anyhow::Result<Option<hang::Frame>> {
|
||||
match self.receive_packet()? {
|
||||
std::task::Poll::Ready(v) => Ok(v),
|
||||
std::task::Poll::Pending => Ok(None),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct VaapiState {
|
||||
device_ctx: *mut ffmpeg::sys::AVBufferRef,
|
||||
frames_ctx: *mut ffmpeg::sys::AVBufferRef,
|
||||
}
|
||||
|
||||
unsafe impl Send for VaapiState {}
|
||||
|
||||
impl VaapiState {
|
||||
/// Create VAAPI device + frames pool (NV12→VAAPI surfaces) for given size.
|
||||
fn new(width: u32, height: u32, device_path: &str) -> Result<Self> {
|
||||
// 1) Create VAAPI device
|
||||
let cpath = CString::new(device_path)?;
|
||||
let mut dev: *mut ffmpeg::sys::AVBufferRef = ptr::null_mut();
|
||||
let ret = unsafe {
|
||||
ffmpeg::sys::av_hwdevice_ctx_create(
|
||||
&mut dev,
|
||||
ffmpeg::sys::AVHWDeviceType::AV_HWDEVICE_TYPE_VAAPI,
|
||||
cpath.as_ptr(),
|
||||
ptr::null_mut(),
|
||||
0,
|
||||
)
|
||||
};
|
||||
if ret < 0 || dev.is_null() {
|
||||
unsafe { ffmpeg::sys::av_buffer_unref(&mut dev) };
|
||||
return Err(anyhow!("vaapi device create failed: {ret}"));
|
||||
}
|
||||
|
||||
// 2) Create frames pool for VAAPI with SW format NV12
|
||||
let frames = unsafe { ffmpeg::sys::av_hwframe_ctx_alloc(dev) };
|
||||
if frames.is_null() {
|
||||
unsafe { ffmpeg::sys::av_buffer_unref(&mut dev) };
|
||||
return Err(anyhow!("av_hwframe_ctx_alloc failed"));
|
||||
}
|
||||
let fc = unsafe { &mut *((*frames).data as *mut ffmpeg::sys::AVHWFramesContext) };
|
||||
fc.format = ffmpeg::sys::AVPixelFormat::AV_PIX_FMT_VAAPI;
|
||||
fc.sw_format = ffmpeg::sys::AVPixelFormat::AV_PIX_FMT_NV12;
|
||||
fc.width = width as i32;
|
||||
fc.height = height as i32;
|
||||
fc.initial_pool_size = 32;
|
||||
|
||||
let ret = unsafe { ffmpeg::sys::av_hwframe_ctx_init(frames) };
|
||||
if ret < 0 {
|
||||
unsafe {
|
||||
ffmpeg::sys::av_buffer_unref(&mut (frames as *mut _));
|
||||
ffmpeg::sys::av_buffer_unref(&mut dev);
|
||||
}
|
||||
return Err(anyhow!("av_hwframe_ctx_init failed: {ret}"));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
device_ctx: dev,
|
||||
frames_ctx: frames,
|
||||
})
|
||||
}
|
||||
|
||||
/// Attach the frames context so the codec context expects VAAPI frames.
|
||||
fn bind_to_context(&self, ctx: &mut codec::context::Context) {
|
||||
unsafe {
|
||||
let ctx = ctx.as_mut_ptr();
|
||||
(*ctx).hw_frames_ctx = ffmpeg::sys::av_buffer_ref(self.frames_ctx);
|
||||
(*ctx).pix_fmt = Pixel::VAAPI.into();
|
||||
}
|
||||
}
|
||||
|
||||
/// Transfer a SW NV12 frame into a VAAPI HW frame, preserving PTS.
|
||||
/// Returns a new `VideoFrame` backed by a VAAPI surface.
|
||||
fn transfer_nv12_to_hw(&self, sw_frame: &VideoFrame) -> Result<VideoFrame> {
|
||||
unsafe {
|
||||
// Allocate an empty HW frame from the pool
|
||||
let mut hw = ffmpeg::frame::Video::empty();
|
||||
let ret = ffmpeg::sys::av_hwframe_get_buffer(self.frames_ctx, hw.as_mut_ptr(), 0);
|
||||
if ret < 0 {
|
||||
return Err(anyhow!("av_hwframe_get_buffer failed: {ret}"));
|
||||
}
|
||||
// Keep PTS
|
||||
(*hw.as_mut_ptr()).pts = sw_frame.pts().unwrap_or(0);
|
||||
|
||||
// Transfer SW NV12 → HW VAAPI surface
|
||||
let ret = ffmpeg::sys::av_hwframe_transfer_data(hw.as_mut_ptr(), sw_frame.as_ptr(), 0);
|
||||
if ret < 0 {
|
||||
return Err(anyhow!("av_hwframe_transfer_data failed: {ret}"));
|
||||
}
|
||||
|
||||
Ok(hw)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for VaapiState {
|
||||
fn drop(&mut self) {
|
||||
unsafe {
|
||||
if !self.frames_ctx.is_null() {
|
||||
ffmpeg::sys::av_buffer_unref(&mut (self.frames_ctx as *mut _));
|
||||
}
|
||||
if !self.device_ctx.is_null() {
|
||||
ffmpeg::sys::av_buffer_unref(&mut (self.device_ctx as *mut _));
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// pub struct Av1FfmpegEncoder {
|
||||
// encoder: ffmpeg::encoder::video::Encoder,
|
||||
// rescaler: Rescaler,
|
||||
// opts: EncoderOpts,
|
||||
// frame_count: u64,
|
||||
// }
|
||||
|
||||
// impl Av1FfmpegEncoder {
|
||||
// pub fn new(width: u32, height: u32, framerate: u32) -> Result<Self> {
|
||||
// info!("Initializing AV1 (FFmpeg) encoder: {width}x{height} @ {framerate}fps");
|
||||
// ffmpeg::init()?;
|
||||
|
||||
// let pixels = width * height;
|
||||
// let framerate_factor = 30.0 + (framerate as f32 - 30.) / 2.;
|
||||
// let bitrate = (pixels as f32 * 0.05 * framerate_factor).round() as u64;
|
||||
// let opts = EncoderOpts {
|
||||
// width,
|
||||
// height,
|
||||
// framerate,
|
||||
// bitrate,
|
||||
// };
|
||||
|
||||
// let codec = ffmpeg::encoder::find(ffmpeg::codec::Id::AV1).context("AV1 codec not found")?;
|
||||
// let mut ctx = codec::context::Context::new_with_codec(codec);
|
||||
// unsafe {
|
||||
// let ctx_mut = ctx.as_mut_ptr();
|
||||
// (*ctx_mut).width = width as i32;
|
||||
// (*ctx_mut).height = height as i32;
|
||||
// (*ctx_mut).time_base.num = 1;
|
||||
// (*ctx_mut).time_base.den = framerate as i32;
|
||||
// (*ctx_mut).framerate.num = framerate as i32;
|
||||
// (*ctx_mut).framerate.den = 1;
|
||||
// (*ctx_mut).gop_size = framerate as i32;
|
||||
// (*ctx_mut).bit_rate = bitrate as i64;
|
||||
// (*ctx_mut).pix_fmt = Pixel::YUV420P.into();
|
||||
// }
|
||||
// // libaom options for realtime
|
||||
// let enc_opts =
|
||||
// ffmpeg::Dictionary::from_iter([("cpu-used", "8"), ("row-mt", "1"), ("tiles", "2x2")]);
|
||||
// let encoder = ctx.encoder().video()?.open_as_with(
|
||||
// ffmpeg::encoder::find(ffmpeg::codec::Id::AV1).unwrap(),
|
||||
// enc_opts,
|
||||
// )?;
|
||||
// let rescaler = Rescaler::new(Pixel::YUV420P, Some((width, height)))?;
|
||||
// Ok(Self {
|
||||
// encoder,
|
||||
// rescaler,
|
||||
// opts,
|
||||
// frame_count: 0,
|
||||
// })
|
||||
// }
|
||||
// }
|
||||
|
||||
// impl av::VideoEncoder for Av1FfmpegEncoder {
|
||||
// fn with_preset(preset: av::VideoPreset) -> Result<Self>
|
||||
// where
|
||||
// Self: Sized,
|
||||
// {
|
||||
// Self::new(preset.width(), preset.height(), preset.fps())
|
||||
// }
|
||||
// fn config(&self) -> hang::catalog::VideoConfig {
|
||||
// hang::catalog::VideoConfig {
|
||||
// codec: hang::catalog::VideoCodec::AV1(Default::default()),
|
||||
// description: None,
|
||||
// coded_width: Some(self.opts.width),
|
||||
// coded_height: Some(self.opts.height),
|
||||
// display_ratio_width: None,
|
||||
// display_ratio_height: None,
|
||||
// bitrate: Some(self.opts.bitrate),
|
||||
// framerate: Some(self.opts.framerate as f64),
|
||||
// optimize_for_latency: Some(true),
|
||||
// }
|
||||
// }
|
||||
// fn push_frame(
|
||||
// &mut self,
|
||||
// format: &av::VideoFormat,
|
||||
// frame: av::VideoFrame,
|
||||
// ) -> anyhow::Result<()> {
|
||||
// use ffmpeg_next::frame::Video as FfFrame;
|
||||
// let pixel = match format.pixel_format {
|
||||
// av::PixelFormat::Rgba => Pixel::RGBA,
|
||||
// av::PixelFormat::Bgra => Pixel::BGRA,
|
||||
// };
|
||||
// let [w, h] = format.dimensions;
|
||||
// let mut ff = FfFrame::new(pixel, w, h);
|
||||
// let stride = ff.stride(0) as usize;
|
||||
// let row_bytes = (w as usize) * 4;
|
||||
// for y in 0..(h as usize) {
|
||||
// let dst_off = y * stride;
|
||||
// let src_off = y * row_bytes;
|
||||
// ff.data_mut(0)[dst_off..dst_off + row_bytes]
|
||||
// .copy_from_slice(&frame.raw[src_off..src_off + row_bytes]);
|
||||
// }
|
||||
// let sw = self
|
||||
// .rescaler
|
||||
// .process(&ff)
|
||||
// .context("failed to color-convert frame")?
|
||||
// .clone();
|
||||
// let mut enc_frame = sw;
|
||||
// enc_frame.set_pts(Some(self.frame_count as i64));
|
||||
// self.frame_count += 1;
|
||||
// self.encoder.send_frame(&enc_frame)?;
|
||||
// Ok(())
|
||||
// }
|
||||
// fn pop_packet(&mut self) -> anyhow::Result<Option<hang::Frame>> {
|
||||
// let mut packet = ffmpeg::packet::Packet::empty();
|
||||
// match self.encoder.receive_packet(&mut packet) {
|
||||
// Ok(()) => {
|
||||
// let payload = packet.data().unwrap_or(&[]).to_vec();
|
||||
// let hang_frame = hang::Frame {
|
||||
// payload: payload.into(),
|
||||
// timestamp: std::time::Duration::from_nanos(
|
||||
// self.frame_count.saturating_sub(1) * 1_000_000_000
|
||||
// / self.opts.framerate as u64,
|
||||
// ),
|
||||
// keyframe: packet.is_key(),
|
||||
// };
|
||||
// Ok(Some(hang_frame))
|
||||
// }
|
||||
// Err(ffmpeg::Error::Eof) => Ok(None),
|
||||
// Err(ffmpeg::Error::Other { errno }) if errno == ffmpeg::util::error::EAGAIN => Ok(None),
|
||||
// Err(e) => Err(e.into()),
|
||||
// }
|
||||
// }
|
||||
// }
|
||||
138
third_party/iroh-live/moq-media/src/ffmpeg/video/util.rs
vendored
Normal file
138
third_party/iroh-live/moq-media/src/ffmpeg/video/util.rs
vendored
Normal file
|
|
@ -0,0 +1,138 @@
|
|||
use std::time::Duration;
|
||||
|
||||
use bytes::{BufMut, BytesMut};
|
||||
use ffmpeg_next::util::{format::pixel::Pixel, frame::video::Video as FfmpegFrame};
|
||||
use hang::Timestamp;
|
||||
use image::{Delay, RgbaImage};
|
||||
|
||||
pub(crate) use self::mjpg_decoder::MjpgDecoder;
|
||||
pub(crate) use self::rescaler::Rescaler;
|
||||
use crate::av::{self, DecodedFrame, PixelFormat, VideoFormat, VideoFrame};
|
||||
|
||||
mod mjpg_decoder;
|
||||
mod rescaler;
|
||||
|
||||
#[derive(Default, Debug)]
|
||||
pub(crate) struct StreamClock {
|
||||
pub(crate) last_timestamp: Option<hang::Timestamp>,
|
||||
}
|
||||
|
||||
impl StreamClock {
|
||||
pub(crate) fn frame_delay(&mut self, timestamp: &hang::Timestamp) -> Duration {
|
||||
// Compute interframe delay from provided timestamps
|
||||
let delay = match self.last_timestamp {
|
||||
None => Duration::ZERO,
|
||||
Some(last_timestamp) => timestamp
|
||||
.checked_sub(last_timestamp)
|
||||
.unwrap_or(Timestamp::ZERO)
|
||||
.into(),
|
||||
};
|
||||
self.last_timestamp = Some(*timestamp);
|
||||
delay
|
||||
}
|
||||
}
|
||||
|
||||
impl av::VideoFrame {
|
||||
pub fn to_ffmpeg(&self) -> FfmpegFrame {
|
||||
// Wrap raw RGBA/BGRA data into an ffmpeg frame and encode
|
||||
let pixel = match self.format.pixel_format {
|
||||
av::PixelFormat::Rgba => Pixel::RGBA,
|
||||
av::PixelFormat::Bgra => Pixel::BGRA,
|
||||
};
|
||||
let [w, h] = self.format.dimensions;
|
||||
let mut ff = FfmpegFrame::new(pixel, w, h);
|
||||
let stride = ff.stride(0) as usize;
|
||||
let row_bytes = (w as usize) * 4;
|
||||
for y in 0..(h as usize) {
|
||||
let dst_off = y * stride;
|
||||
let src_off = y * row_bytes;
|
||||
ff.data_mut(0)[dst_off..dst_off + row_bytes]
|
||||
.copy_from_slice(&self.raw[src_off..src_off + row_bytes]);
|
||||
}
|
||||
ff
|
||||
}
|
||||
}
|
||||
|
||||
impl av::DecodedFrame {
|
||||
pub fn from_ffmpeg(frame: &FfmpegFrame, delay: Duration, timestamp: Duration) -> Self {
|
||||
let image = ffmpeg_frame_to_image(frame);
|
||||
// Compute interframe delay from provided timestamps
|
||||
let delay = Delay::from_saturating_duration(delay);
|
||||
DecodedFrame {
|
||||
frame: image::Frame::from_parts(image, 0, 0, delay),
|
||||
timestamp,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert the ffmpeg frame into an [image] frame.
|
||||
///
|
||||
/// Note: This does not do any color conversion. Make sure the frame is in the correct color format before.
|
||||
///
|
||||
/// This allocates the full frame into a vec, which we need anyway to cross the thread boundary.
|
||||
pub(crate) fn ffmpeg_frame_to_image(frame: &ffmpeg_next::util::frame::Video) -> image::RgbaImage {
|
||||
let width = frame.width();
|
||||
let height = frame.height();
|
||||
let bytes_per_pixel = 4usize; // BGRA
|
||||
let src = frame.data(0);
|
||||
// ffmpeg frames may have padding at end of each line; copy row-by-row.
|
||||
let stride = frame.stride(0) as usize;
|
||||
let row_bytes = (width as usize) * bytes_per_pixel;
|
||||
let mut out = vec![0u8; row_bytes * (height as usize)];
|
||||
for y in 0..(height as usize) {
|
||||
let src_off = y * stride;
|
||||
let dst_off = y * row_bytes;
|
||||
out[dst_off..dst_off + row_bytes].copy_from_slice(&src[src_off..src_off + row_bytes]);
|
||||
}
|
||||
RgbaImage::from_raw(width, height, out).expect("valid image buffer")
|
||||
}
|
||||
|
||||
impl PixelFormat {
|
||||
pub fn to_ffmpeg(&self) -> Pixel {
|
||||
match self {
|
||||
PixelFormat::Rgba => Pixel::RGBA,
|
||||
PixelFormat::Bgra => Pixel::BGRA,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_ffmpeg(value: Pixel) -> Option<Self> {
|
||||
match value {
|
||||
Pixel::RGBA => Some(PixelFormat::Rgba),
|
||||
Pixel::BGRA => Some(PixelFormat::Bgra),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Convert the ffmpeg frame into a [`VideoFrame`]
|
||||
///
|
||||
/// Returns `None` if the frame has an unsupported pixel format.
|
||||
///
|
||||
/// This allocates the full frame into a vec, which we need anyway to cross the thread boundary.
|
||||
pub(crate) fn ffmpeg_frame_to_video_frame(
|
||||
frame: &ffmpeg_next::util::frame::Video,
|
||||
) -> Option<VideoFrame> {
|
||||
let pixel_format = PixelFormat::from_ffmpeg(frame.format())?;
|
||||
let width = frame.width();
|
||||
let height = frame.height();
|
||||
let bytes_per_pixel = 4usize; // RGBA/BGRA
|
||||
let src = frame.data(0);
|
||||
// ffmpeg frames may have padding at end of each line; copy row-by-row.
|
||||
let stride = frame.stride(0) as usize;
|
||||
let row_bytes = (width as usize) * bytes_per_pixel;
|
||||
let mut out = BytesMut::with_capacity(row_bytes * (height as usize));
|
||||
// let mut out = vec![0u8; row_bytes * (height as usize)];
|
||||
for y in 0..(height as usize) {
|
||||
let src_off = y * stride;
|
||||
// let dst_off = y * row_bytes;
|
||||
out.put(&src[src_off..src_off + row_bytes]);
|
||||
// out[dst_off..dst_off + row_bytes].copy_from_slice(&src[src_off..src_off + row_bytes]);
|
||||
}
|
||||
Some(VideoFrame {
|
||||
format: VideoFormat {
|
||||
dimensions: [width, height],
|
||||
pixel_format,
|
||||
},
|
||||
raw: out.freeze(),
|
||||
})
|
||||
}
|
||||
74
third_party/iroh-live/moq-media/src/ffmpeg/video/util/mjpg_decoder.rs
vendored
Normal file
74
third_party/iroh-live/moq-media/src/ffmpeg/video/util/mjpg_decoder.rs
vendored
Normal file
|
|
@ -0,0 +1,74 @@
|
|||
use std::time::Instant;
|
||||
|
||||
use ffmpeg_next::{
|
||||
self as ffmpeg, Error, Packet, codec::Id, format::Pixel, frame::Video as FfmpegVideoFrame,
|
||||
};
|
||||
use tracing::trace;
|
||||
|
||||
use crate::{
|
||||
av::VideoFrame,
|
||||
ffmpeg::util::{Rescaler, ffmpeg_frame_to_video_frame},
|
||||
};
|
||||
|
||||
pub struct MjpgDecoder {
|
||||
dec: ffmpeg::decoder::Video,
|
||||
rescaler: Rescaler,
|
||||
}
|
||||
|
||||
impl MjpgDecoder {
|
||||
/// Initialize FFmpeg and create a Video decoder for MJPEG.
|
||||
pub fn new() -> anyhow::Result<Self> {
|
||||
ffmpeg::init()?;
|
||||
|
||||
// Find the MJPEG decoder and create a context bound to it.
|
||||
let mjpeg = ffmpeg::decoder::find(Id::MJPEG).ok_or(Error::DecoderNotFound)?;
|
||||
|
||||
// Create a codec::Context that's pre-bound to this decoder codec,
|
||||
// then get a video decoder out of it.
|
||||
let ctx = ffmpeg::codec::context::Context::new_with_codec(mjpeg);
|
||||
let dec = ctx.decoder().video()?; // has send_packet/receive_frame
|
||||
|
||||
let rescaler = Rescaler::new(Pixel::RGBA, None)?;
|
||||
|
||||
Ok(Self { dec, rescaler })
|
||||
}
|
||||
|
||||
/// Decode one complete MJPEG/JPEG frame from `mjpg_frame`.
|
||||
pub fn decode_frame(&mut self, mjpg_frame: &[u8]) -> Result<VideoFrame, Error> {
|
||||
let now = Instant::now();
|
||||
// Make a packet that borrows/copies the data.
|
||||
let packet = Packet::borrow(mjpg_frame);
|
||||
// Feed & drain once — MJPEG is intra-only (one picture per packet).
|
||||
self.dec.send_packet(&packet)?;
|
||||
trace!(t=?now.elapsed(), "decode ffmpeg: send packet");
|
||||
let mut frame = FfmpegVideoFrame::empty();
|
||||
self.dec.receive_frame(&mut frame)?;
|
||||
trace!(t=?now.elapsed(), "decode ffmpeg: receive frame");
|
||||
|
||||
// MJPEG may output deprecated YUVJ* formats. Replace them with
|
||||
// the non-deprecated equivalents and mark full range to keep semantics.
|
||||
// This avoids ffmpeg warning: "deprecated pixel format used, make sure you did set range correctly".
|
||||
use ffmpeg_next::util::color::Range;
|
||||
match frame.format() {
|
||||
Pixel::YUVJ420P => {
|
||||
frame.set_color_range(Range::JPEG);
|
||||
frame.set_format(Pixel::YUV420P);
|
||||
}
|
||||
Pixel::YUVJ422P => {
|
||||
frame.set_color_range(Range::JPEG);
|
||||
frame.set_format(Pixel::YUV422P);
|
||||
}
|
||||
Pixel::YUVJ444P => {
|
||||
frame.set_color_range(Range::JPEG);
|
||||
frame.set_format(Pixel::YUV444P);
|
||||
}
|
||||
_ => {}
|
||||
}
|
||||
trace!(t=?now.elapsed(), "decode ffmpeg: color");
|
||||
let frame = self.rescaler.process(&frame)?;
|
||||
trace!(t=?now.elapsed(), "decode ffmpeg: rescale");
|
||||
let frame = ffmpeg_frame_to_video_frame(frame).expect("valid pixel format set in rescaler");
|
||||
trace!(t=?now.elapsed(), "decode ffmpeg: convert");
|
||||
Ok(frame)
|
||||
}
|
||||
}
|
||||
77
third_party/iroh-live/moq-media/src/ffmpeg/video/util/rescaler.rs
vendored
Normal file
77
third_party/iroh-live/moq-media/src/ffmpeg/video/util/rescaler.rs
vendored
Normal file
|
|
@ -0,0 +1,77 @@
|
|||
use anyhow::Result;
|
||||
use ffmpeg_next::software::scaling::Flags;
|
||||
use ffmpeg_next::{
|
||||
self as ffmpeg,
|
||||
software::scaling::{self},
|
||||
util::{format::pixel::Pixel, frame::video::Video as FfmpegFrame},
|
||||
};
|
||||
|
||||
pub(crate) struct Rescaler {
|
||||
pub(crate) target_format: Pixel,
|
||||
pub(crate) target_width_height: Option<(u32, u32)>,
|
||||
pub(crate) ctx: Option<scaling::Context>,
|
||||
pub(crate) out_frame: FfmpegFrame,
|
||||
}
|
||||
|
||||
// I think the ffmpeg structs are send-safe.
|
||||
// We want to create the encoder before moving it to a thread.
|
||||
unsafe impl Send for Rescaler {}
|
||||
|
||||
impl Rescaler {
|
||||
pub fn new(target_format: Pixel, target_width_height: Option<(u32, u32)>) -> Result<Self> {
|
||||
Ok(Self {
|
||||
target_format,
|
||||
ctx: None,
|
||||
target_width_height,
|
||||
out_frame: FfmpegFrame::empty(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn set_target_dimensions(&mut self, w: u32, h: u32) {
|
||||
self.target_width_height = Some((w, h));
|
||||
}
|
||||
|
||||
pub fn process<'a: 'b, 'b>(
|
||||
&'a mut self,
|
||||
frame: &'b FfmpegFrame,
|
||||
) -> Result<&'b FfmpegFrame, ffmpeg::Error> {
|
||||
// Short-circuit if possible.
|
||||
if self.target_width_height.is_none() && self.target_format == frame.format() {
|
||||
return Ok(frame);
|
||||
}
|
||||
let (target_width, target_height) = self
|
||||
.target_width_height
|
||||
.unwrap_or_else(|| (frame.width(), frame.height()));
|
||||
let out_frame_needs_reset = self.out_frame.width() != target_width
|
||||
|| self.out_frame.height() != target_height
|
||||
|| self.out_frame.format() != self.target_format;
|
||||
if out_frame_needs_reset {
|
||||
self.out_frame = FfmpegFrame::new(self.target_format, target_width, target_height);
|
||||
}
|
||||
let ctx = match self.ctx {
|
||||
None => self.ctx.insert(scaling::Context::get(
|
||||
frame.format(),
|
||||
frame.width(),
|
||||
frame.height(),
|
||||
self.out_frame.format(),
|
||||
self.out_frame.width(),
|
||||
self.out_frame.height(),
|
||||
Flags::BILINEAR,
|
||||
)?),
|
||||
Some(ref mut ctx) => ctx,
|
||||
};
|
||||
// This resets the context if any parameters changed.
|
||||
ctx.cached(
|
||||
frame.format(),
|
||||
frame.width(),
|
||||
frame.height(),
|
||||
self.out_frame.format(),
|
||||
self.out_frame.width(),
|
||||
self.out_frame.height(),
|
||||
Flags::BILINEAR,
|
||||
);
|
||||
|
||||
ctx.run(&frame, &mut self.out_frame)?;
|
||||
Ok(&self.out_frame)
|
||||
}
|
||||
}
|
||||
9
third_party/iroh-live/moq-media/src/lib.rs
vendored
Normal file
9
third_party/iroh-live/moq-media/src/lib.rs
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
pub mod audio;
|
||||
pub mod av;
|
||||
pub mod capture;
|
||||
pub mod ffmpeg;
|
||||
pub mod publish;
|
||||
pub mod subscribe;
|
||||
mod util;
|
||||
|
||||
pub use audio::AudioBackend;
|
||||
594
third_party/iroh-live/moq-media/src/publish.rs
vendored
Normal file
594
third_party/iroh-live/moq-media/src/publish.rs
vendored
Normal file
|
|
@ -0,0 +1,594 @@
|
|||
use std::{
|
||||
collections::{BTreeMap, HashMap},
|
||||
sync::{
|
||||
Arc, Mutex,
|
||||
atomic::{AtomicBool, AtomicU32, Ordering},
|
||||
},
|
||||
time::{Duration, Instant},
|
||||
};
|
||||
|
||||
use anyhow::Context;
|
||||
use hang::catalog::{AudioConfig, Catalog, CatalogProducer, VideoConfig};
|
||||
use moq_lite::BroadcastProducer;
|
||||
use n0_error::Result;
|
||||
use n0_future::task::AbortOnDropHandle;
|
||||
use tokio_util::sync::{CancellationToken, DropGuard};
|
||||
use tracing::{debug, error, info, info_span, trace, warn};
|
||||
|
||||
use crate::{
|
||||
av::{
|
||||
AudioEncoder, AudioEncoderInner, AudioPreset, AudioSource, DecodeConfig, VideoEncoder,
|
||||
VideoEncoderInner, VideoPreset, VideoSource,
|
||||
},
|
||||
subscribe::WatchTrack,
|
||||
util::spawn_thread,
|
||||
};
|
||||
|
||||
pub struct PublishBroadcast {
|
||||
producer: BroadcastProducer,
|
||||
catalog: CatalogProducer,
|
||||
state: Arc<Mutex<State>>,
|
||||
_task: Arc<AbortOnDropHandle<()>>,
|
||||
}
|
||||
|
||||
impl PublishBroadcast {
|
||||
pub fn new() -> Self {
|
||||
let mut producer = BroadcastProducer::default();
|
||||
let catalog = Catalog::default().produce();
|
||||
producer.insert_track(catalog.consumer.track);
|
||||
let catalog = catalog.producer;
|
||||
|
||||
let state = Arc::new(Mutex::new(State::default()));
|
||||
let task_handle = tokio::spawn(Self::run(state.clone(), producer.clone()));
|
||||
|
||||
Self {
|
||||
producer,
|
||||
catalog,
|
||||
state,
|
||||
_task: Arc::new(AbortOnDropHandle::new(task_handle)),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn producer(&self) -> BroadcastProducer {
|
||||
self.producer.clone()
|
||||
}
|
||||
|
||||
async fn run(state: Arc<Mutex<State>>, mut producer: BroadcastProducer) {
|
||||
while let Some(track) = producer.requested_track().await {
|
||||
let name = track.info.name.clone();
|
||||
if state
|
||||
.lock()
|
||||
.expect("poisoned")
|
||||
.start_track(track.clone())
|
||||
.inspect_err(|err| warn!(%name, "failed to start requested track: {err:#}"))
|
||||
.is_ok()
|
||||
{
|
||||
info!("started track: {name}");
|
||||
tokio::spawn({
|
||||
let state = state.clone();
|
||||
async move {
|
||||
track.unused().await;
|
||||
info!("stopping track: {name}");
|
||||
state.lock().expect("poisoned").stop_track(&name);
|
||||
}
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// Create a local WatchTrack from the current video source, if present.
|
||||
pub fn watch_local(&self, decode_config: DecodeConfig) -> Option<WatchTrack> {
|
||||
let (source, shutdown) = {
|
||||
let state = self.state.lock().expect("poisoned");
|
||||
let source = state
|
||||
.available_video
|
||||
.as_ref()
|
||||
.map(|video| video.source.clone())?;
|
||||
Some((source, state.shutdown_token.child_token()))
|
||||
}?;
|
||||
Some(WatchTrack::from_video_source(
|
||||
"local".to_string(),
|
||||
shutdown,
|
||||
source,
|
||||
decode_config,
|
||||
))
|
||||
}
|
||||
|
||||
pub fn set_video(&mut self, renditions: Option<VideoRenditions>) -> Result<()> {
|
||||
match renditions {
|
||||
Some(renditions) => {
|
||||
let priority = 1u8;
|
||||
let configs = renditions.available_renditions()?;
|
||||
let video = hang::catalog::Video {
|
||||
renditions: configs,
|
||||
priority,
|
||||
display: None,
|
||||
rotation: None,
|
||||
flip: None,
|
||||
};
|
||||
{
|
||||
let mut catalog = self.catalog.lock();
|
||||
catalog.video = Some(video);
|
||||
}
|
||||
self.state.lock().expect("poisoned").available_video = Some(renditions);
|
||||
// TODO: Drop active encodings if their rendition is no longer available?
|
||||
}
|
||||
None => {
|
||||
// Clear catalog and stop any active video encoders
|
||||
self.state.lock().expect("poisoned").remove_video();
|
||||
{
|
||||
let mut catalog = self.catalog.lock();
|
||||
catalog.video = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub fn set_audio(&mut self, renditions: Option<AudioRenditions>) -> Result<()> {
|
||||
match renditions {
|
||||
Some(renditions) => {
|
||||
let priority = 2u8;
|
||||
let configs = renditions.available_renditions()?;
|
||||
let audio = hang::catalog::Audio {
|
||||
renditions: configs,
|
||||
priority,
|
||||
};
|
||||
{
|
||||
let mut catalog = self.catalog.lock();
|
||||
catalog.audio = Some(audio);
|
||||
}
|
||||
self.state.lock().expect("poisoned").available_audio = Some(renditions);
|
||||
}
|
||||
None => {
|
||||
// Clear catalog and stop any active audio encoders
|
||||
self.state.lock().expect("poisoned").remove_audio();
|
||||
{
|
||||
let mut catalog = self.catalog.lock();
|
||||
catalog.audio = None;
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for PublishBroadcast {
|
||||
fn drop(&mut self) {
|
||||
self.state.lock().expect("poisoned").shutdown_token.cancel();
|
||||
self.producer.close();
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Default)]
|
||||
struct State {
|
||||
shutdown_token: CancellationToken,
|
||||
available_video: Option<VideoRenditions>,
|
||||
available_audio: Option<AudioRenditions>,
|
||||
active_video: HashMap<String, EncoderThread>,
|
||||
active_audio: HashMap<String, EncoderThread>,
|
||||
}
|
||||
|
||||
impl State {
|
||||
fn stop_track(&mut self, name: &str) {
|
||||
let thread = self
|
||||
.active_video
|
||||
.remove(name)
|
||||
.or_else(|| self.active_audio.remove(name));
|
||||
if let Some(thread) = thread {
|
||||
thread.shutdown.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
fn remove_audio(&mut self) {
|
||||
for (_name, thread) in self.active_audio.drain() {
|
||||
thread.shutdown.cancel();
|
||||
}
|
||||
self.available_audio = None;
|
||||
}
|
||||
|
||||
fn remove_video(&mut self) {
|
||||
for (_name, thread) in self.active_video.drain() {
|
||||
thread.shutdown.cancel();
|
||||
}
|
||||
self.available_video = None;
|
||||
}
|
||||
|
||||
fn start_track(&mut self, track: moq_lite::TrackProducer) -> Result<()> {
|
||||
let name = track.info.name.clone();
|
||||
let track = hang::TrackProducer::new(track);
|
||||
let shutdown_token = self.shutdown_token.child_token();
|
||||
if let Some(video) = self.available_video.as_mut()
|
||||
&& video.contains_rendition(&name)
|
||||
{
|
||||
let thread = video.start_encoder(&name, track, shutdown_token)?;
|
||||
self.active_video.insert(name, thread);
|
||||
Ok(())
|
||||
} else if let Some(audio) = self.available_audio.as_mut()
|
||||
&& audio.contains_rendition(&name)
|
||||
{
|
||||
let thread = audio.start_encoder(&name, track, shutdown_token)?;
|
||||
self.active_audio.insert(name, thread);
|
||||
Ok(())
|
||||
} else {
|
||||
info!("ignoring track request {name}: rendition not available");
|
||||
Err(n0_error::anyerr!("rendition not available"))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AudioRenditions {
|
||||
make_encoder: Box<dyn Fn(AudioPreset) -> Result<Box<dyn AudioEncoder>> + Send>,
|
||||
source: Box<dyn AudioSource>,
|
||||
renditions: HashMap<String, AudioPreset>,
|
||||
}
|
||||
|
||||
impl AudioRenditions {
|
||||
pub fn new<E: AudioEncoder>(
|
||||
source: impl AudioSource,
|
||||
presets: impl IntoIterator<Item = AudioPreset>,
|
||||
) -> Self {
|
||||
let renditions = presets
|
||||
.into_iter()
|
||||
.map(|preset| (format!("audio-{preset}"), preset))
|
||||
.collect();
|
||||
let format = source.format();
|
||||
Self {
|
||||
make_encoder: Box::new(move |preset| Ok(Box::new(E::with_preset(format, preset)?))),
|
||||
renditions,
|
||||
source: Box::new(source),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn available_renditions(&self) -> Result<BTreeMap<String, AudioConfig>> {
|
||||
let mut renditions = BTreeMap::new();
|
||||
for (name, preset) in self.renditions.iter() {
|
||||
// We need to create the encoder to get the config, even though we drop it
|
||||
// again (it will be created on deman). Not ideal, but works for now.
|
||||
let config = (self.make_encoder)(*preset)?.config();
|
||||
renditions.insert(name.clone(), config);
|
||||
}
|
||||
Ok(renditions)
|
||||
}
|
||||
|
||||
pub fn encoder(&mut self, name: &str) -> Option<Result<Box<dyn AudioEncoder>>> {
|
||||
let preset = self.renditions.get(name)?;
|
||||
Some((self.make_encoder)(*preset))
|
||||
}
|
||||
|
||||
pub fn contains_rendition(&self, name: &str) -> bool {
|
||||
self.renditions.contains_key(name)
|
||||
}
|
||||
|
||||
pub fn start_encoder(
|
||||
&mut self,
|
||||
name: &str,
|
||||
producer: hang::TrackProducer,
|
||||
shutdown_token: CancellationToken,
|
||||
) -> Result<EncoderThread> {
|
||||
let preset = self
|
||||
.renditions
|
||||
.get(name)
|
||||
.context("rendition not available")?;
|
||||
let encoder = (self.make_encoder)(*preset)?;
|
||||
let thread = EncoderThread::spawn_audio(
|
||||
self.source.cloned_boxed(),
|
||||
encoder,
|
||||
producer,
|
||||
shutdown_token,
|
||||
);
|
||||
Ok(thread)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct VideoRenditions {
|
||||
make_encoder: Box<dyn Fn(VideoPreset) -> Result<Box<dyn VideoEncoder>> + Send>,
|
||||
source: SharedVideoSource,
|
||||
renditions: HashMap<String, VideoPreset>,
|
||||
_shared_source_cancel_guard: DropGuard,
|
||||
}
|
||||
|
||||
impl VideoRenditions {
|
||||
pub fn new<E: VideoEncoder>(
|
||||
source: impl VideoSource,
|
||||
presets: impl IntoIterator<Item = VideoPreset>,
|
||||
) -> Self {
|
||||
let shutdown_token = CancellationToken::new();
|
||||
let source = SharedVideoSource::new(source, shutdown_token.clone());
|
||||
let renditions = presets
|
||||
.into_iter()
|
||||
.map(|preset| (format!("video-{preset}"), preset))
|
||||
.collect();
|
||||
Self {
|
||||
make_encoder: Box::new(|preset| Ok(Box::new(E::with_preset(preset)?))),
|
||||
renditions,
|
||||
source,
|
||||
_shared_source_cancel_guard: shutdown_token.drop_guard(),
|
||||
}
|
||||
}
|
||||
|
||||
pub fn available_renditions(&self) -> Result<BTreeMap<String, VideoConfig>> {
|
||||
let mut renditions = BTreeMap::new();
|
||||
for (name, preset) in self.renditions.iter() {
|
||||
// We need to create the encoder to get the config, even though we drop it
|
||||
// again (it will be created on deman). Not ideal, but works for now.
|
||||
let config = (self.make_encoder)(*preset)?.config();
|
||||
renditions.insert(name.clone(), config);
|
||||
}
|
||||
Ok(renditions)
|
||||
}
|
||||
|
||||
pub fn contains_rendition(&self, name: &str) -> bool {
|
||||
self.renditions.contains_key(name)
|
||||
}
|
||||
|
||||
pub fn start_encoder(
|
||||
&mut self,
|
||||
name: &str,
|
||||
producer: hang::TrackProducer,
|
||||
shutdown_token: CancellationToken,
|
||||
) -> Result<EncoderThread> {
|
||||
let preset = self
|
||||
.renditions
|
||||
.get(name)
|
||||
.context("rendition not available")?;
|
||||
let encoder = (self.make_encoder)(*preset)?;
|
||||
let thread =
|
||||
EncoderThread::spawn_video(self.source.clone(), encoder, producer, shutdown_token);
|
||||
Ok(thread)
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Debug, Clone)]
|
||||
pub(crate) struct SharedVideoSource {
|
||||
name: String,
|
||||
frames_rx: tokio::sync::watch::Receiver<Option<crate::av::VideoFrame>>,
|
||||
format: crate::av::VideoFormat,
|
||||
running: Arc<AtomicBool>,
|
||||
thread: Arc<std::thread::JoinHandle<()>>,
|
||||
subscriber_count: Arc<AtomicU32>,
|
||||
}
|
||||
|
||||
impl SharedVideoSource {
|
||||
fn new(mut source: impl VideoSource, shutdown: CancellationToken) -> Self {
|
||||
let name = source.name().to_string();
|
||||
let format = source.format();
|
||||
let (tx, rx) = tokio::sync::watch::channel(None);
|
||||
let running = Arc::new(AtomicBool::new(false));
|
||||
let thread = spawn_thread(format!("vshr-{}", source.name()), {
|
||||
let shutdown = shutdown.clone();
|
||||
let running = running.clone();
|
||||
move || {
|
||||
let frame_time = Duration::from_secs_f32(1. / 30.);
|
||||
let start = Instant::now();
|
||||
for i in 0.. {
|
||||
if shutdown.is_cancelled() {
|
||||
break;
|
||||
}
|
||||
|
||||
loop {
|
||||
if running.load(Ordering::Relaxed) {
|
||||
break;
|
||||
}
|
||||
if let Err(err) = source.stop() {
|
||||
warn!("Failed to stop video source: {err:#}");
|
||||
}
|
||||
std::thread::park();
|
||||
if let Err(err) = source.start() {
|
||||
warn!("Failed to stop video source: {err:#}");
|
||||
}
|
||||
}
|
||||
|
||||
match source.pop_frame() {
|
||||
Ok(Some(frame)) => {
|
||||
let _ = tx.send(Some(frame));
|
||||
}
|
||||
Ok(None) => {}
|
||||
Err(_) => break,
|
||||
}
|
||||
let expected = frame_time * i;
|
||||
let actual = start.elapsed();
|
||||
if actual < expected {
|
||||
std::thread::sleep(expected - actual);
|
||||
}
|
||||
}
|
||||
}
|
||||
});
|
||||
Self {
|
||||
name,
|
||||
format,
|
||||
frames_rx: rx,
|
||||
thread: Arc::new(thread),
|
||||
running,
|
||||
subscriber_count: Default::default(),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl VideoSource for SharedVideoSource {
|
||||
fn name(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
fn format(&self) -> crate::av::VideoFormat {
|
||||
self.format.clone()
|
||||
}
|
||||
|
||||
fn start(&mut self) -> anyhow::Result<()> {
|
||||
let prev_count = self.subscriber_count.fetch_add(1, Ordering::Relaxed);
|
||||
if prev_count == 0 {
|
||||
self.running.store(true, Ordering::Relaxed);
|
||||
self.thread.thread().unpark();
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn stop(&mut self) -> anyhow::Result<()> {
|
||||
if self
|
||||
.subscriber_count
|
||||
.fetch_update(Ordering::SeqCst, Ordering::SeqCst, |val| {
|
||||
Some(val.saturating_sub(1))
|
||||
})
|
||||
.expect("always returns Some")
|
||||
== 1
|
||||
{
|
||||
self.running.store(false, Ordering::Relaxed);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
fn pop_frame(&mut self) -> anyhow::Result<Option<crate::av::VideoFrame>> {
|
||||
let frame = self.frames_rx.borrow_and_update().clone();
|
||||
Ok(frame)
|
||||
}
|
||||
}
|
||||
|
||||
pub struct EncoderThread {
|
||||
_thread_handle: std::thread::JoinHandle<()>,
|
||||
shutdown: CancellationToken,
|
||||
}
|
||||
|
||||
impl EncoderThread {
|
||||
pub fn spawn_video(
|
||||
mut source: impl VideoSource,
|
||||
mut encoder: impl VideoEncoderInner,
|
||||
mut producer: hang::TrackProducer,
|
||||
shutdown: CancellationToken,
|
||||
) -> Self {
|
||||
let thread_name = format!("venc-{:<4}-{:<4}", source.name(), encoder.name());
|
||||
let span = info_span!("videoenc", source = source.name(), encoder = encoder.name());
|
||||
let handle = spawn_thread(thread_name, {
|
||||
let shutdown = shutdown.clone();
|
||||
move || {
|
||||
let _guard = span.enter();
|
||||
if let Err(err) = source.start() {
|
||||
warn!("video source failed to start: {err:#}");
|
||||
return;
|
||||
}
|
||||
let format = source.format();
|
||||
tracing::debug!(
|
||||
src_format = ?format,
|
||||
dst_config = ?encoder.config(),
|
||||
"video encoder thread start"
|
||||
);
|
||||
let framerate = encoder.config().framerate.unwrap_or(30.0);
|
||||
let interval = Duration::from_secs_f64(1. / framerate);
|
||||
loop {
|
||||
let start = Instant::now();
|
||||
if shutdown.is_cancelled() {
|
||||
debug!("stop video encoder: cancelled");
|
||||
break;
|
||||
}
|
||||
let frame = match source.pop_frame() {
|
||||
Ok(frame) => frame,
|
||||
Err(err) => {
|
||||
warn!("video encoder failed: {err:#}");
|
||||
break;
|
||||
}
|
||||
};
|
||||
if let Some(frame) = frame {
|
||||
if let Err(err) = encoder.push_frame(frame) {
|
||||
warn!("video encoder failed: {err:#}");
|
||||
break;
|
||||
};
|
||||
while let Ok(Some(pkt)) = encoder.pop_packet() {
|
||||
if let Err(err) = producer.write(pkt) {
|
||||
warn!("failed to write frame to producer: {err:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
std::thread::sleep(interval.saturating_sub(start.elapsed()));
|
||||
}
|
||||
producer.inner.close();
|
||||
if let Err(err) = source.stop() {
|
||||
warn!("video source failed to stop: {err:#}");
|
||||
}
|
||||
tracing::debug!("video encoder thread stop");
|
||||
}
|
||||
});
|
||||
Self {
|
||||
_thread_handle: handle,
|
||||
shutdown,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn spawn_audio(
|
||||
mut source: Box<dyn AudioSource>,
|
||||
mut encoder: impl AudioEncoderInner,
|
||||
mut producer: hang::TrackProducer,
|
||||
shutdown: CancellationToken,
|
||||
) -> Self {
|
||||
let sd = shutdown.clone();
|
||||
let name = encoder.name();
|
||||
let thread_name = format!("aenc-{:<4}", name);
|
||||
let span = info_span!("audioenc", %name);
|
||||
let handle = spawn_thread(thread_name, move || {
|
||||
let _guard = span.enter();
|
||||
tracing::debug!(config=?encoder.config(), "audio encoder thread start");
|
||||
let shutdown = sd;
|
||||
// 20ms framing to align with typical Opus config (48kHz → 960 samples/ch)
|
||||
const INTERVAL: Duration = Duration::from_millis(20);
|
||||
let format = source.format();
|
||||
let samples_per_frame = (format.sample_rate / 1000) * INTERVAL.as_millis() as u32;
|
||||
let mut buf = vec![0.0f32; samples_per_frame as usize * format.channel_count as usize];
|
||||
let start = Instant::now();
|
||||
for tick in 0.. {
|
||||
trace!("tick");
|
||||
if shutdown.is_cancelled() {
|
||||
break;
|
||||
}
|
||||
match source.pop_samples(&mut buf) {
|
||||
Ok(Some(_n)) => {
|
||||
// Expect a full frame; if shorter, zero-pad via slice len
|
||||
if let Err(err) = encoder.push_samples(&buf) {
|
||||
error!(buf_len = buf.len(), "audio push_samples failed: {err:#}");
|
||||
break;
|
||||
}
|
||||
while let Ok(Some(pkt)) = encoder
|
||||
.pop_packet()
|
||||
.inspect_err(|err| warn!("encoder error: {err:#}"))
|
||||
{
|
||||
if let Err(err) = producer.write(pkt) {
|
||||
warn!("failed to write frame to producer: {err:#}");
|
||||
}
|
||||
}
|
||||
}
|
||||
Ok(None) => {
|
||||
// keep pacing
|
||||
}
|
||||
Err(err) => {
|
||||
error!("audio source failed: {err:#}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
let expected_time = (tick + 1) * INTERVAL;
|
||||
let actual_time = start.elapsed();
|
||||
if actual_time > expected_time {
|
||||
warn!("audio thread too slow by {:?}", actual_time - expected_time);
|
||||
}
|
||||
let sleep = expected_time.saturating_sub(start.elapsed());
|
||||
if sleep > Duration::ZERO {
|
||||
std::thread::sleep(sleep);
|
||||
}
|
||||
}
|
||||
// drain
|
||||
while let Ok(Some(pkt)) = encoder.pop_packet() {
|
||||
if let Err(err) = producer.write(pkt) {
|
||||
warn!("failed to write frame to producer: {err:#}");
|
||||
}
|
||||
}
|
||||
producer.inner.close();
|
||||
tracing::debug!("audio encoder thread stop");
|
||||
});
|
||||
Self {
|
||||
_thread_handle: handle,
|
||||
shutdown,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for EncoderThread {
|
||||
fn drop(&mut self) {
|
||||
self.shutdown.cancel();
|
||||
}
|
||||
}
|
||||
712
third_party/iroh-live/moq-media/src/subscribe.rs
vendored
Normal file
712
third_party/iroh-live/moq-media/src/subscribe.rs
vendored
Normal file
|
|
@ -0,0 +1,712 @@
|
|||
use std::{collections::BTreeMap, sync::Arc, time::Duration};
|
||||
|
||||
use hang::{
|
||||
Timestamp, TrackConsumer,
|
||||
catalog::{AudioConfig, Catalog, CatalogConsumer, VideoConfig},
|
||||
};
|
||||
use moq_lite::{BroadcastConsumer, Track};
|
||||
use n0_error::{Result, StackResultExt, StdResultExt};
|
||||
use n0_future::task::AbortOnDropHandle;
|
||||
use n0_watcher::{Watchable, Watcher};
|
||||
use tokio::{
|
||||
sync::mpsc::{self, error::TryRecvError},
|
||||
time::Instant,
|
||||
};
|
||||
use tokio_util::sync::{CancellationToken, DropGuard};
|
||||
use tracing::{Span, debug, error, info, info_span, trace, warn};
|
||||
|
||||
use crate::{
|
||||
av::{
|
||||
AudioDecoder, AudioSink, AudioSinkHandle, DecodeConfig, DecodedFrame, Decoders,
|
||||
PlaybackConfig, Quality, VideoDecoder, VideoSource,
|
||||
},
|
||||
ffmpeg::util::Rescaler,
|
||||
util::spawn_thread,
|
||||
};
|
||||
|
||||
const DEFAULT_MAX_LATENCY: Duration = Duration::from_millis(150);
|
||||
|
||||
#[derive(derive_more::Debug, Clone)]
|
||||
pub struct SubscribeBroadcast {
|
||||
broadcast_name: String,
|
||||
#[debug("BroadcastConsumer")]
|
||||
broadcast: BroadcastConsumer,
|
||||
// catalog_watcher: n0_watcher::Direct<CatalogWrapper>,
|
||||
catalog_watchable: Watchable<CatalogWrapper>,
|
||||
shutdown: CancellationToken,
|
||||
_catalog_task: Arc<AbortOnDropHandle<()>>,
|
||||
}
|
||||
|
||||
#[derive(Debug, derive_more::PartialEq, derive_more::Eq, Default, Clone, derive_more::Deref)]
|
||||
pub struct CatalogWrapper {
|
||||
#[eq(skip)]
|
||||
#[deref]
|
||||
inner: Arc<Catalog>,
|
||||
seq: usize,
|
||||
}
|
||||
|
||||
impl CatalogWrapper {
|
||||
fn new(inner: Catalog, seq: usize) -> Self {
|
||||
Self {
|
||||
inner: Arc::new(inner),
|
||||
seq,
|
||||
}
|
||||
}
|
||||
|
||||
pub fn video_renditions(&self) -> impl Iterator<Item = &str> {
|
||||
let mut renditions: Vec<_> = self
|
||||
.inner
|
||||
.video
|
||||
.as_ref()
|
||||
.iter()
|
||||
.map(|v| v.renditions.iter())
|
||||
.flatten()
|
||||
.map(|(name, config)| (name.as_str(), config.coded_width))
|
||||
.collect();
|
||||
renditions.sort_by(|a, b| a.1.cmp(&b.1));
|
||||
renditions.into_iter().map(|(name, _w)| name)
|
||||
}
|
||||
|
||||
pub fn audio_renditions(&self) -> impl Iterator<Item = &str> + '_ {
|
||||
self.inner
|
||||
.audio
|
||||
.as_ref()
|
||||
.into_iter()
|
||||
.map(|v| v.renditions.iter())
|
||||
.flatten()
|
||||
.map(|(name, _config)| name.as_str())
|
||||
}
|
||||
|
||||
pub fn select_video_rendition(&self, quality: Quality) -> Result<String> {
|
||||
let video = self.inner.video.as_ref().context("no video published")?;
|
||||
let track_name =
|
||||
select_video_rendition(&video.renditions, quality).context("no video renditions")?;
|
||||
Ok(track_name)
|
||||
}
|
||||
|
||||
pub fn select_audio_rendition(&self, quality: Quality) -> Result<String> {
|
||||
let audio = self.inner.audio.as_ref().context("no video published")?;
|
||||
let track_name =
|
||||
select_audio_rendition(&audio.renditions, quality).context("no video renditions")?;
|
||||
Ok(track_name)
|
||||
}
|
||||
}
|
||||
|
||||
impl CatalogWrapper {
|
||||
pub fn into_inner(self) -> Arc<Catalog> {
|
||||
self.inner
|
||||
}
|
||||
}
|
||||
|
||||
impl SubscribeBroadcast {
|
||||
pub async fn new(broadcast_name: String, broadcast: BroadcastConsumer) -> Result<Self> {
|
||||
let shutdown = CancellationToken::new();
|
||||
|
||||
let (catalog_watchable, catalog_task) = {
|
||||
let track = broadcast.subscribe_track(&Catalog::default_track());
|
||||
let mut consumer = CatalogConsumer::new(track);
|
||||
let initial_catalog = consumer
|
||||
.next()
|
||||
.await
|
||||
.std_context("Broadcast closed before receiving catalog")?
|
||||
.context("Catalog track closed before receiving catalog")?;
|
||||
let watchable = Watchable::new(CatalogWrapper::new(initial_catalog, 0));
|
||||
|
||||
let task = tokio::spawn({
|
||||
let shutdown = shutdown.clone();
|
||||
let watchable = watchable.clone();
|
||||
async move {
|
||||
for seq in 1.. {
|
||||
match consumer.next().await {
|
||||
Ok(Some(catalog)) => {
|
||||
watchable.set(CatalogWrapper::new(catalog, seq)).ok();
|
||||
}
|
||||
Ok(None) => {
|
||||
debug!("subscribed broadcast catalog track ended");
|
||||
break;
|
||||
}
|
||||
Err(err) => {
|
||||
debug!("subscribed broadcast closed: {err:#}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
shutdown.cancel();
|
||||
}
|
||||
});
|
||||
(watchable, task)
|
||||
};
|
||||
Ok(Self {
|
||||
broadcast_name,
|
||||
broadcast,
|
||||
catalog_watchable,
|
||||
_catalog_task: Arc::new(AbortOnDropHandle::new(catalog_task)),
|
||||
shutdown: CancellationToken::new(),
|
||||
})
|
||||
}
|
||||
|
||||
pub fn broadcast_name(&self) -> &str {
|
||||
&self.broadcast_name
|
||||
}
|
||||
|
||||
pub fn catalog_watcher(&mut self) -> n0_watcher::Direct<CatalogWrapper> {
|
||||
self.catalog_watchable.watch()
|
||||
}
|
||||
|
||||
pub fn catalog(&self) -> CatalogWrapper {
|
||||
self.catalog_watchable.get()
|
||||
}
|
||||
|
||||
pub fn watch_and_listen<D: Decoders>(
|
||||
self,
|
||||
audio_out: impl AudioSink,
|
||||
playback_config: PlaybackConfig,
|
||||
) -> Result<AvRemoteTrack> {
|
||||
AvRemoteTrack::new::<D>(self, audio_out, playback_config)
|
||||
}
|
||||
|
||||
pub fn watch<D: VideoDecoder>(&self) -> Result<WatchTrack> {
|
||||
self.watch_with::<D>(&Default::default(), Quality::Highest)
|
||||
}
|
||||
|
||||
pub fn watch_with<D: VideoDecoder>(
|
||||
&self,
|
||||
playback_config: &DecodeConfig,
|
||||
quality: Quality,
|
||||
) -> Result<WatchTrack> {
|
||||
let track_name = self.catalog().select_video_rendition(quality)?;
|
||||
self.watch_rendition::<D>(playback_config, &track_name)
|
||||
}
|
||||
|
||||
pub fn watch_rendition<D: VideoDecoder>(
|
||||
&self,
|
||||
playback_config: &DecodeConfig,
|
||||
track_name: &str,
|
||||
) -> Result<WatchTrack> {
|
||||
let catalog = self.catalog();
|
||||
let video = catalog.video.as_ref().context("no video published")?;
|
||||
let config = video
|
||||
.renditions
|
||||
.get(track_name)
|
||||
.context("rendition not found")?;
|
||||
let consumer = TrackConsumer::new(
|
||||
self.broadcast.subscribe_track(&Track {
|
||||
name: track_name.to_string(),
|
||||
priority: video.priority,
|
||||
}),
|
||||
DEFAULT_MAX_LATENCY,
|
||||
);
|
||||
let span = info_span!("videodec", %track_name);
|
||||
WatchTrack::from_consumer::<D>(
|
||||
track_name.to_string(),
|
||||
consumer,
|
||||
&config,
|
||||
playback_config,
|
||||
self.shutdown.child_token(),
|
||||
span,
|
||||
)
|
||||
}
|
||||
pub fn listen<D: AudioDecoder>(&self, output: impl AudioSink) -> Result<AudioTrack> {
|
||||
self.listen_with::<D>(Quality::Highest, output)
|
||||
}
|
||||
|
||||
pub fn listen_with<D: AudioDecoder>(
|
||||
&self,
|
||||
quality: Quality,
|
||||
output: impl AudioSink,
|
||||
) -> Result<AudioTrack> {
|
||||
let track_name = self.catalog().select_audio_rendition(quality)?;
|
||||
self.listen_rendition::<D>(&track_name, output)
|
||||
}
|
||||
|
||||
pub fn listen_rendition<D: AudioDecoder>(
|
||||
&self,
|
||||
name: &str,
|
||||
output: impl AudioSink,
|
||||
) -> Result<AudioTrack> {
|
||||
let catalog = self.catalog();
|
||||
let audio = catalog.audio.as_ref().context("no audio published")?;
|
||||
let config = audio.renditions.get(name).context("rendition not found")?;
|
||||
let consumer = TrackConsumer::new(
|
||||
self.broadcast.subscribe_track(&Track {
|
||||
name: name.to_string(),
|
||||
priority: audio.priority,
|
||||
}),
|
||||
DEFAULT_MAX_LATENCY,
|
||||
);
|
||||
let span = info_span!("audiodec", %name);
|
||||
AudioTrack::spawn::<D>(
|
||||
name.to_string(),
|
||||
consumer,
|
||||
config.clone(),
|
||||
output,
|
||||
self.shutdown.child_token(),
|
||||
span,
|
||||
)
|
||||
}
|
||||
|
||||
pub fn closed(&self) -> impl Future<Output = ()> + 'static {
|
||||
self.broadcast.closed()
|
||||
}
|
||||
|
||||
pub fn shutdown(&self) {
|
||||
self.shutdown.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
fn select_rendition<T, P: ToString>(
|
||||
renditions: &BTreeMap<String, T>,
|
||||
order: &[P],
|
||||
) -> Option<String> {
|
||||
order
|
||||
.iter()
|
||||
.map(ToString::to_string)
|
||||
.find(|k| renditions.contains_key(k.as_str()))
|
||||
.or_else(|| renditions.keys().next().cloned())
|
||||
}
|
||||
|
||||
fn select_video_rendition<'a, T>(
|
||||
renditions: &'a BTreeMap<String, T>,
|
||||
q: Quality,
|
||||
) -> Option<String> {
|
||||
use crate::av::VideoPreset::*;
|
||||
let order = match q {
|
||||
Quality::Highest => [P1080, P720, P360, P180],
|
||||
Quality::High => [P720, P360, P180, P1080],
|
||||
Quality::Mid => [P360, P180, P720, P1080],
|
||||
Quality::Low => [P180, P360, P720, P1080],
|
||||
};
|
||||
|
||||
select_rendition(renditions, &order)
|
||||
}
|
||||
|
||||
fn select_audio_rendition<'a, T>(
|
||||
renditions: &'a BTreeMap<String, T>,
|
||||
q: Quality,
|
||||
) -> Option<String> {
|
||||
use crate::av::AudioPreset::*;
|
||||
let order = match q {
|
||||
Quality::Highest | Quality::High => [Hq, Lq],
|
||||
Quality::Mid | Quality::Low => [Lq, Hq],
|
||||
};
|
||||
select_rendition(renditions, &order)
|
||||
}
|
||||
|
||||
pub struct AudioTrack {
|
||||
name: String,
|
||||
handle: Box<dyn AudioSinkHandle>,
|
||||
shutdown_token: CancellationToken,
|
||||
_task_handle: AbortOnDropHandle<()>,
|
||||
_thread_handle: std::thread::JoinHandle<()>,
|
||||
}
|
||||
|
||||
impl AudioTrack {
|
||||
pub(crate) fn spawn<D: AudioDecoder>(
|
||||
name: String,
|
||||
consumer: TrackConsumer,
|
||||
config: AudioConfig,
|
||||
output: impl AudioSink,
|
||||
shutdown: CancellationToken,
|
||||
span: Span,
|
||||
) -> Result<Self> {
|
||||
let _guard = span.enter();
|
||||
let (packet_tx, packet_rx) = mpsc::channel(32);
|
||||
let output_format = output.format()?;
|
||||
info!(?config, "audio thread start");
|
||||
let decoder = D::new(&config, output_format)?;
|
||||
let handle = output.handle();
|
||||
let thread_name = format!("adec-{}", name);
|
||||
let thread = spawn_thread(thread_name, {
|
||||
let shutdown = shutdown.clone();
|
||||
let span = span.clone();
|
||||
move || {
|
||||
let _guard = span.enter();
|
||||
if let Err(err) = Self::run_loop(decoder, packet_rx, output, &shutdown) {
|
||||
error!("audio decoder failed: {err:#}");
|
||||
}
|
||||
info!("audio decoder thread stop");
|
||||
}
|
||||
});
|
||||
let task = tokio::spawn(forward_frames(consumer, packet_tx));
|
||||
Ok(Self {
|
||||
name,
|
||||
handle,
|
||||
shutdown_token: shutdown,
|
||||
_task_handle: AbortOnDropHandle::new(task),
|
||||
_thread_handle: thread,
|
||||
})
|
||||
}
|
||||
|
||||
pub fn stopped(&self) -> impl Future<Output = ()> + 'static {
|
||||
let shutdown_token = self.shutdown_token.clone();
|
||||
async move { shutdown_token.cancelled().await }
|
||||
}
|
||||
|
||||
pub fn rendition(&self) -> &str {
|
||||
&self.name
|
||||
}
|
||||
|
||||
pub fn handle(&self) -> &dyn AudioSinkHandle {
|
||||
self.handle.as_ref()
|
||||
}
|
||||
|
||||
pub(crate) fn run_loop(
|
||||
mut decoder: impl AudioDecoder,
|
||||
mut packet_rx: mpsc::Receiver<hang::Frame>,
|
||||
mut sink: impl AudioSink,
|
||||
shutdown: &CancellationToken,
|
||||
) -> Result<()> {
|
||||
const INTERVAL: Duration = Duration::from_millis(10);
|
||||
let mut remote_start = None;
|
||||
let loop_start = Instant::now();
|
||||
|
||||
'main: for i in 0.. {
|
||||
let tick = Instant::now();
|
||||
|
||||
if shutdown.is_cancelled() {
|
||||
debug!("stop audio thread: cancelled");
|
||||
break;
|
||||
}
|
||||
|
||||
loop {
|
||||
match packet_rx.try_recv() {
|
||||
Ok(packet) => {
|
||||
let remote_start = *remote_start.get_or_insert_with(|| packet.timestamp);
|
||||
|
||||
if tracing::enabled!(tracing::Level::TRACE) {
|
||||
let loop_elapsed = tick.duration_since(loop_start);
|
||||
let remote_elapsed: Duration = packet
|
||||
.timestamp
|
||||
.checked_sub(remote_start)
|
||||
.unwrap_or(Timestamp::ZERO)
|
||||
.into();
|
||||
let diff_ms =
|
||||
(loop_elapsed.as_secs_f32() - remote_elapsed.as_secs_f32()) * 1000.;
|
||||
trace!(len = packet.payload.num_bytes(), ts=?packet.timestamp, ?loop_elapsed, ?remote_elapsed, ?diff_ms, "recv packet");
|
||||
}
|
||||
|
||||
// TODO: Skip outdated packets?
|
||||
|
||||
if !sink.is_paused() {
|
||||
decoder.push_packet(packet)?;
|
||||
if let Some(samples) = decoder.pop_samples()? {
|
||||
sink.push_samples(samples)?;
|
||||
}
|
||||
}
|
||||
}
|
||||
Err(TryRecvError::Disconnected) => {
|
||||
debug!("stop audio thread: packet_rx disconnected");
|
||||
break 'main;
|
||||
}
|
||||
Err(TryRecvError::Empty) => {
|
||||
trace!("no packet to recv");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
let target_time = i * INTERVAL;
|
||||
let real_time = Instant::now().duration_since(loop_start);
|
||||
let sleep = target_time.saturating_sub(real_time);
|
||||
if !sleep.is_zero() {
|
||||
std::thread::sleep(sleep);
|
||||
}
|
||||
}
|
||||
shutdown.cancel();
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
impl Drop for AudioTrack {
|
||||
fn drop(&mut self) {
|
||||
self.shutdown_token.cancel();
|
||||
}
|
||||
}
|
||||
|
||||
pub struct WatchTrack {
|
||||
video_frames: WatchTrackFrames,
|
||||
handle: WatchTrackHandle,
|
||||
}
|
||||
|
||||
pub struct WatchTrackHandle {
|
||||
rendition: String,
|
||||
viewport: Watchable<(u32, u32)>,
|
||||
_guard: WatchTrackGuard,
|
||||
}
|
||||
|
||||
impl WatchTrackHandle {
|
||||
pub fn set_viewport(&self, w: u32, h: u32) {
|
||||
self.viewport.set((w, h)).ok();
|
||||
}
|
||||
|
||||
pub fn rendition(&self) -> &str {
|
||||
&self.rendition
|
||||
}
|
||||
}
|
||||
|
||||
pub struct WatchTrackFrames {
|
||||
rx: mpsc::Receiver<DecodedFrame>,
|
||||
}
|
||||
|
||||
impl WatchTrackFrames {
|
||||
pub fn current_frame(&mut self) -> Option<DecodedFrame> {
|
||||
let mut out = None;
|
||||
while let Ok(item) = self.rx.try_recv() {
|
||||
out = Some(item);
|
||||
}
|
||||
out
|
||||
}
|
||||
|
||||
pub async fn next_frame(&mut self) -> Option<DecodedFrame> {
|
||||
if let Some(frame) = self.current_frame() {
|
||||
Some(frame)
|
||||
} else {
|
||||
self.rx.recv().await
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
struct WatchTrackGuard {
|
||||
_shutdown_token_guard: DropGuard,
|
||||
_task_handle: Option<AbortOnDropHandle<()>>,
|
||||
_thread_handle: Option<std::thread::JoinHandle<()>>,
|
||||
}
|
||||
|
||||
impl WatchTrack {
|
||||
pub fn empty(rendition: impl ToString) -> Self {
|
||||
let (tx, rx) = mpsc::channel(1);
|
||||
let task = tokio::task::spawn(async move {
|
||||
std::future::pending::<()>().await;
|
||||
let _ = tx;
|
||||
});
|
||||
let guard = WatchTrackGuard {
|
||||
_shutdown_token_guard: CancellationToken::new().drop_guard(),
|
||||
_task_handle: Some(AbortOnDropHandle::new(task)),
|
||||
_thread_handle: None,
|
||||
};
|
||||
Self {
|
||||
video_frames: WatchTrackFrames { rx },
|
||||
handle: WatchTrackHandle {
|
||||
rendition: rendition.to_string(),
|
||||
viewport: Default::default(),
|
||||
_guard: guard,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub fn from_video_source(
|
||||
rendition: String,
|
||||
shutdown: CancellationToken,
|
||||
mut source: impl VideoSource,
|
||||
decode_config: DecodeConfig,
|
||||
) -> Self {
|
||||
let viewport = Watchable::new((1u32, 1u32));
|
||||
let (frame_tx, frame_rx) = tokio::sync::mpsc::channel::<DecodedFrame>(2);
|
||||
let thread_name = format!("vpr-{:>4}-{:>4}", source.name(), rendition);
|
||||
let thread = spawn_thread(thread_name, {
|
||||
let mut viewport = viewport.watch();
|
||||
let shutdown = shutdown.clone();
|
||||
move || {
|
||||
// TODO: Make configurable.
|
||||
let fps = 30;
|
||||
let mut rescaler = Rescaler::new(decode_config.pixel_format.to_ffmpeg(), None)
|
||||
.expect("failed to create rescaler");
|
||||
let frame_duration = Duration::from_secs_f32(1. / fps as f32);
|
||||
if let Err(err) = source.start() {
|
||||
warn!("Video source failed to start: {err:?}");
|
||||
return;
|
||||
}
|
||||
let start = Instant::now();
|
||||
for i in 1.. {
|
||||
// let t = Instant::now();
|
||||
if shutdown.is_cancelled() {
|
||||
break;
|
||||
}
|
||||
if viewport.update() {
|
||||
let (w, h) = viewport.peek();
|
||||
rescaler.set_target_dimensions(*w, *h);
|
||||
}
|
||||
match source.pop_frame() {
|
||||
Ok(Some(frame)) => {
|
||||
// trace!(t=?t.elapsed(), "pop");
|
||||
let frame = frame.to_ffmpeg();
|
||||
let frame = rescaler.process(&frame).expect("rescaler failed");
|
||||
let frame =
|
||||
DecodedFrame::from_ffmpeg(frame, frame_duration, start.elapsed());
|
||||
// trace!(t=?t.elapsed(), "convert");
|
||||
let _ = frame_tx.blocking_send(frame);
|
||||
// trace!(t=?t.elapsed(), "send");
|
||||
}
|
||||
Ok(None) => {}
|
||||
Err(_) => break,
|
||||
}
|
||||
let expected_time = i * frame_duration;
|
||||
let actual_time = start.elapsed();
|
||||
if expected_time > actual_time {
|
||||
std::thread::sleep(expected_time - actual_time);
|
||||
// trace!(t=?t.elapsed(), slept=?(actual_time - expected_time), ?expected_time, ?actual_time, "done");
|
||||
}
|
||||
}
|
||||
if let Err(err) = source.stop() {
|
||||
warn!("Video source failed to stop: {err:?}");
|
||||
return;
|
||||
}
|
||||
}
|
||||
});
|
||||
let guard = WatchTrackGuard {
|
||||
_shutdown_token_guard: shutdown.drop_guard(),
|
||||
_task_handle: None,
|
||||
_thread_handle: Some(thread),
|
||||
};
|
||||
WatchTrack {
|
||||
video_frames: WatchTrackFrames { rx: frame_rx },
|
||||
handle: WatchTrackHandle {
|
||||
rendition,
|
||||
viewport,
|
||||
_guard: guard,
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub(crate) fn from_consumer<D: VideoDecoder>(
|
||||
rendition: String,
|
||||
consumer: TrackConsumer,
|
||||
config: &VideoConfig,
|
||||
playback_config: &DecodeConfig,
|
||||
shutdown: CancellationToken,
|
||||
span: Span,
|
||||
) -> Result<Self> {
|
||||
let (packet_tx, packet_rx) = mpsc::channel(32);
|
||||
let (frame_tx, frame_rx) = mpsc::channel(32);
|
||||
let viewport = Watchable::new((1u32, 1u32));
|
||||
let viewport_watcher = viewport.watch();
|
||||
|
||||
let _guard = span.enter();
|
||||
debug!(?config, "video decoder start");
|
||||
let decoder = D::new(config, playback_config)?;
|
||||
let thread_name = format!("vdec-{}", rendition);
|
||||
let thread = spawn_thread(thread_name, {
|
||||
let shutdown = shutdown.clone();
|
||||
let span = span.clone();
|
||||
move || {
|
||||
let _guard = span.enter();
|
||||
if let Err(err) =
|
||||
Self::run_loop(&shutdown, packet_rx, frame_tx, viewport_watcher, decoder)
|
||||
{
|
||||
error!("video decoder failed: {err:#}");
|
||||
}
|
||||
shutdown.cancel();
|
||||
}
|
||||
});
|
||||
let task = tokio::task::spawn(forward_frames(consumer, packet_tx));
|
||||
let guard = WatchTrackGuard {
|
||||
_shutdown_token_guard: shutdown.drop_guard(),
|
||||
_task_handle: Some(AbortOnDropHandle::new(task)),
|
||||
_thread_handle: Some(thread),
|
||||
};
|
||||
Ok(WatchTrack {
|
||||
video_frames: WatchTrackFrames { rx: frame_rx },
|
||||
handle: WatchTrackHandle {
|
||||
rendition,
|
||||
viewport,
|
||||
_guard: guard,
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
pub fn split(self) -> (WatchTrackFrames, WatchTrackHandle) {
|
||||
(self.video_frames, self.handle)
|
||||
}
|
||||
|
||||
pub fn set_viewport(&self, w: u32, h: u32) {
|
||||
self.handle.set_viewport(w, h);
|
||||
}
|
||||
|
||||
pub fn rendition(&self) -> &str {
|
||||
self.handle.rendition()
|
||||
}
|
||||
|
||||
pub fn current_frame(&mut self) -> Option<DecodedFrame> {
|
||||
self.video_frames.current_frame()
|
||||
}
|
||||
|
||||
pub(crate) fn run_loop(
|
||||
shutdown: &CancellationToken,
|
||||
mut input_rx: mpsc::Receiver<hang::Frame>,
|
||||
output_tx: mpsc::Sender<DecodedFrame>,
|
||||
mut viewport_watcher: n0_watcher::Direct<(u32, u32)>,
|
||||
mut decoder: impl VideoDecoder,
|
||||
) -> Result<(), anyhow::Error> {
|
||||
loop {
|
||||
if shutdown.is_cancelled() {
|
||||
break;
|
||||
}
|
||||
let Some(packet) = input_rx.blocking_recv() else {
|
||||
break;
|
||||
};
|
||||
if viewport_watcher.update() {
|
||||
let (w, h) = viewport_watcher.peek();
|
||||
decoder.set_viewport(*w, *h);
|
||||
}
|
||||
let t = Instant::now();
|
||||
decoder
|
||||
.push_packet(packet)
|
||||
.context("failed to push packet")?;
|
||||
trace!(t=?t.elapsed(), "videodec: push_packet");
|
||||
while let Some(frame) = decoder.pop_frame().context("failed to pop frame")? {
|
||||
trace!(t=?t.elapsed(), "videodec: pop frame");
|
||||
if output_tx.blocking_send(frame).is_err() {
|
||||
break;
|
||||
}
|
||||
trace!(t=?t.elapsed(), "videodec: tx");
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
|
||||
async fn forward_frames(mut track: hang::TrackConsumer, sender: mpsc::Sender<hang::Frame>) {
|
||||
loop {
|
||||
let frame = track.read_frame().await;
|
||||
match frame {
|
||||
Ok(Some(frame)) => {
|
||||
if sender.send(frame).await.is_err() {
|
||||
break;
|
||||
}
|
||||
}
|
||||
Ok(None) => break,
|
||||
Err(err) => {
|
||||
warn!("failed to read frame: {err:?}");
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
pub struct AvRemoteTrack {
|
||||
pub broadcast: SubscribeBroadcast,
|
||||
pub video: Option<WatchTrack>,
|
||||
pub audio: Option<AudioTrack>,
|
||||
}
|
||||
|
||||
impl AvRemoteTrack {
|
||||
pub fn new<D: Decoders>(
|
||||
broadcast: SubscribeBroadcast,
|
||||
audio_out: impl AudioSink,
|
||||
playback_config: PlaybackConfig,
|
||||
) -> Result<Self> {
|
||||
let audio = broadcast
|
||||
.listen_with::<D::Audio>(playback_config.quality, audio_out)
|
||||
.inspect_err(|err| tracing::warn!("no audio track: {err}"))
|
||||
.ok();
|
||||
let video = broadcast
|
||||
.watch_with::<D::Video>(&playback_config.decode_config, playback_config.quality)
|
||||
.inspect_err(|err| tracing::warn!("no video track: {err}"))
|
||||
.ok();
|
||||
Ok(Self {
|
||||
broadcast,
|
||||
audio,
|
||||
video,
|
||||
})
|
||||
}
|
||||
}
|
||||
12
third_party/iroh-live/moq-media/src/util.rs
vendored
Normal file
12
third_party/iroh-live/moq-media/src/util.rs
vendored
Normal file
|
|
@ -0,0 +1,12 @@
|
|||
/// Spawn a named OS thread and panic if spawning fails.
|
||||
pub fn spawn_thread<F, T>(name: impl ToString, f: F) -> std::thread::JoinHandle<T>
|
||||
where
|
||||
F: FnOnce() -> T + Send + 'static,
|
||||
T: Send + 'static,
|
||||
{
|
||||
let name_str = name.to_string();
|
||||
std::thread::Builder::new()
|
||||
.name(name_str.clone())
|
||||
.spawn(f)
|
||||
.expect(&format!("failed to spawn thread: {}", name_str))
|
||||
}
|
||||
35
third_party/iroh-live/web-transport-iroh/Cargo.toml
vendored
Normal file
35
third_party/iroh-live/web-transport-iroh/Cargo.toml
vendored
Normal file
|
|
@ -0,0 +1,35 @@
|
|||
[package]
|
||||
name = "web-transport-iroh"
|
||||
description = "WebTransport library for Iroh"
|
||||
authors = ["Franz Heinzmann <frando@n0.computer>"]
|
||||
repository = "https://github.com/n0-computer/iroh-live"
|
||||
license = "MIT OR Apache-2.0"
|
||||
|
||||
version = "0.1.1"
|
||||
edition = "2024"
|
||||
|
||||
keywords = ["quic", "http3", "webtransport", "iroh"]
|
||||
categories = ["network-programming", "web-programming"]
|
||||
|
||||
[dependencies]
|
||||
bytes = "1"
|
||||
http = "1"
|
||||
iroh = "0.96"
|
||||
n0-error = "0.1.2"
|
||||
n0-future = "0.3.1"
|
||||
quinn = { package = "iroh-quinn", version = "0.16" }
|
||||
thiserror = "2"
|
||||
tokio = { version = "1", default-features = false, features = [
|
||||
"io-util",
|
||||
"macros",
|
||||
] }
|
||||
tracing = "0.1.41"
|
||||
url = "2"
|
||||
web-transport-proto = "0.3.0"
|
||||
web-transport-trait = "0.3.0"
|
||||
|
||||
[dev-dependencies]
|
||||
anyhow = "1"
|
||||
clap = { version = "4", features = ["derive"] }
|
||||
n0-tracing-test = "0.3.0"
|
||||
tokio = { version = "1", features = ["full"] }
|
||||
9
third_party/iroh-live/web-transport-iroh/README.md
vendored
Normal file
9
third_party/iroh-live/web-transport-iroh/README.md
vendored
Normal file
|
|
@ -0,0 +1,9 @@
|
|||
[](https://crates.io/crates/web-transport-quinn)
|
||||
[](https://docs.rs/web-transport-quinn)
|
||||
[](https://discord.gg/FCYF3p99mr)
|
||||
|
||||
# web-transport-iiroh
|
||||
|
||||
A wrapper around the Iroh API, implementing the [`web-transport-trait`](https://github.com/kixelated/web-transport/tree/main/web-transport-trait) traits.
|
||||
|
||||
Note that this does *not* actually implement WebTransport for iroh. Instead, it implements the WebTransport traits on raw iroh QUIC connection. Thus, you can use an iroh connection wherever the `web-transport-trait` traits are expected (i.e. in hang). But there's no actual HTTP/3 WebTransport session being established, we just use the raw iroh QUIC connection directly.
|
||||
71
third_party/iroh-live/web-transport-iroh/src/client.rs
vendored
Normal file
71
third_party/iroh-live/web-transport-iroh/src/client.rs
vendored
Normal file
|
|
@ -0,0 +1,71 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use iroh::{EndpointAddr, endpoint::{ConnectOptions, QuicTransportConfig}};
|
||||
use url::Url;
|
||||
|
||||
use crate::{ALPN_H3, ClientError, Session};
|
||||
|
||||
/// A client for connecting to a WebTransport server.
|
||||
pub struct Client {
|
||||
endpoint: iroh::Endpoint,
|
||||
config: QuicTransportConfig,
|
||||
}
|
||||
|
||||
impl Client {
|
||||
pub fn new(endpoint: iroh::Endpoint) -> Self {
|
||||
Self::with_transport_config(endpoint, Default::default())
|
||||
}
|
||||
|
||||
/// Creates a client from an endpoint and a transport config.
|
||||
pub fn with_transport_config(
|
||||
endpoint: iroh::Endpoint,
|
||||
config: QuicTransportConfig,
|
||||
) -> Self {
|
||||
Self { endpoint, config }
|
||||
}
|
||||
|
||||
/// Connect to a server over QUIC without HTTP/3.
|
||||
pub async fn connect_quic(
|
||||
&self,
|
||||
addr: impl Into<EndpointAddr>,
|
||||
alpn: &[u8],
|
||||
) -> Result<Session, ClientError> {
|
||||
let conn = self.connect(addr, alpn).await?;
|
||||
Ok(Session::raw(conn))
|
||||
}
|
||||
|
||||
/// Connect with a full HTTP/3 handshake and WebTransport semantics.
|
||||
///
|
||||
/// Note that the url needs to have a `https:` scheme, otherwise the accepting side will
|
||||
/// fail to accept the connection.
|
||||
pub async fn connect_h3(
|
||||
&self,
|
||||
addr: impl Into<EndpointAddr>,
|
||||
url: Url,
|
||||
) -> Result<Session, ClientError> {
|
||||
let conn = self.connect(addr, ALPN_H3.as_bytes()).await?;
|
||||
// Connect with the connection we established.
|
||||
Session::connect_h3(conn, url).await
|
||||
}
|
||||
|
||||
async fn connect(
|
||||
&self,
|
||||
addr: impl Into<EndpointAddr>,
|
||||
alpn: &[u8],
|
||||
) -> Result<iroh::endpoint::Connection, ClientError> {
|
||||
let opts = ConnectOptions::new().with_transport_config(self.config.clone());
|
||||
let conn = self
|
||||
.endpoint
|
||||
.connect_with_opts(addr, alpn, opts)
|
||||
.await
|
||||
.map_err(|err| ClientError::Connect(Arc::new(err.into())))?;
|
||||
let conn = conn
|
||||
.await
|
||||
.map_err(|err| ClientError::Connect(Arc::new(err.into())))?;
|
||||
Ok(conn)
|
||||
}
|
||||
|
||||
pub async fn close(&self) {
|
||||
self.endpoint.close().await;
|
||||
}
|
||||
}
|
||||
125
third_party/iroh-live/web-transport-iroh/src/connect.rs
vendored
Normal file
125
third_party/iroh-live/web-transport-iroh/src/connect.rs
vendored
Normal file
|
|
@ -0,0 +1,125 @@
|
|||
use web_transport_proto::{ConnectRequest, ConnectResponse, VarInt};
|
||||
|
||||
use thiserror::Error;
|
||||
use url::Url;
|
||||
|
||||
#[derive(Error, Debug, Clone)]
|
||||
pub enum ConnectError {
|
||||
#[error("quic stream was closed early")]
|
||||
UnexpectedEnd,
|
||||
|
||||
#[error("protocol error: {0}")]
|
||||
ProtoError(#[from] web_transport_proto::ConnectError),
|
||||
|
||||
#[error("connection error")]
|
||||
ConnectionError(#[from] iroh::endpoint::ConnectionError),
|
||||
|
||||
#[error("read error")]
|
||||
ReadError(#[from] quinn::ReadError),
|
||||
|
||||
#[error("write error")]
|
||||
WriteError(#[from] quinn::WriteError),
|
||||
|
||||
#[error("http error status: {0}")]
|
||||
ErrorStatus(http::StatusCode),
|
||||
}
|
||||
|
||||
pub struct Connect {
|
||||
// The request that was sent by the client.
|
||||
request: ConnectRequest,
|
||||
|
||||
// A reference to the send/recv stream, so we don't close it until dropped.
|
||||
send: quinn::SendStream,
|
||||
|
||||
#[allow(dead_code)]
|
||||
recv: quinn::RecvStream,
|
||||
}
|
||||
|
||||
impl Connect {
|
||||
pub async fn accept(conn: &iroh::endpoint::Connection) -> Result<Self, ConnectError> {
|
||||
// Accept the stream that will be used to send the HTTP CONNECT request.
|
||||
// If they try to send any other type of HTTP request, we will error out.
|
||||
let (send, mut recv) = conn.accept_bi().await?;
|
||||
|
||||
let request = web_transport_proto::ConnectRequest::read(&mut recv).await?;
|
||||
tracing::debug!("received CONNECT request: {request:?}");
|
||||
|
||||
// The request was successfully decoded, so we can send a response.
|
||||
Ok(Self {
|
||||
request,
|
||||
send,
|
||||
recv,
|
||||
})
|
||||
}
|
||||
|
||||
// Called by the server to send a response to the client.
|
||||
pub async fn respond(&mut self, status: http::StatusCode) -> Result<(), ConnectError> {
|
||||
let resp = ConnectResponse { status };
|
||||
|
||||
tracing::debug!("sending CONNECT response: {resp:?}");
|
||||
resp.write(&mut self.send).await?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
pub async fn open(conn: &iroh::endpoint::Connection, url: Url) -> Result<Self, ConnectError> {
|
||||
// Create a new stream that will be used to send the CONNECT frame.
|
||||
let (mut send, mut recv) = conn.open_bi().await?;
|
||||
|
||||
// Create a new CONNECT request that we'll send using HTTP/3
|
||||
let request = ConnectRequest { url };
|
||||
|
||||
tracing::debug!("sending CONNECT request: {request:?}");
|
||||
request.write(&mut send).await?;
|
||||
|
||||
let response = web_transport_proto::ConnectResponse::read(&mut recv).await?;
|
||||
tracing::debug!("received CONNECT response: {response:?}");
|
||||
|
||||
// Throw an error if we didn't get a 200 OK.
|
||||
if response.status != http::StatusCode::OK {
|
||||
return Err(ConnectError::ErrorStatus(response.status));
|
||||
}
|
||||
|
||||
Ok(Self {
|
||||
request,
|
||||
send,
|
||||
recv,
|
||||
})
|
||||
}
|
||||
|
||||
// The session ID is the stream ID of the CONNECT request.
|
||||
pub fn session_id(&self) -> VarInt {
|
||||
// We gotta convert from the Quinn VarInt to the (forked) WebTransport VarInt.
|
||||
// We don't use the quinn::VarInt because that would mean a quinn dependency in web-transport-proto
|
||||
let stream_id = quinn::VarInt::from(self.send.id());
|
||||
VarInt::try_from(stream_id.into_inner()).unwrap()
|
||||
}
|
||||
|
||||
// The URL in the CONNECT request.
|
||||
pub fn url(&self) -> &Url {
|
||||
&self.request.url
|
||||
}
|
||||
|
||||
pub(super) fn into_inner(self) -> (quinn::SendStream, quinn::RecvStream) {
|
||||
(self.send, self.recv)
|
||||
}
|
||||
|
||||
// Keep reading from the control stream until it's closed.
|
||||
pub(crate) async fn run_closed(self) -> (u32, String) {
|
||||
let (_send, mut recv) = self.into_inner();
|
||||
|
||||
loop {
|
||||
match web_transport_proto::Capsule::read(&mut recv).await {
|
||||
Ok(web_transport_proto::Capsule::CloseWebTransportSession { code, reason }) => {
|
||||
return (code, reason);
|
||||
}
|
||||
Ok(web_transport_proto::Capsule::Unknown { typ, payload }) => {
|
||||
tracing::warn!("unknown capsule: type={typ} size={}", payload.len());
|
||||
}
|
||||
Err(_) => {
|
||||
return (1, "capsule error".to_string());
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
256
third_party/iroh-live/web-transport-iroh/src/error.rs
vendored
Normal file
256
third_party/iroh-live/web-transport-iroh/src/error.rs
vendored
Normal file
|
|
@ -0,0 +1,256 @@
|
|||
use std::sync::Arc;
|
||||
|
||||
use n0_error::stack_error;
|
||||
use thiserror::Error;
|
||||
|
||||
use crate::{ConnectError, SettingsError};
|
||||
|
||||
/// An error returned when connecting to a WebTransport endpoint.
|
||||
#[stack_error(derive, from_sources)]
|
||||
#[derive(Clone)]
|
||||
pub enum ClientError {
|
||||
#[error("unexpected end of stream")]
|
||||
UnexpectedEnd,
|
||||
|
||||
#[error("failed to connect")]
|
||||
Connect(#[error(source)] Arc<iroh::endpoint::ConnectError>),
|
||||
|
||||
#[error("connection failed")]
|
||||
Connection(#[error(source, std_err)] iroh::endpoint::ConnectionError),
|
||||
|
||||
#[error("failed to write")]
|
||||
WriteError(#[error(source, std_err)] quinn::WriteError),
|
||||
|
||||
#[error("failed to read")]
|
||||
ReadError(#[error(source, std_err)] quinn::ReadError),
|
||||
|
||||
#[error("failed to exchange h3 settings")]
|
||||
SettingsError(#[error(from, source, std_err)] SettingsError),
|
||||
|
||||
#[error("failed to exchange h3 connect")]
|
||||
HttpError(#[error(from, source, std_err)] ConnectError),
|
||||
|
||||
#[error("invalid URL")]
|
||||
InvalidUrl,
|
||||
|
||||
#[error("endpoint failed to bind")]
|
||||
Bind(#[error(source)] Arc<iroh::endpoint::BindError>),
|
||||
}
|
||||
|
||||
/// An errors returned by [`crate::Session`], split based on if they are underlying QUIC errors or WebTransport errors.
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum SessionError {
|
||||
#[error("connection error: {0}")]
|
||||
ConnectionError(#[from] iroh::endpoint::ConnectionError),
|
||||
|
||||
#[error("webtransport error: {0}")]
|
||||
WebTransportError(#[from] WebTransportError),
|
||||
|
||||
#[error("send datagram error: {0}")]
|
||||
SendDatagramError(#[from] quinn::SendDatagramError),
|
||||
}
|
||||
|
||||
/// An error that can occur when reading/writing the WebTransport stream header.
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum WebTransportError {
|
||||
#[error("closed: code={0} reason={1}")]
|
||||
Closed(u32, String),
|
||||
|
||||
#[error("unknown session")]
|
||||
UnknownSession,
|
||||
|
||||
#[error("read error: {0}")]
|
||||
ReadError(#[from] quinn::ReadExactError),
|
||||
|
||||
#[error("write error: {0}")]
|
||||
WriteError(#[from] quinn::WriteError),
|
||||
}
|
||||
|
||||
/// An error when writing to [`crate::SendStream`]. Similar to [`quinn::WriteError`].
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum WriteError {
|
||||
#[error("STOP_SENDING: {0}")]
|
||||
Stopped(u32),
|
||||
|
||||
#[error("invalid STOP_SENDING: {0}")]
|
||||
InvalidStopped(quinn::VarInt),
|
||||
|
||||
#[error("session error: {0}")]
|
||||
SessionError(#[from] SessionError),
|
||||
|
||||
#[error("stream closed")]
|
||||
ClosedStream,
|
||||
}
|
||||
|
||||
impl From<quinn::WriteError> for WriteError {
|
||||
fn from(e: quinn::WriteError) -> Self {
|
||||
match e {
|
||||
quinn::WriteError::Stopped(code) => {
|
||||
match web_transport_proto::error_from_http3(code.into_inner()) {
|
||||
Some(code) => WriteError::Stopped(code),
|
||||
None => WriteError::InvalidStopped(code),
|
||||
}
|
||||
}
|
||||
quinn::WriteError::ClosedStream => WriteError::ClosedStream,
|
||||
quinn::WriteError::ConnectionLost(e) => WriteError::SessionError(e.into()),
|
||||
quinn::WriteError::ZeroRttRejected => unreachable!("0-RTT not supported"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An error when reading from [`crate::RecvStream`]. Similar to [`quinn::ReadError`].
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum ReadError {
|
||||
#[error("session error: {0}")]
|
||||
SessionError(#[from] SessionError),
|
||||
|
||||
#[error("RESET_STREAM: {0}")]
|
||||
Reset(u32),
|
||||
|
||||
#[error("invalid RESET_STREAM: {0}")]
|
||||
InvalidReset(quinn::VarInt),
|
||||
|
||||
#[error("stream already closed")]
|
||||
ClosedStream,
|
||||
}
|
||||
|
||||
impl From<quinn::ReadError> for ReadError {
|
||||
fn from(value: quinn::ReadError) -> Self {
|
||||
match value {
|
||||
quinn::ReadError::Reset(code) => {
|
||||
match web_transport_proto::error_from_http3(code.into_inner()) {
|
||||
Some(code) => ReadError::Reset(code),
|
||||
None => ReadError::InvalidReset(code),
|
||||
}
|
||||
}
|
||||
quinn::ReadError::ConnectionLost(e) => ReadError::SessionError(e.into()),
|
||||
quinn::ReadError::ClosedStream => ReadError::ClosedStream,
|
||||
quinn::ReadError::ZeroRttRejected => unreachable!("0-RTT not supported"),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An error returned by [`crate::RecvStream::read_exact`]. Similar to [`quinn::ReadExactError`].
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum ReadExactError {
|
||||
#[error("finished early")]
|
||||
FinishedEarly(usize),
|
||||
|
||||
#[error("read error: {0}")]
|
||||
ReadError(#[from] ReadError),
|
||||
}
|
||||
|
||||
impl From<quinn::ReadExactError> for ReadExactError {
|
||||
fn from(e: quinn::ReadExactError) -> Self {
|
||||
match e {
|
||||
quinn::ReadExactError::FinishedEarly(size) => ReadExactError::FinishedEarly(size),
|
||||
quinn::ReadExactError::ReadError(e) => ReadExactError::ReadError(e.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An error returned by [`crate::RecvStream::read_to_end`]. Similar to [`quinn::ReadToEndError`].
|
||||
#[derive(Clone, Error, Debug)]
|
||||
pub enum ReadToEndError {
|
||||
#[error("too long")]
|
||||
TooLong,
|
||||
|
||||
#[error("read error: {0}")]
|
||||
ReadError(#[from] ReadError),
|
||||
}
|
||||
|
||||
impl From<quinn::ReadToEndError> for ReadToEndError {
|
||||
fn from(e: quinn::ReadToEndError) -> Self {
|
||||
match e {
|
||||
quinn::ReadToEndError::TooLong => ReadToEndError::TooLong,
|
||||
quinn::ReadToEndError::Read(e) => ReadToEndError::ReadError(e.into()),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/// An error indicating the stream was already closed.
|
||||
#[derive(Clone, Error, Debug)]
|
||||
#[error("stream closed")]
|
||||
pub struct ClosedStream;
|
||||
|
||||
impl From<quinn::ClosedStream> for ClosedStream {
|
||||
fn from(_: quinn::ClosedStream) -> Self {
|
||||
ClosedStream
|
||||
}
|
||||
}
|
||||
|
||||
/// An error returned when receiving a new WebTransport session.
|
||||
#[stack_error(derive, from_sources)]
|
||||
#[derive(Clone)]
|
||||
pub enum ServerError {
|
||||
#[error("unexpected end of stream")]
|
||||
UnexpectedEnd,
|
||||
|
||||
#[error("connection failed")]
|
||||
Connection(#[error(source, std_err)] iroh::endpoint::ConnectionError),
|
||||
|
||||
#[error("connection failed during handshake")]
|
||||
Connecting(#[error(source)] Arc<iroh::endpoint::ConnectingError>),
|
||||
|
||||
#[error("failed to write")]
|
||||
WriteError(#[error(source, std_err)] quinn::WriteError),
|
||||
|
||||
#[error("failed to read")]
|
||||
ReadError(#[error(source, std_err)] quinn::ReadError),
|
||||
|
||||
#[error("io error")]
|
||||
IoError(#[error(source)] Arc<std::io::Error>),
|
||||
|
||||
#[error("failed to bind endpoint")]
|
||||
Bind(#[error(source)] Arc<iroh::endpoint::BindError>),
|
||||
|
||||
#[error("failed to exchange h3 connect")]
|
||||
HttpError(#[error(source, from, std_err)] ConnectError),
|
||||
|
||||
#[error("failed to exchange h3 settings")]
|
||||
SettingsError(#[error(source, from, std_err)] SettingsError),
|
||||
}
|
||||
|
||||
impl web_transport_trait::Error for SessionError {
|
||||
fn session_error(&self) -> Option<(u32, String)> {
|
||||
if let SessionError::WebTransportError(WebTransportError::Closed(code, reason)) = self {
|
||||
return Some((*code, reason.to_string()));
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
}
|
||||
|
||||
impl web_transport_trait::Error for WriteError {
|
||||
fn session_error(&self) -> Option<(u32, String)> {
|
||||
if let WriteError::SessionError(e) = self {
|
||||
return e.session_error();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn stream_error(&self) -> Option<u32> {
|
||||
match self {
|
||||
WriteError::Stopped(code) => Some(*code),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
impl web_transport_trait::Error for ReadError {
|
||||
fn session_error(&self) -> Option<(u32, String)> {
|
||||
if let ReadError::SessionError(e) = self {
|
||||
return e.session_error();
|
||||
}
|
||||
|
||||
None
|
||||
}
|
||||
|
||||
fn stream_error(&self) -> Option<u32> {
|
||||
match self {
|
||||
ReadError::Reset(code) => Some(*code),
|
||||
_ => None,
|
||||
}
|
||||
}
|
||||
}
|
||||
56
third_party/iroh-live/web-transport-iroh/src/lib.rs
vendored
Normal file
56
third_party/iroh-live/web-transport-iroh/src/lib.rs
vendored
Normal file
|
|
@ -0,0 +1,56 @@
|
|||
//! WebTransport is a protocol for client-server communication over QUIC.
|
||||
//! It's [available in the browser](https://caniuse.com/webtransport) as an alternative to HTTP and WebSockets.
|
||||
//!
|
||||
//! WebTransport is layered on top of HTTP/3 which is then layered on top of QUIC.
|
||||
//! This library hides that detail and tries to expose only the QUIC API, delegating as much as possible to the underlying implementation.
|
||||
//! See the [Quinn documentation](https://docs.rs/quinn/latest/quinn/) for more documentation.
|
||||
//!
|
||||
//! QUIC provides two primary APIs:
|
||||
//!
|
||||
//! # Streams
|
||||
//! QUIC streams are ordered, reliable, flow-controlled, and optionally bidirectional.
|
||||
//! Both endpoints can create and close streams (including an error code) with no overhead.
|
||||
//! You can think of them as TCP connections, but shared over a single QUIC connection.
|
||||
//!
|
||||
//! # Datagrams
|
||||
//! QUIC datagrams are unordered, unreliable, and not flow-controlled.
|
||||
//! Both endpoints can send datagrams below the MTU size (~1.2kb minimum) and they might arrive out of order or not at all.
|
||||
//! They are basically UDP packets, except they are encrypted and congestion controlled.
|
||||
//!
|
||||
//! # Limitations
|
||||
//! WebTransport is able to be pooled with HTTP/3 and multiple WebTransport sessions.
|
||||
//! This crate avoids that complexity, doing the bare minimum to support a single WebTransport session that owns the entire QUIC connection.
|
||||
//! If you want to support HTTP/3 on the same host/port, you should use another crate (ex. `h3-webtransport`).
|
||||
//! If you want to support multiple WebTransport sessions over the same QUIC connection... you should just dial a new QUIC connection instead.
|
||||
|
||||
// External
|
||||
mod client;
|
||||
mod connect;
|
||||
mod error;
|
||||
mod recv;
|
||||
mod send;
|
||||
mod server;
|
||||
mod session;
|
||||
mod settings;
|
||||
#[cfg(test)]
|
||||
mod tests;
|
||||
|
||||
pub use client::*;
|
||||
pub use connect::*;
|
||||
pub use error::*;
|
||||
pub use recv::*;
|
||||
pub use send::*;
|
||||
pub use server::*;
|
||||
pub use session::*;
|
||||
pub use settings::*;
|
||||
|
||||
/// The HTTP/3 ALPN is required when negotiating a QUIC connection.
|
||||
pub const ALPN_H3: &str = "h3";
|
||||
|
||||
/// Re-export the http crate because it's in the public API.
|
||||
pub use http;
|
||||
pub use iroh;
|
||||
/// Re-export the underlying QUIC implementation.
|
||||
pub use quinn;
|
||||
/// Re-export the generic WebTransport implementation.
|
||||
pub use web_transport_trait as generic;
|
||||
111
third_party/iroh-live/web-transport-iroh/src/recv.rs
vendored
Normal file
111
third_party/iroh-live/web-transport-iroh/src/recv.rs
vendored
Normal file
|
|
@ -0,0 +1,111 @@
|
|||
use std::{
|
||||
io,
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use bytes::Bytes;
|
||||
|
||||
use crate::{ReadError, ReadExactError, ReadToEndError, SessionError};
|
||||
|
||||
/// A stream that can be used to recieve bytes. See [`quinn::RecvStream`].
|
||||
#[derive(Debug)]
|
||||
pub struct RecvStream {
|
||||
inner: quinn::RecvStream,
|
||||
}
|
||||
|
||||
impl RecvStream {
|
||||
pub(crate) fn new(stream: quinn::RecvStream) -> Self {
|
||||
Self { inner: stream }
|
||||
}
|
||||
|
||||
/// Tell the other end to stop sending data with the given error code. See [`quinn::RecvStream::stop`].
|
||||
/// This is a u32 with WebTransport since it shares the error space with HTTP/3.
|
||||
pub fn stop(&mut self, code: u32) -> Result<(), quinn::ClosedStream> {
|
||||
let code = web_transport_proto::error_to_http3(code);
|
||||
let code = quinn::VarInt::try_from(code).unwrap();
|
||||
self.inner.stop(code)
|
||||
}
|
||||
|
||||
// Unfortunately, we have to wrap ReadError for a bunch of functions.
|
||||
|
||||
/// Read some data into the buffer and return the amount read. See [`quinn::RecvStream::read`].
|
||||
pub async fn read(&mut self, buf: &mut [u8]) -> Result<Option<usize>, ReadError> {
|
||||
self.inner.read(buf).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Fill the entire buffer with data. See [`quinn::RecvStream::read_exact`].
|
||||
pub async fn read_exact(&mut self, buf: &mut [u8]) -> Result<(), ReadExactError> {
|
||||
self.inner.read_exact(buf).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Read a chunk of data from the stream. See [`quinn::RecvStream::read_chunk`].
|
||||
pub async fn read_chunk(
|
||||
&mut self,
|
||||
max_length: usize,
|
||||
) -> Result<Option<quinn::Chunk>, ReadError> {
|
||||
self.inner
|
||||
.read_chunk(max_length)
|
||||
.await
|
||||
.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Read chunks of data from the stream. See [`quinn::RecvStream::read_chunks`].
|
||||
pub async fn read_chunks(&mut self, bufs: &mut [Bytes]) -> Result<Option<usize>, ReadError> {
|
||||
self.inner.read_chunks(bufs).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Read until the end of the stream or the limit is hit. See [`quinn::RecvStream::read_to_end`].
|
||||
pub async fn read_to_end(&mut self, size_limit: usize) -> Result<Vec<u8>, ReadToEndError> {
|
||||
self.inner.read_to_end(size_limit).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Block until the stream has been reset and return the error code. See [`quinn::RecvStream::received_reset`].
|
||||
///
|
||||
/// Unlike Quinn, this returns a SessionError, not a ResetError, because 0-RTT is not supported.
|
||||
pub async fn received_reset(&mut self) -> Result<Option<u32>, SessionError> {
|
||||
match self.inner.received_reset().await {
|
||||
Ok(None) => Ok(None),
|
||||
Ok(Some(code)) => Ok(Some(
|
||||
web_transport_proto::error_from_http3(code.into_inner()).unwrap(),
|
||||
)),
|
||||
Err(quinn::ResetError::ConnectionLost(e)) => Err(e.into()),
|
||||
Err(quinn::ResetError::ZeroRttRejected) => unreachable!("0-RTT not supported"),
|
||||
}
|
||||
}
|
||||
|
||||
// We purposely don't expose the stream ID or 0RTT because it's not valid with WebTransport
|
||||
}
|
||||
|
||||
impl tokio::io::AsyncRead for RecvStream {
|
||||
fn poll_read(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &mut tokio::io::ReadBuf,
|
||||
) -> Poll<io::Result<()>> {
|
||||
Pin::new(&mut self.inner).poll_read(cx, buf)
|
||||
}
|
||||
}
|
||||
|
||||
impl web_transport_trait::RecvStream for RecvStream {
|
||||
type Error = ReadError;
|
||||
|
||||
fn stop(&mut self, code: u32) {
|
||||
Self::stop(self, code).ok();
|
||||
}
|
||||
|
||||
async fn read(&mut self, dst: &mut [u8]) -> Result<Option<usize>, Self::Error> {
|
||||
self.read(dst).await
|
||||
}
|
||||
|
||||
async fn read_chunk(&mut self, max: usize) -> Result<Option<Bytes>, Self::Error> {
|
||||
self.read_chunk(max)
|
||||
.await
|
||||
.map(|r| r.map(|chunk| chunk.bytes))
|
||||
}
|
||||
|
||||
async fn closed(&mut self) -> Result<(), Self::Error> {
|
||||
self.received_reset().await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
142
third_party/iroh-live/web-transport-iroh/src/send.rs
vendored
Normal file
142
third_party/iroh-live/web-transport-iroh/src/send.rs
vendored
Normal file
|
|
@ -0,0 +1,142 @@
|
|||
use std::{
|
||||
io,
|
||||
pin::Pin,
|
||||
task::{Context, Poll},
|
||||
};
|
||||
|
||||
use bytes::{Buf, Bytes};
|
||||
|
||||
use crate::{ClosedStream, SessionError, WriteError};
|
||||
|
||||
/// A stream that can be used to send bytes. See [`quinn::SendStream`].
|
||||
///
|
||||
/// This wrapper is mainly needed for error codes, which is unfortunate.
|
||||
/// WebTransport uses u32 error codes and they're mapped in a reserved HTTP/3 error space.
|
||||
#[derive(Debug)]
|
||||
pub struct SendStream {
|
||||
stream: quinn::SendStream,
|
||||
}
|
||||
|
||||
impl SendStream {
|
||||
pub(crate) fn new(stream: quinn::SendStream) -> Self {
|
||||
Self { stream }
|
||||
}
|
||||
|
||||
/// Abruptly reset the stream with the provided error code. See [`quinn::SendStream::reset`].
|
||||
/// This is a u32 with WebTransport because we share the error space with HTTP/3.
|
||||
pub fn reset(&mut self, code: u32) -> Result<(), ClosedStream> {
|
||||
let code = web_transport_proto::error_to_http3(code);
|
||||
let code = quinn::VarInt::try_from(code).unwrap();
|
||||
self.stream.reset(code).map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Wait until the stream has been stopped and return the error code. See [`quinn::SendStream::stopped`].
|
||||
///
|
||||
/// Unlike Quinn, this returns None if the code is not a valid WebTransport error code.
|
||||
/// Also unlike Quinn, this returns a SessionError, not a StoppedError, because 0-RTT is not supported.
|
||||
pub async fn stopped(&mut self) -> Result<Option<u32>, SessionError> {
|
||||
match self.stream.stopped().await {
|
||||
Ok(Some(code)) => Ok(web_transport_proto::error_from_http3(code.into_inner())),
|
||||
Ok(None) => Ok(None),
|
||||
Err(quinn::StoppedError::ConnectionLost(e)) => Err(e.into()),
|
||||
Err(quinn::StoppedError::ZeroRttRejected) => unreachable!("0-RTT not supported"),
|
||||
}
|
||||
}
|
||||
|
||||
// Unfortunately, we have to wrap WriteError for a bunch of functions.
|
||||
|
||||
/// Write some data to the stream, returning the size written. See [`quinn::SendStream::write`].
|
||||
pub async fn write(&mut self, buf: &[u8]) -> Result<usize, WriteError> {
|
||||
self.stream.write(buf).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Write all of the data to the stream. See [`quinn::SendStream::write_all`].
|
||||
pub async fn write_all(&mut self, buf: &[u8]) -> Result<(), WriteError> {
|
||||
self.stream.write_all(buf).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Write chunks of data to the stream. See [`quinn::SendStream::write_chunks`].
|
||||
pub async fn write_chunks(&mut self, bufs: &mut [Bytes]) -> Result<quinn::Written, WriteError> {
|
||||
self.stream.write_chunks(bufs).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Write a chunk of data to the stream. See [`quinn::SendStream::write_chunk`].
|
||||
pub async fn write_chunk(&mut self, buf: Bytes) -> Result<(), WriteError> {
|
||||
self.stream.write_chunk(buf).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Write all of the chunks of data to the stream. See [`quinn::SendStream::write_all_chunks`].
|
||||
pub async fn write_all_chunks(&mut self, bufs: &mut [Bytes]) -> Result<(), WriteError> {
|
||||
self.stream.write_all_chunks(bufs).await.map_err(Into::into)
|
||||
}
|
||||
|
||||
/// Mark the stream as finished, such that no more data can be written. See [`quinn::SendStream::finish`].
|
||||
pub fn finish(&mut self) -> Result<(), ClosedStream> {
|
||||
self.stream.finish().map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn set_priority(&self, order: i32) -> Result<(), ClosedStream> {
|
||||
self.stream.set_priority(order).map_err(Into::into)
|
||||
}
|
||||
|
||||
pub fn priority(&self) -> Result<i32, ClosedStream> {
|
||||
self.stream.priority().map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
impl tokio::io::AsyncWrite for SendStream {
|
||||
fn poll_write(
|
||||
mut self: Pin<&mut Self>,
|
||||
cx: &mut Context<'_>,
|
||||
buf: &[u8],
|
||||
) -> Poll<io::Result<usize>> {
|
||||
// We have to use this syntax because quinn added its own poll_write method.
|
||||
tokio::io::AsyncWrite::poll_write(Pin::new(&mut self.stream), cx, buf)
|
||||
}
|
||||
|
||||
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<io::Result<()>> {
|
||||
Pin::new(&mut self.stream).poll_flush(cx)
|
||||
}
|
||||
|
||||
fn poll_shutdown(mut self: Pin<&mut Self>, cx: &mut Context) -> Poll<io::Result<()>> {
|
||||
Pin::new(&mut self.stream).poll_shutdown(cx)
|
||||
}
|
||||
}
|
||||
|
||||
impl web_transport_trait::SendStream for SendStream {
|
||||
type Error = WriteError;
|
||||
|
||||
fn set_priority(&mut self, order: u8) {
|
||||
self.stream.set_priority(order.into()).ok();
|
||||
}
|
||||
|
||||
fn reset(&mut self, code: u32) {
|
||||
Self::reset(self, code).ok();
|
||||
}
|
||||
|
||||
fn finish(&mut self) -> Result<(), Self::Error> {
|
||||
Self::finish(self).map_err(|_| WriteError::ClosedStream)?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn write(&mut self, buf: &[u8]) -> Result<usize, Self::Error> {
|
||||
Self::write(self, buf).await
|
||||
}
|
||||
|
||||
async fn write_buf<B: Buf + Send>(&mut self, buf: &mut B) -> Result<usize, Self::Error> {
|
||||
// This can avoid making a copy when Buf is Bytes, as Quinn will allocate anyway.
|
||||
let size = buf.chunk().len();
|
||||
let chunk = buf.copy_to_bytes(size);
|
||||
self.write_chunk(chunk).await?;
|
||||
Ok(size)
|
||||
}
|
||||
|
||||
async fn write_chunk(&mut self, chunk: Bytes) -> Result<(), Self::Error> {
|
||||
self.write_chunk(chunk).await
|
||||
}
|
||||
|
||||
async fn closed(&mut self) -> Result<(), Self::Error> {
|
||||
self.stopped().await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
76
third_party/iroh-live/web-transport-iroh/src/server.rs
vendored
Normal file
76
third_party/iroh-live/web-transport-iroh/src/server.rs
vendored
Normal file
|
|
@ -0,0 +1,76 @@
|
|||
use url::Url;
|
||||
|
||||
use crate::{Connect, ServerError, Session, Settings};
|
||||
|
||||
/// A QUIC-only WebTransport handshake, awaiting server decision.
|
||||
pub struct QuicRequest {
|
||||
conn: iroh::endpoint::Connection,
|
||||
}
|
||||
|
||||
/// An H3 WebTransport handshake, SETTINGS exchanged and CONNECT accepted,
|
||||
/// awaiting server decision (respond OK / reject).
|
||||
pub struct H3Request {
|
||||
conn: iroh::endpoint::Connection,
|
||||
settings: Settings,
|
||||
connect: Connect,
|
||||
}
|
||||
|
||||
impl QuicRequest {
|
||||
/// Accept a new QUIC-only WebTransport session from a client.
|
||||
pub fn accept(conn: iroh::endpoint::Connection) -> Self {
|
||||
Self { conn }
|
||||
}
|
||||
|
||||
pub fn conn(&self) -> &iroh::endpoint::Connection {
|
||||
&self.conn
|
||||
}
|
||||
|
||||
/// Accept the session.
|
||||
pub fn ok(self) -> Session {
|
||||
Session::raw(self.conn)
|
||||
}
|
||||
|
||||
/// Reject the session.
|
||||
pub fn close(self, status: http::StatusCode) {
|
||||
self.conn
|
||||
.close(status.as_u16().into(), status.as_str().as_bytes());
|
||||
}
|
||||
}
|
||||
|
||||
impl H3Request {
|
||||
/// Accept a new H3 WebTransport session from a client.
|
||||
pub async fn accept(conn: iroh::endpoint::Connection) -> Result<Self, ServerError> {
|
||||
// Perform the H3 handshake by sending/receiving SETTINGS frames.
|
||||
let settings = Settings::connect(&conn).await?;
|
||||
|
||||
// Accept the CONNECT request but don't send a response yet.
|
||||
let connect = Connect::accept(&conn).await?;
|
||||
|
||||
Ok(Self {
|
||||
conn,
|
||||
settings,
|
||||
connect,
|
||||
})
|
||||
}
|
||||
|
||||
/// Returns the URL provided by the client.
|
||||
pub fn url(&self) -> &Url {
|
||||
self.connect.url()
|
||||
}
|
||||
|
||||
pub fn conn(&self) -> &iroh::endpoint::Connection {
|
||||
&self.conn
|
||||
}
|
||||
|
||||
/// Accept the session, returning a 200 OK.
|
||||
pub async fn ok(mut self) -> Result<Session, ServerError> {
|
||||
self.connect.respond(http::StatusCode::OK).await?;
|
||||
Ok(Session::new_h3(self.conn, self.settings, self.connect))
|
||||
}
|
||||
|
||||
/// Reject the session, returning your favorite HTTP status code.
|
||||
pub async fn close(mut self, status: http::StatusCode) -> Result<(), ServerError> {
|
||||
self.connect.respond(status).await?;
|
||||
Ok(())
|
||||
}
|
||||
}
|
||||
540
third_party/iroh-live/web-transport-iroh/src/session.rs
vendored
Normal file
540
third_party/iroh-live/web-transport-iroh/src/session.rs
vendored
Normal file
|
|
@ -0,0 +1,540 @@
|
|||
use std::{
|
||||
fmt,
|
||||
future::{Future, poll_fn},
|
||||
io::Cursor,
|
||||
ops::Deref,
|
||||
pin::Pin,
|
||||
sync::{Arc, Mutex},
|
||||
task::{Context, Poll, ready},
|
||||
};
|
||||
|
||||
use bytes::{Bytes, BytesMut};
|
||||
use iroh::endpoint::Connection;
|
||||
use n0_future::{
|
||||
FuturesUnordered,
|
||||
stream::{Stream, StreamExt},
|
||||
};
|
||||
use url::Url;
|
||||
|
||||
use crate::{
|
||||
ClientError, Connect, RecvStream, SendStream, SessionError, Settings, WebTransportError,
|
||||
};
|
||||
|
||||
use web_transport_proto::{Frame, StreamUni, VarInt};
|
||||
|
||||
/// An established WebTransport session, acting like a full QUIC connection. See [`iroh::endpoint::Connection`].
|
||||
///
|
||||
/// It is important to remember that WebTransport is layered on top of QUIC:
|
||||
/// 1. Each stream starts with a few bytes identifying the stream type and session ID.
|
||||
/// 2. Errors codes are encoded with the session ID, so they aren't full QUIC error codes.
|
||||
/// 3. Stream IDs may have gaps in them, used by HTTP/3 transparant to the application.
|
||||
///
|
||||
/// Deref is used to expose non-overloaded methods on [`iroh::endpoint::Connection`].
|
||||
/// These should be safe to use with WebTransport, but file a PR if you find one that isn't.
|
||||
#[derive(Clone)]
|
||||
pub struct Session {
|
||||
conn: Connection,
|
||||
h3: Option<H3SessionState>,
|
||||
}
|
||||
|
||||
impl Session {
|
||||
/// Create a new session from a raw QUIC connection and a URL.
|
||||
///
|
||||
/// This is used to pretend like a QUIC connection is a WebTransport session.
|
||||
/// It's a hack, but it makes it much easier to support WebTransport and raw QUIC simultaneously.
|
||||
pub fn raw(conn: Connection) -> Self {
|
||||
Self { conn, h3: None }
|
||||
}
|
||||
|
||||
/// Connect using an established QUIC connection if you want to create the connection yourself.
|
||||
/// This will only work with a brand new QUIC connection using the HTTP/3 ALPN.
|
||||
pub async fn connect_h3(conn: Connection, url: Url) -> Result<Session, ClientError> {
|
||||
// Perform the H3 handshake by sending/reciving SETTINGS frames.
|
||||
let settings = Settings::connect(&conn).await?;
|
||||
|
||||
// Send the HTTP/3 CONNECT request.
|
||||
let connect = Connect::open(&conn, url).await?;
|
||||
|
||||
Ok(Self::new_h3(conn, settings, connect))
|
||||
}
|
||||
|
||||
pub fn new_h3(conn: Connection, settings: Settings, connect: Connect) -> Self {
|
||||
let h3 = H3SessionState::connect(conn.clone(), settings, &connect);
|
||||
let this = Session { conn, h3: Some(h3) };
|
||||
// Run a background task to check if the connect stream is closed.
|
||||
let this2 = this.clone();
|
||||
tokio::spawn(async move {
|
||||
let (code, reason) = connect.run_closed().await;
|
||||
if this2.conn().close_reason().is_none() {
|
||||
// TODO We shouldn't be closing the QUIC connection with the same error.
|
||||
this2.close(code, reason.as_bytes());
|
||||
}
|
||||
});
|
||||
this
|
||||
}
|
||||
|
||||
pub fn conn(&self) -> &Connection {
|
||||
&self.conn
|
||||
}
|
||||
|
||||
pub fn url(&self) -> Option<&Url> {
|
||||
self.h3.as_ref().map(|s| &s.url)
|
||||
}
|
||||
|
||||
/// Accept a new unidirectional stream. See [`iroh::endpoint::Connection::accept_uni`].
|
||||
pub async fn accept_uni(&self) -> Result<RecvStream, SessionError> {
|
||||
if let Some(h3) = &self.h3 {
|
||||
poll_fn(|cx| h3.accept.lock().unwrap().poll_accept_uni(cx)).await
|
||||
} else {
|
||||
self.conn
|
||||
.accept_uni()
|
||||
.await
|
||||
.map(RecvStream::new)
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
/// Accept a new bidirectional stream. See [`iroh::endpoint::Connection::accept_bi`].
|
||||
pub async fn accept_bi(&self) -> Result<(SendStream, RecvStream), SessionError> {
|
||||
if let Some(h3) = &self.h3 {
|
||||
poll_fn(|cx| h3.accept.lock().unwrap().poll_accept_bi(cx)).await
|
||||
} else {
|
||||
self.conn
|
||||
.accept_bi()
|
||||
.await
|
||||
.map(|(send, recv)| (SendStream::new(send), RecvStream::new(recv)))
|
||||
.map_err(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
/// Open a new unidirectional stream. See [`iroh::endpoint::Connection::open_uni`].
|
||||
pub async fn open_uni(&self) -> Result<SendStream, SessionError> {
|
||||
let mut send = self.conn.open_uni().await?;
|
||||
|
||||
if let Some(h3) = self.h3.as_ref() {
|
||||
write_full_with_max_prio(&mut send, &h3.header_uni).await?;
|
||||
}
|
||||
|
||||
Ok(SendStream::new(send))
|
||||
}
|
||||
|
||||
/// Open a new bidirectional stream. See [`iroh::endpoint::Connection::open_bi`].
|
||||
pub async fn open_bi(&self) -> Result<(SendStream, RecvStream), SessionError> {
|
||||
let (mut send, recv) = self.conn.open_bi().await?;
|
||||
|
||||
if let Some(h3) = self.h3.as_ref() {
|
||||
write_full_with_max_prio(&mut send, &h3.header_bi).await?;
|
||||
}
|
||||
|
||||
Ok((SendStream::new(send), RecvStream::new(recv)))
|
||||
}
|
||||
|
||||
/// Asynchronously receives an application datagram from the remote peer.
|
||||
///
|
||||
/// This method is used to receive an application datagram sent by the remote
|
||||
/// peer over the connection.
|
||||
/// It waits for a datagram to become available and returns the received bytes.
|
||||
pub async fn read_datagram(&self) -> Result<Bytes, SessionError> {
|
||||
let mut datagram = self
|
||||
.conn
|
||||
.read_datagram()
|
||||
.await
|
||||
.map_err(SessionError::from)?;
|
||||
|
||||
let datagram = if let Some(h3) = self.h3.as_ref() {
|
||||
let mut cursor = Cursor::new(&datagram);
|
||||
|
||||
// We have to check and strip the session ID from the datagram.
|
||||
let actual_id =
|
||||
VarInt::decode(&mut cursor).map_err(|_| WebTransportError::UnknownSession)?;
|
||||
if actual_id != h3.session_id {
|
||||
return Err(WebTransportError::UnknownSession.into());
|
||||
}
|
||||
|
||||
// Return the datagram without the session ID.
|
||||
let datagram = datagram.split_off(cursor.position() as usize);
|
||||
datagram
|
||||
} else {
|
||||
datagram
|
||||
};
|
||||
|
||||
Ok(datagram)
|
||||
}
|
||||
|
||||
/// Sends an application datagram to the remote peer.
|
||||
///
|
||||
/// Datagrams are unreliable and may be dropped or delivered out of order.
|
||||
/// The data must be smaller than [`max_datagram_size`](Self::max_datagram_size).
|
||||
pub fn send_datagram(&self, data: Bytes) -> Result<(), SessionError> {
|
||||
let datagram = if let Some(h3) = self.h3.as_ref() {
|
||||
// Unfortunately, we need to allocate/copy each datagram because of the Quinn API.
|
||||
// Pls go +1 if you care: https://github.com/quinn-rs/quinn/issues/1724
|
||||
let mut buf = BytesMut::with_capacity(h3.header_datagram.len() + data.len());
|
||||
// Prepend the datagram with the header indicating the session ID.
|
||||
buf.extend_from_slice(&h3.header_datagram);
|
||||
buf.extend_from_slice(&data);
|
||||
buf.into()
|
||||
} else {
|
||||
data
|
||||
};
|
||||
|
||||
self.conn.send_datagram(datagram)?;
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
/// Computes the maximum size of datagrams that may be passed to
|
||||
/// [`send_datagram`](Self::send_datagram).
|
||||
pub fn max_datagram_size(&self) -> usize {
|
||||
let mtu = self
|
||||
.conn
|
||||
.max_datagram_size()
|
||||
.expect("datagram support is required");
|
||||
if let Some(h3) = self.h3.as_ref() {
|
||||
mtu.saturating_sub(h3.header_datagram.len())
|
||||
} else {
|
||||
mtu
|
||||
}
|
||||
}
|
||||
|
||||
/// Immediately close the connection with an error code and reason. See [`iroh::endpoint::Connection::close`].
|
||||
pub fn close(&self, code: u32, reason: &[u8]) {
|
||||
let code = if self.h3.is_some() {
|
||||
web_transport_proto::error_to_http3(code)
|
||||
.try_into()
|
||||
.unwrap()
|
||||
} else {
|
||||
code.into()
|
||||
};
|
||||
|
||||
self.conn.close(code, reason)
|
||||
}
|
||||
|
||||
/// Wait until the session is closed, returning the error. See [`iroh::endpoint::Connection::closed`].
|
||||
pub async fn closed(&self) -> SessionError {
|
||||
self.conn.closed().await.into()
|
||||
}
|
||||
|
||||
/// Return why the session was closed, or None if it's not closed. See [`iroh::endpoint::Connection::close_reason`].
|
||||
pub fn close_reason(&self) -> Option<SessionError> {
|
||||
self.conn.close_reason().map(Into::into)
|
||||
}
|
||||
}
|
||||
|
||||
async fn write_full_with_max_prio(
|
||||
send: &mut quinn::SendStream,
|
||||
buf: &[u8],
|
||||
) -> Result<(), SessionError> {
|
||||
// Set the stream priority to max and then write the stream header.
|
||||
// Otherwise the application could write data with lower priority than the header, resulting in queuing.
|
||||
// Also the header is very important for determining the session ID without reliable reset.
|
||||
send.set_priority(i32::MAX).ok();
|
||||
let res = match send.write_all(buf).await {
|
||||
Ok(_) => Ok(()),
|
||||
Err(quinn::WriteError::ConnectionLost(err)) => Err(err.into()),
|
||||
Err(err) => Err(WebTransportError::WriteError(err).into()),
|
||||
};
|
||||
// Reset the stream priority back to the default of 0.
|
||||
send.set_priority(0).ok();
|
||||
res
|
||||
}
|
||||
|
||||
impl Deref for Session {
|
||||
type Target = Connection;
|
||||
|
||||
fn deref(&self) -> &Self::Target {
|
||||
&self.conn
|
||||
}
|
||||
}
|
||||
|
||||
impl fmt::Debug for Session {
|
||||
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
|
||||
self.conn.fmt(f)
|
||||
}
|
||||
}
|
||||
|
||||
impl PartialEq for Session {
|
||||
fn eq(&self, other: &Self) -> bool {
|
||||
self.conn.stable_id() == other.conn.stable_id()
|
||||
}
|
||||
}
|
||||
|
||||
impl Eq for Session {}
|
||||
|
||||
#[derive(Clone)]
|
||||
struct H3SessionState {
|
||||
url: Url,
|
||||
// The session ID, as determined by the stream ID of the connect request.
|
||||
session_id: VarInt,
|
||||
// Cache the headers in front of each stream we open.
|
||||
header_uni: Vec<u8>,
|
||||
header_bi: Vec<u8>,
|
||||
header_datagram: Vec<u8>,
|
||||
|
||||
// Keep a reference to the settings and connect stream to avoid closing them until dropped.
|
||||
#[allow(dead_code)]
|
||||
settings: Arc<Settings>,
|
||||
// The accept logic is stateful, so use an Arc<Mutex> to share it.
|
||||
accept: Arc<Mutex<H3SessionAccept>>,
|
||||
}
|
||||
|
||||
impl H3SessionState {
|
||||
fn connect(conn: Connection, settings: Settings, connect: &Connect) -> Self {
|
||||
// The session ID is the stream ID of the CONNECT request.
|
||||
let session_id = connect.session_id();
|
||||
|
||||
// Cache the tiny header we write in front of each stream we open.
|
||||
let mut header_uni = Vec::new();
|
||||
StreamUni::WEBTRANSPORT.encode(&mut header_uni);
|
||||
session_id.encode(&mut header_uni);
|
||||
|
||||
let mut header_bi = Vec::new();
|
||||
Frame::WEBTRANSPORT.encode(&mut header_bi);
|
||||
session_id.encode(&mut header_bi);
|
||||
|
||||
let mut header_datagram = Vec::new();
|
||||
session_id.encode(&mut header_datagram);
|
||||
|
||||
// Accept logic is stateful, so use an Arc<Mutex> to share it.
|
||||
let accept = H3SessionAccept::new(conn, session_id);
|
||||
Self {
|
||||
url: connect.url().clone(),
|
||||
session_id,
|
||||
header_uni,
|
||||
header_bi,
|
||||
header_datagram,
|
||||
settings: Arc::new(settings),
|
||||
accept: Arc::new(Mutex::new(accept)),
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Type aliases just so clippy doesn't complain about the complexity.
|
||||
type AcceptUni =
|
||||
dyn Stream<Item = Result<quinn::RecvStream, iroh::endpoint::ConnectionError>> + Send;
|
||||
type AcceptBi = dyn Stream<Item = Result<(quinn::SendStream, quinn::RecvStream), iroh::endpoint::ConnectionError>>
|
||||
+ Send;
|
||||
type PendingUni = dyn Future<Output = Result<(StreamUni, quinn::RecvStream), SessionError>> + Send;
|
||||
type PendingBi = dyn Future<Output = Result<Option<(quinn::SendStream, quinn::RecvStream)>, SessionError>>
|
||||
+ Send;
|
||||
|
||||
// Logic just for accepting streams, which is annoying because of the stream header.
|
||||
pub struct H3SessionAccept {
|
||||
session_id: VarInt,
|
||||
|
||||
// We also need to keep a reference to the qpack streams if the endpoint (incorrectly) creates them.
|
||||
// Again, this is just so they don't get closed until we drop the session.
|
||||
qpack_encoder: Option<quinn::RecvStream>,
|
||||
qpack_decoder: Option<quinn::RecvStream>,
|
||||
|
||||
accept_uni: Pin<Box<AcceptUni>>,
|
||||
accept_bi: Pin<Box<AcceptBi>>,
|
||||
|
||||
// Keep track of work being done to read/write the WebTransport stream header.
|
||||
pending_uni: FuturesUnordered<Pin<Box<PendingUni>>>,
|
||||
pending_bi: FuturesUnordered<Pin<Box<PendingBi>>>,
|
||||
}
|
||||
|
||||
impl H3SessionAccept {
|
||||
pub(crate) fn new(conn: Connection, session_id: VarInt) -> Self {
|
||||
// Create a stream that just outputs new streams, so it's easy to call from poll.
|
||||
let accept_uni = Box::pin(n0_future::stream::unfold(conn.clone(), |conn| async {
|
||||
Some((conn.accept_uni().await, conn))
|
||||
}));
|
||||
|
||||
let accept_bi = Box::pin(n0_future::stream::unfold(conn, |conn| async {
|
||||
Some((conn.accept_bi().await, conn))
|
||||
}));
|
||||
|
||||
Self {
|
||||
session_id,
|
||||
|
||||
qpack_decoder: None,
|
||||
qpack_encoder: None,
|
||||
|
||||
accept_uni,
|
||||
accept_bi,
|
||||
|
||||
pending_uni: FuturesUnordered::new(),
|
||||
pending_bi: FuturesUnordered::new(),
|
||||
}
|
||||
}
|
||||
|
||||
// This is poll-based because we accept and decode streams in parallel.
|
||||
// In async land I would use tokio::JoinSet, but that requires a runtime.
|
||||
// It's better to use FuturesUnordered instead because it's agnostic.
|
||||
pub fn poll_accept_uni(
|
||||
&mut self,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Result<RecvStream, SessionError>> {
|
||||
loop {
|
||||
// Accept any new streams.
|
||||
if let Poll::Ready(Some(res)) = self.accept_uni.poll_next(cx) {
|
||||
// Start decoding the header and add the future to the list of pending streams.
|
||||
let recv = res?;
|
||||
let pending = Self::decode_uni(recv, self.session_id);
|
||||
self.pending_uni.push(Box::pin(pending));
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Poll the list of pending streams.
|
||||
let (typ, recv) = match ready!(self.pending_uni.poll_next(cx)) {
|
||||
Some(Ok(res)) => res,
|
||||
Some(Err(err)) => {
|
||||
// Ignore the error, the stream was probably reset early.
|
||||
tracing::warn!("failed to decode unidirectional stream: {err:?}");
|
||||
continue;
|
||||
}
|
||||
None => return Poll::Pending,
|
||||
};
|
||||
|
||||
// Decide if we keep looping based on the type.
|
||||
match typ {
|
||||
StreamUni::WEBTRANSPORT => {
|
||||
let recv = RecvStream::new(recv);
|
||||
return Poll::Ready(Ok(recv));
|
||||
}
|
||||
StreamUni::QPACK_DECODER => {
|
||||
self.qpack_decoder = Some(recv);
|
||||
}
|
||||
StreamUni::QPACK_ENCODER => {
|
||||
self.qpack_encoder = Some(recv);
|
||||
}
|
||||
_ => {
|
||||
// ignore unknown streams
|
||||
tracing::debug!("ignoring unknown unidirectional stream: {typ:?}");
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Reads the stream header, returning the stream type.
|
||||
async fn decode_uni(
|
||||
mut recv: quinn::RecvStream,
|
||||
expected_session: VarInt,
|
||||
) -> Result<(StreamUni, quinn::RecvStream), SessionError> {
|
||||
// Read the VarInt at the start of the stream.
|
||||
let typ = VarInt::read(&mut recv)
|
||||
.await
|
||||
.map_err(|_| WebTransportError::UnknownSession)?;
|
||||
let typ = StreamUni(typ);
|
||||
|
||||
if typ == StreamUni::WEBTRANSPORT {
|
||||
// Read the session_id and validate it
|
||||
let session_id = VarInt::read(&mut recv)
|
||||
.await
|
||||
.map_err(|_| WebTransportError::UnknownSession)?;
|
||||
if session_id != expected_session {
|
||||
return Err(WebTransportError::UnknownSession.into());
|
||||
}
|
||||
}
|
||||
|
||||
// We need to keep a reference to the qpack streams if the endpoint (incorrectly) creates them, so return everything.
|
||||
Ok((typ, recv))
|
||||
}
|
||||
|
||||
pub fn poll_accept_bi(
|
||||
&mut self,
|
||||
cx: &mut Context<'_>,
|
||||
) -> Poll<Result<(SendStream, RecvStream), SessionError>> {
|
||||
loop {
|
||||
// Accept any new streams.
|
||||
if let Poll::Ready(Some(res)) = self.accept_bi.poll_next(cx) {
|
||||
// Start decoding the header and add the future to the list of pending streams.
|
||||
let (send, recv) = res?;
|
||||
let pending = Self::decode_bi(send, recv, self.session_id);
|
||||
self.pending_bi.push(Box::pin(pending));
|
||||
|
||||
continue;
|
||||
}
|
||||
|
||||
// Poll the list of pending streams.
|
||||
let res = match ready!(self.pending_bi.poll_next(cx)) {
|
||||
Some(Ok(res)) => res,
|
||||
Some(Err(err)) => {
|
||||
// Ignore the error, the stream was probably reset early.
|
||||
tracing::warn!("failed to decode bidirectional stream: {err:?}");
|
||||
continue;
|
||||
}
|
||||
None => return Poll::Pending,
|
||||
};
|
||||
|
||||
if let Some((send, recv)) = res {
|
||||
// Wrap the streams in our own types for correct error codes.
|
||||
let send = SendStream::new(send);
|
||||
let recv = RecvStream::new(recv);
|
||||
return Poll::Ready(Ok((send, recv)));
|
||||
}
|
||||
|
||||
// Keep looping if it's a stream we want to ignore.
|
||||
}
|
||||
}
|
||||
|
||||
// Reads the stream header, returning Some if it's a WebTransport stream.
|
||||
async fn decode_bi(
|
||||
send: quinn::SendStream,
|
||||
mut recv: quinn::RecvStream,
|
||||
expected_session: VarInt,
|
||||
) -> Result<Option<(quinn::SendStream, quinn::RecvStream)>, SessionError> {
|
||||
let typ = VarInt::read(&mut recv)
|
||||
.await
|
||||
.map_err(|_| WebTransportError::UnknownSession)?;
|
||||
if Frame(typ) != Frame::WEBTRANSPORT {
|
||||
tracing::debug!("ignoring unknown bidirectional stream: {typ:?}");
|
||||
return Ok(None);
|
||||
}
|
||||
|
||||
// Read the session ID and validate it.
|
||||
let session_id = VarInt::read(&mut recv)
|
||||
.await
|
||||
.map_err(|_| WebTransportError::UnknownSession)?;
|
||||
if session_id != expected_session {
|
||||
return Err(WebTransportError::UnknownSession.into());
|
||||
}
|
||||
|
||||
Ok(Some((send, recv)))
|
||||
}
|
||||
}
|
||||
|
||||
impl web_transport_trait::Session for Session {
|
||||
type SendStream = SendStream;
|
||||
type RecvStream = RecvStream;
|
||||
type Error = SessionError;
|
||||
|
||||
async fn accept_uni(&self) -> Result<Self::RecvStream, Self::Error> {
|
||||
Self::accept_uni(self).await
|
||||
}
|
||||
|
||||
async fn accept_bi(&self) -> Result<(Self::SendStream, Self::RecvStream), Self::Error> {
|
||||
Self::accept_bi(self).await
|
||||
}
|
||||
|
||||
async fn open_bi(&self) -> Result<(Self::SendStream, Self::RecvStream), Self::Error> {
|
||||
Self::open_bi(self).await
|
||||
}
|
||||
|
||||
async fn open_uni(&self) -> Result<Self::SendStream, Self::Error> {
|
||||
Self::open_uni(self).await
|
||||
}
|
||||
|
||||
fn close(&self, code: u32, reason: &str) {
|
||||
Self::close(self, code, reason.as_bytes());
|
||||
}
|
||||
|
||||
async fn closed(&self) -> Self::Error {
|
||||
Self::closed(self).await
|
||||
}
|
||||
|
||||
fn send_datagram(&self, data: Bytes) -> Result<(), Self::Error> {
|
||||
Self::send_datagram(self, data)
|
||||
}
|
||||
|
||||
async fn recv_datagram(&self) -> Result<Bytes, Self::Error> {
|
||||
Self::read_datagram(self).await
|
||||
}
|
||||
|
||||
fn max_datagram_size(&self) -> usize {
|
||||
Self::max_datagram_size(self)
|
||||
}
|
||||
}
|
||||
69
third_party/iroh-live/web-transport-iroh/src/settings.rs
vendored
Normal file
69
third_party/iroh-live/web-transport-iroh/src/settings.rs
vendored
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
use thiserror::Error;
|
||||
use tokio::try_join;
|
||||
|
||||
#[derive(Error, Debug, Clone)]
|
||||
pub enum SettingsError {
|
||||
#[error("quic stream was closed early")]
|
||||
UnexpectedEnd,
|
||||
|
||||
#[error("protocol error: {0}")]
|
||||
ProtoError(#[from] web_transport_proto::SettingsError),
|
||||
|
||||
#[error("WebTransport is not supported")]
|
||||
WebTransportUnsupported,
|
||||
|
||||
#[error("connection error")]
|
||||
ConnectionError(#[from] iroh::endpoint::ConnectionError),
|
||||
|
||||
#[error("read error")]
|
||||
ReadError(#[from] quinn::ReadError),
|
||||
|
||||
#[error("write error")]
|
||||
WriteError(#[from] quinn::WriteError),
|
||||
}
|
||||
|
||||
pub struct Settings {
|
||||
// A reference to the send/recv stream, so we don't close it until dropped.
|
||||
#[allow(dead_code)]
|
||||
send: quinn::SendStream,
|
||||
|
||||
#[allow(dead_code)]
|
||||
recv: quinn::RecvStream,
|
||||
}
|
||||
|
||||
impl Settings {
|
||||
// Establish the H3 connection.
|
||||
pub async fn connect(conn: &iroh::endpoint::Connection) -> Result<Self, SettingsError> {
|
||||
let recv = Self::accept(conn);
|
||||
let send = Self::open(conn);
|
||||
|
||||
// Run both tasks concurrently until one errors or they both complete.
|
||||
let (send, recv) = try_join!(send, recv)?;
|
||||
Ok(Self { send, recv })
|
||||
}
|
||||
|
||||
async fn accept(conn: &iroh::endpoint::Connection) -> Result<quinn::RecvStream, SettingsError> {
|
||||
let mut recv = conn.accept_uni().await?;
|
||||
let settings = web_transport_proto::Settings::read(&mut recv).await?;
|
||||
|
||||
tracing::debug!("received SETTINGS frame: {settings:?}");
|
||||
|
||||
if settings.supports_webtransport() == 0 {
|
||||
return Err(SettingsError::WebTransportUnsupported);
|
||||
}
|
||||
|
||||
Ok(recv)
|
||||
}
|
||||
|
||||
async fn open(conn: &iroh::endpoint::Connection) -> Result<quinn::SendStream, SettingsError> {
|
||||
let mut settings = web_transport_proto::Settings::default();
|
||||
settings.enable_webtransport(1);
|
||||
|
||||
tracing::debug!("sending SETTINGS frame: {settings:?}");
|
||||
|
||||
let mut send = conn.open_uni().await?;
|
||||
settings.write(&mut send).await?;
|
||||
|
||||
Ok(send)
|
||||
}
|
||||
}
|
||||
128
third_party/iroh-live/web-transport-iroh/src/tests.rs
vendored
Normal file
128
third_party/iroh-live/web-transport-iroh/src/tests.rs
vendored
Normal file
|
|
@ -0,0 +1,128 @@
|
|||
use iroh::{Endpoint, endpoint::ConnectionError};
|
||||
use n0_tracing_test::traced_test;
|
||||
use tracing::Instrument;
|
||||
use url::Url;
|
||||
|
||||
use crate::{ALPN_H3, Client, H3Request, QuicRequest, SessionError};
|
||||
|
||||
#[tokio::test]
|
||||
#[traced_test]
|
||||
async fn h3_smoke() -> n0_error::Result<()> {
|
||||
let client = Endpoint::bind()
|
||||
.instrument(tracing::error_span!("client-ep"))
|
||||
.await
|
||||
.unwrap();
|
||||
let client_id = client.id();
|
||||
let client = Client::new(client);
|
||||
|
||||
let server = Endpoint::builder()
|
||||
.alpns(vec![ALPN_H3.as_bytes().to_vec()])
|
||||
.bind()
|
||||
.instrument(tracing::error_span!("server-ep"))
|
||||
.await
|
||||
.unwrap();
|
||||
let server_id = server.id();
|
||||
let server_addr = server.addr();
|
||||
|
||||
let url: Url = format!("https://{}/foo", server_id).parse().unwrap();
|
||||
|
||||
let client_task = tokio::task::spawn({
|
||||
let url = url.clone();
|
||||
async move {
|
||||
let session = client.connect_h3(server_addr, url.clone()).await.inspect_err(|err| println!("{err:#?}")).unwrap();
|
||||
assert_eq!(session.remote_id(), server_id);
|
||||
assert_eq!(session.url(), Some(&url));
|
||||
|
||||
let mut stream = session.open_uni().await.unwrap();
|
||||
stream.write_all(b"hi").await.unwrap();
|
||||
stream.finish().unwrap();
|
||||
let reason = session.closed().await;
|
||||
assert!(
|
||||
matches!(reason, SessionError::ConnectionError(ConnectionError::ApplicationClosed(frame)) if web_transport_proto::error_from_http3(frame.error_code.into_inner()) == Some(23))
|
||||
);
|
||||
|
||||
drop(session);
|
||||
client.close().await;
|
||||
}.instrument(tracing::error_span!("client"))
|
||||
});
|
||||
|
||||
let server_task = tokio::task::spawn(
|
||||
async move {
|
||||
let conn = server.accept().await.unwrap().await.unwrap();
|
||||
assert_eq!(conn.alpn(), ALPN_H3.as_bytes());
|
||||
let request = H3Request::accept(conn)
|
||||
.await
|
||||
.inspect_err(|err| tracing::error!("accept failed: {err:?}"))
|
||||
.unwrap();
|
||||
assert_eq!(request.url(), &url);
|
||||
assert_eq!(request.conn().remote_id(), client_id);
|
||||
let session = request.ok().await.unwrap();
|
||||
assert_eq!(session.url(), Some(&url));
|
||||
assert_eq!(session.conn().remote_id(), client_id);
|
||||
let mut stream = session.accept_uni().await.unwrap();
|
||||
let buf = stream.read_to_end(2).await.unwrap();
|
||||
assert_eq!(buf, b"hi");
|
||||
session.close(23, b"bye");
|
||||
server.close().await;
|
||||
}
|
||||
.instrument(tracing::error_span!("server")),
|
||||
);
|
||||
|
||||
client_task.await.unwrap();
|
||||
server_task.await.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::test]
|
||||
#[traced_test]
|
||||
async fn quic_smoke() -> n0_error::Result<()> {
|
||||
const ALPN: &str = "moql";
|
||||
|
||||
let client = Endpoint::bind().await.unwrap();
|
||||
let client_id = client.id();
|
||||
let client = Client::new(client);
|
||||
|
||||
let server = Endpoint::builder()
|
||||
.alpns(vec![ALPN.as_bytes().to_vec()])
|
||||
.bind()
|
||||
.await
|
||||
.unwrap();
|
||||
let server_id = server.id();
|
||||
let server_addr = server.addr();
|
||||
|
||||
let client_task = tokio::task::spawn({
|
||||
async move {
|
||||
let session = client
|
||||
.connect_quic(server_addr, ALPN.as_bytes())
|
||||
.await
|
||||
.unwrap();
|
||||
println!("session established");
|
||||
assert_eq!(session.remote_id(), server_id);
|
||||
assert_eq!(session.url(), None);
|
||||
let reason = session.closed().await;
|
||||
assert!(
|
||||
matches!(reason, SessionError::ConnectionError(ConnectionError::ApplicationClosed(frame)) if frame.error_code.into_inner() == 23)
|
||||
)
|
||||
}.instrument(tracing::error_span!("client"))
|
||||
});
|
||||
|
||||
let server_task = tokio::task::spawn({
|
||||
async move {
|
||||
let conn = server.accept().await.unwrap().await.unwrap();
|
||||
assert_eq!(conn.alpn(), ALPN.as_bytes());
|
||||
let request = QuicRequest::accept(conn);
|
||||
assert_eq!(request.conn().remote_id(), client_id);
|
||||
let session = request.ok();
|
||||
assert_eq!(session.url(), None);
|
||||
assert_eq!(session.conn().remote_id(), client_id);
|
||||
session.close(23, b"bye");
|
||||
}
|
||||
.instrument(tracing::error_span!("server"))
|
||||
});
|
||||
|
||||
client_task.await.unwrap();
|
||||
server_task.await.unwrap();
|
||||
|
||||
Ok(())
|
||||
}
|
||||
Loading…
Add table
Add a link
Reference in a new issue