1
Fork 0

switched to jpeg from h264 compression

10x larger over the wire but no dependency on libstdc++
This commit is contained in:
Andy Killorin 2025-01-02 19:42:05 -05:00
parent 2dab4515fb
commit f5bc322862
Signed by: ank
GPG key ID: 23F9463ECB67FE8C
6 changed files with 46 additions and 56 deletions

16
decoder/Cargo.lock generated
View file

@ -79,6 +79,7 @@ version = "0.1.0"
dependencies = [
"openh264",
"tokio",
"zune-jpeg",
]
[[package]]
@ -453,3 +454,18 @@ name = "windows_x86_64_msvc"
version = "0.52.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "589f6da84c646204747d1270a2a5661ea66ed1cced2631d546fdfb155959f9ec"
[[package]]
name = "zune-core"
version = "0.4.12"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3f423a2c17029964870cfaabb1f13dfab7d092a62a29a89264f4d36990ca414a"
[[package]]
name = "zune-jpeg"
version = "0.4.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99a5bab8d7dedf81405c4bb1f2b83ea057643d9cb28778cea9eecddeedd2e028"
dependencies = [
"zune-core",
]

View file

@ -6,6 +6,7 @@ edition = "2021"
[dependencies]
openh264 = "0.6.6"
tokio = { version = "1.42.0", features = ["full"] }
zune-jpeg = "0.4.14"
[lib]
crate-type = ["cdylib","lib"]

View file

@ -1,53 +1,32 @@
use std::{mem::transmute, net::{self, SocketAddr}, slice, sync::{Mutex, OnceLock}};
use openh264::{decoder::Decoder, formats::YUVSource};
use zune_jpeg::{zune_core::{colorspace::ColorSpace, options::DecoderOptions}, JpegDecoder};
#[no_mangle]
pub extern fn add(a: u64, b: u64) -> u64 {
a + b
}
static DECODER: OnceLock<Mutex<Decoder>> = OnceLock::new();
#[no_mangle]
/// reset h264 decoder
///
/// call before opening new stream
pub extern fn reset_decoder() -> u64 {
// calls constructor twice in the reset case, not big deal
if let Err(_) = DECODER.set(Mutex::new(Decoder::new().unwrap())) {
*DECODER.get().unwrap().lock().unwrap() = Decoder::new().unwrap();
}
0
}
#[no_mangle]
/// decode h264 packet of given length
/// decode jpeg image of given length
///
/// image format is 0RGB
///
/// returns 1 if the packet contained a new frame, otherwise 0
pub extern fn decode_h264(image: &mut u32, packet: &u8, length: u32) -> u64 {
let decoder = DECODER.get_or_init(|| Mutex::new(Decoder::new().unwrap()));
let mut decoder = decoder.lock().unwrap();
pub extern fn decode(image: &mut u32, packet: &u8, length: u32) -> u64 {
let packet = unsafe {slice::from_raw_parts(packet, length as usize)};
let image: &mut [u8; 4*320*240] = unsafe{transmute(image)};
if let Ok(Some(frame)) = decoder.decode(packet) {
let mut buf = [0u8; 3*320*240];
frame.write_rgb8(&mut buf);
// 0RGB conversion, avoids need to modify openh264
for (buffer, image) in buf.chunks_exact_mut(3).zip(image.chunks_exact_mut(4)) {
let options = DecoderOptions::new_fast().jpeg_set_out_colorspace(ColorSpace::RGB);
let mut frame = JpegDecoder::new_with_options(packet, options).decode().unwrap();
// 0RGB conversion, easier than shifting and zeroing A
for (buffer, image) in frame.chunks_exact_mut(3).zip(image.chunks_exact_mut(4)) {
image[0] = 0;
image[1] = buffer[0];
image[2] = buffer[1];
image[3] = buffer[2];
}
return 1;
}
0
}

1
encoder/Cargo.lock generated
View file

@ -423,6 +423,7 @@ dependencies = [
"nokhwa",
"openh264",
"tokio",
"zune-jpeg",
]
[[package]]

View file

@ -9,3 +9,4 @@ image = "0.25.5"
nokhwa = { version = "0.10.7", features = ["input-native"] }
openh264 = "0.6.6"
tokio = { version = "1.42.0", features = ["full"] }
zune-jpeg = "0.4.14"

View file

@ -1,14 +1,13 @@
use std::{net::SocketAddr, result, sync::Arc, thread::{self, sleep}, time::Duration};
use std::{array::from_ref, net::SocketAddr, result, sync::Arc, thread::{self, sleep}, time::Duration};
use image::{ImageBuffer, Rgb};
use image::{codecs::jpeg::JpegEncoder, ImageBuffer, Rgb};
use nokhwa::{pixel_format::RgbFormat, utils::{ApiBackend, RequestedFormat, RequestedFormatType, Resolution}, Camera};
use anyhow::{Context, Ok, Result};
use openh264::{encoder::{Encoder, EncoderConfig, UsageType}, formats::{RgbSliceU8, YUVBuffer}, nal_units, OpenH264API};
use tokio::{io::AsyncWriteExt, net::{TcpListener, TcpStream}, runtime::Runtime, sync::{Notify, RwLock}, task::LocalSet};
fn main() -> Result<()>{
let await_frame = Arc::new(Notify::new());
let latest_frame = Arc::new(RwLock::new(YUVBuffer::new(0, 0)));
let latest_frame = Arc::new(RwLock::new(Vec::new()));
{
let runtime = Runtime::new()?;
@ -34,7 +33,7 @@ fn main() -> Result<()>{
loop {}
}
async fn camera_manager(await_frame: Arc<Notify>, latest_frame: Arc<RwLock<YUVBuffer>>) -> Result<()>{
async fn camera_manager(await_frame: Arc<Notify>, latest_frame: Arc<RwLock<Vec<u8>>>) -> Result<()>{
let cameras = nokhwa::query(ApiBackend::Auto)?;
let camera = cameras.get(0).context("no cameras")?;
@ -47,41 +46,34 @@ async fn camera_manager(await_frame: Arc<Notify>, latest_frame: Arc<RwLock<YUVBu
loop {
let frame = camera.frame()?;
let resolution = frame.resolution();
let frame: ImageBuffer<Rgb<u8>, Vec<u8>> = frame.decode_image::<RgbFormat>()?;
let buf = RgbSliceU8::new(frame.as_raw(), (resolution.width_x as usize, resolution.height_y as usize));
let yuv = YUVBuffer::from_rgb8_source(buf);
*latest_frame.write().await = yuv;
let mut output = Vec::new();
let mut encoder = JpegEncoder::new_with_quality(&mut output, 30);
encoder.encode_image(&frame).unwrap();
*latest_frame.write().await = output;
await_frame.notify_waiters();
}
}
async fn stream_video(await_frame: Arc<Notify>, latest_frame: Arc<RwLock<YUVBuffer>>, mut client: TcpStream) -> Result<()>{
let mut encoder = Encoder::with_api_config(
OpenH264API::from_source(),
EncoderConfig::new()
.usage_type(UsageType::CameraVideoRealTime
))?;
async fn stream_video(await_frame: Arc<Notify>, latest_frame: Arc<RwLock<Vec<u8>>>, mut client: TcpStream) -> Result<()>{
loop {
await_frame.notified().await;
let data = encoder.encode(&*latest_frame.read().await)?;
let data = data.to_vec();
let data = latest_frame.read().await;
println!("len: {}", data.len());
let len = data.len();
let data2 = data.clone();
drop(data);
client.write(&data2).await?;
// holding data across await, likely nonissue
client.write_u32(len as u32).await?;
client.write(&data).await?;
}
}
async fn server(await_frame: Arc<Notify>, latest_frame: Arc<RwLock<YUVBuffer>>) -> Result<()> {
async fn server(await_frame: Arc<Notify>, latest_frame: Arc<RwLock<Vec<u8>>>) -> Result<()> {
let port = 2993;
let listener = TcpListener::bind(format!("0.0.0.0:{port}")).await?;
println!("listening on {port}");