lesavka/server/src/video.rs

141 lines
5.0 KiB
Rust
Raw Normal View History

2025-06-21 05:21:57 -05:00
// server/src/video.rs
use anyhow::Context;
use gstreamer as gst;
use gstreamer_app as gst_app;
use gst::prelude::*;
2025-06-28 15:45:11 -05:00
use gst::{log, MessageView};
2025-06-23 07:18:26 -05:00
use lesavka_common::lesavka::VideoPacket;
2025-06-21 05:21:57 -05:00
use tokio_stream::wrappers::ReceiverStream;
use tonic::Status;
2025-06-27 22:51:50 -05:00
use tracing::{debug, enabled, trace, Level};
2025-06-28 19:40:48 -05:00
use futures_util::Stream;
2025-06-21 05:21:57 -05:00
2025-06-28 03:34:48 -05:00
const EYE_ID: [&str; 2] = ["l", "r"];
2025-06-27 14:01:29 -05:00
static START: std::sync::OnceLock<gst::ClockTime> = std::sync::OnceLock::new();
2025-06-28 19:40:48 -05:00
pub struct VideoStream {
_pipeline: gst::Pipeline,
inner: ReceiverStream<Result<VideoPacket, Status>>,
}
impl Stream for VideoStream {
type Item = Result<VideoPacket, Status>;
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Option<Self::Item>> {
Stream::poll_next(std::pin::Pin::new(&mut self.inner), cx)
}
}
impl Drop for VideoStream {
fn drop(&mut self) {
// shut down nicely avoids the “dispose element … READY/PLAYING …” spam
let _ = self._pipeline.set_state(gst::State::Null);
}
}
2025-06-27 22:51:50 -05:00
pub async fn eye_ball(
2025-06-21 05:21:57 -05:00
dev: &str,
id: u32,
2025-06-25 16:52:26 -05:00
_max_bitrate_kbit: u32,
2025-06-28 19:40:48 -05:00
) -> anyhow::Result<VideoStream> {
2025-06-28 03:34:48 -05:00
let eye = EYE_ID[id as usize];
2025-06-21 05:21:57 -05:00
gst::init().context("gst init")?;
2025-06-29 04:17:44 -05:00
let desc = format!(
"v4l2src device=\"{dev}\" io-mode=mmap ! \
queue ! tsdemux name=d ! \
d. ! h264parse disable-passthrough=true config-interval=-1 ! \
video/x-h264,stream-format=byte-stream,alignment=au ! \
appsink name=vsink emit-signals=true max-buffers=32 drop=true"
);
2025-06-28 15:45:11 -05:00
// let desc = format!(
// "v4l2src device={dev} io-mode=mmap ! \
2025-06-29 04:17:44 -05:00
// queue max-size-buffers=0 max-size-bytes=0 max-size-time=0 ! tsdemux name=d ! \
// video/x-h264,stream-format=byte-stream,alignment=au,profile=high ! tsdemux name=d ! \
// d. ! h264parse config-interval=1 ! queue ! appsink name=vsink emit-signals=true \
// d. ! aacparse ! queue ! h264parse config-interval=1 ! appsink name=sink \
// emit-signals=true drop=false sync=false"
2025-06-28 15:45:11 -05:00
// );
2025-06-21 05:21:57 -05:00
2025-06-23 20:51:52 -05:00
let pipeline = gst::parse::launch(&desc)?
.downcast::<gst::Pipeline>()
2025-06-29 04:17:44 -05:00
.expect("not a pipeline");
2025-06-21 05:21:57 -05:00
2025-06-23 20:57:18 -05:00
let sink = pipeline
.by_name("sink")
.expect("appsink")
.dynamic_cast::<gst_app::AppSink>()
2025-06-28 15:45:11 -05:00
.expect("appsink down-cast");
2025-06-23 20:57:18 -05:00
2025-06-28 15:45:11 -05:00
let (tx, rx) = tokio::sync::mpsc::channel(8192);
2025-06-21 05:21:57 -05:00
sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |sink| {
2025-06-27 22:51:50 -05:00
/* -------- pull frame ---------- */
2025-06-21 05:21:57 -05:00
let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
2025-06-28 01:08:57 -05:00
/* -------- map once, reuse ----- */
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
2025-06-27 22:51:50 -05:00
/* -------- basic counters ------ */
static FRAME: std::sync::atomic::AtomicU64 =
std::sync::atomic::AtomicU64::new(0);
let n = FRAME.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if n % 120 == 0 {
2025-06-28 03:34:48 -05:00
trace!(target: "lesavka_server::video", "eye-{eye}: delivered {n} frames");
2025-06-28 01:08:57 -05:00
if enabled!(Level::TRACE) {
2025-06-28 03:34:48 -05:00
let path = format!("/tmp/eye-{eye}-srv-{:05}.h264", n);
2025-06-28 01:08:57 -05:00
std::fs::write(&path, map.as_slice()).ok();
}
2025-06-27 22:51:50 -05:00
}
2025-06-23 20:45:21 -05:00
2025-06-27 22:51:50 -05:00
/* -------- detect SPS / IDR ---- */
2025-06-26 17:12:59 -05:00
if enabled!(Level::DEBUG) {
2025-06-27 22:51:50 -05:00
if let Some(&nal) = map.as_slice().get(4) {
if (nal & 0x1F) == 0x05 /* IDR */ {
2025-06-28 03:34:48 -05:00
debug!("eye-{eye}: IDR");
2025-06-26 17:12:59 -05:00
}
}
}
2025-06-27 22:51:50 -05:00
/* -------- timestamps ---------- */
let origin = *START.get_or_init(|| buffer.pts().unwrap_or(gst::ClockTime::ZERO));
let pts_us = buffer
.pts()
.unwrap_or(gst::ClockTime::ZERO)
.saturating_sub(origin)
.nseconds()
/ 1_000;
/* -------- ship over gRPC ----- */
2025-06-21 05:21:57 -05:00
let pkt = VideoPacket {
id,
2025-06-23 20:45:21 -05:00
pts: pts_us,
2025-06-21 05:21:57 -05:00
data: map.as_slice().to_vec(),
};
2025-06-28 03:34:48 -05:00
tracing::trace!("srv→grpc eye-{eye} {} bytes pts={}", pkt.data.len(), pkt.pts);
2025-06-27 22:51:50 -05:00
let _ = tx.try_send(Ok(pkt));
2025-06-21 05:21:57 -05:00
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
pipeline.set_state(gst::State::Playing)?;
2025-06-28 15:45:11 -05:00
let bus = pipeline.bus().unwrap();
loop {
match bus.timed_pop(gst::ClockTime::NONE) {
Some(msg) if matches!(msg.view(), MessageView::StateChanged(s)
if s.current() == gst::State::Playing) => break,
Some(_) => continue,
None => continue,
}
}
2025-06-28 19:40:48 -05:00
Ok(VideoStream { _pipeline: pipeline, inner: ReceiverStream::new(rx) })
2025-06-21 05:21:57 -05:00
}