// client/src/output/video.rs use anyhow::Context; use gstreamer as gst; use gstreamer_app as gst_app; use gst::prelude::*; use lesavka_common::lesavka::VideoPacket; use tracing::{error, info, warn, debug}; /* ---------- pipeline ---------------------------------------------------- * ┌────────────┐ H.264/AU ┌─────────┐ Decoded ┌─────────────┐ * │ AppSrc │────────────► decodebin ├──────────► glimagesink │ * └────────────┘ (autoplug) (overlay) | * ----------------------------------------------------------------------*/ const PIPELINE_DESC: &str = concat!( "appsrc name=src is-live=true format=time do-timestamp=true block=false ! ", "queue leaky=downstream ! ", "capsfilter caps=video/x-h264,stream-format=byte-stream,alignment=au ! ", "h264parse disable-passthrough=true ! decodebin ! videoconvert ! ", "glimagesink name=sink sync=false" ); pub struct MonitorWindow { _pipeline: gst::Pipeline, src: gst_app::AppSrc, } impl MonitorWindow { pub fn new(_id: u32) -> anyhow::Result { gst::init().context("initialising GStreamer")?; // --- Build pipeline ------------------------------------------------ let pipeline: gst::Pipeline = gst::parse::launch(PIPELINE_DESC)? .downcast::() .expect("not a pipeline"); // Optional: turn the sink full‑screen when LESAVKA_FULLSCREEN=1 let fullscreen = std::env::var("LESAVKA_FULLSCREEN").is_ok(); if let Some(sink) = pipeline.by_name("sink") { // glimagesink: title / fullscreen / force‑aspect‑ratio let title = format!("Lesavka‑eye‑{_id}"); let _ = sink.set_property("title", &title); // only if supported if fullscreen { let _ = sink.set_property("fullscreen", &true); // ditto } let _ = sink.set_property("force-aspect-ratio", &true); } /* ---------- AppSrc ------------------------------------------------- */ let src: gst_app::AppSrc = pipeline .by_name("src") .unwrap() .downcast::() .unwrap(); src.set_caps(Some(&gst::Caps::builder("video/x-h264") .field("stream-format", &"byte-stream") .field("alignment", &"au") .build())); src.set_format(gst::Format::Time); pipeline.set_state(gst::State::Playing)?; if let Some(sink) = pipeline.by_name("sink") { let title = format!("Lesavka‑eye‑{_id}"); sink.set_property_from_str("window-title", &title); sink.set_property("force-aspect-ratio", &true); } Ok(Self { _pipeline: pipeline, src }) } /// Feed one access-unit to the decoder. pub fn push_packet(&self, pkt: VideoPacket) { static CNT : std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0); let n = CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed); if n % 150 == 0 || n < 10 { debug!(eye = pkt.id, bytes = pkt.data.len(), pts = pkt.pts, "⬇️ received video AU"); } let mut buf = gst::Buffer::from_slice(pkt.data); buf.get_mut() .unwrap() .set_pts(Some(gst::ClockTime::from_useconds(pkt.pts))); let _ = self.src.push_buffer(buf); // ignore Eos/flushing } }