use gstreamer as gst; use gstreamer_app as gst_app; use gst::prelude::*; use lesavka_common::lesavka::VideoPacket; use winit::{ event_loop::EventLoop, window::{Window, WindowAttributes}, }; const DESC: &str = concat!( "appsrc name=src is-live=true format=time do-timestamp=true block=false ! ", "queue max-size-buffers=0 max-size-bytes=0 max-size-time=0 leaky=downstream ! ", "capsfilter caps=video/x-h264,stream-format=byte-stream,alignment=au ! ", "h264parse disable-passthrough=true ! decodebin ! ", "queue ! videoconvert ! autovideosink sync=false" ); pub struct MonitorWindow { id: u32, _window: Window, src: gst_app::AppSrc, } impl MonitorWindow { pub fn new(id: u32, el: &EventLoop<()>) -> anyhow::Result { gst::init()?; // idempotent /* ---------- Wayland / X11 window ------------- */ let window = el .create_window( WindowAttributes::default() .with_title(format!("Lesavka‑monitor‑{id}")) .with_decorations(true), )?; /* ---------- GStreamer pipeline --------------- */ let caps = gst::Caps::builder("video/x-h264") .field("stream-format", &"byte-stream") .field("alignment", &"au") .build(); let pipeline = gst::parse::launch(DESC)? .downcast::() .expect("pipeline down‑cast"); let src = pipeline .by_name("src") .expect("appsrc element not found") .downcast::() .expect("appsrc down‑cast"); src.set_caps(Some(&caps)); src.set_format(gst::Format::Time); // downstream clock src.set_property("blocksize", &0u32); // one AU per buffer // NOTE: set_property() and friends return (), so no `?` src.set_property("do-timestamp", &true); src.set_latency(gst::ClockTime::NONE, gst::ClockTime::NONE); pipeline.set_state(gst::State::Playing)?; Ok(Self { id, _window: window, src }) } /// Feed one H.264 access‑unit into the pipeline. pub fn push_packet(&self, pkt: VideoPacket) { // Mutable so we can set the PTS: let mut buf = gst::Buffer::from_slice(pkt.data); if let Some(ref mut b) = buf.get_mut() { b.set_pts(Some(gst::ClockTime::from_useconds(pkt.pts))); } let _ = self.src.push_buffer(buf); // ignore Eos / flushing } }