225 lines
8.5 KiB
Rust
225 lines
8.5 KiB
Rust
#[allow(clippy::all)]
|
|
impl Drop for MonitorWindow {
|
|
fn drop(&mut self) {
|
|
let _ = self._pipeline.set_state(gst::State::Null);
|
|
}
|
|
}
|
|
|
|
#[allow(clippy::all)]
|
|
impl UnifiedMonitorWindow {
|
|
#[cfg(coverage)]
|
|
/// Build the unified renderer in coverage mode with deterministic fakesinks.
|
|
pub fn new() -> anyhow::Result<Self> {
|
|
gst::init().context("initialising GStreamer")?;
|
|
let pipeline = gst::Pipeline::new();
|
|
let left_src: gst_app::AppSrc = gst::ElementFactory::make("appsrc")
|
|
.build()
|
|
.context("make left appsrc")?
|
|
.downcast::<gst_app::AppSrc>()
|
|
.expect("left appsrc");
|
|
let right_src: gst_app::AppSrc = gst::ElementFactory::make("appsrc")
|
|
.build()
|
|
.context("make right appsrc")?
|
|
.downcast::<gst_app::AppSrc>()
|
|
.expect("right appsrc");
|
|
|
|
left_src.set_caps(Some(
|
|
&gst::Caps::builder("video/x-h264")
|
|
.field("stream-format", &"byte-stream")
|
|
.field("alignment", &"au")
|
|
.build(),
|
|
));
|
|
right_src.set_caps(Some(
|
|
&gst::Caps::builder("video/x-h264")
|
|
.field("stream-format", &"byte-stream")
|
|
.field("alignment", &"au")
|
|
.build(),
|
|
));
|
|
left_src.set_format(gst::Format::Time);
|
|
right_src.set_format(gst::Format::Time);
|
|
let left_sink = gst::ElementFactory::make("fakesink")
|
|
.build()
|
|
.context("make left fakesink")?;
|
|
let right_sink = gst::ElementFactory::make("fakesink")
|
|
.build()
|
|
.context("make right fakesink")?;
|
|
|
|
pipeline.add(left_src.upcast_ref::<gst::Element>())?;
|
|
pipeline.add(right_src.upcast_ref::<gst::Element>())?;
|
|
pipeline.add(&left_sink)?;
|
|
pipeline.add(&right_sink)?;
|
|
gst::Element::link_many(&[left_src.upcast_ref(), &left_sink])?;
|
|
gst::Element::link_many(&[right_src.upcast_ref(), &right_sink])?;
|
|
pipeline.set_state(gst::State::Playing)?;
|
|
Ok(Self {
|
|
pipeline,
|
|
left_src,
|
|
right_src,
|
|
})
|
|
}
|
|
|
|
#[cfg(not(coverage))]
|
|
/// Build the unified renderer that composites both eyes in a single window.
|
|
pub fn new() -> anyhow::Result<Self> {
|
|
gst::init().context("initialising GStreamer")?;
|
|
|
|
let decoder_name = pick_h264_decoder()?;
|
|
let sink = if std::env::var("GDK_BACKEND")
|
|
.map(|v| v.contains("x11"))
|
|
.unwrap_or_else(|_| std::env::var_os("DISPLAY").is_some())
|
|
{
|
|
"ximagesink name=sink sync=false"
|
|
} else {
|
|
"glimagesink name=sink sync=false"
|
|
};
|
|
|
|
let decoder_fragment0 = h264_decoder_launch_fragment_named(&decoder_name, "decoder0");
|
|
let decoder_fragment1 = h264_decoder_launch_fragment_named(&decoder_name, "decoder1");
|
|
let desc = format!(
|
|
"compositor name=mix background=black ! videoconvert ! {sink} \
|
|
appsrc name=src0 is-live=true format=time do-timestamp=true block=false ! \
|
|
queue max-size-buffers=2 max-size-time=0 max-size-bytes=0 leaky=downstream ! \
|
|
capsfilter caps=video/x-h264,stream-format=byte-stream,alignment=au ! \
|
|
h264parse disable-passthrough=true ! {decoder_fragment0} ! videoconvert ! videoscale ! mix. \
|
|
appsrc name=src1 is-live=true format=time do-timestamp=true block=false ! \
|
|
queue max-size-buffers=2 max-size-time=0 max-size-bytes=0 leaky=downstream ! \
|
|
capsfilter caps=video/x-h264,stream-format=byte-stream,alignment=au ! \
|
|
h264parse disable-passthrough=true ! {decoder_fragment1} ! videoconvert ! videoscale ! mix."
|
|
);
|
|
|
|
let pipeline: gst::Pipeline = gst::parse::launch(&desc)?
|
|
.downcast::<gst::Pipeline>()
|
|
.expect("not a pipeline");
|
|
|
|
let monitors = display::enumerate_monitors();
|
|
let root_rect = layout::assign_rectangles(&monitors, &[("unified", 1920, 1080)])
|
|
.first()
|
|
.copied()
|
|
.unwrap_or(layout::Rect {
|
|
x: 0,
|
|
y: 0,
|
|
w: 1920,
|
|
h: 1080,
|
|
});
|
|
let pane_w = (root_rect.w / 2).max(320);
|
|
let pane_h = root_rect.h.max(240);
|
|
|
|
if let Some(mix) = pipeline.by_name("mix") {
|
|
if let Some(left_pad) = mix.static_pad("sink_0") {
|
|
left_pad.set_property("xpos", 0_i32);
|
|
left_pad.set_property("ypos", 0_i32);
|
|
left_pad.set_property("width", pane_w);
|
|
left_pad.set_property("height", pane_h);
|
|
}
|
|
if let Some(right_pad) = mix.static_pad("sink_1") {
|
|
right_pad.set_property("xpos", pane_w);
|
|
right_pad.set_property("ypos", 0_i32);
|
|
right_pad.set_property("width", pane_w);
|
|
right_pad.set_property("height", pane_h);
|
|
}
|
|
}
|
|
|
|
if let Some(sink_elem) = pipeline.by_name("sink") {
|
|
if sink_elem.find_property("window-title").is_some() {
|
|
let _ = sink_elem.set_property("window-title", &"Lesavka-unified");
|
|
}
|
|
if let Ok(overlay) = sink_elem.dynamic_cast::<VideoOverlay>() {
|
|
let _ = overlay.set_render_rectangle(0, 0, pane_w * 2, pane_h);
|
|
}
|
|
}
|
|
|
|
let left_src: gst_app::AppSrc = pipeline
|
|
.by_name("src0")
|
|
.context("missing src0")?
|
|
.downcast::<gst_app::AppSrc>()
|
|
.expect("src0 appsrc");
|
|
let right_src: gst_app::AppSrc = pipeline
|
|
.by_name("src1")
|
|
.context("missing src1")?
|
|
.downcast::<gst_app::AppSrc>()
|
|
.expect("src1 appsrc");
|
|
|
|
left_src.set_caps(Some(
|
|
&gst::Caps::builder("video/x-h264")
|
|
.field("stream-format", &"byte-stream")
|
|
.field("alignment", &"au")
|
|
.build(),
|
|
));
|
|
right_src.set_caps(Some(
|
|
&gst::Caps::builder("video/x-h264")
|
|
.field("stream-format", &"byte-stream")
|
|
.field("alignment", &"au")
|
|
.build(),
|
|
));
|
|
left_src.set_format(gst::Format::Time);
|
|
right_src.set_format(gst::Format::Time);
|
|
|
|
{
|
|
let bus = pipeline.bus().expect("no bus");
|
|
std::thread::spawn(move || {
|
|
use gst::MessageView::*;
|
|
for msg in bus.iter_timed(gst::ClockTime::NONE) {
|
|
match msg.view() {
|
|
StateChanged(s) if s.current() == gst::State::Playing => {
|
|
if msg.src().map(|s| s.is::<gst::Pipeline>()).unwrap_or(false) {
|
|
info!("🎞️ unified video pipeline ▶️");
|
|
info!("🎞️ unified decoder → {decoder_name}");
|
|
}
|
|
}
|
|
Error(e) => error!(
|
|
"💥 gst unified-video: {} ({})",
|
|
e.error(),
|
|
e.debug().unwrap_or_default()
|
|
),
|
|
Warning(w) => warn!(
|
|
"⚠️ gst unified-video: {} ({})",
|
|
w.error(),
|
|
w.debug().unwrap_or_default()
|
|
),
|
|
_ => {}
|
|
}
|
|
}
|
|
});
|
|
}
|
|
|
|
pipeline.set_state(gst::State::Playing)?;
|
|
|
|
Ok(Self {
|
|
pipeline,
|
|
left_src,
|
|
right_src,
|
|
})
|
|
}
|
|
|
|
/// Feed one access-unit into the unified decoder wall.
|
|
pub fn push_packet(&self, pkt: VideoPacket) {
|
|
static CNT: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
|
|
let n = CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
|
|
if n % 150 == 0 || n < 10 {
|
|
debug!(
|
|
eye = pkt.id,
|
|
bytes = pkt.data.len(),
|
|
pts = pkt.pts,
|
|
"⬇️ received unified video AU"
|
|
);
|
|
}
|
|
let src = if pkt.id == 0 {
|
|
&self.left_src
|
|
} else {
|
|
&self.right_src
|
|
};
|
|
let mut buf = gst::Buffer::from_slice(pkt.data);
|
|
buf.get_mut()
|
|
.unwrap()
|
|
.set_pts(Some(gst::ClockTime::from_useconds(pkt.pts)));
|
|
let _ = src.push_buffer(buf);
|
|
}
|
|
}
|
|
|
|
#[allow(clippy::all)]
|
|
impl Drop for UnifiedMonitorWindow {
|
|
fn drop(&mut self) {
|
|
let _ = self.pipeline.set_state(gst::State::Null);
|
|
}
|
|
}
|