375 lines
14 KiB
Rust
375 lines
14 KiB
Rust
use anyhow::Context;
|
|
use gstreamer as gst;
|
|
use gstreamer::prelude::*;
|
|
use gstreamer_app as gst_app;
|
|
use lesavka_common::lesavka::VideoPacket;
|
|
use std::fs;
|
|
use std::path::Path;
|
|
use std::path::PathBuf;
|
|
use std::sync::atomic::{AtomicBool, AtomicU64};
|
|
use tracing::warn;
|
|
|
|
use crate::camera::{CameraCodec, CameraConfig};
|
|
use crate::video_support::{contains_idr, dev_mode_enabled, pick_h264_decoder, reserve_local_pts};
|
|
|
|
/// Push H.264 or MJPEG frames into the USB UVC gadget.
|
|
///
|
|
/// Inputs: a UVC device node and the negotiated camera configuration.
|
|
/// Outputs: a live `WebcamSink` that accepts `VideoPacket`s.
|
|
/// Why: the UVC sink owns the GStreamer pipeline details for gadget output so
|
|
/// the relay logic can focus on session lifecycle instead of media plumbing.
|
|
pub struct WebcamSink {
|
|
appsrc: gst_app::AppSrc,
|
|
pipe: gst::Pipeline,
|
|
clock_aligned: AtomicBool,
|
|
next_pts_us: AtomicU64,
|
|
frame_step_us: u64,
|
|
mjpeg_spool_path: Option<PathBuf>,
|
|
}
|
|
|
|
fn uvc_sink_session_clock_align_enabled() -> bool {
|
|
std::env::var("LESAVKA_UVC_SESSION_CLOCK_ALIGN")
|
|
.ok()
|
|
.map(|value| {
|
|
let trimmed = value.trim();
|
|
!(trimmed.eq_ignore_ascii_case("0")
|
|
|| trimmed.eq_ignore_ascii_case("false")
|
|
|| trimmed.eq_ignore_ascii_case("no")
|
|
|| trimmed.eq_ignore_ascii_case("off"))
|
|
})
|
|
.unwrap_or(true)
|
|
}
|
|
|
|
fn uvc_mjpeg_v4l2sink_io_mode() -> String {
|
|
let value = std::env::var("LESAVKA_UVC_MJPEG_IO_MODE").unwrap_or_else(|_| "mmap".to_string());
|
|
let trimmed = value.trim().to_ascii_lowercase();
|
|
match trimmed.as_str() {
|
|
"auto" | "rw" | "mmap" | "userptr" | "dmabuf" | "dmabuf-import" => trimmed,
|
|
_ => {
|
|
warn!(
|
|
value,
|
|
"invalid LESAVKA_UVC_MJPEG_IO_MODE; falling back to mmap"
|
|
);
|
|
"mmap".to_string()
|
|
}
|
|
}
|
|
}
|
|
|
|
fn mjpeg_spool_enabled() -> bool {
|
|
std::env::var("LESAVKA_UVC_MJPEG_SPOOL")
|
|
.ok()
|
|
.map(|value| {
|
|
let trimmed = value.trim();
|
|
!(trimmed.eq_ignore_ascii_case("0")
|
|
|| trimmed.eq_ignore_ascii_case("false")
|
|
|| trimmed.eq_ignore_ascii_case("no")
|
|
|| trimmed.eq_ignore_ascii_case("off"))
|
|
})
|
|
.unwrap_or(true)
|
|
}
|
|
|
|
fn mjpeg_spool_path() -> PathBuf {
|
|
std::env::var("LESAVKA_UVC_FRAME_PATH")
|
|
.map(PathBuf::from)
|
|
.unwrap_or_else(|_| PathBuf::from("/run/lesavka-uvc-frame.mjpg"))
|
|
}
|
|
|
|
impl WebcamSink {
|
|
/// Build a new webcam sink pipeline.
|
|
///
|
|
/// Inputs: the target UVC device plus the selected camera profile.
|
|
/// Outputs: a sink ready to receive `VideoPacket`s.
|
|
/// Why: UVC output has its own caps and decoder chain that differs from the
|
|
/// HDMI sink, so it lives in a dedicated constructor.
|
|
#[cfg(coverage)]
|
|
pub fn new(_uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result<Self> {
|
|
gst::init()?;
|
|
|
|
let pipeline = gst::Pipeline::new();
|
|
let clock_align_enabled = uvc_sink_session_clock_align_enabled();
|
|
let src = gst::ElementFactory::make("appsrc")
|
|
.build()?
|
|
.downcast::<gst_app::AppSrc>()
|
|
.expect("appsrc");
|
|
src.set_is_live(true);
|
|
src.set_format(gst::Format::Time);
|
|
src.set_property("do-timestamp", &false);
|
|
|
|
let sink = gst::ElementFactory::make("fakesink")
|
|
.build()
|
|
.context("building fakesink")?;
|
|
if clock_align_enabled {
|
|
crate::media_timing::prepare_pipeline_clock_sync(&pipeline);
|
|
crate::media_timing::enable_sink_clock_sync(&sink);
|
|
}
|
|
pipeline.add_many(&[src.upcast_ref(), &sink])?;
|
|
gst::Element::link_many(&[src.upcast_ref(), &sink])?;
|
|
pipeline.set_state(gst::State::Playing)?;
|
|
|
|
let frame_step_us = (1_000_000u64 / u64::from(cfg.fps.max(1))).max(1);
|
|
Ok(Self {
|
|
appsrc: src,
|
|
pipe: pipeline,
|
|
clock_aligned: AtomicBool::new(!clock_align_enabled),
|
|
next_pts_us: AtomicU64::new(0),
|
|
frame_step_us,
|
|
mjpeg_spool_path: None,
|
|
})
|
|
}
|
|
|
|
#[cfg(not(coverage))]
|
|
pub fn new(uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result<Self> {
|
|
gst::init()?;
|
|
|
|
let pipeline = gst::Pipeline::new();
|
|
let clock_align_enabled = uvc_sink_session_clock_align_enabled();
|
|
|
|
let width = cfg.width as i32;
|
|
let height = cfg.height as i32;
|
|
let fps = cfg.fps.max(1) as i32;
|
|
let use_mjpeg = matches!(cfg.codec, CameraCodec::Mjpeg);
|
|
|
|
let src = gst::ElementFactory::make("appsrc")
|
|
.build()?
|
|
.downcast::<gst_app::AppSrc>()
|
|
.expect("appsrc");
|
|
src.set_is_live(true);
|
|
src.set_format(gst::Format::Time);
|
|
src.set_property("do-timestamp", false);
|
|
let block = std::env::var("LESAVKA_UVC_APP_BLOCK")
|
|
.ok()
|
|
.map(|value| value != "0")
|
|
.unwrap_or(false);
|
|
src.set_property("block", block);
|
|
if clock_align_enabled {
|
|
crate::media_timing::prepare_pipeline_clock_sync(&pipeline);
|
|
}
|
|
|
|
let mut mjpeg_spool_file = None;
|
|
|
|
if use_mjpeg && mjpeg_spool_enabled() {
|
|
let caps_mjpeg = gst::Caps::builder("image/jpeg")
|
|
.field("parsed", true)
|
|
.field("width", width)
|
|
.field("height", height)
|
|
.field("framerate", gst::Fraction::new(fps, 1))
|
|
.field("pixel-aspect-ratio", gst::Fraction::new(1, 1))
|
|
.field("colorimetry", "2:4:7:1")
|
|
.build();
|
|
src.set_caps(Some(&caps_mjpeg));
|
|
|
|
let sink = gst::ElementFactory::make("fakesink")
|
|
.build()
|
|
.context("building fakesink for MJPEG UVC spool")?;
|
|
if clock_align_enabled {
|
|
crate::media_timing::enable_sink_clock_sync(&sink);
|
|
} else if sink.has_property("sync", None) {
|
|
sink.set_property("sync", false);
|
|
}
|
|
pipeline.add_many([src.upcast_ref(), &sink])?;
|
|
gst::Element::link_many([src.upcast_ref(), &sink])?;
|
|
mjpeg_spool_file = Some(mjpeg_spool_path());
|
|
} else if use_mjpeg {
|
|
let caps_mjpeg = gst::Caps::builder("image/jpeg")
|
|
.field("parsed", true)
|
|
.field("width", width)
|
|
.field("height", height)
|
|
.field("framerate", gst::Fraction::new(fps, 1))
|
|
.field("pixel-aspect-ratio", gst::Fraction::new(1, 1))
|
|
.field("colorimetry", "2:4:7:1")
|
|
.build();
|
|
src.set_caps(Some(&caps_mjpeg));
|
|
|
|
let queue = gst::ElementFactory::make("queue").build()?;
|
|
let capsfilter = gst::ElementFactory::make("capsfilter")
|
|
.property("caps", &caps_mjpeg)
|
|
.build()?;
|
|
let sink = gst::ElementFactory::make("v4l2sink")
|
|
.property("device", uvc_dev)
|
|
.build()?;
|
|
// Kept as an emergency fallback; normal MJPEG output is brokered
|
|
// through the UVC helper so only one process owns the gadget node.
|
|
sink.set_property_from_str("io-mode", &uvc_mjpeg_v4l2sink_io_mode());
|
|
if clock_align_enabled {
|
|
crate::media_timing::enable_sink_clock_sync(&sink);
|
|
} else if sink.has_property("sync", None) {
|
|
sink.set_property("sync", false);
|
|
}
|
|
|
|
pipeline.add_many([src.upcast_ref(), &queue, &capsfilter, &sink])?;
|
|
gst::Element::link_many([src.upcast_ref(), &queue, &capsfilter, &sink])?;
|
|
} else {
|
|
let caps_h264 = gst::Caps::builder("video/x-h264")
|
|
.field("stream-format", "byte-stream")
|
|
.field("alignment", "au")
|
|
.build();
|
|
let raw_caps = gst::Caps::builder("video/x-raw")
|
|
.field("format", "YUY2")
|
|
.field("width", width)
|
|
.field("height", height)
|
|
.field("framerate", gst::Fraction::new(fps, 1))
|
|
.build();
|
|
src.set_caps(Some(&caps_h264));
|
|
|
|
let h264parse = gst::ElementFactory::make("h264parse").build()?;
|
|
let decoder_name = pick_h264_decoder();
|
|
let decoder = gst::ElementFactory::make(decoder_name)
|
|
.build()
|
|
.with_context(|| format!("building decoder element {decoder_name}"))?;
|
|
let convert = gst::ElementFactory::make("videoconvert").build()?;
|
|
let scale = gst::ElementFactory::make("videoscale").build()?;
|
|
let caps = gst::ElementFactory::make("capsfilter")
|
|
.property("caps", &raw_caps)
|
|
.build()?;
|
|
let sink = gst::ElementFactory::make("v4l2sink")
|
|
.property("device", uvc_dev)
|
|
.build()?;
|
|
if clock_align_enabled {
|
|
crate::media_timing::enable_sink_clock_sync(&sink);
|
|
} else if sink.has_property("sync", None) {
|
|
sink.set_property("sync", false);
|
|
}
|
|
|
|
pipeline.add_many([
|
|
src.upcast_ref(),
|
|
&h264parse,
|
|
&decoder,
|
|
&convert,
|
|
&scale,
|
|
&caps,
|
|
&sink,
|
|
])?;
|
|
gst::Element::link_many([
|
|
src.upcast_ref(),
|
|
&h264parse,
|
|
&decoder,
|
|
&convert,
|
|
&scale,
|
|
&caps,
|
|
&sink,
|
|
])?;
|
|
}
|
|
pipeline.set_state(gst::State::Playing)?;
|
|
|
|
let frame_step_us = (1_000_000u64 / u64::from(cfg.fps.max(1))).max(1);
|
|
Ok(Self {
|
|
appsrc: src,
|
|
pipe: pipeline,
|
|
clock_aligned: AtomicBool::new(!clock_align_enabled),
|
|
next_pts_us: AtomicU64::new(0),
|
|
frame_step_us,
|
|
mjpeg_spool_path: mjpeg_spool_file,
|
|
})
|
|
}
|
|
|
|
/// Push one client frame into the UVC pipeline.
|
|
///
|
|
/// Inputs: the next `VideoPacket` from the gRPC camera stream.
|
|
/// Outputs: none; the frame is forwarded to the appsrc when possible.
|
|
/// Why: UVC sinks use a locally monotonic timeline so presentation remains
|
|
/// stable even when WAN packet timestamps arrive out of order.
|
|
#[cfg(coverage)]
|
|
pub fn push(&self, pkt: VideoPacket) {
|
|
let buf = gst::Buffer::from_slice(pkt.data);
|
|
let _ = self.appsrc.push_buffer(buf);
|
|
}
|
|
|
|
#[cfg(not(coverage))]
|
|
pub fn push(&self, pkt: VideoPacket) {
|
|
if let Some(path) = &self.mjpeg_spool_path {
|
|
if let Err(err) = spool_mjpeg_frame(path, &pkt.data) {
|
|
warn!(target:"lesavka_server::video", %err, "📸⚠️ failed to spool MJPEG frame for UVC helper");
|
|
}
|
|
return;
|
|
}
|
|
|
|
let mut buf = gst::Buffer::from_slice(pkt.data);
|
|
if let Some(meta) = buf.get_mut() {
|
|
let pts_us = reserve_local_pts(&self.next_pts_us, pkt.pts, self.frame_step_us);
|
|
if !self
|
|
.clock_aligned
|
|
.swap(true, std::sync::atomic::Ordering::SeqCst)
|
|
{
|
|
crate::media_timing::align_pipeline_to_session_clock(&self.pipe, pts_us);
|
|
}
|
|
let ts = gst::ClockTime::from_useconds(pts_us);
|
|
meta.set_pts(Some(ts));
|
|
meta.set_dts(Some(ts));
|
|
meta.set_duration(Some(gst::ClockTime::from_useconds(self.frame_step_us)));
|
|
}
|
|
if let Err(err) = self.appsrc.push_buffer(buf) {
|
|
tracing::warn!(target:"lesavka_server::video", %err, "📸⚠️ appsrc push failed");
|
|
}
|
|
}
|
|
}
|
|
|
|
#[cfg(not(coverage))]
|
|
fn spool_mjpeg_frame(path: &Path, data: &[u8]) -> anyhow::Result<()> {
|
|
if let Some(parent) = path.parent() {
|
|
fs::create_dir_all(parent)?;
|
|
}
|
|
let tmp = path.with_extension(format!("mjpg.{}.tmp", std::process::id()));
|
|
fs::write(&tmp, data)?;
|
|
fs::rename(&tmp, path)?;
|
|
Ok(())
|
|
}
|
|
|
|
impl Drop for WebcamSink {
|
|
fn drop(&mut self) {
|
|
let _ = self.pipe.set_state(gst::State::Null);
|
|
}
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
#[test]
|
|
fn uvc_session_clock_alignment_defaults_on_and_accepts_disable_overrides() {
|
|
temp_env::with_var_unset("LESAVKA_UVC_SESSION_CLOCK_ALIGN", || {
|
|
assert!(super::uvc_sink_session_clock_align_enabled());
|
|
});
|
|
|
|
for disabled in ["0", "false", "no", "off"] {
|
|
temp_env::with_var("LESAVKA_UVC_SESSION_CLOCK_ALIGN", Some(disabled), || {
|
|
assert!(!super::uvc_sink_session_clock_align_enabled());
|
|
});
|
|
}
|
|
|
|
temp_env::with_var("LESAVKA_UVC_SESSION_CLOCK_ALIGN", Some("1"), || {
|
|
assert!(super::uvc_sink_session_clock_align_enabled());
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn mjpeg_uvc_sink_defaults_to_mmap_io_mode_with_safe_override() {
|
|
temp_env::with_var_unset("LESAVKA_UVC_MJPEG_IO_MODE", || {
|
|
assert_eq!(super::uvc_mjpeg_v4l2sink_io_mode(), "mmap");
|
|
});
|
|
|
|
temp_env::with_var("LESAVKA_UVC_MJPEG_IO_MODE", Some("mmap"), || {
|
|
assert_eq!(super::uvc_mjpeg_v4l2sink_io_mode(), "mmap");
|
|
});
|
|
|
|
temp_env::with_var("LESAVKA_UVC_MJPEG_IO_MODE", Some("not-real"), || {
|
|
assert_eq!(super::uvc_mjpeg_v4l2sink_io_mode(), "mmap");
|
|
});
|
|
}
|
|
|
|
#[test]
|
|
fn mjpeg_spool_defaults_on_with_path_override() {
|
|
temp_env::with_var_unset("LESAVKA_UVC_MJPEG_SPOOL", || {
|
|
assert!(super::mjpeg_spool_enabled());
|
|
});
|
|
|
|
temp_env::with_var("LESAVKA_UVC_MJPEG_SPOOL", Some("0"), || {
|
|
assert!(!super::mjpeg_spool_enabled());
|
|
});
|
|
|
|
temp_env::with_var("LESAVKA_UVC_FRAME_PATH", Some("/tmp/frame.mjpg"), || {
|
|
assert_eq!(
|
|
super::mjpeg_spool_path(),
|
|
std::path::PathBuf::from("/tmp/frame.mjpg")
|
|
);
|
|
});
|
|
}
|
|
}
|