lesavka/client/src/input/camera.rs

408 lines
15 KiB
Rust
Raw Normal View History

2025-06-08 22:24:14 -05:00
// client/src/input/camera.rs
2025-07-03 09:24:57 -05:00
use anyhow::Context;
use gst::prelude::*;
2025-07-03 08:19:59 -05:00
use gstreamer as gst;
use gstreamer_app as gst_app;
use lesavka_common::lesavka::VideoPacket;
2025-06-08 22:24:14 -05:00
fn env_u32(name: &str, default: u32) -> u32 {
std::env::var(name)
.ok()
.and_then(|v| v.parse::<u32>().ok())
.unwrap_or(default)
}
2026-01-28 17:52:00 -03:00
#[derive(Clone, Copy, Debug)]
pub enum CameraCodec {
H264,
Mjpeg,
}
#[derive(Clone, Copy, Debug)]
pub struct CameraConfig {
pub codec: CameraCodec,
pub width: u32,
pub height: u32,
pub fps: u32,
}
2025-06-08 22:24:14 -05:00
pub struct CameraCapture {
#[allow(dead_code)] // kept alive to hold PLAYING state
2025-07-03 08:19:59 -05:00
pipeline: gst::Pipeline,
sink: gst_app::AppSink,
2025-06-08 22:24:14 -05:00
}
impl CameraCapture {
pub fn new(device_fragment: Option<&str>, cfg: Option<CameraConfig>) -> anyhow::Result<Self> {
2025-07-03 08:19:59 -05:00
gst::init().ok();
// Select source: V4L2 device or test pattern
let (src_desc, dev_label, allow_mjpg_source) = match device_fragment {
Some(fragment)
if fragment.eq_ignore_ascii_case("test")
|| fragment.eq_ignore_ascii_case("videotestsrc") =>
{
let pattern =
std::env::var("LESAVKA_CAM_TEST_PATTERN").unwrap_or_else(|_| "smpte".into());
(
format!("videotestsrc is-live=true pattern={pattern}"),
format!("videotestsrc:{pattern}"),
false,
)
}
Some(path) if path.starts_with("/dev/") => (
format!("v4l2src device={path} do-timestamp=true"),
path.to_string(),
true,
),
Some(fragment) => {
let dev = Self::find_device(fragment).unwrap_or_else(|| "/dev/video0".into());
(format!("v4l2src device={dev} do-timestamp=true"), dev, true)
}
None => {
let dev = "/dev/video0".to_string();
(format!("v4l2src device={dev} do-timestamp=true"), dev, true)
}
};
2025-07-03 08:19:59 -05:00
let use_mjpg_source = allow_mjpg_source
&& (std::env::var("LESAVKA_CAM_MJPG").is_ok()
|| std::env::var("LESAVKA_CAM_FORMAT")
.ok()
.map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpg" | "mjpeg" | "jpeg"))
.unwrap_or(false));
2026-01-28 17:52:00 -03:00
let output_mjpeg = cfg
.map(|cfg| matches!(cfg.codec, CameraCodec::Mjpeg))
.unwrap_or_else(|| {
std::env::var("LESAVKA_CAM_CODEC")
.ok()
.map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg"))
.unwrap_or(false)
});
2026-01-06 21:36:13 -03:00
let jpeg_quality = env_u32("LESAVKA_CAM_JPEG_QUALITY", 85).clamp(1, 100);
let width = cfg
.map(|cfg| cfg.width)
.unwrap_or_else(|| env_u32("LESAVKA_CAM_WIDTH", 1280));
let height = cfg
.map(|cfg| cfg.height)
.unwrap_or_else(|| env_u32("LESAVKA_CAM_HEIGHT", 720));
let fps = cfg
.map(|cfg| cfg.fps)
.unwrap_or_else(|| env_u32("LESAVKA_CAM_FPS", 25))
.max(1);
let keyframe_interval = env_u32("LESAVKA_CAM_KEYFRAME_INTERVAL", fps.min(5)).clamp(1, fps);
let (enc, kf_prop) = if use_mjpg_source && !output_mjpeg {
("x264enc", Some("key-int-max"))
2026-01-06 16:19:55 -03:00
} else {
Self::choose_encoder()
};
2026-01-06 21:06:20 -03:00
if use_mjpg_source && !output_mjpeg {
2026-01-06 16:19:55 -03:00
tracing::info!("📸 using MJPG source with software encode");
}
let _enc_opts = if enc == "x264enc" {
let bitrate_kbit = env_u32("LESAVKA_CAM_H264_KBIT", 4500);
let keyframe_opt = kf_prop
.map(|property| format!(" {property}={keyframe_interval}"))
.unwrap_or_default();
format!(
"{enc} tune=zerolatency speed-preset=faster bitrate={bitrate_kbit}{keyframe_opt}"
)
} else if let Some(property) = kf_prop {
format!("{enc} {property}={keyframe_interval}")
} else {
enc.to_string()
};
2026-01-06 21:06:20 -03:00
if output_mjpeg {
tracing::info!("📸 outputting MJPEG frames for UVC (quality={jpeg_quality})");
2026-01-06 21:06:20 -03:00
} else {
tracing::info!("📸 using encoder element: {enc}");
}
#[cfg(not(coverage))]
2025-07-04 18:00:49 -05:00
let have_nvvidconv = gst::ElementFactory::find("nvvidconv").is_some();
2025-07-04 01:56:59 -05:00
let (src_caps, preenc) = match enc {
2025-07-04 18:00:49 -05:00
// ───────────────────────────────────────────────────────────────────
// Jetson (has nvvidconv) Desktop (falls back to videoconvert)
// ───────────────────────────────────────────────────────────────────
#[cfg(not(coverage))]
2025-07-04 18:00:49 -05:00
"nvh264enc" if have_nvvidconv =>
(format!(
"video/x-raw(memory:NVMM),format=NV12,width={width},height={height},framerate={fps}/1"
), "nvvidconv !"),
#[cfg(not(coverage))]
2025-07-04 18:00:49 -05:00
"nvh264enc" /* else */ =>
(format!(
"video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1"
), "videoconvert !"),
#[cfg(not(coverage))]
2025-07-04 18:00:49 -05:00
"vaapih264enc" =>
(format!(
"video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1"
), "videoconvert !"),
2025-07-04 18:00:49 -05:00
_ =>
(format!(
"video/x-raw,width={width},height={height},framerate={fps}/1"
), "videoconvert !"),
};
2025-07-03 15:22:30 -05:00
// let desc = format!(
// "v4l2src device={dev} do-timestamp=true ! {raw_caps},width=1280,height=720 ! \
// videoconvert ! {enc} key-int-max=30 ! \
// h264parse config-interval=-1 ! \
// appsink name=asink emit-signals=true max-buffers=60 drop=true"
// );
// tracing::debug!(%desc, "📸 pipeline-desc");
// Build a pipeline that works for any of the three encoders.
// * nvh264enc needs NVMM memory caps;
// * vaapih264enc wants system-memory caps;
// * x264enc needs the usual raw caps.
2026-01-06 21:06:20 -03:00
let desc = if output_mjpeg {
if use_mjpg_source {
format!(
"{src_desc} ! \
2026-01-06 21:36:13 -03:00
image/jpeg,width={width},height={height},framerate={fps}/1 ! \
2026-01-06 21:06:20 -03:00
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
} else {
format!(
"{src_desc} ! \
2026-01-06 21:06:20 -03:00
video/x-raw,width={width},height={height},framerate={fps}/1 ! \
2026-01-06 21:36:13 -03:00
videoconvert ! jpegenc quality={jpeg_quality} ! \
2026-01-06 21:06:20 -03:00
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
}
} else if use_mjpg_source {
2026-01-06 16:19:55 -03:00
format!(
"{src_desc} ! \
2026-01-06 16:19:55 -03:00
image/jpeg,width={width},height={height} ! \
jpegdec ! videorate ! video/x-raw,framerate={fps}/1 ! \
videoconvert ! {_enc_opts} ! \
2026-01-06 16:19:55 -03:00
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
} else {
format!(
"{src_desc} ! {src_caps} ! \
{preenc} {_enc_opts} ! \
2026-01-06 16:19:55 -03:00
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
};
tracing::info!(%enc, width, height, fps, ?desc, "📸 using encoder element");
2025-07-03 08:19:59 -05:00
2025-07-03 09:24:57 -05:00
let pipeline: gst::Pipeline = gst::parse::launch(&desc)
.context("gst parse_launch(cam)")?
2025-07-03 08:19:59 -05:00
.downcast::<gst::Pipeline>()
.expect("not a pipeline");
2025-07-03 09:24:57 -05:00
tracing::debug!("📸 pipeline built OK setting PLAYING…");
2025-07-03 08:19:59 -05:00
let sink: gst_app::AppSink = pipeline
.by_name("asink")
.expect("appsink element not found")
.downcast::<gst_app::AppSink>()
.expect("appsink downcast");
pipeline.set_state(gst::State::Playing)?;
tracing::info!("📸 webcam pipeline ▶️ device={dev_label}");
2025-07-03 08:19:59 -05:00
Ok(Self { pipeline, sink })
2025-06-08 22:24:14 -05:00
}
2025-07-03 08:19:59 -05:00
pub fn pull(&self) -> Option<VideoPacket> {
let sample = self.sink.pull_sample().ok()?;
let buf = sample.buffer()?;
let map = buf.map_readable().ok()?;
2025-07-03 08:19:59 -05:00
let pts = buf.pts().unwrap_or(gst::ClockTime::ZERO).nseconds() / 1_000;
Some(VideoPacket {
id: 2,
pts,
data: map.as_slice().to_vec(),
2026-04-16 21:18:34 -03:00
..Default::default()
})
2025-07-03 08:19:59 -05:00
}
/// Fuzzymatch devices under `/dev/v4l/by-id`, preferring capture nodes
#[cfg(not(coverage))]
2025-07-03 08:19:59 -05:00
fn find_device(substr: &str) -> Option<String> {
let wanted = substr.to_ascii_lowercase();
let mut matches: Vec<_> = std::fs::read_dir("/dev/v4l/by-id")
.ok()?
.flatten()
.filter_map(|e| {
let p = e.path();
let name = p.file_name()?.to_string_lossy().to_ascii_lowercase();
if name.contains(&wanted) {
Some(p)
} else {
None
}
})
.collect();
// deterministic order
matches.sort();
for p in matches {
if let Ok(target) = std::fs::read_link(&p) {
let dev = format!("/dev/{}", target.file_name()?.to_string_lossy());
if Self::is_capture(&dev) {
return Some(dev);
2025-07-03 08:19:59 -05:00
}
}
}
None
}
#[cfg(coverage)]
fn find_device(substr: &str) -> Option<String> {
let wanted = substr.to_ascii_lowercase();
let by_id_dir =
std::env::var("LESAVKA_CAM_BY_ID_DIR").unwrap_or_else(|_| "/dev/v4l/by-id".to_string());
let dev_root = std::env::var("LESAVKA_CAM_DEV_ROOT").unwrap_or_else(|_| "/dev".to_string());
let mut matches: Vec<_> = std::fs::read_dir(by_id_dir)
.ok()?
.flatten()
.filter_map(|e| {
let p = e.path();
let name = p.file_name()?.to_string_lossy().to_ascii_lowercase();
if name.contains(&wanted) {
Some(p)
} else {
None
}
})
.collect();
matches.sort();
for p in matches {
if let Ok(target) = std::fs::read_link(&p) {
let dev = format!("{}/{}", dev_root, target.file_name()?.to_string_lossy());
if Self::is_capture(&dev) {
return Some(dev);
}
}
}
None
}
#[cfg(not(coverage))]
fn is_capture(dev: &str) -> bool {
2025-11-30 23:41:29 -03:00
const V4L2_CAP_VIDEO_CAPTURE: u32 = 0x0000_0001;
const V4L2_CAP_VIDEO_CAPTURE_MPLANE: u32 = 0x0000_1000;
v4l::Device::with_path(dev)
.ok()
.and_then(|d| d.query_caps().ok())
.map(|caps| {
let bits = caps.capabilities.bits();
(bits & V4L2_CAP_VIDEO_CAPTURE != 0) || (bits & V4L2_CAP_VIDEO_CAPTURE_MPLANE != 0)
2025-11-30 23:41:29 -03:00
})
.unwrap_or(false)
}
#[cfg(coverage)]
fn is_capture(dev: &str) -> bool {
dev.starts_with("/dev/video")
}
2025-07-03 08:19:59 -05:00
/// Cheap stub used when the webcam is disabled
pub fn new_stub() -> Self {
let pipeline = gst::Pipeline::new();
let sink: gst_app::AppSink = gst::ElementFactory::make("appsink")
.build()
2025-07-03 09:24:57 -05:00
.expect("appsink")
2025-07-03 08:19:59 -05:00
.downcast::<gst_app::AppSink>()
2025-07-03 09:24:57 -05:00
.unwrap();
2025-07-03 08:19:59 -05:00
Self { pipeline, sink }
2025-06-08 22:24:14 -05:00
}
2025-07-03 15:22:30 -05:00
#[allow(dead_code)] // helper kept for future heuristics
#[cfg(not(coverage))]
2025-07-03 15:22:30 -05:00
fn pick_encoder() -> (&'static str, &'static str) {
let encoders = &[
("nvh264enc", "video/x-raw(memory:NVMM),format=NV12"),
("vaapih264enc", "video/x-raw,format=NV12"),
("v4l2h264enc", "video/x-raw"), // RPi, Jetson, etc.
("x264enc", "video/x-raw"), // software
2025-07-03 15:22:30 -05:00
];
for (name, caps) in encoders {
if gst::ElementFactory::find(name).is_some() {
return (name, caps);
}
}
// last resort software
("x264enc", "video/x-raw")
}
#[cfg(coverage)]
fn pick_encoder() -> (&'static str, &'static str) {
("x264enc", "video/x-raw")
}
#[cfg(not(coverage))]
fn choose_encoder() -> (&'static str, Option<&'static str>) {
if buildable_encoder("nvh264enc") {
return (
"nvh264enc",
supported_encoder_property(
"nvh264enc",
&["iframeinterval", "idrinterval", "gop-size"],
),
);
}
if buildable_encoder("vaapih264enc") {
return (
"vaapih264enc",
supported_encoder_property("vaapih264enc", &["keyframe-period"]),
);
}
if buildable_encoder("v4l2h264enc") {
return (
"v4l2h264enc",
supported_encoder_property("v4l2h264enc", &["idrcount"]),
);
2025-07-03 15:22:30 -05:00
}
("x264enc", Some("key-int-max"))
2025-07-03 15:22:30 -05:00
}
#[cfg(coverage)]
fn choose_encoder() -> (&'static str, Option<&'static str>) {
match std::env::var("LESAVKA_CAM_TEST_ENCODER")
.ok()
.as_deref()
.map(str::trim)
{
Some("nvh264enc") => ("nvh264enc", None),
Some("vaapih264enc") => ("vaapih264enc", Some("keyframe-period")),
Some("v4l2h264enc") => ("v4l2h264enc", Some("idrcount")),
_ => ("x264enc", Some("key-int-max")),
}
}
2025-06-08 22:24:14 -05:00
}
#[cfg(not(coverage))]
fn buildable_encoder(encoder: &'static str) -> bool {
gst::ElementFactory::find(encoder).is_some()
&& gst::ElementFactory::make(encoder).build().is_ok()
}
#[cfg(not(coverage))]
fn supported_encoder_property(
encoder: &'static str,
properties: &[&'static str],
) -> Option<&'static str> {
let element = gst::ElementFactory::make(encoder).build().ok()?;
properties
.iter()
.copied()
.find(|property| element.find_property(property).is_some())
}
impl Drop for CameraCapture {
fn drop(&mut self) {
let _ = self.pipeline.set_state(gst::State::Null);
}
}