304 lines
12 KiB
Rust
304 lines
12 KiB
Rust
// client/src/input/camera.rs
|
||
#![forbid(unsafe_code)]
|
||
|
||
use anyhow::Context;
|
||
use gst::prelude::*;
|
||
use gstreamer as gst;
|
||
use gstreamer_app as gst_app;
|
||
use lesavka_common::lesavka::VideoPacket;
|
||
|
||
fn env_u32(name: &str, default: u32) -> u32 {
|
||
std::env::var(name)
|
||
.ok()
|
||
.and_then(|v| v.parse::<u32>().ok())
|
||
.unwrap_or(default)
|
||
}
|
||
|
||
#[derive(Clone, Copy, Debug)]
|
||
pub enum CameraCodec {
|
||
H264,
|
||
Mjpeg,
|
||
}
|
||
|
||
#[derive(Clone, Copy, Debug)]
|
||
pub struct CameraConfig {
|
||
pub codec: CameraCodec,
|
||
pub width: u32,
|
||
pub height: u32,
|
||
pub fps: u32,
|
||
}
|
||
|
||
pub struct CameraCapture {
|
||
#[allow(dead_code)] // kept alive to hold PLAYING state
|
||
pipeline: gst::Pipeline,
|
||
sink: gst_app::AppSink,
|
||
}
|
||
|
||
impl CameraCapture {
|
||
pub fn new(device_fragment: Option<&str>, cfg: Option<CameraConfig>) -> anyhow::Result<Self> {
|
||
gst::init().ok();
|
||
|
||
// Select source: V4L2 device or test pattern
|
||
let (src_desc, dev_label, allow_mjpg_source) = match device_fragment {
|
||
Some(fragment)
|
||
if fragment.eq_ignore_ascii_case("test")
|
||
|| fragment.eq_ignore_ascii_case("videotestsrc") =>
|
||
{
|
||
let pattern =
|
||
std::env::var("LESAVKA_CAM_TEST_PATTERN").unwrap_or_else(|_| "smpte".into());
|
||
(
|
||
format!("videotestsrc is-live=true pattern={pattern}"),
|
||
format!("videotestsrc:{pattern}"),
|
||
false,
|
||
)
|
||
}
|
||
Some(path) if path.starts_with("/dev/") => (
|
||
format!("v4l2src device={path} do-timestamp=true"),
|
||
path.to_string(),
|
||
true,
|
||
),
|
||
Some(fragment) => {
|
||
let dev = Self::find_device(fragment).unwrap_or_else(|| "/dev/video0".into());
|
||
(format!("v4l2src device={dev} do-timestamp=true"), dev, true)
|
||
}
|
||
None => {
|
||
let dev = "/dev/video0".to_string();
|
||
(format!("v4l2src device={dev} do-timestamp=true"), dev, true)
|
||
}
|
||
};
|
||
|
||
let use_mjpg_source = allow_mjpg_source
|
||
&& (std::env::var("LESAVKA_CAM_MJPG").is_ok()
|
||
|| std::env::var("LESAVKA_CAM_FORMAT")
|
||
.ok()
|
||
.map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpg" | "mjpeg" | "jpeg"))
|
||
.unwrap_or(false));
|
||
let output_mjpeg = cfg
|
||
.map(|cfg| matches!(cfg.codec, CameraCodec::Mjpeg))
|
||
.unwrap_or_else(|| {
|
||
std::env::var("LESAVKA_CAM_CODEC")
|
||
.ok()
|
||
.map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg"))
|
||
.unwrap_or(false)
|
||
});
|
||
let jpeg_quality = env_u32("LESAVKA_CAM_JPEG_QUALITY", 85).clamp(1, 100);
|
||
let (enc, kf_prop, kf_val) = if use_mjpg_source && !output_mjpeg {
|
||
("x264enc", "key-int-max", "30")
|
||
} else {
|
||
Self::choose_encoder()
|
||
};
|
||
if use_mjpg_source && !output_mjpeg {
|
||
tracing::info!("📸 using MJPG source with software encode");
|
||
}
|
||
if output_mjpeg {
|
||
tracing::info!("📸 outputting MJPEG frames for UVC (quality={jpeg_quality})");
|
||
} else {
|
||
tracing::info!("📸 using encoder element: {enc}");
|
||
}
|
||
let width = cfg
|
||
.map(|cfg| cfg.width)
|
||
.unwrap_or_else(|| env_u32("LESAVKA_CAM_WIDTH", 1280));
|
||
let height = cfg
|
||
.map(|cfg| cfg.height)
|
||
.unwrap_or_else(|| env_u32("LESAVKA_CAM_HEIGHT", 720));
|
||
let fps = cfg
|
||
.map(|cfg| cfg.fps)
|
||
.unwrap_or_else(|| env_u32("LESAVKA_CAM_FPS", 25))
|
||
.max(1);
|
||
let have_nvvidconv = gst::ElementFactory::find("nvvidconv").is_some();
|
||
let (src_caps, preenc) = match enc {
|
||
// ───────────────────────────────────────────────────────────────────
|
||
// Jetson (has nvvidconv) Desktop (falls back to videoconvert)
|
||
// ───────────────────────────────────────────────────────────────────
|
||
"nvh264enc" if have_nvvidconv =>
|
||
(format!(
|
||
"video/x-raw(memory:NVMM),format=NV12,width={width},height={height},framerate={fps}/1"
|
||
), "nvvidconv !"),
|
||
"nvh264enc" /* else */ =>
|
||
(format!(
|
||
"video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1"
|
||
), "videoconvert !"),
|
||
"vaapih264enc" =>
|
||
(format!(
|
||
"video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1"
|
||
), "videoconvert !"),
|
||
_ =>
|
||
(format!(
|
||
"video/x-raw,width={width},height={height},framerate={fps}/1"
|
||
), "videoconvert !"),
|
||
};
|
||
|
||
// let desc = format!(
|
||
// "v4l2src device={dev} do-timestamp=true ! {raw_caps},width=1280,height=720 ! \
|
||
// videoconvert ! {enc} key-int-max=30 ! \
|
||
// h264parse config-interval=-1 ! \
|
||
// appsink name=asink emit-signals=true max-buffers=60 drop=true"
|
||
// );
|
||
// tracing::debug!(%desc, "📸 pipeline-desc");
|
||
// Build a pipeline that works for any of the three encoders.
|
||
// * nvh264enc needs NVMM memory caps;
|
||
// * vaapih264enc wants system-memory caps;
|
||
// * x264enc needs the usual raw caps.
|
||
let desc = if output_mjpeg {
|
||
if use_mjpg_source {
|
||
format!(
|
||
"{src_desc} ! \
|
||
image/jpeg,width={width},height={height},framerate={fps}/1 ! \
|
||
queue max-size-buffers=30 leaky=downstream ! \
|
||
appsink name=asink emit-signals=true max-buffers=60 drop=true"
|
||
)
|
||
} else {
|
||
format!(
|
||
"{src_desc} ! \
|
||
video/x-raw,width={width},height={height},framerate={fps}/1 ! \
|
||
videoconvert ! jpegenc quality={jpeg_quality} ! \
|
||
queue max-size-buffers=30 leaky=downstream ! \
|
||
appsink name=asink emit-signals=true max-buffers=60 drop=true"
|
||
)
|
||
}
|
||
} else if use_mjpg_source {
|
||
format!(
|
||
"{src_desc} ! \
|
||
image/jpeg,width={width},height={height} ! \
|
||
jpegdec ! videorate ! video/x-raw,framerate={fps}/1 ! \
|
||
videoconvert ! {enc} {kf_prop}={kf_val} ! \
|
||
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
|
||
queue max-size-buffers=30 leaky=downstream ! \
|
||
appsink name=asink emit-signals=true max-buffers=60 drop=true"
|
||
)
|
||
} else {
|
||
format!(
|
||
"{src_desc} ! {src_caps} ! \
|
||
{preenc} {enc} {kf_prop}={kf_val} ! \
|
||
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
|
||
queue max-size-buffers=30 leaky=downstream ! \
|
||
appsink name=asink emit-signals=true max-buffers=60 drop=true"
|
||
)
|
||
};
|
||
tracing::info!(%enc, width, height, fps, ?desc, "📸 using encoder element");
|
||
|
||
let pipeline: gst::Pipeline = gst::parse::launch(&desc)
|
||
.context("gst parse_launch(cam)")?
|
||
.downcast::<gst::Pipeline>()
|
||
.expect("not a pipeline");
|
||
|
||
tracing::debug!("📸 pipeline built OK – setting PLAYING…");
|
||
let sink: gst_app::AppSink = pipeline
|
||
.by_name("asink")
|
||
.expect("appsink element not found")
|
||
.downcast::<gst_app::AppSink>()
|
||
.expect("appsink down‑cast");
|
||
|
||
pipeline.set_state(gst::State::Playing)?;
|
||
tracing::info!("📸 webcam pipeline ▶️ device={dev_label}");
|
||
|
||
Ok(Self { pipeline, sink })
|
||
}
|
||
|
||
pub fn pull(&self) -> Option<VideoPacket> {
|
||
let sample = self.sink.pull_sample().ok()?;
|
||
let buf = sample.buffer()?;
|
||
let map = buf.map_readable().ok()?;
|
||
let pts = buf.pts().unwrap_or(gst::ClockTime::ZERO).nseconds() / 1_000;
|
||
Some(VideoPacket {
|
||
id: 2,
|
||
pts,
|
||
data: map.as_slice().to_vec(),
|
||
})
|
||
}
|
||
|
||
/// Fuzzy‑match devices under `/dev/v4l/by-id`, preferring capture nodes
|
||
fn find_device(substr: &str) -> Option<String> {
|
||
let wanted = substr.to_ascii_lowercase();
|
||
let mut matches: Vec<_> = std::fs::read_dir("/dev/v4l/by-id")
|
||
.ok()?
|
||
.flatten()
|
||
.filter_map(|e| {
|
||
let p = e.path();
|
||
let name = p.file_name()?.to_string_lossy().to_ascii_lowercase();
|
||
if name.contains(&wanted) {
|
||
Some(p)
|
||
} else {
|
||
None
|
||
}
|
||
})
|
||
.collect();
|
||
|
||
// deterministic order
|
||
matches.sort();
|
||
|
||
for p in matches {
|
||
if let Ok(target) = std::fs::read_link(&p) {
|
||
let dev = format!("/dev/{}", target.file_name()?.to_string_lossy());
|
||
if Self::is_capture(&dev) {
|
||
return Some(dev);
|
||
}
|
||
}
|
||
}
|
||
None
|
||
}
|
||
|
||
fn is_capture(dev: &str) -> bool {
|
||
const V4L2_CAP_VIDEO_CAPTURE: u32 = 0x0000_0001;
|
||
const V4L2_CAP_VIDEO_CAPTURE_MPLANE: u32 = 0x0000_1000;
|
||
|
||
v4l::Device::with_path(dev)
|
||
.ok()
|
||
.and_then(|d| d.query_caps().ok())
|
||
.map(|caps| {
|
||
let bits = caps.capabilities.bits();
|
||
(bits & V4L2_CAP_VIDEO_CAPTURE != 0) || (bits & V4L2_CAP_VIDEO_CAPTURE_MPLANE != 0)
|
||
})
|
||
.unwrap_or(false)
|
||
}
|
||
|
||
/// Cheap stub used when the web‑cam is disabled
|
||
pub fn new_stub() -> Self {
|
||
let pipeline = gst::Pipeline::new();
|
||
let sink: gst_app::AppSink = gst::ElementFactory::make("appsink")
|
||
.build()
|
||
.expect("appsink")
|
||
.downcast::<gst_app::AppSink>()
|
||
.unwrap();
|
||
Self { pipeline, sink }
|
||
}
|
||
|
||
#[allow(dead_code)] // helper kept for future heuristics
|
||
fn pick_encoder() -> (&'static str, &'static str) {
|
||
let encoders = &[
|
||
("nvh264enc", "video/x-raw(memory:NVMM),format=NV12"),
|
||
("vaapih264enc", "video/x-raw,format=NV12"),
|
||
("v4l2h264enc", "video/x-raw"), // RPi, Jetson, etc.
|
||
("x264enc", "video/x-raw"), // software
|
||
];
|
||
for (name, caps) in encoders {
|
||
if gst::ElementFactory::find(name).is_some() {
|
||
return (name, caps);
|
||
}
|
||
}
|
||
// last resort – software
|
||
("x264enc", "video/x-raw")
|
||
}
|
||
|
||
fn choose_encoder() -> (&'static str, &'static str, &'static str) {
|
||
match () {
|
||
_ if gst::ElementFactory::find("nvh264enc").is_some() => {
|
||
("nvh264enc", "gop-size", "30")
|
||
}
|
||
_ if gst::ElementFactory::find("vaapih264enc").is_some() => {
|
||
("vaapih264enc", "keyframe-period", "30")
|
||
}
|
||
_ if gst::ElementFactory::find("v4l2h264enc").is_some() => {
|
||
("v4l2h264enc", "idrcount", "30")
|
||
}
|
||
_ => ("x264enc", "key-int-max", "30"),
|
||
}
|
||
}
|
||
}
|
||
|
||
impl Drop for CameraCapture {
|
||
fn drop(&mut self) {
|
||
let _ = self.pipeline.set_state(gst::State::Null);
|
||
}
|
||
}
|