lesavka/client/src/input/camera.rs

716 lines
27 KiB
Rust
Raw Normal View History

2025-06-08 22:24:14 -05:00
// client/src/input/camera.rs
2025-07-03 09:24:57 -05:00
use anyhow::Context;
use gst::prelude::*;
2025-07-03 08:19:59 -05:00
use gstreamer as gst;
use gstreamer_app as gst_app;
use lesavka_common::lesavka::VideoPacket;
use std::{
io::Write,
path::{Path, PathBuf},
sync::{
Arc,
atomic::{AtomicBool, Ordering},
},
thread,
time::Duration,
};
const CAMERA_PREVIEW_TAP_ENV: &str = "LESAVKA_UPLINK_CAMERA_PREVIEW";
2025-06-08 22:24:14 -05:00
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
enum CameraSourceProfile {
Raw,
Mjpeg,
AutoDecode,
}
fn env_u32(name: &str, default: u32) -> u32 {
std::env::var(name)
.ok()
.and_then(|v| v.parse::<u32>().ok())
.unwrap_or(default)
}
2026-01-28 17:52:00 -03:00
#[derive(Clone, Copy, Debug)]
pub enum CameraCodec {
H264,
Mjpeg,
}
#[derive(Clone, Copy, Debug)]
pub struct CameraConfig {
pub codec: CameraCodec,
pub width: u32,
pub height: u32,
pub fps: u32,
}
2025-06-08 22:24:14 -05:00
pub struct CameraCapture {
#[allow(dead_code)] // kept alive to hold PLAYING state
2025-07-03 08:19:59 -05:00
pipeline: gst::Pipeline,
sink: gst_app::AppSink,
preview_tap_running: Option<Arc<AtomicBool>>,
2025-06-08 22:24:14 -05:00
}
impl CameraCapture {
pub fn new(device_fragment: Option<&str>, cfg: Option<CameraConfig>) -> anyhow::Result<Self> {
2025-07-03 08:19:59 -05:00
gst::init().ok();
// Select source: V4L2 device or test pattern
let (src_desc, dev_label, allow_mjpg_source) = match device_fragment {
Some(fragment)
if fragment.eq_ignore_ascii_case("test")
|| fragment.eq_ignore_ascii_case("videotestsrc") =>
{
let pattern =
std::env::var("LESAVKA_CAM_TEST_PATTERN").unwrap_or_else(|_| "smpte".into());
(
format!("videotestsrc is-live=true pattern={pattern}"),
format!("videotestsrc:{pattern}"),
false,
)
}
Some(path) if path.starts_with("/dev/") => (
format!("v4l2src device={path} do-timestamp=true"),
path.to_string(),
true,
),
Some(fragment) => {
let dev = Self::find_device(fragment).unwrap_or_else(|| "/dev/video0".into());
(format!("v4l2src device={dev} do-timestamp=true"), dev, true)
}
None => {
let dev = "/dev/video0".to_string();
(format!("v4l2src device={dev} do-timestamp=true"), dev, true)
}
};
2025-07-03 08:19:59 -05:00
let output_mjpeg = cfg.map_or_else(
|| {
std::env::var("LESAVKA_CAM_CODEC").ok().is_some_and(|v| {
matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg")
})
},
|cfg| matches!(cfg.codec, CameraCodec::Mjpeg),
);
2026-01-06 21:36:13 -03:00
let jpeg_quality = env_u32("LESAVKA_CAM_JPEG_QUALITY", 85).clamp(1, 100);
let width = env_u32("LESAVKA_CAM_WIDTH", cfg.map_or(1280, |cfg| cfg.width));
let height = env_u32("LESAVKA_CAM_HEIGHT", cfg.map_or(720, |cfg| cfg.height));
let fps = env_u32("LESAVKA_CAM_FPS", cfg.map_or(25, |cfg| cfg.fps)).max(1);
let keyframe_interval = env_u32("LESAVKA_CAM_KEYFRAME_INTERVAL", fps.min(5)).clamp(1, fps);
let source_profile = camera_source_profile(allow_mjpg_source);
let use_mjpg_source = source_profile == CameraSourceProfile::Mjpeg;
let (enc, kf_prop) = if use_mjpg_source && !output_mjpeg {
("x264enc", Some("key-int-max"))
2026-01-06 16:19:55 -03:00
} else {
Self::choose_encoder()
};
match source_profile {
CameraSourceProfile::Mjpeg if !output_mjpeg => {
tracing::info!("📸 using MJPG source with software encode");
}
CameraSourceProfile::AutoDecode => {
tracing::info!("📸 using auto-decoded webcam source (raw/MJPEG accepted)");
}
_ => {}
2026-01-06 16:19:55 -03:00
}
let enc_opts = Self::encoder_options(enc, kf_prop, keyframe_interval);
2026-01-06 21:06:20 -03:00
if output_mjpeg {
tracing::info!("📸 outputting MJPEG frames for UVC (quality={jpeg_quality})");
2026-01-06 21:06:20 -03:00
} else {
tracing::info!("📸 using encoder element: {enc}");
}
#[cfg(not(coverage))]
2025-07-04 18:00:49 -05:00
let have_nvvidconv = gst::ElementFactory::find("nvvidconv").is_some();
2025-07-04 01:56:59 -05:00
let (src_caps, preenc) = match enc {
2025-07-04 18:00:49 -05:00
// ───────────────────────────────────────────────────────────────────
// Jetson (has nvvidconv) Desktop (falls back to videoconvert)
// ───────────────────────────────────────────────────────────────────
#[cfg(not(coverage))]
2025-07-04 18:00:49 -05:00
"nvh264enc" if have_nvvidconv =>
(format!(
"video/x-raw(memory:NVMM),format=NV12,width={width},height={height},framerate={fps}/1"
), "nvvidconv !"),
#[cfg(not(coverage))]
2025-07-04 18:00:49 -05:00
"nvh264enc" /* else */ =>
(format!(
"video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1"
), "videoconvert !"),
#[cfg(not(coverage))]
2025-07-04 18:00:49 -05:00
"vaapih264enc" =>
(format!(
"video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1"
), "videoconvert !"),
2025-07-04 18:00:49 -05:00
_ =>
(format!(
"video/x-raw,width={width},height={height},framerate={fps}/1"
), "videoconvert !"),
};
2025-07-03 15:22:30 -05:00
// let desc = format!(
// "v4l2src device={dev} do-timestamp=true ! {raw_caps},width=1280,height=720 ! \
// videoconvert ! {enc} key-int-max=30 ! \
// h264parse config-interval=-1 ! \
// appsink name=asink emit-signals=true max-buffers=60 drop=true"
// );
// tracing::debug!(%desc, "📸 pipeline-desc");
// Build a pipeline that works for any of the three encoders.
// * nvh264enc needs NVMM memory caps;
// * vaapih264enc wants system-memory caps;
// * x264enc needs the usual raw caps.
let preview_tap_path = camera_preview_tap_path();
let preview_tap_branch = camera_preview_tap_branch(width, height, fps);
let raw_source_chain =
camera_raw_source_chain(&src_desc, &src_caps, width, height, fps, source_profile);
let desc = if preview_tap_path.is_some() {
if output_mjpeg {
if use_mjpg_source {
format!(
"{src_desc} ! \
image/jpeg,width={width},height={height},framerate={fps}/1 ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! jpegdec ! \
{preview_tap_branch}"
)
} else {
format!(
"{raw_source_chain} ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
videoconvert ! jpegenc quality={jpeg_quality} ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! \
{preview_tap_branch}"
)
}
} else if use_mjpg_source {
format!(
"{src_desc} ! \
image/jpeg,width={width},height={height} ! \
jpegdec ! videorate ! video/x-raw,framerate={fps}/1 ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
videoconvert ! {enc_opts} ! \
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! \
{preview_tap_branch}"
)
} else {
format!(
"{raw_source_chain} ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
{preenc} {enc_opts} ! \
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! \
{preview_tap_branch}"
)
}
} else if output_mjpeg {
2026-01-06 21:06:20 -03:00
if use_mjpg_source {
format!(
"{src_desc} ! \
2026-01-06 21:36:13 -03:00
image/jpeg,width={width},height={height},framerate={fps}/1 ! \
2026-01-06 21:06:20 -03:00
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
} else {
format!(
"{raw_source_chain} ! \
2026-01-06 21:36:13 -03:00
videoconvert ! jpegenc quality={jpeg_quality} ! \
2026-01-06 21:06:20 -03:00
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
}
} else if use_mjpg_source {
2026-01-06 16:19:55 -03:00
format!(
"{src_desc} ! \
2026-01-06 16:19:55 -03:00
image/jpeg,width={width},height={height} ! \
jpegdec ! videorate ! video/x-raw,framerate={fps}/1 ! \
videoconvert ! {enc_opts} ! \
2026-01-06 16:19:55 -03:00
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
} else {
format!(
"{raw_source_chain} ! \
{preenc} {enc_opts} ! \
2026-01-06 16:19:55 -03:00
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
};
tracing::info!(%enc, width, height, fps, ?desc, "📸 using encoder element");
2025-07-03 08:19:59 -05:00
2025-07-03 09:24:57 -05:00
let pipeline: gst::Pipeline = gst::parse::launch(&desc)
.context("gst parse_launch(cam)")?
2025-07-03 08:19:59 -05:00
.downcast::<gst::Pipeline>()
.expect("not a pipeline");
2025-07-03 09:24:57 -05:00
tracing::debug!("📸 pipeline built OK setting PLAYING…");
2025-07-03 08:19:59 -05:00
let sink: gst_app::AppSink = pipeline
.by_name("asink")
.expect("appsink element not found")
.downcast::<gst_app::AppSink>()
.expect("appsink downcast");
spawn_camera_bus_logger(&pipeline, dev_label.clone());
if let Err(err) = pipeline.set_state(gst::State::Playing) {
let _ = pipeline.set_state(gst::State::Null);
return Err(err.into());
}
tracing::info!("📸 webcam pipeline ▶️ device={dev_label}");
2025-07-03 08:19:59 -05:00
let preview_tap_running = if let Some(path) = preview_tap_path {
let preview_sink = pipeline
.by_name("preview_sink")
.context("missing camera preview tap appsink")?
.downcast::<gst_app::AppSink>()
.expect("camera preview tap appsink");
Some(spawn_camera_preview_tap(preview_sink, path))
} else {
None
};
Ok(Self {
pipeline,
sink,
preview_tap_running,
})
2025-06-08 22:24:14 -05:00
}
2025-07-03 08:19:59 -05:00
pub fn pull(&self) -> Option<VideoPacket> {
let sample = self.sink.pull_sample().ok()?;
let buf = sample.buffer()?;
let map = buf.map_readable().ok()?;
2025-07-03 08:19:59 -05:00
let pts = buf.pts().unwrap_or(gst::ClockTime::ZERO).nseconds() / 1_000;
static FIRST_CAMERA_PACKET: AtomicBool = AtomicBool::new(false);
if !FIRST_CAMERA_PACKET.swap(true, Ordering::Relaxed) {
tracing::info!(
bytes = map.as_slice().len(),
pts_us = pts,
"📸 upstream webcam frames flowing"
);
}
Some(VideoPacket {
id: 2,
pts,
data: map.as_slice().to_vec(),
2026-04-16 21:18:34 -03:00
..Default::default()
})
2025-07-03 08:19:59 -05:00
}
/// Fuzzymatch devices under `/dev/v4l/by-id`, preferring capture nodes
#[cfg(not(coverage))]
2025-07-03 08:19:59 -05:00
fn find_device(substr: &str) -> Option<String> {
let wanted = substr.to_ascii_lowercase();
let mut matches: Vec<_> = std::fs::read_dir("/dev/v4l/by-id")
.ok()?
.flatten()
.filter_map(|e| {
let p = e.path();
let name = p.file_name()?.to_string_lossy().to_ascii_lowercase();
if name.contains(&wanted) {
Some(p)
} else {
None
}
})
.collect();
// deterministic order
matches.sort();
for p in matches {
if let Ok(target) = std::fs::read_link(&p) {
let dev = format!("/dev/{}", target.file_name()?.to_string_lossy());
if Self::is_capture(&dev) {
return Some(dev);
2025-07-03 08:19:59 -05:00
}
}
}
None
}
#[cfg(coverage)]
fn find_device(substr: &str) -> Option<String> {
let wanted = substr.to_ascii_lowercase();
let by_id_dir =
std::env::var("LESAVKA_CAM_BY_ID_DIR").unwrap_or_else(|_| "/dev/v4l/by-id".to_string());
let dev_root = std::env::var("LESAVKA_CAM_DEV_ROOT").unwrap_or_else(|_| "/dev".to_string());
let mut matches: Vec<_> = std::fs::read_dir(by_id_dir)
.ok()?
.flatten()
.filter_map(|e| {
let p = e.path();
let name = p.file_name()?.to_string_lossy().to_ascii_lowercase();
if name.contains(&wanted) {
Some(p)
} else {
None
}
})
.collect();
matches.sort();
for p in matches {
if let Ok(target) = std::fs::read_link(&p) {
let dev = format!("{}/{}", dev_root, target.file_name()?.to_string_lossy());
if Self::is_capture(&dev) {
return Some(dev);
}
}
}
None
}
#[cfg(not(coverage))]
fn is_capture(dev: &str) -> bool {
2025-11-30 23:41:29 -03:00
const V4L2_CAP_VIDEO_CAPTURE: u32 = 0x0000_0001;
const V4L2_CAP_VIDEO_CAPTURE_MPLANE: u32 = 0x0000_1000;
v4l::Device::with_path(dev)
.ok()
.and_then(|d| d.query_caps().ok())
.map(|caps| {
let bits = caps.capabilities.bits();
(bits & V4L2_CAP_VIDEO_CAPTURE != 0) || (bits & V4L2_CAP_VIDEO_CAPTURE_MPLANE != 0)
2025-11-30 23:41:29 -03:00
})
.unwrap_or(false)
}
#[cfg(coverage)]
fn is_capture(dev: &str) -> bool {
dev.starts_with("/dev/video")
}
2025-07-03 08:19:59 -05:00
/// Cheap stub used when the webcam is disabled
pub fn new_stub() -> Self {
let pipeline = gst::Pipeline::new();
let sink: gst_app::AppSink = gst::ElementFactory::make("appsink")
.build()
2025-07-03 09:24:57 -05:00
.expect("appsink")
2025-07-03 08:19:59 -05:00
.downcast::<gst_app::AppSink>()
2025-07-03 09:24:57 -05:00
.unwrap();
Self {
pipeline,
sink,
preview_tap_running: None,
}
2025-06-08 22:24:14 -05:00
}
2025-07-03 15:22:30 -05:00
#[allow(dead_code)] // helper kept for future heuristics
#[cfg(not(coverage))]
2025-07-03 15:22:30 -05:00
fn pick_encoder() -> (&'static str, &'static str) {
let encoders = &[
("nvh264enc", "video/x-raw(memory:NVMM),format=NV12"),
("vaapih264enc", "video/x-raw,format=NV12"),
("v4l2h264enc", "video/x-raw"), // RPi, Jetson, etc.
("x264enc", "video/x-raw"), // software
2025-07-03 15:22:30 -05:00
];
for (name, caps) in encoders {
if gst::ElementFactory::find(name).is_some() {
return (name, caps);
}
}
// last resort software
("x264enc", "video/x-raw")
}
#[cfg(coverage)]
fn pick_encoder() -> (&'static str, &'static str) {
("x264enc", "video/x-raw")
}
#[cfg(not(coverage))]
fn choose_encoder() -> (&'static str, Option<&'static str>) {
if buildable_encoder("nvh264enc") {
return (
"nvh264enc",
supported_encoder_property(
"nvh264enc",
&["iframeinterval", "idrinterval", "gop-size"],
),
);
}
if buildable_encoder("vaapih264enc") {
return (
"vaapih264enc",
supported_encoder_property("vaapih264enc", &["keyframe-period"]),
);
}
if buildable_encoder("v4l2h264enc") {
return (
"v4l2h264enc",
supported_encoder_property("v4l2h264enc", &["idrcount"]),
);
2025-07-03 15:22:30 -05:00
}
("x264enc", Some("key-int-max"))
2025-07-03 15:22:30 -05:00
}
#[cfg(coverage)]
fn choose_encoder() -> (&'static str, Option<&'static str>) {
match std::env::var("LESAVKA_CAM_TEST_ENCODER")
.ok()
.as_deref()
.map(str::trim)
{
Some("nvh264enc") => ("nvh264enc", None),
Some("vaapih264enc") => ("vaapih264enc", Some("keyframe-period")),
Some("v4l2h264enc") => ("v4l2h264enc", Some("idrcount")),
_ => ("x264enc", Some("key-int-max")),
}
}
fn encoder_options(
enc: &'static str,
kf_prop: Option<&'static str>,
keyframe_interval: u32,
) -> String {
if enc == "x264enc" {
let bitrate_kbit = env_u32("LESAVKA_CAM_H264_KBIT", 4500);
let keyframe_opt = kf_prop
.map(|property| format!(" {property}={keyframe_interval}"))
.unwrap_or_default();
format!(
"{enc} tune=zerolatency speed-preset=faster bitrate={bitrate_kbit}{keyframe_opt}"
)
} else if let Some(property) = kf_prop {
format!("{enc} {property}={keyframe_interval}")
} else {
enc.to_string()
}
}
}
/// Choose the pre-encoder webcam format path.
///
/// V4L2 webcams often expose 720p/30 as MJPEG only, so the default accepts
/// either raw frames or MJPEG unless the operator explicitly pins a format.
fn camera_source_profile(allow_v4l2_auto_decode: bool) -> CameraSourceProfile {
if !allow_v4l2_auto_decode {
return CameraSourceProfile::Raw;
}
if std::env::var("LESAVKA_CAM_MJPG").is_ok() {
return CameraSourceProfile::Mjpeg;
}
match std::env::var("LESAVKA_CAM_FORMAT")
.ok()
.as_deref()
.map(str::trim)
.map(str::to_ascii_lowercase)
.as_deref()
{
Some("mjpg" | "mjpeg" | "jpeg") => CameraSourceProfile::Mjpeg,
Some("raw" | "yuyv" | "yuy2") => CameraSourceProfile::Raw,
_ => CameraSourceProfile::AutoDecode,
}
}
/// Build the source-to-raw-video chain consumed by the encoder and preview tap.
fn camera_raw_source_chain(
src_desc: &str,
src_caps: &str,
width: u32,
height: u32,
fps: u32,
profile: CameraSourceProfile,
) -> String {
match profile {
CameraSourceProfile::Raw => format!("{src_desc} ! {src_caps}"),
CameraSourceProfile::Mjpeg => format!(
"{src_desc} ! \
image/jpeg,width={width},height={height},framerate={fps}/1 ! \
jpegdec ! videoconvert ! videoscale ! videorate ! \
video/x-raw,width={width},height={height},framerate={fps}/1"
),
CameraSourceProfile::AutoDecode => format!(
"{src_desc} ! \
capsfilter caps=\"{}\" ! \
decodebin ! videoconvert ! videoscale ! videorate ! \
video/x-raw,width={width},height={height},framerate={fps}/1,pixel-aspect-ratio=1/1",
camera_auto_decode_caps(width, height, fps)
),
}
}
/// Caps string that lets decodebin negotiate either raw webcam frames or MJPEG.
fn camera_auto_decode_caps(width: u32, height: u32, fps: u32) -> String {
format!(
"video/x-raw,width=(int){width},height=(int){height},framerate=(fraction){fps}/1;image/jpeg,width=(int){width},height=(int){height},framerate=(fraction){fps}/1"
)
}
fn camera_preview_tap_path() -> Option<PathBuf> {
std::env::var(CAMERA_PREVIEW_TAP_ENV)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
.map(PathBuf::from)
}
fn camera_preview_tap_branch(width: u32, height: u32, fps: u32) -> String {
let preview_width = width.clamp(1, i32::MAX as u32);
let preview_height = height.clamp(1, i32::MAX as u32);
let preview_fps = fps.clamp(1, 60);
format!(
"videoconvert ! videoscale ! videorate ! \
video/x-raw,format=RGBA,width={preview_width},height={preview_height},framerate={preview_fps}/1,pixel-aspect-ratio=1/1 ! \
appsink name=preview_sink emit-signals=false sync=false max-buffers=1 drop=true"
)
}
/// Publish actual-size local preview frames so the launcher mirrors uplink quality.
fn spawn_camera_preview_tap(sink: gst_app::AppSink, path: PathBuf) -> Arc<AtomicBool> {
let running = Arc::new(AtomicBool::new(true));
let thread_running = Arc::clone(&running);
thread::spawn(move || {
let mut wrote_first = false;
let mut empty_polls = 0_u64;
while thread_running.load(Ordering::Acquire) {
if let Some(sample) = sink.try_pull_sample(gst::ClockTime::from_mseconds(250)) {
empty_polls = 0;
match write_camera_preview_tap(&path, &sample) {
Ok(info) => {
if !wrote_first {
wrote_first = true;
tracing::info!(
path = %path.display(),
width = info.width,
height = info.height,
stride = info.stride,
"📸 local uplink preview tap publishing frames"
);
}
}
Err(err) => {
tracing::debug!("📸 local uplink preview tap write failed: {err:#}");
thread::sleep(Duration::from_millis(100));
}
}
} else if !wrote_first {
empty_polls += 1;
if empty_polls == 20 || empty_polls.is_multiple_of(120) {
tracing::warn!(
path = %path.display(),
"📸 local uplink preview tap is still waiting for webcam frames"
);
}
}
}
});
running
}
struct CameraPreviewTapInfo {
width: i32,
height: i32,
stride: usize,
}
/// Atomically write one RGBA preview sample for the launcher status pane.
fn write_camera_preview_tap(
path: &Path,
sample: &gst::Sample,
) -> anyhow::Result<CameraPreviewTapInfo> {
let caps = sample.caps().context("preview tap sample missing caps")?;
let structure = caps
.structure(0)
.context("preview tap caps missing structure")?;
let width = structure
.get::<i32>("width")
.context("preview tap caps missing width")?;
let height = structure
.get::<i32>("height")
.context("preview tap caps missing height")?;
let buffer = sample
.buffer()
.context("preview tap sample missing buffer")?;
let map = buffer
.map_readable()
.context("preview tap buffer unreadable")?;
let row_count = usize::try_from(height)
.ok()
.filter(|height| *height > 0)
.unwrap_or(1);
let stride = map.as_slice().len() / row_count;
let tmp_path = path.with_extension("tmp");
let mut file = std::fs::File::create(&tmp_path)
.with_context(|| format!("creating {}", tmp_path.display()))?;
writeln!(file, "LESAVKA_RGBA {width} {height} {stride}")?;
file.write_all(map.as_slice())?;
file.sync_all().ok();
std::fs::rename(&tmp_path, path).with_context(|| format!("publishing {}", path.display()))?;
Ok(CameraPreviewTapInfo {
width,
height,
stride,
})
}
/// Forward camera bus warnings/errors into the relay log with the device label.
fn spawn_camera_bus_logger(pipeline: &gst::Pipeline, device: String) {
let Some(bus) = pipeline.bus() else {
return;
};
std::thread::spawn(move || {
use gst::MessageView::{Error, StateChanged, Warning};
for msg in bus.iter_timed(gst::ClockTime::NONE) {
match msg.view() {
StateChanged(s)
if s.current() == gst::State::Playing
&& msg.src().is_some_and(|source| {
source.type_() == gst::Pipeline::static_type()
}) =>
{
tracing::info!(%device, "📸 camera pipeline ▶️");
}
Error(e) => tracing::error!(
%device,
"📸💥 camera: {} ({})",
e.error(),
e.debug().unwrap_or_default()
),
Warning(w) => tracing::warn!(
%device,
"📸⚠️ camera: {} ({})",
w.error(),
w.debug().unwrap_or_default()
),
_ => {}
}
}
});
2025-06-08 22:24:14 -05:00
}
#[cfg(not(coverage))]
fn buildable_encoder(encoder: &'static str) -> bool {
gst::ElementFactory::find(encoder).is_some()
&& gst::ElementFactory::make(encoder).build().is_ok()
}
#[cfg(not(coverage))]
fn supported_encoder_property(
encoder: &'static str,
properties: &[&'static str],
) -> Option<&'static str> {
let element = gst::ElementFactory::make(encoder).build().ok()?;
properties
.iter()
.copied()
.find(|property| element.find_property(property).is_some())
}
impl Drop for CameraCapture {
fn drop(&mut self) {
if let Some(running) = &self.preview_tap_running {
running.store(false, Ordering::Release);
}
let _ = self.pipeline.set_state(gst::State::Null);
}
}