lesavka/client/src/input/camera/capture_pipeline.rs

273 lines
12 KiB
Rust
Raw Blame History

This file contains ambiguous Unicode characters

This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.

#[cfg(any(coverage, test))]
fn shared_capture_pts_us() -> u64 {
use std::sync::OnceLock;
use std::time::Instant;
static CAPTURE_ORIGIN: OnceLock<Instant> = OnceLock::new();
CAPTURE_ORIGIN
.get_or_init(Instant::now)
.elapsed()
.as_micros()
.min(u64::MAX as u128) as u64
}
#[cfg(not(any(coverage, test)))]
fn shared_capture_pts_us() -> u64 {
crate::live_capture_clock::capture_pts_us()
}
impl CameraCapture {
pub fn new(device_fragment: Option<&str>, cfg: Option<CameraConfig>) -> anyhow::Result<Self> {
gst::init().ok();
// Select source: V4L2 device or test pattern
let (src_desc, dev_label, allow_mjpg_source) = match device_fragment {
Some(fragment)
if fragment.eq_ignore_ascii_case("test")
|| fragment.eq_ignore_ascii_case("videotestsrc") =>
{
let pattern =
std::env::var("LESAVKA_CAM_TEST_PATTERN").unwrap_or_else(|_| "smpte".into());
(
format!("videotestsrc is-live=true pattern={pattern}"),
format!("videotestsrc:{pattern}"),
false,
)
}
Some(path) if path.starts_with("/dev/") => (
format!("v4l2src device={path} do-timestamp=true"),
path.to_string(),
true,
),
Some(fragment) => {
let dev = Self::find_device(fragment).unwrap_or_else(|| "/dev/video0".into());
(format!("v4l2src device={dev} do-timestamp=true"), dev, true)
}
None => {
let dev = "/dev/video0".to_string();
(format!("v4l2src device={dev} do-timestamp=true"), dev, true)
}
};
let output_mjpeg = cfg.map_or_else(
|| {
std::env::var("LESAVKA_CAM_CODEC").ok().is_some_and(|v| {
matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg")
})
},
|cfg| matches!(cfg.codec, CameraCodec::Mjpeg),
);
let jpeg_quality = env_u32("LESAVKA_CAM_JPEG_QUALITY", 85).clamp(1, 100);
let width = env_u32("LESAVKA_CAM_WIDTH", cfg.map_or(1280, |cfg| cfg.width));
let height = env_u32("LESAVKA_CAM_HEIGHT", cfg.map_or(720, |cfg| cfg.height));
let fps = env_u32("LESAVKA_CAM_FPS", cfg.map_or(25, |cfg| cfg.fps)).max(1);
let keyframe_interval = env_u32("LESAVKA_CAM_KEYFRAME_INTERVAL", fps.min(5)).clamp(1, fps);
let source_profile = camera_source_profile(allow_mjpg_source);
let use_mjpg_source = source_profile == CameraSourceProfile::Mjpeg;
let (enc, kf_prop) = if use_mjpg_source && !output_mjpeg {
("x264enc", Some("key-int-max"))
} else {
Self::choose_encoder()
};
match source_profile {
CameraSourceProfile::Mjpeg if !output_mjpeg => {
tracing::info!("📸 using MJPG source with software encode");
}
CameraSourceProfile::AutoDecode => {
tracing::info!("📸 using auto-decoded webcam source (raw/MJPEG accepted)");
}
_ => {}
}
let enc_opts = Self::encoder_options(enc, kf_prop, keyframe_interval);
if output_mjpeg {
tracing::info!("📸 outputting MJPEG frames for UVC (quality={jpeg_quality})");
} else {
tracing::info!("📸 using encoder element: {enc}");
}
#[cfg(not(coverage))]
let have_nvvidconv = gst::ElementFactory::find("nvvidconv").is_some();
let (src_caps, preenc) = match enc {
// ───────────────────────────────────────────────────────────────────
// Jetson (has nvvidconv) Desktop (falls back to videoconvert)
// ───────────────────────────────────────────────────────────────────
#[cfg(not(coverage))]
"nvh264enc" if have_nvvidconv =>
(format!(
"video/x-raw(memory:NVMM),format=NV12,width={width},height={height},framerate={fps}/1"
), "nvvidconv !"),
#[cfg(not(coverage))]
"nvh264enc" /* else */ =>
(format!(
"video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1"
), "videoconvert !"),
#[cfg(not(coverage))]
"vaapih264enc" =>
(format!(
"video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1"
), "videoconvert !"),
_ =>
(format!(
"video/x-raw,width={width},height={height},framerate={fps}/1"
), "videoconvert !"),
};
// let desc = format!(
// "v4l2src device={dev} do-timestamp=true ! {raw_caps},width=1280,height=720 ! \
// videoconvert ! {enc} key-int-max=30 ! \
// h264parse config-interval=-1 ! \
// appsink name=asink emit-signals=true max-buffers=60 drop=true"
// );
// tracing::debug!(%desc, "📸 pipeline-desc");
// Build a pipeline that works for any of the three encoders.
// * nvh264enc needs NVMM memory caps;
// * vaapih264enc wants system-memory caps;
// * x264enc needs the usual raw caps.
let preview_tap_path = camera_preview_tap_path();
let preview_tap_branch = camera_preview_tap_branch(width, height, fps);
let raw_source_chain =
camera_raw_source_chain(&src_desc, &src_caps, width, height, fps, source_profile);
let desc = if preview_tap_path.is_some() {
if output_mjpeg {
if use_mjpg_source {
format!(
"{src_desc} ! \
image/jpeg,width={width},height={height},framerate={fps}/1 ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! jpegdec ! \
{preview_tap_branch}"
)
} else {
format!(
"{raw_source_chain} ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
videoconvert ! jpegenc quality={jpeg_quality} ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! \
{preview_tap_branch}"
)
}
} else if use_mjpg_source {
format!(
"{src_desc} ! \
image/jpeg,width={width},height={height} ! \
jpegdec ! videorate ! video/x-raw,framerate={fps}/1 ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
videoconvert ! {enc_opts} ! \
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! \
{preview_tap_branch}"
)
} else {
format!(
"{raw_source_chain} ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
{preenc} {enc_opts} ! \
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! \
{preview_tap_branch}"
)
}
} else if output_mjpeg {
if use_mjpg_source {
format!(
"{src_desc} ! \
image/jpeg,width={width},height={height},framerate={fps}/1 ! \
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
} else {
format!(
"{raw_source_chain} ! \
videoconvert ! jpegenc quality={jpeg_quality} ! \
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
}
} else if use_mjpg_source {
format!(
"{src_desc} ! \
image/jpeg,width={width},height={height} ! \
jpegdec ! videorate ! video/x-raw,framerate={fps}/1 ! \
videoconvert ! {enc_opts} ! \
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
} else {
format!(
"{raw_source_chain} ! \
{preenc} {enc_opts} ! \
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true"
)
};
tracing::info!(%enc, width, height, fps, ?desc, "📸 using encoder element");
let pipeline: gst::Pipeline = gst::parse::launch(&desc)
.context("gst parse_launch(cam)")?
.downcast::<gst::Pipeline>()
.expect("not a pipeline");
tracing::debug!("📸 pipeline built OK setting PLAYING…");
let sink: gst_app::AppSink = pipeline
.by_name("asink")
.expect("appsink element not found")
.downcast::<gst_app::AppSink>()
.expect("appsink downcast");
spawn_camera_bus_logger(&pipeline, dev_label.clone());
if let Err(err) = pipeline.set_state(gst::State::Playing) {
let _ = pipeline.set_state(gst::State::Null);
return Err(err.into());
}
tracing::info!("📸 webcam pipeline ▶️ device={dev_label}");
let preview_tap_running = if let Some(path) = preview_tap_path {
let preview_sink = pipeline
.by_name("preview_sink")
.context("missing camera preview tap appsink")?
.downcast::<gst_app::AppSink>()
.expect("camera preview tap appsink");
Some(spawn_camera_preview_tap(preview_sink, path))
} else {
None
};
Ok(Self {
pipeline,
sink,
preview_tap_running,
})
}
pub fn pull(&self) -> Option<VideoPacket> {
let sample = self.sink.pull_sample().ok()?;
let buf = sample.buffer()?;
let map = buf.map_readable().ok()?;
let pts = shared_capture_pts_us();
static FIRST_CAMERA_PACKET: AtomicBool = AtomicBool::new(false);
if !FIRST_CAMERA_PACKET.swap(true, Ordering::Relaxed) {
tracing::info!(
bytes = map.as_slice().len(),
pts_us = pts,
"📸 upstream webcam frames flowing"
);
}
Some(VideoPacket {
id: 2,
pts,
data: map.as_slice().to_vec(),
..Default::default()
})
}
}