impl CameraCapture { pub fn new(device_fragment: Option<&str>, cfg: Option) -> anyhow::Result { gst::init().ok(); // Select source: V4L2 device or test pattern let (src_desc, dev_label, allow_mjpg_source) = match device_fragment { Some(fragment) if fragment.eq_ignore_ascii_case("test") || fragment.eq_ignore_ascii_case("videotestsrc") => { let pattern = std::env::var("LESAVKA_CAM_TEST_PATTERN").unwrap_or_else(|_| "smpte".into()); ( format!("videotestsrc is-live=true pattern={pattern}"), format!("videotestsrc:{pattern}"), false, ) } Some(path) if path.starts_with("/dev/") => ( format!("v4l2src device={path} do-timestamp=true"), path.to_string(), true, ), Some(fragment) => { let dev = Self::find_device(fragment).unwrap_or_else(|| "/dev/video0".into()); (format!("v4l2src device={dev} do-timestamp=true"), dev, true) } None => { let dev = "/dev/video0".to_string(); (format!("v4l2src device={dev} do-timestamp=true"), dev, true) } }; let output_mjpeg = cfg.map_or_else( || { std::env::var("LESAVKA_CAM_CODEC").ok().is_some_and(|v| { matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg") }) }, |cfg| matches!(cfg.codec, CameraCodec::Mjpeg), ); let jpeg_quality = env_u32("LESAVKA_CAM_JPEG_QUALITY", 85).clamp(1, 100); let (width, height, fps) = resolved_capture_profile(cfg); let keyframe_interval = env_u32("LESAVKA_CAM_KEYFRAME_INTERVAL", fps.min(5)).clamp(1, fps); let source_profile = camera_source_profile(allow_mjpg_source); let use_mjpg_source = source_profile == CameraSourceProfile::Mjpeg; let (enc, kf_prop) = if use_mjpg_source && !output_mjpeg { ("x264enc", Some("key-int-max")) } else { Self::choose_encoder() }; match source_profile { CameraSourceProfile::Mjpeg if !output_mjpeg => { tracing::info!("πŸ“Έ using MJPG source with software encode"); } CameraSourceProfile::AutoDecode => { tracing::info!("πŸ“Έ using auto-decoded webcam source (raw/MJPEG accepted)"); } _ => {} } let enc_opts = Self::encoder_options(enc, kf_prop, keyframe_interval); if output_mjpeg { tracing::info!("πŸ“Έ outputting MJPEG frames for UVC (quality={jpeg_quality})"); } else { tracing::info!("πŸ“Έ using encoder element: {enc}"); } #[cfg(not(coverage))] let have_nvvidconv = gst::ElementFactory::find("nvvidconv").is_some(); let (src_caps, preenc) = match enc { // ─────────────────────────────────────────────────────────────────── // Jetson (has nvvidconv) Desktop (falls back to videoconvert) // ─────────────────────────────────────────────────────────────────── #[cfg(not(coverage))] "nvh264enc" if have_nvvidconv => (format!( "video/x-raw(memory:NVMM),format=NV12,width={width},height={height},framerate={fps}/1" ), "nvvidconv !"), #[cfg(not(coverage))] "nvh264enc" /* else */ => (format!( "video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1" ), "videoconvert !"), #[cfg(not(coverage))] "vaapih264enc" => (format!( "video/x-raw,format=NV12,width={width},height={height},framerate={fps}/1" ), "videoconvert !"), _ => (format!( "video/x-raw,width={width},height={height},framerate={fps}/1" ), "videoconvert !"), }; // let desc = format!( // "v4l2src device={dev} do-timestamp=true ! {raw_caps},width=1280,height=720 ! \ // videoconvert ! {enc} key-int-max=30 ! \ // h264parse config-interval=-1 ! \ // appsink name=asink emit-signals=true max-buffers=60 drop=true" // ); // tracing::debug!(%desc, "πŸ“Έ pipeline-desc"); // Build a pipeline that works for any of the three encoders. // * nvh264enc needs NVMM memory caps; // * vaapih264enc wants system-memory caps; // * x264enc needs the usual raw caps. let preview_tap_path = camera_preview_tap_path(); let preview_tap_branch = camera_preview_tap_branch(width, height, fps); let raw_source_chain = camera_raw_source_chain(&src_desc, &src_caps, width, height, fps, source_profile); let desc = if preview_tap_path.is_some() { if output_mjpeg { if use_mjpg_source { format!( "{src_desc} ! \ image/jpeg,width={width},height={height},framerate={fps}/1 ! \ tee name=t \ t. ! queue max-size-buffers=30 leaky=downstream ! \ appsink name=asink emit-signals=true max-buffers=60 drop=true \ t. ! queue max-size-buffers=2 leaky=downstream ! jpegdec ! \ {preview_tap_branch}" ) } else { format!( "{raw_source_chain} ! \ tee name=t \ t. ! queue max-size-buffers=30 leaky=downstream ! \ videoconvert ! jpegenc quality={jpeg_quality} ! \ appsink name=asink emit-signals=true max-buffers=60 drop=true \ t. ! queue max-size-buffers=2 leaky=downstream ! \ {preview_tap_branch}" ) } } else if use_mjpg_source { format!( "{src_desc} ! \ image/jpeg,width={width},height={height} ! \ jpegdec ! videorate ! video/x-raw,framerate={fps}/1 ! \ tee name=t \ t. ! queue max-size-buffers=30 leaky=downstream ! \ videoconvert ! {enc_opts} ! \ h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \ appsink name=asink emit-signals=true max-buffers=60 drop=true \ t. ! queue max-size-buffers=2 leaky=downstream ! \ {preview_tap_branch}" ) } else { format!( "{raw_source_chain} ! \ tee name=t \ t. ! queue max-size-buffers=30 leaky=downstream ! \ {preenc} {enc_opts} ! \ h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \ appsink name=asink emit-signals=true max-buffers=60 drop=true \ t. ! queue max-size-buffers=2 leaky=downstream ! \ {preview_tap_branch}" ) } } else if output_mjpeg { if use_mjpg_source { format!( "{src_desc} ! \ image/jpeg,width={width},height={height},framerate={fps}/1 ! \ queue max-size-buffers=30 leaky=downstream ! \ appsink name=asink emit-signals=true max-buffers=60 drop=true" ) } else { format!( "{raw_source_chain} ! \ videoconvert ! jpegenc quality={jpeg_quality} ! \ queue max-size-buffers=30 leaky=downstream ! \ appsink name=asink emit-signals=true max-buffers=60 drop=true" ) } } else if use_mjpg_source { format!( "{src_desc} ! \ image/jpeg,width={width},height={height} ! \ jpegdec ! videorate ! video/x-raw,framerate={fps}/1 ! \ videoconvert ! {enc_opts} ! \ h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \ queue max-size-buffers=30 leaky=downstream ! \ appsink name=asink emit-signals=true max-buffers=60 drop=true" ) } else { format!( "{raw_source_chain} ! \ {preenc} {enc_opts} ! \ h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \ queue max-size-buffers=30 leaky=downstream ! \ appsink name=asink emit-signals=true max-buffers=60 drop=true" ) }; tracing::info!(%enc, width, height, fps, ?desc, "πŸ“Έ using encoder element"); let pipeline: gst::Pipeline = gst::parse::launch(&desc) .context("gst parse_launch(cam)")? .downcast::() .expect("not a pipeline"); tracing::debug!("πŸ“Έ pipeline built OK – setting PLAYING…"); let sink: gst_app::AppSink = pipeline .by_name("asink") .expect("appsink element not found") .downcast::() .expect("appsink down‑cast"); spawn_camera_bus_logger(&pipeline, dev_label.clone()); if let Err(err) = pipeline.set_state(gst::State::Playing) { let _ = pipeline.set_state(gst::State::Null); return Err(err.into()); } tracing::info!("πŸ“Έ webcam pipeline ▢️ device={dev_label}"); let preview_tap_running = if let Some(path) = preview_tap_path { let preview_sink = pipeline .by_name("preview_sink") .context("missing camera preview tap appsink")? .downcast::() .expect("camera preview tap appsink"); Some(spawn_camera_preview_tap(preview_sink, path)) } else { None }; Ok(Self { pipeline, sink, preview_tap_running, pts_rebaser: crate::live_capture_clock::DurationPacedSourcePtsRebaser::default(), frame_duration_us: (1_000_000u64 / u64::from(fps.max(1))).max(1), }) } pub fn pull(&self) -> Option { let sample = self.sink.pull_sample().ok()?; let buf = sample.buffer()?; let map = buf.map_readable().ok()?; let source_pts_us = buf.pts().map(|ts| ts.nseconds() / 1_000); let packet_duration_us = buf .duration() .map(|ts| (ts.nseconds() / 1_000).max(1)) .unwrap_or(self.frame_duration_us); let timing = self.pts_rebaser.rebase_with_packet_duration( source_pts_us, packet_duration_us, crate::live_capture_clock::upstream_source_lag_cap(), ); let pts = timing.packet_pts_us; static CAMERA_PACKET_COUNT: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0); let packet_index = CAMERA_PACKET_COUNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed); log_camera_first_packet(packet_index, map.as_slice().len(), pts); log_camera_timing_sample(packet_index, timing, map.as_slice().len()); Some(VideoPacket { id: 2, pts, data: map.as_slice().to_vec(), ..Default::default() }) } } /// Resolve the exact profile the client sends, preferring the server UVC contract. fn resolved_capture_profile(cfg: Option) -> (u32, u32, u32) { match cfg { Some(cfg) if !env_flag_enabled("LESAVKA_CAM_ALLOW_PROFILE_OVERRIDE") => { return (cfg.width, cfg.height, cfg.fps.max(1)); } _ => {} } ( env_u32("LESAVKA_CAM_WIDTH", cfg.map_or(1280, |cfg| cfg.width)), env_u32("LESAVKA_CAM_HEIGHT", cfg.map_or(720, |cfg| cfg.height)), env_u32("LESAVKA_CAM_FPS", cfg.map_or(25, |cfg| cfg.fps)).max(1), ) } fn env_flag_enabled(name: &str) -> bool { std::env::var(name).ok().is_some_and(|value| { let trimmed = value.trim(); !(trimmed.is_empty() || trimmed.eq_ignore_ascii_case("0") || trimmed.eq_ignore_ascii_case("false") || trimmed.eq_ignore_ascii_case("no") || trimmed.eq_ignore_ascii_case("off")) }) } fn log_camera_first_packet(packet_index: u64, bytes: usize, pts_us: u64) { if packet_index == 0 { tracing::info!(bytes, pts_us, "πŸ“Έ upstream webcam frames flowing"); } } fn should_log_camera_timing_sample(packet_index: u64) -> bool { crate::live_capture_clock::upstream_timing_trace_enabled() && (packet_index < 10 || packet_index.is_multiple_of(300)) } fn log_camera_timing_sample( packet_index: u64, timing: crate::live_capture_clock::RebasedSourcePts, bytes: usize, ) { if should_log_camera_timing_sample(packet_index) { tracing::info!( packet_index, source_pts_us = timing.source_pts_us.unwrap_or_default(), source_base_us = timing.source_base_us.unwrap_or_default(), capture_base_us = timing.capture_base_us.unwrap_or_default(), capture_now_us = timing.capture_now_us, packet_pts_us = timing.packet_pts_us, pull_path_delay_us = timing.capture_now_us as i128 - timing.packet_pts_us as i128, used_source_pts = timing.used_source_pts, lag_clamped = timing.lag_clamped, bytes, "πŸ“Έ upstream webcam timing sample" ); } }