diff --git a/server/src/main.rs b/server/src/main.rs index 89d3bcd..8247301 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -5,6 +5,7 @@ use anyhow::Context as _; use futures_util::{Stream, StreamExt}; use gstreamer as gst; +use std::path::Path; use std::sync::atomic::{AtomicBool, Ordering}; use std::time::{Duration, SystemTime, UNIX_EPOCH}; use std::{backtrace::Backtrace, panic, pin::Pin, sync::Arc}; @@ -141,7 +142,16 @@ fn pick_uvc_device() -> anyhow::Result { return Ok(path); } + let ctrl = UsbGadget::find_controller().ok(); + if let Some(ctrl) = ctrl.as_deref() { + let by_path = format!("/dev/v4l/by-path/platform-{ctrl}-video-index0"); + if Path::new(&by_path).exists() { + return Ok(by_path); + } + } + // walk /dev/video* via udev and look for an output‑capable node (gadget exposes one) + let mut fallback: Option = None; if let Ok(mut en) = udev::Enumerator::new() { let _ = en.match_subsystem("video4linux"); if let Ok(devs) = en.scan_devices() { @@ -150,14 +160,33 @@ fn pick_uvc_device() -> anyhow::Result { .property_value("ID_V4L_CAPABILITIES") .and_then(|v| v.to_str()) .unwrap_or_default(); - if caps.contains(":video_output:") { - if let Some(node) = dev.devnode() { - return Ok(node.to_string_lossy().into_owned()); + if !caps.contains(":video_output:") { + continue; + } + let Some(node) = dev.devnode() else { continue }; + let node = node.to_string_lossy().into_owned(); + let product = dev + .property_value("ID_V4L_PRODUCT") + .and_then(|v| v.to_str()) + .unwrap_or_default(); + let path = dev + .property_value("ID_PATH") + .and_then(|v| v.to_str()) + .unwrap_or_default(); + if let Some(ctrl) = ctrl.as_deref() { + if product == ctrl || path.contains(ctrl) { + return Ok(node); } } + if fallback.is_none() { + fallback = Some(node); + } } } } + if let Some(node) = fallback { + return Ok(node); + } Err(anyhow::anyhow!( "no video_output v4l2 node found; set LESAVKA_UVC_DEV" diff --git a/server/src/video.rs b/server/src/video.rs index 588a061..d3a351b 100644 --- a/server/src/video.rs +++ b/server/src/video.rs @@ -10,6 +10,8 @@ use gstreamer_app as gst_app; use lesavka_common::lesavka::VideoPacket; use tokio_stream::wrappers::ReceiverStream; use tonic::Status; +use std::sync::atomic::{AtomicU64, Ordering}; +use std::sync::Arc; use tracing::{Level, debug, enabled, error, info, trace, warn}; const EYE_ID: [&str; 2] = ["l", "r"]; @@ -81,6 +83,14 @@ pub async fn eye_ball(dev: &str, id: u32, _max_bitrate_kbit: u32) -> anyhow::Res let eye = EYE_ID[id as usize]; gst::init().context("gst init")?; + let target_fps = env_u32("LESAVKA_EYE_FPS", 25); + let frame_interval_us = if target_fps == 0 { + 0 + } else { + (1_000_000 / target_fps) as u64 + }; + let last_sent = Arc::new(AtomicU64::new(0)); + let desc = format!( "v4l2src name=cam_{eye} device=\"{dev}\" io-mode=mmap do-timestamp=true ! \ queue ! \ @@ -166,6 +176,7 @@ pub async fn eye_ball(dev: &str, id: u32, _max_bitrate_kbit: u32) -> anyhow::Res } }); + let last_sent_cloned = last_sent.clone(); sink.set_callbacks( gst_app::AppSinkCallbacks::builder() .new_sample(move |sink| { @@ -211,6 +222,14 @@ pub async fn eye_ball(dev: &str, id: u32, _max_bitrate_kbit: u32) -> anyhow::Res .nseconds() / 1_000; + if frame_interval_us > 0 { + let last = last_sent_cloned.load(Ordering::Relaxed); + if last != 0 && pts_us.saturating_sub(last) < frame_interval_us { + return Ok(gst::FlowSuccess::Ok); + } + last_sent_cloned.store(pts_us, Ordering::Relaxed); + } + /* -------- ship over gRPC ----- */ let data = map.as_slice().to_vec(); let size = data.len();