diff --git a/client/src/app.rs b/client/src/app.rs index b6d5ca4..7d1c4ac 100644 --- a/client/src/app.rs +++ b/client/src/app.rs @@ -20,8 +20,12 @@ use lesavka_common::lesavka::{ }; use crate::{ - handshake, input::camera::CameraCapture, input::inputs::InputAggregator, - input::microphone::MicrophoneCapture, output::audio::AudioOut, output::video::MonitorWindow, + handshake, + input::camera::{CameraCapture, CameraCodec, CameraConfig}, + input::inputs::InputAggregator, + input::microphone::MicrophoneCapture, + output::audio::AudioOut, + output::video::MonitorWindow, }; pub struct LesavkaClientApp { @@ -59,6 +63,27 @@ impl LesavkaClientApp { info!(server = %self.server_addr, "🚦 starting handshake"); let caps = handshake::negotiate(&self.server_addr).await; tracing::info!("🀝 server capabilities = {:?}", caps); + let camera_cfg = match ( + caps.camera_codec.as_deref(), + caps.camera_width, + caps.camera_height, + caps.camera_fps, + ) { + (Some(codec), Some(width), Some(height), Some(fps)) => { + let codec = match codec.to_ascii_lowercase().as_str() { + "mjpeg" | "mjpg" | "jpeg" => CameraCodec::Mjpeg, + "h264" => CameraCodec::H264, + _ => CameraCodec::H264, + }; + Some(CameraConfig { + codec, + width, + height, + fps, + }) + } + _ => None, + }; /*────────── persistent gRPC channels ──────────*/ let hid_ep = Channel::from_shared(self.server_addr.clone())? @@ -150,8 +175,18 @@ impl LesavkaClientApp { tokio::spawn(Self::audio_loop(ep_audio, audio_out)); /*────────── camera & mic tasks (gated by caps) ───────────*/ if caps.camera && std::env::var("LESAVKA_CAM_DISABLE").is_err() { + if let Some(cfg) = camera_cfg { + info!( + codec = ?cfg.codec, + width = cfg.width, + height = cfg.height, + fps = cfg.fps, + "πŸ“Έ using camera settings from server" + ); + } let cam = Arc::new(CameraCapture::new( std::env::var("LESAVKA_CAM_SOURCE").ok().as_deref(), + camera_cfg, )?); tokio::spawn(Self::cam_loop(vid_ep.clone(), cam)); } diff --git a/client/src/handshake.rs b/client/src/handshake.rs index c1d0a61..83358e5 100644 --- a/client/src/handshake.rs +++ b/client/src/handshake.rs @@ -7,10 +7,15 @@ use tokio::time::timeout; use tonic::{Code, transport::Endpoint}; use tracing::{info, warn}; -#[derive(Default, Clone, Copy, Debug)] +#[derive(Default, Clone, Debug)] pub struct PeerCaps { pub camera: bool, pub microphone: bool, + pub camera_output: Option, + pub camera_codec: Option, + pub camera_width: Option, + pub camera_height: Option, + pub camera_fps: Option, } pub async fn negotiate(uri: &str) -> PeerCaps { @@ -33,9 +38,35 @@ pub async fn negotiate(uri: &str) -> PeerCaps { match timeout(Duration::from_secs(5), cli.get_capabilities(pb::Empty {})).await { Ok(Ok(rsp)) => { + let rsp = rsp.get_ref(); let caps = PeerCaps { - camera: rsp.get_ref().camera, - microphone: rsp.get_ref().microphone, + camera: rsp.camera, + microphone: rsp.microphone, + camera_output: if rsp.camera_output.is_empty() { + None + } else { + Some(rsp.camera_output.clone()) + }, + camera_codec: if rsp.camera_codec.is_empty() { + None + } else { + Some(rsp.camera_codec.clone()) + }, + camera_width: if rsp.camera_width == 0 { + None + } else { + Some(rsp.camera_width) + }, + camera_height: if rsp.camera_height == 0 { + None + } else { + Some(rsp.camera_height) + }, + camera_fps: if rsp.camera_fps == 0 { + None + } else { + Some(rsp.camera_fps) + }, }; info!(?caps, "🀝 handshake ok"); caps diff --git a/client/src/input/camera.rs b/client/src/input/camera.rs index e779c31..3ac1381 100644 --- a/client/src/input/camera.rs +++ b/client/src/input/camera.rs @@ -14,6 +14,20 @@ fn env_u32(name: &str, default: u32) -> u32 { .unwrap_or(default) } +#[derive(Clone, Copy, Debug)] +pub enum CameraCodec { + H264, + Mjpeg, +} + +#[derive(Clone, Copy, Debug)] +pub struct CameraConfig { + pub codec: CameraCodec, + pub width: u32, + pub height: u32, + pub fps: u32, +} + pub struct CameraCapture { #[allow(dead_code)] // kept alive to hold PLAYING state pipeline: gst::Pipeline, @@ -21,7 +35,10 @@ pub struct CameraCapture { } impl CameraCapture { - pub fn new(device_fragment: Option<&str>) -> anyhow::Result { + pub fn new( + device_fragment: Option<&str>, + cfg: Option, + ) -> anyhow::Result { gst::init().ok(); // Pick device (prefers V4L2 nodes with capture capability) @@ -36,10 +53,14 @@ impl CameraCapture { .ok() .map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpg" | "mjpeg" | "jpeg")) .unwrap_or(false); - let output_mjpeg = std::env::var("LESAVKA_CAM_CODEC") - .ok() - .map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg")) - .unwrap_or(false); + let output_mjpeg = cfg + .map(|cfg| matches!(cfg.codec, CameraCodec::Mjpeg)) + .unwrap_or_else(|| { + std::env::var("LESAVKA_CAM_CODEC") + .ok() + .map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg")) + .unwrap_or(false) + }); let jpeg_quality = env_u32("LESAVKA_CAM_JPEG_QUALITY", 85).clamp(1, 100); let (enc, kf_prop, kf_val) = if use_mjpg_source && !output_mjpeg { ("x264enc", "key-int-max", "30") @@ -56,9 +77,9 @@ impl CameraCapture { } else { tracing::info!("πŸ“Έ using encoder element: {enc}"); } - let width = env_u32("LESAVKA_CAM_WIDTH", 1280); - let height = env_u32("LESAVKA_CAM_HEIGHT", 720); - let fps = env_u32("LESAVKA_CAM_FPS", 25).max(1); + let width = cfg.map(|cfg| cfg.width).unwrap_or_else(|| env_u32("LESAVKA_CAM_WIDTH", 1280)); + let height = cfg.map(|cfg| cfg.height).unwrap_or_else(|| env_u32("LESAVKA_CAM_HEIGHT", 720)); + let fps = cfg.map(|cfg| cfg.fps).unwrap_or_else(|| env_u32("LESAVKA_CAM_FPS", 25)).max(1); let have_nvvidconv = gst::ElementFactory::find("nvvidconv").is_some(); let (src_caps, preenc) = match enc { // ─────────────────────────────────────────────────────────────────── diff --git a/common/proto/lesavka.proto b/common/proto/lesavka.proto index 7d37566..b1f20a6 100644 --- a/common/proto/lesavka.proto +++ b/common/proto/lesavka.proto @@ -11,7 +11,15 @@ message AudioPacket { uint32 id = 1; uint64 pts = 2; bytes data = 3; } message ResetUsbReply { bool ok = 1; } // true = success -message HandshakeSet { bool camera = 1; bool microphone = 2; } +message HandshakeSet { + bool camera = 1; + bool microphone = 2; + string camera_output = 3; + string camera_codec = 4; + uint32 camera_width = 5; + uint32 camera_height = 6; + uint32 camera_fps = 7; +} message Empty {} diff --git a/server/src/camera.rs b/server/src/camera.rs new file mode 100644 index 0000000..ca7da57 --- /dev/null +++ b/server/src/camera.rs @@ -0,0 +1,260 @@ +// server/src/camera.rs + +use gstreamer as gst; +use std::collections::HashMap; +use std::fs; +use std::sync::{OnceLock, RwLock}; +use tracing::{info, warn}; + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum CameraOutput { + Uvc, + Hdmi, +} + +impl CameraOutput { + pub fn as_str(self) -> &'static str { + match self { + CameraOutput::Uvc => "uvc", + CameraOutput::Hdmi => "hdmi", + } + } +} + +#[derive(Clone, Copy, Debug, PartialEq, Eq)] +pub enum CameraCodec { + H264, + Mjpeg, +} + +impl CameraCodec { + pub fn as_str(self) -> &'static str { + match self { + CameraCodec::H264 => "h264", + CameraCodec::Mjpeg => "mjpeg", + } + } +} + +#[derive(Clone, Debug)] +pub struct HdmiConnector { + pub name: String, + pub id: Option, +} + +#[derive(Clone, Debug)] +pub struct CameraConfig { + pub output: CameraOutput, + pub codec: CameraCodec, + pub width: u32, + pub height: u32, + pub fps: u32, + pub hdmi: Option, +} + +static LAST_CONFIG: OnceLock> = OnceLock::new(); + +pub fn update_camera_config() -> CameraConfig { + let cfg = select_camera_config(); + let lock = LAST_CONFIG.get_or_init(|| RwLock::new(cfg.clone())); + *lock.write().unwrap() = cfg.clone(); + cfg +} + +pub fn current_camera_config() -> CameraConfig { + if let Some(lock) = LAST_CONFIG.get() { + return lock.read().unwrap().clone(); + } + update_camera_config() +} + +fn select_camera_config() -> CameraConfig { + let output_env = std::env::var("LESAVKA_CAM_OUTPUT").ok(); + let output_override = output_env + .as_deref() + .and_then(parse_camera_output); + + let require_connected = output_override != Some(CameraOutput::Hdmi); + let hdmi = detect_hdmi_connector(require_connected); + if output_override == Some(CameraOutput::Hdmi) && hdmi.is_none() { + warn!("πŸ“· HDMI output forced but no connector detected"); + } + let output = match output_override { + Some(v) => v, + None => { + if hdmi.is_some() { + CameraOutput::Hdmi + } else { + CameraOutput::Uvc + } + } + }; + + let cfg = match output { + CameraOutput::Hdmi => select_hdmi_config(hdmi), + CameraOutput::Uvc => select_uvc_config(), + }; + + info!( + output = cfg.output.as_str(), + codec = cfg.codec.as_str(), + width = cfg.width, + height = cfg.height, + fps = cfg.fps, + hdmi = cfg.hdmi.as_ref().map(|h| h.name.as_str()).unwrap_or("none"), + "πŸ“· camera output selected" + ); + + cfg +} + +fn parse_camera_output(raw: &str) -> Option { + match raw.trim().to_ascii_lowercase().as_str() { + "uvc" => Some(CameraOutput::Uvc), + "hdmi" => Some(CameraOutput::Hdmi), + "auto" | "" => None, + _ => None, + } +} + +fn select_hdmi_config(hdmi: Option) -> CameraConfig { + let hw_decode = has_hw_h264_decode(); + let (width, height) = if hw_decode { (1920, 1080) } else { (1280, 720) }; + let fps = 30; + if !hw_decode { + warn!("πŸ“· HDMI output: hardware H264 decoder not detected; using 720p30"); + } + CameraConfig { + output: CameraOutput::Hdmi, + codec: CameraCodec::H264, + width, + height, + fps, + hdmi, + } +} + +fn select_uvc_config() -> CameraConfig { + let mut uvc_env = HashMap::new(); + if let Ok(text) = fs::read_to_string("/etc/lesavka/uvc.env") { + uvc_env = parse_env_file(&text); + } + + let width = read_u32_from_env("LESAVKA_UVC_WIDTH") + .or_else(|| read_u32_from_map(&uvc_env, "LESAVKA_UVC_WIDTH")) + .unwrap_or(1280); + let height = read_u32_from_env("LESAVKA_UVC_HEIGHT") + .or_else(|| read_u32_from_map(&uvc_env, "LESAVKA_UVC_HEIGHT")) + .unwrap_or(720); + let fps = read_u32_from_env("LESAVKA_UVC_FPS") + .or_else(|| read_u32_from_map(&uvc_env, "LESAVKA_UVC_FPS")) + .or_else(|| { + read_u32_from_env("LESAVKA_UVC_INTERVAL") + .or_else(|| read_u32_from_map(&uvc_env, "LESAVKA_UVC_INTERVAL")) + .and_then(|interval| { + if interval == 0 { + None + } else { + Some(10_000_000 / interval) + } + }) + }) + .unwrap_or(25); + + CameraConfig { + output: CameraOutput::Uvc, + codec: CameraCodec::Mjpeg, + width, + height, + fps, + hdmi: None, + } +} + +fn has_hw_h264_decode() -> bool { + if gst::init().is_err() { + return false; + } + for name in ["v4l2h264dec", "v4l2slh264dec", "omxh264dec"] { + if gst::ElementFactory::find(name).is_some() { + return true; + } + } + false +} + +fn detect_hdmi_connector(require_connected: bool) -> Option { + let preferred = std::env::var("LESAVKA_HDMI_CONNECTOR").ok(); + let entries = fs::read_dir("/sys/class/drm").ok()?; + let mut connectors = Vec::new(); + + for entry in entries.flatten() { + let name = entry.file_name().to_string_lossy().into_owned(); + if !name.contains("HDMI-A-") { + continue; + } + let status_path = entry.path().join("status"); + let status = fs::read_to_string(&status_path) + .ok() + .map(|v| v.trim().to_string()) + .unwrap_or_default(); + let id = fs::read_to_string(entry.path().join("connector_id")) + .ok() + .and_then(|v| v.trim().parse::().ok()); + connectors.push((name, status, id)); + } + + let matches_preferred = |name: &str, preferred: &str| { + name == preferred || name.ends_with(preferred) + }; + + if let Some(pref) = preferred.as_deref() { + for (name, status, id) in &connectors { + if matches_preferred(name, pref) + && (!require_connected || status == "connected") + { + return Some(HdmiConnector { + name: name.clone(), + id: *id, + }); + } + } + } + + for (name, status, id) in connectors { + if !require_connected || status == "connected" { + return Some(HdmiConnector { name, id }); + } + } + + None +} + +fn parse_env_file(text: &str) -> HashMap { + let mut out = HashMap::new(); + for line in text.lines() { + let line = line.trim(); + if line.is_empty() || line.starts_with('#') { + continue; + } + let mut parts = line.splitn(2, '='); + let key = match parts.next() { + Some(v) => v.trim(), + None => continue, + }; + let val = match parts.next() { + Some(v) => v.trim(), + None => continue, + }; + out.insert(key.to_string(), val.to_string()); + } + out +} + +fn read_u32_from_env(key: &str) -> Option { + std::env::var(key).ok().and_then(|v| v.parse::().ok()) +} + +fn read_u32_from_map(map: &HashMap, key: &str) -> Option { + map.get(key).and_then(|v| v.parse::().ok()) +} diff --git a/server/src/handshake.rs b/server/src/handshake.rs index 903fce9..ce18e86 100644 --- a/server/src/handshake.rs +++ b/server/src/handshake.rs @@ -5,11 +5,9 @@ use lesavka_common::lesavka::{ Empty, HandshakeSet, handshake_server::{Handshake, HandshakeServer}, }; +use crate::camera; -pub struct HandshakeSvc { - pub camera: bool, - pub microphone: bool, -} +pub struct HandshakeSvc; #[tonic::async_trait] impl Handshake for HandshakeSvc { @@ -17,18 +15,26 @@ impl Handshake for HandshakeSvc { &self, _req: Request, ) -> Result, Status> { + let cfg = camera::update_camera_config(); + let camera_enabled = match cfg.output { + camera::CameraOutput::Uvc => std::env::var("LESAVKA_DISABLE_UVC").is_err(), + camera::CameraOutput::Hdmi => true, + }; + let microphone = std::env::var("LESAVKA_DISABLE_UAC").is_err(); Ok(Response::new(HandshakeSet { - camera: self.camera, - microphone: self.microphone, + camera: camera_enabled, + microphone, + camera_output: cfg.output.as_str().to_string(), + camera_codec: cfg.codec.as_str().to_string(), + camera_width: cfg.width, + camera_height: cfg.height, + camera_fps: cfg.fps, })) } } impl HandshakeSvc { pub fn server() -> HandshakeServer { - HandshakeServer::new(Self { - camera: true, - microphone: true, - }) + HandshakeServer::new(Self) } } diff --git a/server/src/lib.rs b/server/src/lib.rs index db93be3..1e6a2f1 100644 --- a/server/src/lib.rs +++ b/server/src/lib.rs @@ -1,6 +1,7 @@ // server/src/lib.rs pub mod audio; +pub mod camera; pub mod gadget; pub mod handshake; pub mod video; diff --git a/server/src/main.rs b/server/src/main.rs index 4372c9e..964d067 100644 --- a/server/src/main.rs +++ b/server/src/main.rs @@ -23,7 +23,7 @@ use lesavka_common::lesavka::{ relay_server::{Relay, RelayServer}, }; -use lesavka_server::{audio, gadget::UsbGadget, handshake::HandshakeSvc, video}; +use lesavka_server::{audio, camera, gadget::UsbGadget, handshake::HandshakeSvc, video}; /*──────────────── constants ────────────────*/ const VERSION: &str = env!("CARGO_PKG_VERSION"); @@ -100,7 +100,10 @@ async fn recover_hid_if_needed( did_cycle: Arc, ) { let code = err.raw_os_error(); - let should_recover = matches!(code, Some(libc::ENOTCONN) | Some(libc::ESHUTDOWN) | Some(libc::EPIPE)); + let should_recover = matches!( + code, + Some(libc::ENOTCONN) | Some(libc::ESHUTDOWN) | Some(libc::EPIPE) + ); if !should_recover { return; } @@ -271,7 +274,9 @@ impl Handler { info!("πŸ› οΈ Initial USB reset…"); let _ = gadget.cycle(); // ignore failure - may boot without host } else { - info!("πŸ”’ gadget cycle disabled at startup (set LESAVKA_ALLOW_GADGET_CYCLE=1 to enable)"); + info!( + "πŸ”’ gadget cycle disabled at startup (set LESAVKA_ALLOW_GADGET_CYCLE=1 to enable)" + ); } info!("πŸ› οΈ opening HID endpoints …"); @@ -322,8 +327,14 @@ impl Relay for Handler { while let Some(pkt) = s.next().await.transpose()? { if let Err(e) = kb.lock().await.write_all(&pkt.data).await { warn!("⌨️ write failed: {e} (dropped)"); - recover_hid_if_needed(&e, gadget.clone(), kb.clone(), ms.clone(), did_cycle.clone()) - .await; + recover_hid_if_needed( + &e, + gadget.clone(), + kb.clone(), + ms.clone(), + did_cycle.clone(), + ) + .await; } tx.send(Ok(pkt)).await.ok(); } @@ -348,8 +359,14 @@ impl Relay for Handler { while let Some(pkt) = s.next().await.transpose()? { if let Err(e) = ms.lock().await.write_all(&pkt.data).await { warn!("πŸ–±οΈ write failed: {e} (dropped)"); - recover_hid_if_needed(&e, gadget.clone(), kb.clone(), ms.clone(), did_cycle.clone()) - .await; + recover_hid_if_needed( + &e, + gadget.clone(), + kb.clone(), + ms.clone(), + did_cycle.clone(), + ) + .await; } tx.send(Ok(pkt)).await.ok(); } @@ -397,13 +414,32 @@ impl Relay for Handler { &self, req: Request>, ) -> Result, Status> { - // map gRPC camera id β†’ UVC device - let uvc = pick_uvc_device().map_err(|e| Status::internal(format!("{e:#}")))?; - info!(%uvc, "πŸŽ₯ stream_camera using UVC sink"); + let cfg = camera::current_camera_config(); + info!( + output = cfg.output.as_str(), + codec = cfg.codec.as_str(), + width = cfg.width, + height = cfg.height, + fps = cfg.fps, + hdmi = cfg.hdmi.as_ref().map(|h| h.name.as_str()).unwrap_or("none"), + "πŸŽ₯ stream_camera output selected" + ); - // build once - let relay = - video::CameraRelay::new(0, &uvc).map_err(|e| Status::internal(format!("{e:#}")))?; + let relay = match cfg.output { + camera::CameraOutput::Uvc => { + if std::env::var("LESAVKA_DISABLE_UVC").is_ok() { + return Err(Status::failed_precondition( + "UVC output disabled (LESAVKA_DISABLE_UVC set)", + )); + } + let uvc = pick_uvc_device().map_err(|e| Status::internal(format!("{e:#}")))?; + info!(%uvc, "πŸŽ₯ stream_camera using UVC sink"); + video::CameraRelay::new_uvc(0, &uvc, &cfg) + .map_err(|e| Status::internal(format!("{e:#}")))? + } + camera::CameraOutput::Hdmi => video::CameraRelay::new_hdmi(0, &cfg) + .map_err(|e| Status::internal(format!("{e:#}")))?, + }; // dummy outbound (same pattern as other streams) let (tx, rx) = tokio::sync::mpsc::channel(1); diff --git a/server/src/video.rs b/server/src/video.rs index 152a8bf..696eefd 100644 --- a/server/src/video.rs +++ b/server/src/video.rs @@ -2,18 +2,20 @@ use anyhow::Context; use futures_util::Stream; -use gst::prelude::*; -use gst::MessageView::*; use gst::MessageView; +use gst::MessageView::*; +use gst::prelude::*; use gstreamer as gst; use gstreamer_app as gst_app; use lesavka_common::lesavka::VideoPacket; +use std::sync::Arc; +use std::sync::atomic::{AtomicU64, Ordering}; use tokio_stream::wrappers::ReceiverStream; use tonic::Status; -use std::sync::atomic::{AtomicU64, Ordering}; -use std::sync::Arc; use tracing::{Level, debug, enabled, error, info, trace, warn}; +use crate::camera::{CameraCodec, CameraConfig}; + const EYE_ID: [&str; 2] = ["l", "r"]; static START: std::sync::OnceLock = std::sync::OnceLock::new(); static DEV_MODE: std::sync::OnceLock = std::sync::OnceLock::new(); @@ -26,8 +28,19 @@ fn env_u32(name: &str, default: u32) -> u32 { } fn dev_mode_enabled() -> bool { - *DEV_MODE - .get_or_init(|| std::env::var("LESAVKA_DEV_MODE").is_ok()) + *DEV_MODE.get_or_init(|| std::env::var("LESAVKA_DEV_MODE").is_ok()) +} + +fn pick_h264_decoder() -> &'static str { + if gst::ElementFactory::find("v4l2h264dec").is_some() { + "v4l2h264dec" + } else if gst::ElementFactory::find("v4l2slh264dec").is_some() { + "v4l2slh264dec" + } else if gst::ElementFactory::find("omxh264dec").is_some() { + "omxh264dec" + } else { + "avdec_h264" + } } fn contains_idr(h264: &[u8]) -> bool { @@ -292,19 +305,15 @@ pub struct WebcamSink { } impl WebcamSink { - pub fn new(uvc_dev: &str) -> anyhow::Result { + pub fn new(uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result { gst::init()?; let pipeline = gst::Pipeline::new(); - let width = env_u32("LESAVKA_UVC_WIDTH", 1280) as i32; - let height = env_u32("LESAVKA_UVC_HEIGHT", 720) as i32; - let fps = env_u32("LESAVKA_UVC_FPS", 25).max(1) as i32; - let use_mjpeg = std::env::var("LESAVKA_UVC_MJPEG").is_ok() - || std::env::var("LESAVKA_UVC_CODEC") - .ok() - .map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg")) - .unwrap_or(false); + let width = cfg.width as i32; + let height = cfg.height as i32; + let fps = cfg.fps.max(1) as i32; + let use_mjpeg = matches!(cfg.codec, CameraCodec::Mjpeg); let src = gst::ElementFactory::make("appsrc") .build()? @@ -354,7 +363,7 @@ impl WebcamSink { src.set_caps(Some(&caps_h264)); let h264parse = gst::ElementFactory::make("h264parse").build()?; - let decoder_name = Self::pick_decoder(); + let decoder_name = pick_h264_decoder(); let decoder = gst::ElementFactory::make(decoder_name) .build() .with_context(|| format!("building decoder element {decoder_name}"))?; @@ -405,33 +414,187 @@ impl WebcamSink { } } - fn pick_decoder() -> &'static str { - if gst::ElementFactory::find("v4l2h264dec").is_some() { - "v4l2h264dec" - } else if gst::ElementFactory::find("v4l2slh264dec").is_some() { - "v4l2slh264dec" - } else if gst::ElementFactory::find("omxh264dec").is_some() { - "omxh264dec" - } else { - "avdec_h264" +} + +pub struct HdmiSink { + appsrc: gst_app::AppSrc, + _pipe: gst::Pipeline, +} + +impl HdmiSink { + pub fn new(cfg: &CameraConfig) -> anyhow::Result { + gst::init()?; + + let pipeline = gst::Pipeline::new(); + let width = cfg.width as i32; + let height = cfg.height as i32; + let fps = cfg.fps.max(1) as i32; + + let src = gst::ElementFactory::make("appsrc") + .build()? + .downcast::() + .expect("appsrc"); + src.set_is_live(true); + src.set_format(gst::Format::Time); + + let raw_caps = gst::Caps::builder("video/x-raw") + .field("width", width) + .field("height", height) + .field("framerate", gst::Fraction::new(fps, 1)) + .build(); + let capsfilter = gst::ElementFactory::make("capsfilter") + .property("caps", &raw_caps) + .build()?; + + let queue = gst::ElementFactory::make("queue") + .property("max-size-buffers", 4u32) + .build()?; + let convert = gst::ElementFactory::make("videoconvert").build()?; + let scale = gst::ElementFactory::make("videoscale").build()?; + let sink = build_hdmi_sink(cfg)?; + + match cfg.codec { + CameraCodec::H264 => { + let caps_h264 = gst::Caps::builder("video/x-h264") + .field("stream-format", "byte-stream") + .field("alignment", "au") + .build(); + src.set_caps(Some(&caps_h264)); + let h264parse = gst::ElementFactory::make("h264parse").build()?; + let decoder_name = pick_h264_decoder(); + let decoder = gst::ElementFactory::make(decoder_name) + .build() + .with_context(|| format!("building decoder element {decoder_name}"))?; + + pipeline.add_many(&[ + src.upcast_ref(), + &queue, + &h264parse, + &decoder, + &convert, + &scale, + &capsfilter, + &sink, + ])?; + gst::Element::link_many(&[ + src.upcast_ref(), + &queue, + &h264parse, + &decoder, + &convert, + &scale, + &capsfilter, + &sink, + ])?; + } + CameraCodec::Mjpeg => { + let caps_mjpeg = gst::Caps::builder("image/jpeg") + .field("parsed", true) + .field("width", width) + .field("height", height) + .field("framerate", gst::Fraction::new(fps, 1)) + .build(); + src.set_caps(Some(&caps_mjpeg)); + let jpegdec = gst::ElementFactory::make("jpegdec").build()?; + + pipeline.add_many(&[ + src.upcast_ref(), + &queue, + &jpegdec, + &convert, + &scale, + &capsfilter, + &sink, + ])?; + gst::Element::link_many(&[ + src.upcast_ref(), + &queue, + &jpegdec, + &convert, + &scale, + &capsfilter, + &sink, + ])?; + } + } + + pipeline.set_state(gst::State::Playing)?; + Ok(Self { + appsrc: src, + _pipe: pipeline, + }) + } + + pub fn push(&self, pkt: VideoPacket) { + let mut buf = gst::Buffer::from_slice(pkt.data); + buf.get_mut() + .unwrap() + .set_pts(Some(gst::ClockTime::from_useconds(pkt.pts))); + if let Err(err) = self.appsrc.push_buffer(buf) { + tracing::warn!(target:"lesavka_server::video", %err, "πŸ“Ίβš οΈ HDMI appsrc push failed"); } } } +fn build_hdmi_sink(cfg: &CameraConfig) -> anyhow::Result { + if let Ok(name) = std::env::var("LESAVKA_HDMI_SINK") { + return gst::ElementFactory::make(&name) + .build() + .context("building HDMI sink"); + } + + if gst::ElementFactory::find("kmssink").is_some() { + let sink = gst::ElementFactory::make("kmssink").build()?; + if let Some(connector) = cfg.hdmi.as_ref().and_then(|h| h.id) { + sink.set_property("connector-id", &connector); + } + sink.set_property("sync", &false); + return Ok(sink); + } + + let sink = gst::ElementFactory::make("autovideosink") + .build() + .context("building HDMI sink")?; + let _ = sink.set_property("sync", &false); + Ok(sink) +} + /*─────────────────────────────────*/ -/* gRPC β†’ WebcamSink relay */ +/* gRPC β†’ CameraSink relay */ /*─────────────────────────────────*/ +enum CameraSink { + Uvc(WebcamSink), + Hdmi(HdmiSink), +} + +impl CameraSink { + fn push(&self, pkt: VideoPacket) { + match self { + CameraSink::Uvc(sink) => sink.push(pkt), + CameraSink::Hdmi(sink) => sink.push(pkt), + } + } +} + pub struct CameraRelay { - sink: WebcamSink, // the v4l2sink pipeline (or stub) - id: u32, // gRPC β€œid” (for future multi‑cam) + sink: CameraSink, + id: u32, // gRPC β€œid” (for future multi‑cam) frames: std::sync::atomic::AtomicU64, } impl CameraRelay { - pub fn new(id: u32, uvc_dev: &str) -> anyhow::Result { + pub fn new_uvc(id: u32, uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result { Ok(Self { - sink: WebcamSink::new(uvc_dev)?, + sink: CameraSink::Uvc(WebcamSink::new(uvc_dev, cfg)?), + id, + frames: std::sync::atomic::AtomicU64::new(0), + }) + } + + pub fn new_hdmi(id: u32, cfg: &CameraConfig) -> anyhow::Result { + Ok(Self { + sink: CameraSink::Hdmi(HdmiSink::new(cfg)?), id, frames: std::sync::atomic::AtomicU64::new(0), })