camera: add HDMI output path

This commit is contained in:
Brad Stein 2026-01-28 17:52:00 -03:00
parent dd69d7e378
commit 57adce2696
9 changed files with 628 additions and 67 deletions

View File

@ -20,8 +20,12 @@ use lesavka_common::lesavka::{
}; };
use crate::{ use crate::{
handshake, input::camera::CameraCapture, input::inputs::InputAggregator, handshake,
input::microphone::MicrophoneCapture, output::audio::AudioOut, output::video::MonitorWindow, input::camera::{CameraCapture, CameraCodec, CameraConfig},
input::inputs::InputAggregator,
input::microphone::MicrophoneCapture,
output::audio::AudioOut,
output::video::MonitorWindow,
}; };
pub struct LesavkaClientApp { pub struct LesavkaClientApp {
@ -59,6 +63,27 @@ impl LesavkaClientApp {
info!(server = %self.server_addr, "🚦 starting handshake"); info!(server = %self.server_addr, "🚦 starting handshake");
let caps = handshake::negotiate(&self.server_addr).await; let caps = handshake::negotiate(&self.server_addr).await;
tracing::info!("🤝 server capabilities = {:?}", caps); tracing::info!("🤝 server capabilities = {:?}", caps);
let camera_cfg = match (
caps.camera_codec.as_deref(),
caps.camera_width,
caps.camera_height,
caps.camera_fps,
) {
(Some(codec), Some(width), Some(height), Some(fps)) => {
let codec = match codec.to_ascii_lowercase().as_str() {
"mjpeg" | "mjpg" | "jpeg" => CameraCodec::Mjpeg,
"h264" => CameraCodec::H264,
_ => CameraCodec::H264,
};
Some(CameraConfig {
codec,
width,
height,
fps,
})
}
_ => None,
};
/*────────── persistent gRPC channels ──────────*/ /*────────── persistent gRPC channels ──────────*/
let hid_ep = Channel::from_shared(self.server_addr.clone())? let hid_ep = Channel::from_shared(self.server_addr.clone())?
@ -150,8 +175,18 @@ impl LesavkaClientApp {
tokio::spawn(Self::audio_loop(ep_audio, audio_out)); tokio::spawn(Self::audio_loop(ep_audio, audio_out));
/*────────── camera & mic tasks (gated by caps) ───────────*/ /*────────── camera & mic tasks (gated by caps) ───────────*/
if caps.camera && std::env::var("LESAVKA_CAM_DISABLE").is_err() { if caps.camera && std::env::var("LESAVKA_CAM_DISABLE").is_err() {
if let Some(cfg) = camera_cfg {
info!(
codec = ?cfg.codec,
width = cfg.width,
height = cfg.height,
fps = cfg.fps,
"📸 using camera settings from server"
);
}
let cam = Arc::new(CameraCapture::new( let cam = Arc::new(CameraCapture::new(
std::env::var("LESAVKA_CAM_SOURCE").ok().as_deref(), std::env::var("LESAVKA_CAM_SOURCE").ok().as_deref(),
camera_cfg,
)?); )?);
tokio::spawn(Self::cam_loop(vid_ep.clone(), cam)); tokio::spawn(Self::cam_loop(vid_ep.clone(), cam));
} }

View File

@ -7,10 +7,15 @@ use tokio::time::timeout;
use tonic::{Code, transport::Endpoint}; use tonic::{Code, transport::Endpoint};
use tracing::{info, warn}; use tracing::{info, warn};
#[derive(Default, Clone, Copy, Debug)] #[derive(Default, Clone, Debug)]
pub struct PeerCaps { pub struct PeerCaps {
pub camera: bool, pub camera: bool,
pub microphone: bool, pub microphone: bool,
pub camera_output: Option<String>,
pub camera_codec: Option<String>,
pub camera_width: Option<u32>,
pub camera_height: Option<u32>,
pub camera_fps: Option<u32>,
} }
pub async fn negotiate(uri: &str) -> PeerCaps { pub async fn negotiate(uri: &str) -> PeerCaps {
@ -33,9 +38,35 @@ pub async fn negotiate(uri: &str) -> PeerCaps {
match timeout(Duration::from_secs(5), cli.get_capabilities(pb::Empty {})).await { match timeout(Duration::from_secs(5), cli.get_capabilities(pb::Empty {})).await {
Ok(Ok(rsp)) => { Ok(Ok(rsp)) => {
let rsp = rsp.get_ref();
let caps = PeerCaps { let caps = PeerCaps {
camera: rsp.get_ref().camera, camera: rsp.camera,
microphone: rsp.get_ref().microphone, microphone: rsp.microphone,
camera_output: if rsp.camera_output.is_empty() {
None
} else {
Some(rsp.camera_output.clone())
},
camera_codec: if rsp.camera_codec.is_empty() {
None
} else {
Some(rsp.camera_codec.clone())
},
camera_width: if rsp.camera_width == 0 {
None
} else {
Some(rsp.camera_width)
},
camera_height: if rsp.camera_height == 0 {
None
} else {
Some(rsp.camera_height)
},
camera_fps: if rsp.camera_fps == 0 {
None
} else {
Some(rsp.camera_fps)
},
}; };
info!(?caps, "🤝 handshake ok"); info!(?caps, "🤝 handshake ok");
caps caps

View File

@ -14,6 +14,20 @@ fn env_u32(name: &str, default: u32) -> u32 {
.unwrap_or(default) .unwrap_or(default)
} }
#[derive(Clone, Copy, Debug)]
pub enum CameraCodec {
H264,
Mjpeg,
}
#[derive(Clone, Copy, Debug)]
pub struct CameraConfig {
pub codec: CameraCodec,
pub width: u32,
pub height: u32,
pub fps: u32,
}
pub struct CameraCapture { pub struct CameraCapture {
#[allow(dead_code)] // kept alive to hold PLAYING state #[allow(dead_code)] // kept alive to hold PLAYING state
pipeline: gst::Pipeline, pipeline: gst::Pipeline,
@ -21,7 +35,10 @@ pub struct CameraCapture {
} }
impl CameraCapture { impl CameraCapture {
pub fn new(device_fragment: Option<&str>) -> anyhow::Result<Self> { pub fn new(
device_fragment: Option<&str>,
cfg: Option<CameraConfig>,
) -> anyhow::Result<Self> {
gst::init().ok(); gst::init().ok();
// Pick device (prefers V4L2 nodes with capture capability) // Pick device (prefers V4L2 nodes with capture capability)
@ -36,10 +53,14 @@ impl CameraCapture {
.ok() .ok()
.map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpg" | "mjpeg" | "jpeg")) .map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpg" | "mjpeg" | "jpeg"))
.unwrap_or(false); .unwrap_or(false);
let output_mjpeg = std::env::var("LESAVKA_CAM_CODEC") let output_mjpeg = cfg
.map(|cfg| matches!(cfg.codec, CameraCodec::Mjpeg))
.unwrap_or_else(|| {
std::env::var("LESAVKA_CAM_CODEC")
.ok() .ok()
.map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg")) .map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg"))
.unwrap_or(false); .unwrap_or(false)
});
let jpeg_quality = env_u32("LESAVKA_CAM_JPEG_QUALITY", 85).clamp(1, 100); let jpeg_quality = env_u32("LESAVKA_CAM_JPEG_QUALITY", 85).clamp(1, 100);
let (enc, kf_prop, kf_val) = if use_mjpg_source && !output_mjpeg { let (enc, kf_prop, kf_val) = if use_mjpg_source && !output_mjpeg {
("x264enc", "key-int-max", "30") ("x264enc", "key-int-max", "30")
@ -56,9 +77,9 @@ impl CameraCapture {
} else { } else {
tracing::info!("📸 using encoder element: {enc}"); tracing::info!("📸 using encoder element: {enc}");
} }
let width = env_u32("LESAVKA_CAM_WIDTH", 1280); let width = cfg.map(|cfg| cfg.width).unwrap_or_else(|| env_u32("LESAVKA_CAM_WIDTH", 1280));
let height = env_u32("LESAVKA_CAM_HEIGHT", 720); let height = cfg.map(|cfg| cfg.height).unwrap_or_else(|| env_u32("LESAVKA_CAM_HEIGHT", 720));
let fps = env_u32("LESAVKA_CAM_FPS", 25).max(1); let fps = cfg.map(|cfg| cfg.fps).unwrap_or_else(|| env_u32("LESAVKA_CAM_FPS", 25)).max(1);
let have_nvvidconv = gst::ElementFactory::find("nvvidconv").is_some(); let have_nvvidconv = gst::ElementFactory::find("nvvidconv").is_some();
let (src_caps, preenc) = match enc { let (src_caps, preenc) = match enc {
// ─────────────────────────────────────────────────────────────────── // ───────────────────────────────────────────────────────────────────

View File

@ -11,7 +11,15 @@ message AudioPacket { uint32 id = 1; uint64 pts = 2; bytes data = 3; }
message ResetUsbReply { bool ok = 1; } // true = success message ResetUsbReply { bool ok = 1; } // true = success
message HandshakeSet { bool camera = 1; bool microphone = 2; } message HandshakeSet {
bool camera = 1;
bool microphone = 2;
string camera_output = 3;
string camera_codec = 4;
uint32 camera_width = 5;
uint32 camera_height = 6;
uint32 camera_fps = 7;
}
message Empty {} message Empty {}

260
server/src/camera.rs Normal file
View File

@ -0,0 +1,260 @@
// server/src/camera.rs
use gstreamer as gst;
use std::collections::HashMap;
use std::fs;
use std::sync::{OnceLock, RwLock};
use tracing::{info, warn};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CameraOutput {
Uvc,
Hdmi,
}
impl CameraOutput {
pub fn as_str(self) -> &'static str {
match self {
CameraOutput::Uvc => "uvc",
CameraOutput::Hdmi => "hdmi",
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CameraCodec {
H264,
Mjpeg,
}
impl CameraCodec {
pub fn as_str(self) -> &'static str {
match self {
CameraCodec::H264 => "h264",
CameraCodec::Mjpeg => "mjpeg",
}
}
}
#[derive(Clone, Debug)]
pub struct HdmiConnector {
pub name: String,
pub id: Option<u32>,
}
#[derive(Clone, Debug)]
pub struct CameraConfig {
pub output: CameraOutput,
pub codec: CameraCodec,
pub width: u32,
pub height: u32,
pub fps: u32,
pub hdmi: Option<HdmiConnector>,
}
static LAST_CONFIG: OnceLock<RwLock<CameraConfig>> = OnceLock::new();
pub fn update_camera_config() -> CameraConfig {
let cfg = select_camera_config();
let lock = LAST_CONFIG.get_or_init(|| RwLock::new(cfg.clone()));
*lock.write().unwrap() = cfg.clone();
cfg
}
pub fn current_camera_config() -> CameraConfig {
if let Some(lock) = LAST_CONFIG.get() {
return lock.read().unwrap().clone();
}
update_camera_config()
}
fn select_camera_config() -> CameraConfig {
let output_env = std::env::var("LESAVKA_CAM_OUTPUT").ok();
let output_override = output_env
.as_deref()
.and_then(parse_camera_output);
let require_connected = output_override != Some(CameraOutput::Hdmi);
let hdmi = detect_hdmi_connector(require_connected);
if output_override == Some(CameraOutput::Hdmi) && hdmi.is_none() {
warn!("📷 HDMI output forced but no connector detected");
}
let output = match output_override {
Some(v) => v,
None => {
if hdmi.is_some() {
CameraOutput::Hdmi
} else {
CameraOutput::Uvc
}
}
};
let cfg = match output {
CameraOutput::Hdmi => select_hdmi_config(hdmi),
CameraOutput::Uvc => select_uvc_config(),
};
info!(
output = cfg.output.as_str(),
codec = cfg.codec.as_str(),
width = cfg.width,
height = cfg.height,
fps = cfg.fps,
hdmi = cfg.hdmi.as_ref().map(|h| h.name.as_str()).unwrap_or("none"),
"📷 camera output selected"
);
cfg
}
fn parse_camera_output(raw: &str) -> Option<CameraOutput> {
match raw.trim().to_ascii_lowercase().as_str() {
"uvc" => Some(CameraOutput::Uvc),
"hdmi" => Some(CameraOutput::Hdmi),
"auto" | "" => None,
_ => None,
}
}
fn select_hdmi_config(hdmi: Option<HdmiConnector>) -> CameraConfig {
let hw_decode = has_hw_h264_decode();
let (width, height) = if hw_decode { (1920, 1080) } else { (1280, 720) };
let fps = 30;
if !hw_decode {
warn!("📷 HDMI output: hardware H264 decoder not detected; using 720p30");
}
CameraConfig {
output: CameraOutput::Hdmi,
codec: CameraCodec::H264,
width,
height,
fps,
hdmi,
}
}
fn select_uvc_config() -> CameraConfig {
let mut uvc_env = HashMap::new();
if let Ok(text) = fs::read_to_string("/etc/lesavka/uvc.env") {
uvc_env = parse_env_file(&text);
}
let width = read_u32_from_env("LESAVKA_UVC_WIDTH")
.or_else(|| read_u32_from_map(&uvc_env, "LESAVKA_UVC_WIDTH"))
.unwrap_or(1280);
let height = read_u32_from_env("LESAVKA_UVC_HEIGHT")
.or_else(|| read_u32_from_map(&uvc_env, "LESAVKA_UVC_HEIGHT"))
.unwrap_or(720);
let fps = read_u32_from_env("LESAVKA_UVC_FPS")
.or_else(|| read_u32_from_map(&uvc_env, "LESAVKA_UVC_FPS"))
.or_else(|| {
read_u32_from_env("LESAVKA_UVC_INTERVAL")
.or_else(|| read_u32_from_map(&uvc_env, "LESAVKA_UVC_INTERVAL"))
.and_then(|interval| {
if interval == 0 {
None
} else {
Some(10_000_000 / interval)
}
})
})
.unwrap_or(25);
CameraConfig {
output: CameraOutput::Uvc,
codec: CameraCodec::Mjpeg,
width,
height,
fps,
hdmi: None,
}
}
fn has_hw_h264_decode() -> bool {
if gst::init().is_err() {
return false;
}
for name in ["v4l2h264dec", "v4l2slh264dec", "omxh264dec"] {
if gst::ElementFactory::find(name).is_some() {
return true;
}
}
false
}
fn detect_hdmi_connector(require_connected: bool) -> Option<HdmiConnector> {
let preferred = std::env::var("LESAVKA_HDMI_CONNECTOR").ok();
let entries = fs::read_dir("/sys/class/drm").ok()?;
let mut connectors = Vec::new();
for entry in entries.flatten() {
let name = entry.file_name().to_string_lossy().into_owned();
if !name.contains("HDMI-A-") {
continue;
}
let status_path = entry.path().join("status");
let status = fs::read_to_string(&status_path)
.ok()
.map(|v| v.trim().to_string())
.unwrap_or_default();
let id = fs::read_to_string(entry.path().join("connector_id"))
.ok()
.and_then(|v| v.trim().parse::<u32>().ok());
connectors.push((name, status, id));
}
let matches_preferred = |name: &str, preferred: &str| {
name == preferred || name.ends_with(preferred)
};
if let Some(pref) = preferred.as_deref() {
for (name, status, id) in &connectors {
if matches_preferred(name, pref)
&& (!require_connected || status == "connected")
{
return Some(HdmiConnector {
name: name.clone(),
id: *id,
});
}
}
}
for (name, status, id) in connectors {
if !require_connected || status == "connected" {
return Some(HdmiConnector { name, id });
}
}
None
}
fn parse_env_file(text: &str) -> HashMap<String, String> {
let mut out = HashMap::new();
for line in text.lines() {
let line = line.trim();
if line.is_empty() || line.starts_with('#') {
continue;
}
let mut parts = line.splitn(2, '=');
let key = match parts.next() {
Some(v) => v.trim(),
None => continue,
};
let val = match parts.next() {
Some(v) => v.trim(),
None => continue,
};
out.insert(key.to_string(), val.to_string());
}
out
}
fn read_u32_from_env(key: &str) -> Option<u32> {
std::env::var(key).ok().and_then(|v| v.parse::<u32>().ok())
}
fn read_u32_from_map(map: &HashMap<String, String>, key: &str) -> Option<u32> {
map.get(key).and_then(|v| v.parse::<u32>().ok())
}

View File

@ -5,11 +5,9 @@ use lesavka_common::lesavka::{
Empty, HandshakeSet, Empty, HandshakeSet,
handshake_server::{Handshake, HandshakeServer}, handshake_server::{Handshake, HandshakeServer},
}; };
use crate::camera;
pub struct HandshakeSvc { pub struct HandshakeSvc;
pub camera: bool,
pub microphone: bool,
}
#[tonic::async_trait] #[tonic::async_trait]
impl Handshake for HandshakeSvc { impl Handshake for HandshakeSvc {
@ -17,18 +15,26 @@ impl Handshake for HandshakeSvc {
&self, &self,
_req: Request<Empty>, _req: Request<Empty>,
) -> Result<Response<HandshakeSet>, Status> { ) -> Result<Response<HandshakeSet>, Status> {
let cfg = camera::update_camera_config();
let camera_enabled = match cfg.output {
camera::CameraOutput::Uvc => std::env::var("LESAVKA_DISABLE_UVC").is_err(),
camera::CameraOutput::Hdmi => true,
};
let microphone = std::env::var("LESAVKA_DISABLE_UAC").is_err();
Ok(Response::new(HandshakeSet { Ok(Response::new(HandshakeSet {
camera: self.camera, camera: camera_enabled,
microphone: self.microphone, microphone,
camera_output: cfg.output.as_str().to_string(),
camera_codec: cfg.codec.as_str().to_string(),
camera_width: cfg.width,
camera_height: cfg.height,
camera_fps: cfg.fps,
})) }))
} }
} }
impl HandshakeSvc { impl HandshakeSvc {
pub fn server() -> HandshakeServer<Self> { pub fn server() -> HandshakeServer<Self> {
HandshakeServer::new(Self { HandshakeServer::new(Self)
camera: true,
microphone: true,
})
} }
} }

View File

@ -1,6 +1,7 @@
// server/src/lib.rs // server/src/lib.rs
pub mod audio; pub mod audio;
pub mod camera;
pub mod gadget; pub mod gadget;
pub mod handshake; pub mod handshake;
pub mod video; pub mod video;

View File

@ -23,7 +23,7 @@ use lesavka_common::lesavka::{
relay_server::{Relay, RelayServer}, relay_server::{Relay, RelayServer},
}; };
use lesavka_server::{audio, gadget::UsbGadget, handshake::HandshakeSvc, video}; use lesavka_server::{audio, camera, gadget::UsbGadget, handshake::HandshakeSvc, video};
/*──────────────── constants ────────────────*/ /*──────────────── constants ────────────────*/
const VERSION: &str = env!("CARGO_PKG_VERSION"); const VERSION: &str = env!("CARGO_PKG_VERSION");
@ -100,7 +100,10 @@ async fn recover_hid_if_needed(
did_cycle: Arc<AtomicBool>, did_cycle: Arc<AtomicBool>,
) { ) {
let code = err.raw_os_error(); let code = err.raw_os_error();
let should_recover = matches!(code, Some(libc::ENOTCONN) | Some(libc::ESHUTDOWN) | Some(libc::EPIPE)); let should_recover = matches!(
code,
Some(libc::ENOTCONN) | Some(libc::ESHUTDOWN) | Some(libc::EPIPE)
);
if !should_recover { if !should_recover {
return; return;
} }
@ -271,7 +274,9 @@ impl Handler {
info!("🛠️ Initial USB reset…"); info!("🛠️ Initial USB reset…");
let _ = gadget.cycle(); // ignore failure - may boot without host let _ = gadget.cycle(); // ignore failure - may boot without host
} else { } else {
info!("🔒 gadget cycle disabled at startup (set LESAVKA_ALLOW_GADGET_CYCLE=1 to enable)"); info!(
"🔒 gadget cycle disabled at startup (set LESAVKA_ALLOW_GADGET_CYCLE=1 to enable)"
);
} }
info!("🛠️ opening HID endpoints …"); info!("🛠️ opening HID endpoints …");
@ -322,7 +327,13 @@ impl Relay for Handler {
while let Some(pkt) = s.next().await.transpose()? { while let Some(pkt) = s.next().await.transpose()? {
if let Err(e) = kb.lock().await.write_all(&pkt.data).await { if let Err(e) = kb.lock().await.write_all(&pkt.data).await {
warn!("⌨️ write failed: {e} (dropped)"); warn!("⌨️ write failed: {e} (dropped)");
recover_hid_if_needed(&e, gadget.clone(), kb.clone(), ms.clone(), did_cycle.clone()) recover_hid_if_needed(
&e,
gadget.clone(),
kb.clone(),
ms.clone(),
did_cycle.clone(),
)
.await; .await;
} }
tx.send(Ok(pkt)).await.ok(); tx.send(Ok(pkt)).await.ok();
@ -348,7 +359,13 @@ impl Relay for Handler {
while let Some(pkt) = s.next().await.transpose()? { while let Some(pkt) = s.next().await.transpose()? {
if let Err(e) = ms.lock().await.write_all(&pkt.data).await { if let Err(e) = ms.lock().await.write_all(&pkt.data).await {
warn!("🖱️ write failed: {e} (dropped)"); warn!("🖱️ write failed: {e} (dropped)");
recover_hid_if_needed(&e, gadget.clone(), kb.clone(), ms.clone(), did_cycle.clone()) recover_hid_if_needed(
&e,
gadget.clone(),
kb.clone(),
ms.clone(),
did_cycle.clone(),
)
.await; .await;
} }
tx.send(Ok(pkt)).await.ok(); tx.send(Ok(pkt)).await.ok();
@ -397,13 +414,32 @@ impl Relay for Handler {
&self, &self,
req: Request<tonic::Streaming<VideoPacket>>, req: Request<tonic::Streaming<VideoPacket>>,
) -> Result<Response<Self::StreamCameraStream>, Status> { ) -> Result<Response<Self::StreamCameraStream>, Status> {
// map gRPC camera id → UVC device let cfg = camera::current_camera_config();
info!(
output = cfg.output.as_str(),
codec = cfg.codec.as_str(),
width = cfg.width,
height = cfg.height,
fps = cfg.fps,
hdmi = cfg.hdmi.as_ref().map(|h| h.name.as_str()).unwrap_or("none"),
"🎥 stream_camera output selected"
);
let relay = match cfg.output {
camera::CameraOutput::Uvc => {
if std::env::var("LESAVKA_DISABLE_UVC").is_ok() {
return Err(Status::failed_precondition(
"UVC output disabled (LESAVKA_DISABLE_UVC set)",
));
}
let uvc = pick_uvc_device().map_err(|e| Status::internal(format!("{e:#}")))?; let uvc = pick_uvc_device().map_err(|e| Status::internal(format!("{e:#}")))?;
info!(%uvc, "🎥 stream_camera using UVC sink"); info!(%uvc, "🎥 stream_camera using UVC sink");
video::CameraRelay::new_uvc(0, &uvc, &cfg)
// build once .map_err(|e| Status::internal(format!("{e:#}")))?
let relay = }
video::CameraRelay::new(0, &uvc).map_err(|e| Status::internal(format!("{e:#}")))?; camera::CameraOutput::Hdmi => video::CameraRelay::new_hdmi(0, &cfg)
.map_err(|e| Status::internal(format!("{e:#}")))?,
};
// dummy outbound (same pattern as other streams) // dummy outbound (same pattern as other streams)
let (tx, rx) = tokio::sync::mpsc::channel(1); let (tx, rx) = tokio::sync::mpsc::channel(1);

View File

@ -2,18 +2,20 @@
use anyhow::Context; use anyhow::Context;
use futures_util::Stream; use futures_util::Stream;
use gst::prelude::*;
use gst::MessageView::*;
use gst::MessageView; use gst::MessageView;
use gst::MessageView::*;
use gst::prelude::*;
use gstreamer as gst; use gstreamer as gst;
use gstreamer_app as gst_app; use gstreamer_app as gst_app;
use lesavka_common::lesavka::VideoPacket; use lesavka_common::lesavka::VideoPacket;
use std::sync::Arc;
use std::sync::atomic::{AtomicU64, Ordering};
use tokio_stream::wrappers::ReceiverStream; use tokio_stream::wrappers::ReceiverStream;
use tonic::Status; use tonic::Status;
use std::sync::atomic::{AtomicU64, Ordering};
use std::sync::Arc;
use tracing::{Level, debug, enabled, error, info, trace, warn}; use tracing::{Level, debug, enabled, error, info, trace, warn};
use crate::camera::{CameraCodec, CameraConfig};
const EYE_ID: [&str; 2] = ["l", "r"]; const EYE_ID: [&str; 2] = ["l", "r"];
static START: std::sync::OnceLock<gst::ClockTime> = std::sync::OnceLock::new(); static START: std::sync::OnceLock<gst::ClockTime> = std::sync::OnceLock::new();
static DEV_MODE: std::sync::OnceLock<bool> = std::sync::OnceLock::new(); static DEV_MODE: std::sync::OnceLock<bool> = std::sync::OnceLock::new();
@ -26,8 +28,19 @@ fn env_u32(name: &str, default: u32) -> u32 {
} }
fn dev_mode_enabled() -> bool { fn dev_mode_enabled() -> bool {
*DEV_MODE *DEV_MODE.get_or_init(|| std::env::var("LESAVKA_DEV_MODE").is_ok())
.get_or_init(|| std::env::var("LESAVKA_DEV_MODE").is_ok()) }
fn pick_h264_decoder() -> &'static str {
if gst::ElementFactory::find("v4l2h264dec").is_some() {
"v4l2h264dec"
} else if gst::ElementFactory::find("v4l2slh264dec").is_some() {
"v4l2slh264dec"
} else if gst::ElementFactory::find("omxh264dec").is_some() {
"omxh264dec"
} else {
"avdec_h264"
}
} }
fn contains_idr(h264: &[u8]) -> bool { fn contains_idr(h264: &[u8]) -> bool {
@ -292,19 +305,15 @@ pub struct WebcamSink {
} }
impl WebcamSink { impl WebcamSink {
pub fn new(uvc_dev: &str) -> anyhow::Result<Self> { pub fn new(uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result<Self> {
gst::init()?; gst::init()?;
let pipeline = gst::Pipeline::new(); let pipeline = gst::Pipeline::new();
let width = env_u32("LESAVKA_UVC_WIDTH", 1280) as i32; let width = cfg.width as i32;
let height = env_u32("LESAVKA_UVC_HEIGHT", 720) as i32; let height = cfg.height as i32;
let fps = env_u32("LESAVKA_UVC_FPS", 25).max(1) as i32; let fps = cfg.fps.max(1) as i32;
let use_mjpeg = std::env::var("LESAVKA_UVC_MJPEG").is_ok() let use_mjpeg = matches!(cfg.codec, CameraCodec::Mjpeg);
|| std::env::var("LESAVKA_UVC_CODEC")
.ok()
.map(|v| matches!(v.to_ascii_lowercase().as_str(), "mjpeg" | "mjpg" | "jpeg"))
.unwrap_or(false);
let src = gst::ElementFactory::make("appsrc") let src = gst::ElementFactory::make("appsrc")
.build()? .build()?
@ -354,7 +363,7 @@ impl WebcamSink {
src.set_caps(Some(&caps_h264)); src.set_caps(Some(&caps_h264));
let h264parse = gst::ElementFactory::make("h264parse").build()?; let h264parse = gst::ElementFactory::make("h264parse").build()?;
let decoder_name = Self::pick_decoder(); let decoder_name = pick_h264_decoder();
let decoder = gst::ElementFactory::make(decoder_name) let decoder = gst::ElementFactory::make(decoder_name)
.build() .build()
.with_context(|| format!("building decoder element {decoder_name}"))?; .with_context(|| format!("building decoder element {decoder_name}"))?;
@ -405,33 +414,187 @@ impl WebcamSink {
} }
} }
fn pick_decoder() -> &'static str { }
if gst::ElementFactory::find("v4l2h264dec").is_some() {
"v4l2h264dec" pub struct HdmiSink {
} else if gst::ElementFactory::find("v4l2slh264dec").is_some() { appsrc: gst_app::AppSrc,
"v4l2slh264dec" _pipe: gst::Pipeline,
} else if gst::ElementFactory::find("omxh264dec").is_some() { }
"omxh264dec"
} else { impl HdmiSink {
"avdec_h264" pub fn new(cfg: &CameraConfig) -> anyhow::Result<Self> {
gst::init()?;
let pipeline = gst::Pipeline::new();
let width = cfg.width as i32;
let height = cfg.height as i32;
let fps = cfg.fps.max(1) as i32;
let src = gst::ElementFactory::make("appsrc")
.build()?
.downcast::<gst_app::AppSrc>()
.expect("appsrc");
src.set_is_live(true);
src.set_format(gst::Format::Time);
let raw_caps = gst::Caps::builder("video/x-raw")
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &raw_caps)
.build()?;
let queue = gst::ElementFactory::make("queue")
.property("max-size-buffers", 4u32)
.build()?;
let convert = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let sink = build_hdmi_sink(cfg)?;
match cfg.codec {
CameraCodec::H264 => {
let caps_h264 = gst::Caps::builder("video/x-h264")
.field("stream-format", "byte-stream")
.field("alignment", "au")
.build();
src.set_caps(Some(&caps_h264));
let h264parse = gst::ElementFactory::make("h264parse").build()?;
let decoder_name = pick_h264_decoder();
let decoder = gst::ElementFactory::make(decoder_name)
.build()
.with_context(|| format!("building decoder element {decoder_name}"))?;
pipeline.add_many(&[
src.upcast_ref(),
&queue,
&h264parse,
&decoder,
&convert,
&scale,
&capsfilter,
&sink,
])?;
gst::Element::link_many(&[
src.upcast_ref(),
&queue,
&h264parse,
&decoder,
&convert,
&scale,
&capsfilter,
&sink,
])?;
}
CameraCodec::Mjpeg => {
let caps_mjpeg = gst::Caps::builder("image/jpeg")
.field("parsed", true)
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.build();
src.set_caps(Some(&caps_mjpeg));
let jpegdec = gst::ElementFactory::make("jpegdec").build()?;
pipeline.add_many(&[
src.upcast_ref(),
&queue,
&jpegdec,
&convert,
&scale,
&capsfilter,
&sink,
])?;
gst::Element::link_many(&[
src.upcast_ref(),
&queue,
&jpegdec,
&convert,
&scale,
&capsfilter,
&sink,
])?;
}
}
pipeline.set_state(gst::State::Playing)?;
Ok(Self {
appsrc: src,
_pipe: pipeline,
})
}
pub fn push(&self, pkt: VideoPacket) {
let mut buf = gst::Buffer::from_slice(pkt.data);
buf.get_mut()
.unwrap()
.set_pts(Some(gst::ClockTime::from_useconds(pkt.pts)));
if let Err(err) = self.appsrc.push_buffer(buf) {
tracing::warn!(target:"lesavka_server::video", %err, "📺⚠️ HDMI appsrc push failed");
} }
} }
} }
fn build_hdmi_sink(cfg: &CameraConfig) -> anyhow::Result<gst::Element> {
if let Ok(name) = std::env::var("LESAVKA_HDMI_SINK") {
return gst::ElementFactory::make(&name)
.build()
.context("building HDMI sink");
}
if gst::ElementFactory::find("kmssink").is_some() {
let sink = gst::ElementFactory::make("kmssink").build()?;
if let Some(connector) = cfg.hdmi.as_ref().and_then(|h| h.id) {
sink.set_property("connector-id", &connector);
}
sink.set_property("sync", &false);
return Ok(sink);
}
let sink = gst::ElementFactory::make("autovideosink")
.build()
.context("building HDMI sink")?;
let _ = sink.set_property("sync", &false);
Ok(sink)
}
/*─────────────────────────────────*/ /*─────────────────────────────────*/
/* gRPC → WebcamSink relay */ /* gRPC → CameraSink relay */
/*─────────────────────────────────*/ /*─────────────────────────────────*/
enum CameraSink {
Uvc(WebcamSink),
Hdmi(HdmiSink),
}
impl CameraSink {
fn push(&self, pkt: VideoPacket) {
match self {
CameraSink::Uvc(sink) => sink.push(pkt),
CameraSink::Hdmi(sink) => sink.push(pkt),
}
}
}
pub struct CameraRelay { pub struct CameraRelay {
sink: WebcamSink, // the v4l2sink pipeline (or stub) sink: CameraSink,
id: u32, // gRPC “id” (for future multicam) id: u32, // gRPC “id” (for future multicam)
frames: std::sync::atomic::AtomicU64, frames: std::sync::atomic::AtomicU64,
} }
impl CameraRelay { impl CameraRelay {
pub fn new(id: u32, uvc_dev: &str) -> anyhow::Result<Self> { pub fn new_uvc(id: u32, uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result<Self> {
Ok(Self { Ok(Self {
sink: WebcamSink::new(uvc_dev)?, sink: CameraSink::Uvc(WebcamSink::new(uvc_dev, cfg)?),
id,
frames: std::sync::atomic::AtomicU64::new(0),
})
}
pub fn new_hdmi(id: u32, cfg: &CameraConfig) -> anyhow::Result<Self> {
Ok(Self {
sink: CameraSink::Hdmi(HdmiSink::new(cfg)?),
id, id,
frames: std::sync::atomic::AtomicU64::new(0), frames: std::sync::atomic::AtomicU64::new(0),
}) })