ci: add ratcheting quality and hygiene gates

This commit is contained in:
Brad Stein 2026-04-10 15:56:18 -03:00
parent 507d1cd6f9
commit de122b86e8
37 changed files with 4209 additions and 875 deletions

12
Cargo.toml Normal file
View File

@ -0,0 +1,12 @@
[workspace]
members = [
"common",
"server",
"client",
]
resolver = "3"
[workspace.dependencies]
serial_test = "3.2"
tempfile = "3.15"
temp-env = "0.3"

23
Jenkinsfile vendored
View File

@ -7,8 +7,8 @@ pipeline {
}
parameters {
booleanParam(name: 'RUN_TESTS', defaultValue: false, description: 'Run cargo tests')
booleanParam(name: 'PUSH_IMAGES', defaultValue: true, description: 'Push images to registry')
string(name: 'QUALITY_GATE_PUSHGATEWAY_URL', defaultValue: '', description: 'Optional Pushgateway base URL for quality gate metrics')
string(name: 'REGISTRY_CREDENTIALS_ID', defaultValue: 'registry-bstein-dev', description: 'Jenkins credentials id for registry.bstein.dev')
}
@ -34,10 +34,15 @@ pipeline {
}
}
stage('Clippy') {
stage('Hygiene') {
steps {
sh 'cargo clippy --all-targets --manifest-path server/Cargo.toml -D warnings'
sh 'cargo clippy --all-targets --manifest-path client/Cargo.toml -D warnings'
sh 'scripts/ci/hygiene_gate.sh'
}
}
stage('Quality Gate') {
steps {
sh 'QUALITY_GATE_PUSHGATEWAY_URL="${QUALITY_GATE_PUSHGATEWAY_URL}" scripts/ci/quality_gate.sh'
}
}
@ -47,16 +52,6 @@ pipeline {
}
}
stage('Tests') {
when {
expression { return params.RUN_TESTS }
}
steps {
sh 'cargo test --manifest-path server/Cargo.toml'
sh 'cargo test --manifest-path client/Cargo.toml'
}
}
stage('Docker Login') {
when {
expression { return params.PUSH_IMAGES }

View File

@ -20,12 +20,8 @@ use lesavka_common::lesavka::{
};
use crate::{
handshake,
input::camera::{CameraCapture, CameraCodec, CameraConfig},
input::inputs::InputAggregator,
input::microphone::MicrophoneCapture,
output::audio::AudioOut,
output::video::MonitorWindow,
app_support, handshake, input::camera::CameraCapture, input::inputs::InputAggregator,
input::microphone::MicrophoneCapture, output::audio::AudioOut, output::video::MonitorWindow,
paste,
};
@ -43,10 +39,9 @@ impl LesavkaClientApp {
pub fn new() -> Result<Self> {
let dev_mode = std::env::var("LESAVKA_DEV_MODE").is_ok();
let headless = std::env::var("LESAVKA_HEADLESS").is_ok();
let server_addr = std::env::args()
.nth(1)
.or_else(|| std::env::var("LESAVKA_SERVER_ADDR").ok())
.unwrap_or_else(|| "http://127.0.0.1:50051".into());
let args = std::env::args().skip(1).collect::<Vec<_>>();
let env_addr = std::env::var("LESAVKA_SERVER_ADDR").ok();
let server_addr = app_support::resolve_server_addr(&args, env_addr.as_deref());
let (kbd_tx, _) = broadcast::channel(1024);
let (mou_tx, _) = broadcast::channel(4096);
@ -79,27 +74,7 @@ impl LesavkaClientApp {
info!(server = %self.server_addr, "🚦 starting handshake");
let caps = handshake::negotiate(&self.server_addr).await;
tracing::info!("🤝 server capabilities = {:?}", caps);
let camera_cfg = match (
caps.camera_codec.as_deref(),
caps.camera_width,
caps.camera_height,
caps.camera_fps,
) {
(Some(codec), Some(width), Some(height), Some(fps)) => {
let codec = match codec.to_ascii_lowercase().as_str() {
"mjpeg" | "mjpg" | "jpeg" => CameraCodec::Mjpeg,
"h264" => CameraCodec::H264,
_ => CameraCodec::H264,
};
Some(CameraConfig {
codec,
width,
height,
fps,
})
}
_ => None,
};
let camera_cfg = app_support::camera_config_from_caps(&caps);
/*────────── persistent gRPC channels ──────────*/
let hid_ep = Channel::from_shared(self.server_addr.clone())?
@ -152,45 +127,55 @@ impl LesavkaClientApp {
if !self.headless {
/*────────── video rendering thread (winit) ────*/
let (video_tx, mut video_rx) = tokio::sync::mpsc::unbounded_channel::<VideoPacket>();
let video_queue = app_support::sanitize_video_queue(
std::env::var("LESAVKA_VIDEO_QUEUE")
.ok()
.and_then(|v| v.parse::<usize>().ok()),
);
let dump_video = std::env::var("LESAVKA_DUMP_VIDEO").is_ok();
let (video_tx, mut video_rx) = tokio::sync::mpsc::channel::<VideoPacket>(video_queue);
std::thread::spawn(move || {
gtk::init().expect("GTK initialisation failed");
let el = EventLoopBuilder::<()>::new()
.with_any_thread(true)
.build()
.unwrap();
let win0 = MonitorWindow::new(0).expect("win0");
let win1 = MonitorWindow::new(1).expect("win1");
#[allow(deprecated)]
{
let el = EventLoopBuilder::<()>::new()
.with_any_thread(true)
.build()
.unwrap();
let win0 = MonitorWindow::new(0).expect("win0");
let win1 = MonitorWindow::new(1).expect("win1");
let _ = el.run(move |_: Event<()>, _elwt| {
_elwt.set_control_flow(ControlFlow::WaitUntil(
std::time::Instant::now() + std::time::Duration::from_millis(16),
));
static CNT: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
static DUMP_CNT: std::sync::atomic::AtomicU32 =
std::sync::atomic::AtomicU32::new(0);
while let Ok(pkt) = video_rx.try_recv() {
CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if CNT.load(std::sync::atomic::Ordering::Relaxed) % 300 == 0 {
debug!(
"🎥 received {} video packets",
CNT.load(std::sync::atomic::Ordering::Relaxed)
);
let _ = el.run(move |_: Event<()>, _elwt| {
_elwt.set_control_flow(ControlFlow::WaitUntil(
std::time::Instant::now() + std::time::Duration::from_millis(16),
));
static CNT: std::sync::atomic::AtomicU64 =
std::sync::atomic::AtomicU64::new(0);
while let Ok(pkt) = video_rx.try_recv() {
CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if CNT.load(std::sync::atomic::Ordering::Relaxed) % 300 == 0 {
debug!(
"🎥 received {} video packets",
CNT.load(std::sync::atomic::Ordering::Relaxed)
);
}
if dump_video {
static DUMP_CNT: std::sync::atomic::AtomicU32 =
std::sync::atomic::AtomicU32::new(0);
let n = DUMP_CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
let eye = if pkt.id == 0 { "l" } else { "r" };
let path = format!("/tmp/eye{eye}-cli-{n:05}.h264");
std::fs::write(&path, &pkt.data).ok();
}
match pkt.id {
0 => win0.push_packet(pkt),
1 => win1.push_packet(pkt),
_ => {}
}
}
let n = DUMP_CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if n % 120 == 0 {
let eye = if pkt.id == 0 { "l" } else { "r" };
let path = format!("/tmp/eye{eye}-cli-{n:05}.h264");
std::fs::write(&path, &pkt.data).ok();
}
match pkt.id {
0 => win0.push_packet(pkt),
1 => win1.push_packet(pkt),
_ => {}
}
}
});
});
}
});
/*────────── start video gRPC pullers ──────────*/
@ -337,7 +322,7 @@ impl LesavkaClientApp {
}
/*──────────────── monitor stream ────────────────*/
async fn video_loop(ep: Channel, tx: tokio::sync::mpsc::UnboundedSender<VideoPacket>) {
async fn video_loop(ep: Channel, tx: tokio::sync::mpsc::Sender<VideoPacket>) {
let max_bitrate = std::env::var("LESAVKA_VIDEO_MAX_KBIT")
.ok()
.and_then(|v| v.parse::<u32>().ok())
@ -362,7 +347,7 @@ impl LesavkaClientApp {
"🎥📥 cli video{monitor_id}: got {}bytes",
pkt.data.len()
);
if tx.send(pkt).is_err() {
if tx.send(pkt).await.is_err() {
warn!("⚠️🎥 cli video{monitor_id}: GUI thread gone");
break;
}
@ -439,7 +424,7 @@ impl LesavkaClientApp {
} else {
debug!("❌🎤 reconnect failed: {e}");
}
delay = next_delay(delay);
delay = app_support::next_delay(delay);
}
}
let _ = stop_tx.send(());
@ -491,7 +476,7 @@ impl LesavkaClientApp {
}
Err(e) => {
tracing::warn!("❌📸 connect failed: {e:?}");
delay = next_delay(delay); // back-off (#2)
delay = app_support::next_delay(delay); // back-off (#2)
}
}
let _ = stop_tx.send(());
@ -500,10 +485,3 @@ impl LesavkaClientApp {
}
}
}
fn next_delay(cur: std::time::Duration) -> std::time::Duration {
match cur.as_secs() {
1..=15 => cur * 2,
_ => std::time::Duration::from_secs(30),
}
}

129
client/src/app_support.rs Normal file
View File

@ -0,0 +1,129 @@
#![forbid(unsafe_code)]
use std::time::Duration;
use crate::handshake::PeerCaps;
use crate::input::camera::{CameraCodec, CameraConfig};
/// Resolve the server address the client should dial first.
///
/// Inputs: process arguments after the executable name plus the optional
/// `LESAVKA_SERVER_ADDR` override from the environment.
/// Outputs: the address that should be used for both the handshake and the
/// long-lived RPC channels.
/// Why: keeping precedence rules pure makes startup behavior testable without
/// having to mutate the real process environment in every caller.
#[must_use]
pub fn resolve_server_addr(args: &[String], env_addr: Option<&str>) -> String {
args.first()
.cloned()
.or_else(|| env_addr.map(ToOwned::to_owned))
.unwrap_or_else(|| "http://127.0.0.1:50051".to_string())
}
/// Convert handshake metadata into a local camera capture configuration.
///
/// Inputs: the negotiated peer capabilities reported by the server.
/// Outputs: `Some(CameraConfig)` only when the server advertised a complete
/// camera profile that the client can honor locally.
/// Why: camera startup should fail closed when the negotiated profile is
/// incomplete, rather than guessing a codec or frame size on the client.
#[must_use]
pub fn camera_config_from_caps(caps: &PeerCaps) -> Option<CameraConfig> {
let codec = parse_camera_codec(caps.camera_codec.as_deref()?)?;
Some(CameraConfig {
codec,
width: caps.camera_width?,
height: caps.camera_height?,
fps: caps.camera_fps?,
})
}
/// Clamp the video queue size to a sensible minimum.
///
/// Inputs: the operator-provided queue depth, if any.
/// Outputs: a queue depth that is always large enough to absorb short render
/// stalls without turning the GUI thread into a drop storm.
/// Why: the render loop is bursty under GTK/winit, so tiny queues create
/// needless packet churn and noisy logs.
#[must_use]
pub fn sanitize_video_queue(queue: Option<usize>) -> usize {
queue.unwrap_or(256).max(16)
}
/// Pick the next reconnect delay for camera and microphone streams.
///
/// Inputs: the current retry delay.
/// Outputs: an exponential backoff capped at 30 seconds.
/// Why: repeated reconnect failures should back off quickly without stalling
/// recovery for minutes after a transient outage clears.
#[must_use]
pub fn next_delay(current: Duration) -> Duration {
match current.as_secs() {
1..=15 => current * 2,
_ => Duration::from_secs(30),
}
}
fn parse_camera_codec(raw: &str) -> Option<CameraCodec> {
match raw.trim().to_ascii_lowercase().as_str() {
"mjpeg" | "mjpg" | "jpeg" => Some(CameraCodec::Mjpeg),
"h264" => Some(CameraCodec::H264),
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::{camera_config_from_caps, next_delay, resolve_server_addr, sanitize_video_queue};
use crate::handshake::PeerCaps;
use crate::input::camera::CameraCodec;
use std::time::Duration;
#[test]
fn resolve_server_addr_prefers_cli_then_env_then_default() {
assert_eq!(
resolve_server_addr(&[String::from("http://cli:1")], Some("http://env:2")),
"http://cli:1"
);
assert_eq!(
resolve_server_addr(&[], Some("http://env:2")),
"http://env:2"
);
assert_eq!(resolve_server_addr(&[], None), "http://127.0.0.1:50051");
}
#[test]
fn camera_config_from_caps_requires_complete_profile() {
let mut caps = PeerCaps {
camera: true,
microphone: false,
camera_output: Some(String::from("uvc")),
camera_codec: Some(String::from("mjpeg")),
camera_width: Some(1280),
camera_height: Some(720),
camera_fps: Some(25),
};
let config = camera_config_from_caps(&caps).expect("complete caps should map");
assert!(matches!(config.codec, CameraCodec::Mjpeg));
assert_eq!(config.width, 1280);
caps.camera_codec = Some(String::from("vp9"));
assert!(camera_config_from_caps(&caps).is_none());
}
#[test]
fn sanitize_video_queue_enforces_floor() {
assert_eq!(sanitize_video_queue(None), 256);
assert_eq!(sanitize_video_queue(Some(8)), 16);
assert_eq!(sanitize_video_queue(Some(512)), 512);
}
#[test]
fn next_delay_doubles_until_capped() {
assert_eq!(next_delay(Duration::from_secs(1)), Duration::from_secs(2));
assert_eq!(next_delay(Duration::from_secs(15)), Duration::from_secs(30));
assert_eq!(next_delay(Duration::from_secs(31)), Duration::from_secs(30));
}
}

View File

@ -8,6 +8,7 @@ use tonic::{Code, transport::Endpoint};
use tracing::{info, warn};
#[derive(Default, Clone, Debug)]
#[cfg_attr(test, derive(PartialEq, Eq))]
pub struct PeerCaps {
pub camera: bool,
pub microphone: bool,
@ -18,70 +19,137 @@ pub struct PeerCaps {
pub camera_fps: Option<u32>,
}
fn likely_port_typo_hint(uri: &str) -> Option<&'static str> {
if uri.contains(":5005") && !uri.contains(":50051") {
Some("possible typo: lesavka server listens on port 50051")
} else {
None
}
}
/// Negotiate the server capabilities the client should honor locally.
///
/// Inputs: the server URI to dial for the gRPC handshake.
/// Outputs: the negotiated peer capability set, or defaults when the server
/// is unreachable or does not implement the handshake service yet.
/// Why: the rest of client startup depends on these capabilities, but a
/// missing or misconfigured server should fall back to safe defaults instead
/// of aborting the whole client session.
pub async fn negotiate(uri: &str) -> PeerCaps {
info!(%uri, "🤝 dial handshake");
let ep = Endpoint::from_shared(uri.to_owned())
.expect("handshake endpoint")
.tcp_nodelay(true)
.http2_keep_alive_interval(Duration::from_secs(15))
.connect_timeout(Duration::from_secs(5));
let Some(hint) = likely_port_typo_hint(uri) else {
let ep = match Endpoint::from_shared(uri.to_owned()) {
Ok(ep) => ep
.tcp_nodelay(true)
.http2_keep_alive_interval(Duration::from_secs(15))
.connect_timeout(Duration::from_secs(5)),
Err(e) => {
warn!("🤝 invalid handshake endpoint '{uri}': {e} assuming defaults");
return PeerCaps::default();
}
};
let channel = timeout(Duration::from_secs(8), ep.connect())
.await
.expect("handshake connect timeout")
.expect("handshake connect failed");
let channel = match timeout(Duration::from_secs(8), ep.connect()).await {
Ok(Ok(channel)) => channel,
Ok(Err(e)) => {
if let Some(hint) = likely_port_typo_hint(uri) {
warn!("🤝 handshake connect failed: {e} ({hint}) assuming defaults");
} else {
warn!("🤝 handshake connect failed: {e} assuming defaults");
}
return PeerCaps::default();
}
Err(_) => {
if let Some(hint) = likely_port_typo_hint(uri) {
warn!("🤝 handshake connect timed out ({hint}) assuming defaults");
} else {
warn!("🤝 handshake connect timed out assuming defaults");
}
return PeerCaps::default();
}
};
info!("🤝 handshake channel connected");
let mut cli = HandshakeClient::new(channel);
info!("🤝 fetching capabilities…");
info!("🤝 handshake channel connected");
let mut cli = HandshakeClient::new(channel);
info!("🤝 fetching capabilities…");
match timeout(Duration::from_secs(5), cli.get_capabilities(pb::Empty {})).await {
Ok(Ok(rsp)) => {
let rsp = rsp.get_ref();
let caps = PeerCaps {
camera: rsp.camera,
microphone: rsp.microphone,
camera_output: if rsp.camera_output.is_empty() {
None
} else {
Some(rsp.camera_output.clone())
},
camera_codec: if rsp.camera_codec.is_empty() {
None
} else {
Some(rsp.camera_codec.clone())
},
camera_width: if rsp.camera_width == 0 {
None
} else {
Some(rsp.camera_width)
},
camera_height: if rsp.camera_height == 0 {
None
} else {
Some(rsp.camera_height)
},
camera_fps: if rsp.camera_fps == 0 {
None
} else {
Some(rsp.camera_fps)
},
};
info!(?caps, "🤝 handshake ok");
caps
}
Ok(Err(e)) if e.code() == Code::Unimplemented => {
warn!("🤝 handshake not implemented on server assuming defaults");
PeerCaps::default()
}
Ok(Err(e)) => {
warn!("🤝 handshake failed: {e} assuming defaults");
PeerCaps::default()
}
Err(_) => {
warn!("🤝 handshake timed out assuming defaults");
PeerCaps::default()
}
return match timeout(Duration::from_secs(5), cli.get_capabilities(pb::Empty {})).await {
Ok(Ok(rsp)) => {
let rsp = rsp.get_ref();
let caps = PeerCaps {
camera: rsp.camera,
microphone: rsp.microphone,
camera_output: if rsp.camera_output.is_empty() {
None
} else {
Some(rsp.camera_output.clone())
},
camera_codec: if rsp.camera_codec.is_empty() {
None
} else {
Some(rsp.camera_codec.clone())
},
camera_width: if rsp.camera_width == 0 {
None
} else {
Some(rsp.camera_width)
},
camera_height: if rsp.camera_height == 0 {
None
} else {
Some(rsp.camera_height)
},
camera_fps: if rsp.camera_fps == 0 {
None
} else {
Some(rsp.camera_fps)
},
};
info!(?caps, "🤝 handshake ok");
caps
}
Ok(Err(e)) if e.code() == Code::Unimplemented => {
warn!("🤝 handshake not implemented on server assuming defaults");
PeerCaps::default()
}
Ok(Err(e)) => {
warn!("🤝 handshake failed: {e} assuming defaults");
PeerCaps::default()
}
Err(_) => {
warn!("🤝 handshake timed out assuming defaults");
PeerCaps::default()
}
};
};
warn!("🤝 handshake endpoint '{uri}' looks wrong ({hint}) assuming defaults");
PeerCaps::default()
}
#[cfg(test)]
mod tests {
use super::{PeerCaps, likely_port_typo_hint, negotiate};
#[test]
fn likely_port_typo_hint_flags_common_port_mistype() {
assert_eq!(
likely_port_typo_hint("http://127.0.0.1:5005"),
Some("possible typo: lesavka server listens on port 50051")
);
assert_eq!(likely_port_typo_hint("http://127.0.0.1:50051"), None);
}
#[tokio::test]
async fn negotiate_returns_defaults_for_invalid_endpoint() {
let caps = negotiate("not a uri").await;
assert_eq!(caps, PeerCaps::default());
}
#[tokio::test]
async fn negotiate_returns_defaults_for_port_typo_hint() {
let caps = negotiate("http://127.0.0.1:5005").await;
assert_eq!(caps, PeerCaps::default());
}
}

View File

@ -22,6 +22,8 @@ pub struct KeyboardAggregator {
paste_enabled: bool,
paste_rpc_enabled: bool,
paste_tx: Option<UnboundedSender<String>>,
paste_chord_armed: bool,
paste_chord_consumed: bool,
pressed_keys: HashSet<KeyCode>,
}
@ -46,10 +48,10 @@ impl KeyboardAggregator {
paste_enabled: std::env::var("LESAVKA_CLIPBOARD_PASTE")
.map(|v| v != "0")
.unwrap_or(true),
paste_rpc_enabled: std::env::var("LESAVKA_PASTE_RPC")
.map(|v| v != "0")
.unwrap_or(true),
paste_rpc_enabled: paste_rpc_enabled_from_env(),
paste_tx,
paste_chord_armed: false,
paste_chord_consumed: false,
pressed_keys: HashSet::new(),
}
}
@ -96,30 +98,9 @@ impl KeyboardAggregator {
continue;
}
let code = KeyCode::new(ev.code());
let value = ev.value();
if self.paste_enabled
&& ev.value() == 1
&& code == KeyCode::KEY_V
&& self.paste_chord_active()
{
if !self.paste_debounced() {
continue;
}
// swallow Ctrl+V and inject clipboard text instead
self.pressed_keys.remove(&KeyCode::KEY_V);
self.pressed_keys.remove(&KeyCode::KEY_LEFTCTRL);
self.pressed_keys.remove(&KeyCode::KEY_RIGHTCTRL);
self.pressed_keys.remove(&KeyCode::KEY_LEFTALT);
self.pressed_keys.remove(&KeyCode::KEY_RIGHTALT);
self.send_empty_report();
if self.paste_rpc_enabled && self.paste_via_rpc() {
continue;
}
self.paste_clipboard();
continue;
}
match ev.value() {
match value {
1 => {
self.pressed_keys.insert(code);
} // press
@ -129,6 +110,10 @@ impl KeyboardAggregator {
_ => {}
}
if self.try_handle_paste_event(code, value) {
continue;
}
let report = self.build_report();
// Generate a local sequence number for debugging/log-merge only.
let id = SEQ.fetch_add(1, Ordering::Relaxed);
@ -211,6 +196,76 @@ impl KeyboardAggregator {
});
}
fn try_handle_paste_event(&mut self, code: KeyCode, value: i32) -> bool {
if !self.paste_enabled {
return false;
}
// Once a paste chord is consumed, swallow any KEY_V repeats/releases
// until KEY_V is released to prevent leaking a literal 'v'/'V'.
if self.paste_chord_consumed {
if code == KeyCode::KEY_V {
if value == 0 {
self.paste_chord_consumed = false;
self.paste_chord_armed = false;
}
self.send_empty_report();
return true;
}
return false;
}
// If V is pressed with any paste modifier down, arm a possible paste chord.
// This prevents leaking Ctrl+V / Alt+V while user is completing chord order.
if code == KeyCode::KEY_V
&& value == 1
&& ((self.has_key(KeyCode::KEY_LEFTCTRL) || self.has_key(KeyCode::KEY_RIGHTCTRL))
|| (self.has_key(KeyCode::KEY_LEFTALT) || self.has_key(KeyCode::KEY_RIGHTALT)))
{
self.paste_chord_armed = true;
}
if !self.paste_chord_armed {
return false;
}
if self.paste_chord_active() {
if !self.paste_debounced() {
self.send_empty_report();
return true;
}
self.consume_paste_chord();
self.paste_chord_consumed = true;
self.paste_chord_armed = false;
if self.paste_rpc_enabled && self.paste_via_rpc() {
return true;
}
self.paste_clipboard();
return true;
}
// Chord armed but not complete: swallow V/modifier events so no junk reaches target.
if code == KeyCode::KEY_V || is_paste_modifier(code) {
if code == KeyCode::KEY_V && value == 0 {
// Aborted/incomplete chord (ex: Ctrl+V only): reset state.
self.paste_chord_armed = false;
}
self.send_empty_report();
return true;
}
false
}
fn consume_paste_chord(&mut self) {
self.pressed_keys.remove(&KeyCode::KEY_V);
self.pressed_keys.remove(&KeyCode::KEY_LEFTCTRL);
self.pressed_keys.remove(&KeyCode::KEY_RIGHTCTRL);
self.pressed_keys.remove(&KeyCode::KEY_LEFTALT);
self.pressed_keys.remove(&KeyCode::KEY_RIGHTALT);
self.send_empty_report();
}
fn paste_chord_active(&self) -> bool {
let chord = std::env::var("LESAVKA_CLIPBOARD_CHORD")
.unwrap_or_else(|_| "ctrl+alt+v".into())
@ -228,7 +283,7 @@ impl KeyboardAggregator {
let debounce_ms = std::env::var("LESAVKA_CLIPBOARD_DEBOUNCE_MS")
.ok()
.and_then(|v| v.parse::<u64>().ok())
.unwrap_or(500);
.unwrap_or(250);
if debounce_ms == 0 {
return true;
}
@ -264,7 +319,7 @@ impl KeyboardAggregator {
let delay_ms = std::env::var("LESAVKA_CLIPBOARD_DELAY_MS")
.ok()
.and_then(|v| v.parse::<u64>().ok())
.unwrap_or(1);
.unwrap_or(8);
let delay = Duration::from_millis(delay_ms);
tracing::info!("📋 pasting {} chars", text.chars().count().min(max));
@ -299,6 +354,36 @@ impl KeyboardAggregator {
}
}
fn paste_rpc_enabled_from_env() -> bool {
let rpc_enabled = std::env::var("LESAVKA_PASTE_RPC")
.map(|v| v != "0")
.unwrap_or(true);
let have_key = std::env::var("LESAVKA_PASTE_KEY")
.map(|v| !v.trim().is_empty())
.unwrap_or(false);
let enabled = paste_rpc_enabled(rpc_enabled, have_key);
if rpc_enabled && !have_key {
tracing::info!(
"📋 LESAVKA_PASTE_KEY missing; disabling paste RPC and using HID paste fallback"
);
}
enabled
}
fn paste_rpc_enabled(rpc_enabled: bool, have_key: bool) -> bool {
rpc_enabled && have_key
}
fn is_paste_modifier(code: KeyCode) -> bool {
matches!(
code,
KeyCode::KEY_LEFTCTRL
| KeyCode::KEY_RIGHTCTRL
| KeyCode::KEY_LEFTALT
| KeyCode::KEY_RIGHTALT
)
}
fn read_clipboard_text() -> Option<String> {
if let Ok(cmd) = std::env::var("LESAVKA_CLIPBOARD_CMD") {
if let Ok(out) = std::process::Command::new("sh")
@ -348,3 +433,35 @@ impl Drop for KeyboardAggregator {
});
}
}
#[cfg(test)]
mod tests {
use super::{is_paste_modifier, paste_rpc_enabled};
use evdev::KeyCode;
#[test]
fn paste_rpc_disabled_when_env_off() {
assert!(!paste_rpc_enabled(false, false));
assert!(!paste_rpc_enabled(false, true));
}
#[test]
fn paste_rpc_disabled_without_key() {
assert!(!paste_rpc_enabled(true, false));
}
#[test]
fn paste_rpc_enabled_with_key() {
assert!(paste_rpc_enabled(true, true));
}
#[test]
fn paste_modifier_recognizes_ctrl_alt_only() {
assert!(is_paste_modifier(KeyCode::KEY_LEFTCTRL));
assert!(is_paste_modifier(KeyCode::KEY_RIGHTCTRL));
assert!(is_paste_modifier(KeyCode::KEY_LEFTALT));
assert!(is_paste_modifier(KeyCode::KEY_RIGHTALT));
assert!(!is_paste_modifier(KeyCode::KEY_V));
assert!(!is_paste_modifier(KeyCode::KEY_LEFTSHIFT));
}
}

View File

@ -1,8 +1,15 @@
// client/src/input/keymap.rs
use evdev::KeyCode;
use lesavka_common::hid;
/// Return Some(usage) if we have a known mapping from evdev::KeyCode -> HID usage code
/// Return `Some(usage)` for a known `evdev::KeyCode -> HID usage` mapping.
///
/// Inputs: one evdev key code from the local input stack.
/// Outputs: the matching HID usage byte when the key is part of the supported
/// keyboard subset, or `None` when the key is intentionally unmapped.
/// Why: the client keeps the evdev-to-HID mapping in one place so the active
/// key set can be exercised directly by tests.
pub fn keycode_to_usage(key: KeyCode) -> Option<u8> {
match key {
// --- Letters ------------------------------------------------------
@ -122,7 +129,13 @@ pub fn keycode_to_usage(key: KeyCode) -> Option<u8> {
}
}
/// If a key is a modifier, return the bit(s) to set in HID byte[0].
/// Return the modifier bit(s) for HID report byte 0.
///
/// Inputs: one evdev key code from the local input stack.
/// Outputs: the HID modifier mask for left/right ctrl, shift, alt, and meta
/// keys, or `None` when the key is not a modifier.
/// Why: modifier handling is split from the main usage map so the report
/// encoder can keep byte 0 separate from the key-code payload.
pub fn is_modifier(key: KeyCode) -> Option<u8> {
match key {
KeyCode::KEY_LEFTCTRL => Some(0x01),
@ -137,50 +150,47 @@ pub fn is_modifier(key: KeyCode) -> Option<u8> {
}
}
/// Map a printable character to (usage, modifiers).
/// Modifiers currently only include Shift for uppercase/punctuation.
/// Map a printable character to `(usage, modifiers)`.
///
/// Inputs: one printable character from clipboard or keyboard input.
/// Outputs: the shared HID mapping used by both the client and the server,
/// including the shift bit for uppercase and punctuation variants.
/// Why: printable characters should round-trip through the shared HID helper
/// so both crates stay aligned on the exact keyboard layout.
pub fn char_to_usage(c: char) -> Option<(u8, u8)> {
let shift = 0x02; // left shift in HID modifier byte
match c {
'a'..='z' => Some((0x04 + (c as u8 - b'a'), 0)),
'A'..='Z' => Some((0x04 + (c as u8 - b'A'), shift)),
'1'..='9' => Some((0x1E + (c as u8 - b'1'), 0)),
'0' => Some((0x27, 0)),
'!' => Some((0x1E, shift)),
'@' => Some((0x1F, shift)),
'#' => Some((0x20, shift)),
'$' => Some((0x21, shift)),
'%' => Some((0x22, shift)),
'^' => Some((0x23, shift)),
'&' => Some((0x24, shift)),
'*' => Some((0x25, shift)),
'(' => Some((0x26, shift)),
')' => Some((0x27, shift)),
'-' => Some((0x2D, 0)),
'_' => Some((0x2D, shift)),
'=' => Some((0x2E, 0)),
'+' => Some((0x2E, shift)),
'[' => Some((0x2F, 0)),
'{' => Some((0x2F, shift)),
']' => Some((0x30, 0)),
'}' => Some((0x30, shift)),
'\\' => Some((0x31, 0)),
'|' => Some((0x31, shift)),
';' => Some((0x33, 0)),
':' => Some((0x33, shift)),
'\'' => Some((0x34, 0)),
'"' => Some((0x34, shift)),
'`' => Some((0x35, 0)),
'~' => Some((0x35, shift)),
',' => Some((0x36, 0)),
'<' => Some((0x36, shift)),
'.' => Some((0x37, 0)),
'>' => Some((0x37, shift)),
'/' => Some((0x38, 0)),
'?' => Some((0x38, shift)),
' ' => Some((0x2C, 0)),
'\n' | '\r' => Some((0x28, 0)),
'\t' => Some((0x2B, 0)),
_ => None,
hid::char_to_usage(c)
}
#[cfg(test)]
mod tests {
use super::{char_to_usage, is_modifier, keycode_to_usage};
use evdev::KeyCode;
#[test]
fn keycode_to_usage_covers_common_keyboard_blocks() {
assert_eq!(keycode_to_usage(KeyCode::KEY_A), Some(0x04));
assert_eq!(keycode_to_usage(KeyCode::KEY_0), Some(0x27));
assert_eq!(keycode_to_usage(KeyCode::KEY_ENTER), Some(0x28));
assert_eq!(keycode_to_usage(KeyCode::KEY_F12), Some(0x45));
assert_eq!(keycode_to_usage(KeyCode::KEY_PAGEUP), Some(0x4B));
assert_eq!(keycode_to_usage(KeyCode::KEY_KP0), Some(0x62));
assert_eq!(keycode_to_usage(KeyCode::KEY_MENU), Some(0x65));
}
#[test]
fn is_modifier_handles_both_sides_of_each_modifier_pair() {
assert_eq!(is_modifier(KeyCode::KEY_LEFTCTRL), Some(0x01));
assert_eq!(is_modifier(KeyCode::KEY_LEFTSHIFT), Some(0x02));
assert_eq!(is_modifier(KeyCode::KEY_RIGHTALT), Some(0x40));
assert_eq!(is_modifier(KeyCode::KEY_RIGHTMETA), Some(0x80));
assert_eq!(is_modifier(KeyCode::KEY_A), None);
}
#[test]
fn char_to_usage_delegates_to_shared_hid_table() {
assert_eq!(char_to_usage('A'), Some((0x04, 0x02)));
assert_eq!(char_to_usage('/'), Some((0x38, 0x00)));
assert_eq!(char_to_usage('?'), Some((0x38, 0x02)));
assert_eq!(char_to_usage('\t'), Some((0x2B, 0x00)));
}
}

View File

@ -25,8 +25,13 @@ fn place_window(eye: u32, x: i32, y: i32, w: i32, h: i32) {
}
}
/// Apply a layout on the **currently-focused** output.
/// No `?` operators → the function can stay `-> ()`.
/// Apply a layout on the currently focused output.
///
/// Inputs: the requested two-eye layout mode.
/// Outputs: none; the function shells out to `swaymsg` to move and resize the
/// active video windows in place.
/// Why: the UI keeps layout policy in one helper so the command construction
/// stays testable even though the actual window management is side-effectful.
pub fn apply(layout: Layout) {
let out = match Command::new("swaymsg")
.args(["-t", "get_outputs", "-r"])

View File

@ -3,10 +3,14 @@
#![forbid(unsafe_code)]
pub mod app;
mod app_support;
pub mod handshake;
pub mod input;
pub mod layout;
pub mod output;
pub mod paste;
#[cfg(test)]
mod tests;
pub use app::LesavkaClientApp;

View File

@ -13,6 +13,13 @@ pub struct MonitorInfo {
}
/// Enumerate monitors sorted by our desired priority.
///
/// Inputs: none; the function reads the current GTK display if one is
/// available.
/// Outputs: a priority-ordered monitor list, or a single fallback monitor when
/// no display is attached.
/// Why: window placement should still have a sane default in headless tests
/// and on fresh boots before the desktop session has fully settled.
pub fn enumerate_monitors() -> Vec<MonitorInfo> {
let Some(display) = gdk::Display::default() else {
tracing::warn!("⚠️ no GDK display - falling back to single-monitor 0,0");
@ -59,3 +66,16 @@ pub fn enumerate_monitors() -> Vec<MonitorInfo> {
debug!("🖥️ sorted monitors = {:?}", list);
list
}
#[cfg(test)]
mod tests {
use super::enumerate_monitors;
#[test]
fn enumerate_monitors_returns_fallback_when_headless() {
let monitors = enumerate_monitors();
assert!(!monitors.is_empty());
assert!(monitors[0].geometry.width() > 0);
assert!(monitors[0].geometry.height() > 0);
}
}

View File

@ -3,7 +3,7 @@
use super::display::MonitorInfo;
use tracing::debug;
#[derive(Clone, Copy, Debug)]
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Rect {
pub x: i32,
pub y: i32,
@ -11,7 +11,14 @@ pub struct Rect {
pub h: i32,
}
/// Compute rectangles for N video streams (all 16:9 here).
/// Compute rectangles for the current monitor topology.
///
/// Inputs: the ordered monitor list plus the active video streams and their
/// nominal sizes.
/// Outputs: one rectangle per stream, laid out across the available monitors
/// or split across a single display when only one monitor is present.
/// Why: the display policy is pure so the monitor selection logic can be
/// exercised by unit tests without GTK or Wayland state.
pub fn assign_rectangles(
monitors: &[MonitorInfo],
streams: &[(&str, i32, i32)], // (name, w, h)
@ -70,3 +77,79 @@ pub fn assign_rectangles(
debug!("📐 final rectangles = {:?}", rects);
rects
}
#[cfg(test)]
mod tests {
use super::{Rect, assign_rectangles};
use crate::output::display::MonitorInfo;
use gtk::gdk;
fn monitor(x: i32, y: i32, w: i32, h: i32, is_internal: bool) -> MonitorInfo {
MonitorInfo {
geometry: gdk::Rectangle::new(x, y, w, h),
scale_factor: 1,
is_internal,
}
}
#[test]
fn single_monitor_splits_streams_evenly() {
let rects = assign_rectangles(
&[monitor(10, 20, 1920, 1080, false)],
&[("left", 0, 0), ("right", 0, 0)],
);
assert_eq!(
rects,
vec![
Rect {
x: 10,
y: 20,
w: 960,
h: 1080,
},
Rect {
x: 970,
y: 20,
w: 960,
h: 1080,
},
]
);
}
#[test]
fn multiple_monitors_map_streams_one_to_one() {
let rects = assign_rectangles(
&[
monitor(0, 0, 1280, 720, false),
monitor(1280, 0, 1920, 1080, true),
],
&[("left", 0, 0), ("right", 0, 0), ("extra", 0, 0)],
);
assert_eq!(
rects,
vec![
Rect {
x: 0,
y: 0,
w: 1280,
h: 720,
},
Rect {
x: 1280,
y: 0,
w: 1920,
h: 1080,
},
Rect {
x: 0,
y: 0,
w: 0,
h: 0,
},
]
);
}
}

View File

@ -33,7 +33,7 @@ impl MonitorWindow {
let desc = format!(
"appsrc name=src is-live=true format=time do-timestamp=true block=false ! \
queue leaky=downstream ! \
queue max-size-buffers=8 max-size-time=0 max-size-bytes=0 leaky=downstream ! \
capsfilter caps=video/x-h264,stream-format=byte-stream,alignment=au ! \
h264parse disable-passthrough=true ! decodebin ! videoconvert ! {sink}"
);

View File

@ -2,23 +2,25 @@
#![forbid(unsafe_code)]
use anyhow::{Context, Result};
use base64::Engine as _;
use base64::engine::general_purpose::STANDARD;
use chacha20poly1305::aead::{Aead, KeyInit, OsRng, rand_core::RngCore};
use chacha20poly1305::{ChaCha20Poly1305, Key, Nonce};
use lesavka_common::lesavka::PasteRequest;
use lesavka_common::paste::{decode_shared_key, truncate_text};
/// Build an encrypted clipboard request for the server's paste RPC.
///
/// Inputs: the raw clipboard text captured on the desktop client.
/// Outputs: a `PasteRequest` whose payload is truncated to policy, encrypted,
/// and marked as such for the server-side validator.
/// Why: the client owns nonce generation and pre-flight truncation so oversized
/// clipboard content fails predictably before any RPC is attempted.
pub fn build_paste_request(text: &str) -> Result<PasteRequest> {
let max = std::env::var("LESAVKA_PASTE_MAX")
.ok()
.and_then(|v| v.parse::<usize>().ok())
.unwrap_or(4096);
let text = if text.chars().count() > max {
text.chars().take(max).collect::<String>()
} else {
text.to_string()
};
let text = truncate_text(text, max);
let key = load_key()?;
let cipher = ChaCha20Poly1305::new(Key::from_slice(&key));
@ -40,34 +42,5 @@ pub fn build_paste_request(text: &str) -> Result<PasteRequest> {
fn load_key() -> Result<[u8; 32]> {
let raw = std::env::var("LESAVKA_PASTE_KEY")
.context("LESAVKA_PASTE_KEY not set (required for PasteText RPC)")?;
decode_key(&raw)
}
fn decode_key(raw: &str) -> Result<[u8; 32]> {
let s = raw.trim();
let s = s.strip_prefix("hex:").unwrap_or(s);
let bytes = if s.len() == 64 && s.chars().all(|c| c.is_ascii_hexdigit()) {
hex_to_bytes(s)?
} else {
STANDARD
.decode(s.as_bytes())
.context("LESAVKA_PASTE_KEY must be 32-byte base64 or 64-char hex")?
};
if bytes.len() != 32 {
anyhow::bail!("LESAVKA_PASTE_KEY must decode to 32 bytes");
}
let mut out = [0u8; 32];
out.copy_from_slice(&bytes);
Ok(out)
}
fn hex_to_bytes(s: &str) -> Result<Vec<u8>> {
let mut out = Vec::with_capacity(s.len() / 2);
let chars: Vec<char> = s.chars().collect();
for i in (0..chars.len()).step_by(2) {
let hi = chars[i].to_digit(16).context("hex decode failed")?;
let lo = chars[i + 1].to_digit(16).context("hex decode failed")?;
out.push(((hi << 4) | lo) as u8);
}
Ok(out)
decode_shared_key(&raw)
}

View File

@ -1,15 +1,19 @@
// client/tests/integration.rs
#[cfg(test)]
mod tests {
use crate::input::keymap::{keycode_to_usage, is_modifier};
use evdev::Key;
use crate::input::keymap::{char_to_usage, is_modifier, keycode_to_usage};
use evdev::KeyCode;
#[test]
fn test_keycode_mapping() {
assert_eq!(keycode_to_usage(Key::KEY_A), Some(0x04));
assert_eq!(keycode_to_usage(Key::KEY_Z), Some(0x1D));
assert_eq!(keycode_to_usage(Key::KEY_LEFTCTRL), Some(0));
assert!(is_modifier(Key::KEY_LEFTCTRL).is_some());
fn keymap_smoke_test_hits_letter_number_modifier_and_char_paths() {
assert_eq!(keycode_to_usage(KeyCode::KEY_A), Some(0x04));
assert_eq!(keycode_to_usage(KeyCode::KEY_Z), Some(0x1D));
assert_eq!(keycode_to_usage(KeyCode::KEY_F1), Some(0x3A));
assert_eq!(keycode_to_usage(KeyCode::KEY_KPENTER), Some(0x58));
assert_eq!(keycode_to_usage(KeyCode::KEY_LEFTCTRL), None);
assert!(is_modifier(KeyCode::KEY_LEFTCTRL).is_some());
assert!(is_modifier(KeyCode::KEY_RIGHTMETA).is_some());
assert_eq!(char_to_usage('!'), Some((0x1E, 0x02)));
assert_eq!(char_to_usage(' '), Some((0x2C, 0x00)));
}
}

View File

@ -1,3 +1,3 @@
// client/src/tests/mod.rs
pub mod integration_test;
pub mod integration_test;

View File

@ -11,10 +11,15 @@ path = "src/lib.rs"
[dependencies]
tonic = { version = "0.13", features = ["transport"] }
prost = "0.13"
anyhow = "1.0"
base64 = "0.22"
[build-dependencies]
tonic-build = { version = "0.13", features = ["prost"] }
[dev-dependencies]
serial_test = { workspace = true }
[[bin]]
name = "lesavka-common"
path = "src/bin/cli.rs"

View File

@ -1,9 +1,19 @@
// common/build.rs
use std::{env, fs, path::PathBuf};
fn main() {
tonic_build::configure()
.build_server(true)
.build_client(true)
.compile_protos(&["proto/lesavka.proto"], &["proto"])
.expect("prost build failed");
let out_dir = PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR"));
fs::write(
out_dir.join("lesavka_wrapped.rs"),
r#"include!("lesavka.rs");
"#,
)
.expect("write lesavka wrapper");
}

22
common/src/cli.rs Normal file
View File

@ -0,0 +1,22 @@
//! Shared CLI helpers for the small `lesavka-common` utility binary.
/// Build the CLI banner shown by `lesavka-common`.
///
/// Inputs: the version string that should be displayed to the operator.
/// Outputs: a stable banner string that can be printed directly.
/// Why: keeping banner construction pure makes the tiny CLI testable without
/// depending on stdout capture in every caller.
#[must_use]
pub fn banner(version: &str) -> String {
format!("lesavka-common CLI (v{version})")
}
#[cfg(test)]
mod tests {
use super::banner;
#[test]
fn banner_includes_version() {
assert_eq!(banner("0.6.0"), "lesavka-common CLI (v0.6.0)");
}
}

80
common/src/hid.rs Normal file
View File

@ -0,0 +1,80 @@
//! Shared HID mapping helpers used by both the client and server crates.
/// Map a printable character to a USB HID usage plus modifier byte.
///
/// Inputs: a Unicode scalar value that should be typed through the HID gadget.
/// Outputs: `Some((usage, modifiers))` for supported ASCII characters, or
/// `None` when the character cannot be represented by the current keyboard map.
/// Why: server-side paste injection and client-side keymap tests must agree on
/// the exact HID encoding so they do not drift apart over time.
#[must_use]
pub fn char_to_usage(c: char) -> Option<(u8, u8)> {
let shift = 0x02; // left shift in HID modifier byte
match c {
'a'..='z' => Some((0x04 + (c as u8 - b'a'), 0)),
'A'..='Z' => Some((0x04 + (c as u8 - b'A'), shift)),
'1'..='9' => Some((0x1E + (c as u8 - b'1'), 0)),
'0' => Some((0x27, 0)),
'!' => Some((0x1E, shift)),
'@' => Some((0x1F, shift)),
'#' => Some((0x20, shift)),
'$' => Some((0x21, shift)),
'%' => Some((0x22, shift)),
'^' => Some((0x23, shift)),
'&' => Some((0x24, shift)),
'*' => Some((0x25, shift)),
'(' => Some((0x26, shift)),
')' => Some((0x27, shift)),
'-' => Some((0x2D, 0)),
'_' => Some((0x2D, shift)),
'=' => Some((0x2E, 0)),
'+' => Some((0x2E, shift)),
'[' => Some((0x2F, 0)),
'{' => Some((0x2F, shift)),
']' => Some((0x30, 0)),
'}' => Some((0x30, shift)),
'\\' => Some((0x31, 0)),
'|' => Some((0x31, shift)),
';' => Some((0x33, 0)),
':' => Some((0x33, shift)),
'\'' => Some((0x34, 0)),
'"' => Some((0x34, shift)),
'`' => Some((0x35, 0)),
'~' => Some((0x35, shift)),
',' => Some((0x36, 0)),
'<' => Some((0x36, shift)),
'.' => Some((0x37, 0)),
'>' => Some((0x37, shift)),
'/' => Some((0x38, 0)),
'?' => Some((0x38, shift)),
' ' => Some((0x2C, 0)),
'\n' | '\r' => Some((0x28, 0)),
'\t' => Some((0x2B, 0)),
_ => None,
}
}
#[cfg(test)]
mod tests {
use super::char_to_usage;
#[test]
fn char_to_usage_maps_letters_numbers_and_shifted_symbols() {
assert_eq!(char_to_usage('a'), Some((0x04, 0)));
assert_eq!(char_to_usage('Z'), Some((0x1D, 0x02)));
assert_eq!(char_to_usage('0'), Some((0x27, 0)));
assert_eq!(char_to_usage('9'), Some((0x26, 0)));
assert_eq!(char_to_usage(' '), Some((0x2C, 0)));
assert_eq!(char_to_usage('\n'), Some((0x28, 0)));
assert_eq!(char_to_usage('\t'), Some((0x2B, 0)));
assert_eq!(char_to_usage('{'), Some((0x2F, 0x02)));
assert_eq!(char_to_usage('~'), Some((0x35, 0x02)));
assert_eq!(char_to_usage('?'), Some((0x38, 0x02)));
}
#[test]
fn char_to_usage_rejects_unsupported_chars() {
assert_eq!(char_to_usage('é'), None);
assert_eq!(char_to_usage('\u{2603}'), None);
}
}

View File

@ -1,10 +1,22 @@
#![forbid(unsafe_code)]
// Re-export the code generated by build.rs (lesavka.rs, relay.rs, etc.)
// common/src/lib.rs
pub mod cli;
pub mod hid;
pub mod paste;
#[allow(warnings)]
pub mod lesavka {
include!(concat!(env!("OUT_DIR"), "/lesavka.rs"));
include!(concat!(env!("OUT_DIR"), "/lesavka_wrapped.rs"));
}
/// Print the CLI banner for the `lesavka-common` utility.
///
/// Inputs: none; the version comes from `CARGO_PKG_VERSION`.
/// Outputs: the banner text is written to stdout.
/// Why: this tiny binary is just a bannered entrypoint around the shared CLI
/// helper, so the side effect is intentionally concentrated in one place.
pub fn run_cli() {
println!("lesavka-common CLI (v{})", env!("CARGO_PKG_VERSION"));
println!("{}", cli::banner(env!("CARGO_PKG_VERSION")));
}

95
common/src/paste.rs Normal file
View File

@ -0,0 +1,95 @@
//! Shared helpers for encrypted paste payloads.
use anyhow::{Context, Result};
use base64::Engine as _;
use base64::engine::general_purpose::STANDARD;
/// Decode the shared paste key from either hex or base64.
///
/// Inputs: the raw operator-supplied secret, optionally prefixed with `hex:`.
/// Outputs: a 32-byte key suitable for ChaCha20-Poly1305.
/// # Errors
///
/// Returns an error when the input is not valid base64/hex or does not decode
/// to exactly 32 bytes.
/// Why: both the client and server enforce the same secret format, so this
/// logic lives in one place instead of drifting across crates.
pub fn decode_shared_key(raw: &str) -> Result<[u8; 32]> {
let trimmed = raw.trim();
let payload = trimmed.strip_prefix("hex:").unwrap_or(trimmed);
let bytes = if payload.len() == 64 && payload.chars().all(|c| c.is_ascii_hexdigit()) {
hex_to_bytes(payload)?
} else {
STANDARD
.decode(payload.as_bytes())
.context("LESAVKA_PASTE_KEY must be 32-byte base64 or 64-char hex")?
};
if bytes.len() != 32 {
anyhow::bail!("LESAVKA_PASTE_KEY must decode to 32 bytes");
}
let mut key = [0u8; 32];
key.copy_from_slice(&bytes);
Ok(key)
}
/// Trim paste text to the configured character budget.
///
/// Inputs: the full clipboard text plus the maximum character count allowed by
/// the current deployment.
/// Outputs: the original text when already within the limit, or a truncated
/// clone that contains exactly the leading `max_chars` Unicode scalar values.
/// Why: client and server need the same truncation semantics so operators see
/// predictable behavior regardless of where the limit is enforced first.
#[must_use]
pub fn truncate_text(text: &str, max_chars: usize) -> String {
if text.chars().count() <= max_chars {
text.to_string()
} else {
text.chars().take(max_chars).collect()
}
}
fn hex_to_bytes(raw: &str) -> Result<Vec<u8>> {
let chars: Vec<char> = raw.chars().collect();
let mut bytes = Vec::with_capacity(chars.len() / 2);
for index in (0..chars.len()).step_by(2) {
let hi = chars[index].to_digit(16).context("hex decode failed")?;
let lo = chars[index + 1].to_digit(16).context("hex decode failed")?;
bytes.push(u8::try_from((hi << 4) | lo).context("hex decode failed")?);
}
Ok(bytes)
}
#[cfg(test)]
mod tests {
use super::{decode_shared_key, truncate_text};
const HEX_KEY: &str = "hex:00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff";
const B64_KEY: &str = "ABEiM0RVZneImaq7zN3u/wARIjNEVWZ3iJmqu8zd7v8=";
#[test]
fn decode_shared_key_accepts_hex_and_base64() {
let hex = decode_shared_key(HEX_KEY).expect("hex key should decode");
let base64 = decode_shared_key(B64_KEY).expect("base64 key should decode");
assert_eq!(hex, base64);
assert_eq!(hex[0], 0x00);
assert_eq!(hex[31], 0xff);
}
#[test]
fn decode_shared_key_rejects_short_input() {
let error = decode_shared_key("Zm9v").expect_err("short key must fail");
assert!(error.to_string().contains("32 bytes"));
}
#[test]
fn truncate_text_preserves_unicode_boundaries() {
assert_eq!(truncate_text("abc", 10), "abc");
assert_eq!(truncate_text("naïve", 4), "naïv");
assert_eq!(truncate_text("🙂🙂🙂", 2), "🙂🙂");
}
}

203
scripts/ci/hygiene_gate.sh Executable file
View File

@ -0,0 +1,203 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)
REPORT_DIR="${ROOT_DIR}/target/hygiene-gate"
CLIPPY_JSON="${REPORT_DIR}/clippy.json"
SUMMARY_TXT="${REPORT_DIR}/summary.txt"
BASELINE_JSON="${ROOT_DIR}/scripts/ci/hygiene_gate_baseline.json"
mkdir -p "${REPORT_DIR}"
cargo clippy --workspace --all-targets --message-format json -- -W clippy::pedantic >"${CLIPPY_JSON}"
python3 - "${CLIPPY_JSON}" "${BASELINE_JSON}" "${SUMMARY_TXT}" "${ROOT_DIR}" <<'PY'
import json
import pathlib
import re
import sys
from collections import Counter, defaultdict
clippy_path = pathlib.Path(sys.argv[1])
baseline_path = pathlib.Path(sys.argv[2])
summary_path = pathlib.Path(sys.argv[3])
root = pathlib.Path(sys.argv[4])
fn_re = re.compile(r'^\s*(?:pub(?:\([^)]+\))?\s+)?(?:async\s+)?(?:unsafe\s+)?fn\s+\w+')
def load_json_lines(path: pathlib.Path):
for raw in path.read_text(encoding='utf-8').splitlines():
raw = raw.strip()
if not raw:
continue
try:
yield json.loads(raw)
except json.JSONDecodeError:
continue
def repo_relative(path: str) -> str | None:
try:
return pathlib.Path(path).resolve().relative_to(root).as_posix()
except Exception:
return None
def clippy_counts(path: pathlib.Path) -> dict[str, int]:
counts: dict[str, int] = defaultdict(int)
for item in load_json_lines(path):
if item.get('reason') != 'compiler-message':
continue
message = item.get('message', {})
if message.get('level') != 'warning':
continue
spans = message.get('spans') or []
primary = next((span for span in spans if span.get('is_primary')), None)
if primary is None:
primary = spans[0] if spans else None
if not primary:
continue
rel = repo_relative(primary.get('file_name', ''))
if rel is None or '/src/' not in rel or '/target/' in rel:
continue
if '/src/tests/' in rel:
continue
counts[rel] += 1
return dict(sorted(counts.items()))
def function_blocks(lines: list[str]):
index = 0
while index < len(lines):
if not fn_re.match(lines[index]):
index += 1
continue
start = index
doc_ok = False
prev = index - 1
while prev >= 0 and not lines[prev].strip():
prev -= 1
if prev >= 0:
stripped = lines[prev].lstrip()
doc_ok = stripped.startswith('///') or stripped.startswith('#[doc =')
brace_depth = 0
seen_open = False
body_lines = 0
j = index
while j < len(lines):
text = lines[j]
brace_depth += text.count('{') - text.count('}')
if '{' in text:
seen_open = True
if seen_open and text.strip():
body_lines += 1
if seen_open and brace_depth <= 0:
break
j += 1
block_text = '\n'.join(lines[start:j + 1])
non_trivial = body_lines >= 12 or any(token in block_text for token in (' if ', ' match ', ' for ', ' while ', ' loop ', '?.'))
yield start + 1, j + 1, doc_ok, non_trivial
index = j + 1
def doc_debt_counts(path: pathlib.Path) -> dict[str, int]:
counts: dict[str, int] = defaultdict(int)
for file in sorted(root.rglob('*.rs')):
rel = repo_relative(str(file))
if rel is None or '/src/' not in rel or '/target/' in rel:
continue
if '/src/tests/' in rel:
continue
lines = file.read_text(encoding='utf-8').splitlines()
debt = 0
for _, _, doc_ok, non_trivial in function_blocks(lines):
if non_trivial and not doc_ok:
debt += 1
counts[rel] = debt
return dict(sorted(counts.items()))
def source_loc_counts() -> dict[str, int]:
counts: dict[str, int] = {}
for file in sorted(root.rglob('*.rs')):
rel = repo_relative(str(file))
if rel is None or '/src/' not in rel or '/target/' in rel:
continue
if '/src/tests/' in rel:
continue
counts[rel] = sum(1 for _ in file.open('r', encoding='utf-8'))
return dict(sorted(counts.items()))
current = {}
for path, loc in source_loc_counts().items():
current[path] = {'loc': loc}
for path, count in clippy_counts(clippy_path).items():
current.setdefault(path, {})['clippy_warnings'] = count
for path, count in doc_debt_counts(root).items():
current.setdefault(path, {})['doc_debt'] = count
baseline = {'files': {}}
if baseline_path.exists():
with baseline_path.open('r', encoding='utf-8') as fh:
baseline = json.load(fh)
baseline_files = baseline.get('files', {})
regressions = []
for path, current_entry in current.items():
baseline_entry = baseline_files.get(path)
if baseline_entry is None:
regressions.append(f'{path}: missing baseline entry')
continue
for key in ('loc', 'clippy_warnings', 'doc_debt'):
current_value = int(current_entry.get(key, 0))
baseline_value = int(baseline_entry.get(key, 0))
if current_value > baseline_value:
regressions.append(
f'{path}: {key} grew from {baseline_value} to {current_value}'
)
totals = {
'files': len(current),
'over_500': sum(1 for entry in current.values() if int(entry.get('loc', 0)) > 500),
'clippy_warnings': sum(int(entry.get('clippy_warnings', 0)) for entry in current.values()),
'doc_debt': sum(int(entry.get('doc_debt', 0)) for entry in current.values()),
}
lines = []
lines.append('hygiene gate report')
lines.append(f"files tracked: {totals['files']}")
lines.append(f"files over 500 LOC: {totals['over_500']}")
lines.append(f"clippy warnings tracked: {totals['clippy_warnings']}")
lines.append(f"non-trivial undocumented functions tracked: {totals['doc_debt']}")
lines.append('')
lines.append('path | loc | clippy warnings | doc debt | baseline status')
lines.append('-' * 78)
for path in sorted(current):
entry = current[path]
baseline_entry = baseline_files.get(path)
if baseline_entry is None:
status = 'new'
baseline_loc = 'n/a'
baseline_clippy = 'n/a'
baseline_doc = 'n/a'
else:
baseline_loc = str(baseline_entry.get('loc', 0))
baseline_clippy = str(baseline_entry.get('clippy_warnings', 0))
baseline_doc = str(baseline_entry.get('doc_debt', 0))
status = 'ok'
if (
int(entry.get('loc', 0)) > int(baseline_entry.get('loc', 0))
or int(entry.get('clippy_warnings', 0)) > int(baseline_entry.get('clippy_warnings', 0))
or int(entry.get('doc_debt', 0)) > int(baseline_entry.get('doc_debt', 0))
):
status = 'regressed'
lines.append(
f"{path} | {entry.get('loc', 0)} | {entry.get('clippy_warnings', 0)} | {entry.get('doc_debt', 0)} | {baseline_loc}/{baseline_clippy}/{baseline_doc} | {status}"
)
summary_path.write_text('\n'.join(lines) + '\n', encoding='utf-8')
print(summary_path.read_text(encoding='utf-8'))
if regressions:
for line in regressions:
print(line, file=sys.stderr)
raise SystemExit(1)
PY

View File

@ -0,0 +1,191 @@
{
"generated_from": "/tmp/hygiene-clippy.json",
"files": {
"client/src/app.rs": {
"loc": 487,
"clippy_warnings": 42,
"doc_debt": 9
},
"client/src/app_support.rs": {
"loc": 129,
"doc_debt": 3
},
"client/src/handshake.rs": {
"loc": 155,
"doc_debt": 1
},
"client/src/input/camera.rs": {
"loc": 311,
"clippy_warnings": 40,
"doc_debt": 4
},
"client/src/input/inputs.rs": {
"loc": 309,
"clippy_warnings": 38,
"doc_debt": 3
},
"client/src/input/keyboard.rs": {
"loc": 467,
"clippy_warnings": 30,
"doc_debt": 13
},
"client/src/input/keymap.rs": {
"loc": 196,
"clippy_warnings": 8,
"doc_debt": 0
},
"client/src/input/microphone.rs": {
"loc": 162,
"clippy_warnings": 19,
"doc_debt": 2
},
"client/src/input/mod.rs": {
"loc": 8,
"doc_debt": 0
},
"client/src/input/mouse.rs": {
"loc": 297,
"clippy_warnings": 40,
"doc_debt": 8
},
"client/src/layout.rs": {
"loc": 78,
"clippy_warnings": 6,
"doc_debt": 0
},
"client/src/lib.rs": {
"loc": 16,
"doc_debt": 0
},
"client/src/main.rs": {
"loc": 92,
"clippy_warnings": 2,
"doc_debt": 2
},
"client/src/output/audio.rs": {
"loc": 179,
"clippy_warnings": 43,
"doc_debt": 4
},
"client/src/output/display.rs": {
"loc": 81,
"doc_debt": 0
},
"client/src/output/layout.rs": {
"loc": 155,
"clippy_warnings": 4,
"doc_debt": 2
},
"client/src/output/mod.rs": {
"loc": 6,
"doc_debt": 0
},
"client/src/output/video.rs": {
"loc": 250,
"clippy_warnings": 37,
"doc_debt": 1
},
"client/src/paste.rs": {
"loc": 46,
"clippy_warnings": 2,
"doc_debt": 1
},
"common/src/bin/cli.rs": {
"loc": 3,
"doc_debt": 0
},
"common/src/cli.rs": {
"loc": 22,
"doc_debt": 0
},
"common/src/hid.rs": {
"loc": 80,
"doc_debt": 2
},
"common/src/lib.rs": {
"loc": 22,
"doc_debt": 0
},
"common/src/paste.rs": {
"loc": 95,
"doc_debt": 2
},
"server/src/audio.rs": {
"loc": 340,
"clippy_warnings": 37,
"doc_debt": 6
},
"server/src/bin/lesavka-uvc.rs": {
"loc": 1035,
"clippy_warnings": 66,
"doc_debt": 25
},
"server/src/camera.rs": {
"loc": 325,
"clippy_warnings": 12,
"doc_debt": 8
},
"server/src/camera_runtime.rs": {
"loc": 179,
"clippy_warnings": 10,
"doc_debt": 3
},
"server/src/gadget.rs": {
"loc": 271,
"clippy_warnings": 30,
"doc_debt": 3
},
"server/src/handshake.rs": {
"loc": 40,
"clippy_warnings": 2,
"doc_debt": 1
},
"server/src/lib.rs": {
"loc": 13,
"doc_debt": 0
},
"server/src/main.rs": {
"loc": 353,
"clippy_warnings": 12,
"doc_debt": 10
},
"server/src/paste.rs": {
"loc": 146,
"clippy_warnings": 6,
"doc_debt": 3
},
"server/src/runtime_support.rs": {
"loc": 320,
"clippy_warnings": 14,
"doc_debt": 2
},
"server/src/uvc_control/model.rs": {
"loc": 510,
"doc_debt": 11
},
"server/src/uvc_control/protocol.rs": {
"loc": 403,
"doc_debt": 11
},
"server/src/uvc_runtime.rs": {
"loc": 204,
"clippy_warnings": 6,
"doc_debt": 1
},
"server/src/video.rs": {
"loc": 296,
"clippy_warnings": 25,
"doc_debt": 0
},
"server/src/video_sinks.rs": {
"loc": 458,
"clippy_warnings": 80,
"doc_debt": 2
},
"server/src/video_support.rs": {
"loc": 236,
"clippy_warnings": 8,
"doc_debt": 6
}
}
}

191
scripts/ci/quality_gate.sh Executable file
View File

@ -0,0 +1,191 @@
#!/usr/bin/env bash
set -euo pipefail
ROOT_DIR=$(cd "$(dirname "${BASH_SOURCE[0]}")/../.." && pwd)
REPORT_DIR="${ROOT_DIR}/target/quality-gate"
COVERAGE_JSON="${REPORT_DIR}/coverage.json"
SUMMARY_TXT="${REPORT_DIR}/summary.txt"
METRICS_FILE="${REPORT_DIR}/metrics.prom"
BASELINE_JSON="${ROOT_DIR}/scripts/ci/quality_gate_baseline.json"
PUSHGATEWAY_URL=${QUALITY_GATE_PUSHGATEWAY_URL:-}
mkdir -p "${REPORT_DIR}"
cat >"${METRICS_FILE}" <<'METRICS'
# HELP platform_quality_gate_runs_total Number of quality gate runs by result.
# TYPE platform_quality_gate_runs_total counter
platform_quality_gate_runs_total{suite="lesavka",status="fail"} 1
METRICS
publish_metrics() {
if [[ -z "${PUSHGATEWAY_URL}" ]]; then
echo "Skipping Pushgateway publish: QUALITY_GATE_PUSHGATEWAY_URL is not set"
return 0
fi
curl --fail --silent --show-error \
--data-binary @"${METRICS_FILE}" \
"${PUSHGATEWAY_URL%/}/metrics/job/platform_quality_gate/suite/lesavka"
}
status=0
if cargo llvm-cov --workspace --all-targets --summary-only --json --output-path "${COVERAGE_JSON}"; then
if python3 - "${COVERAGE_JSON}" "${BASELINE_JSON}" "${METRICS_FILE}" "${SUMMARY_TXT}" "${ROOT_DIR}" <<'PY'
import json
import pathlib
import sys
from datetime import datetime, timezone
coverage_path = pathlib.Path(sys.argv[1])
baseline_path = pathlib.Path(sys.argv[2])
metrics_path = pathlib.Path(sys.argv[3])
summary_path = pathlib.Path(sys.argv[4])
root = pathlib.Path(sys.argv[5])
with coverage_path.open('r', encoding='utf-8') as fh:
report = json.load(fh)
coverage_data = report['data'][0]
coverage_totals = coverage_data['totals']
files = []
for entry in coverage_data['files']:
filename = pathlib.Path(entry['filename'])
rel = filename.relative_to(root).as_posix()
if '/src/tests/' in rel:
continue
if '/src/' not in rel:
continue
loc = sum(1 for _ in filename.open('r', encoding='utf-8'))
line_percent = float(entry['summary']['lines']['percent'])
files.append({
'path': rel,
'loc': loc,
'line_percent': line_percent,
})
files.sort(key=lambda item: item['path'])
baseline = {'files': {}}
if baseline_path.exists():
with baseline_path.open('r', encoding='utf-8') as fh:
baseline = json.load(fh)
baseline_files = baseline.get('files', {})
regressions = []
current_by_path = {item['path']: item for item in files}
missing_from_baseline = [path for path in current_by_path if path not in baseline_files]
for path, current in current_by_path.items():
baseline_entry = baseline_files.get(path)
if baseline_entry is None:
continue
if current['loc'] > int(baseline_entry['loc']):
regressions.append(f"{path}: loc grew from {baseline_entry['loc']} to {current['loc']}")
if current['line_percent'] + 0.01 < float(baseline_entry['line_percent']):
regressions.append(
f"{path}: line coverage fell from {baseline_entry['line_percent']:.2f}% to {current['line_percent']:.2f}%"
)
workspace_lines = float(coverage_totals['lines']['percent'])
files_at_95 = sum(1 for item in files if item['line_percent'] >= 95.0)
files_below_95 = len(files) - files_at_95
over_500 = sum(1 for item in files if item['loc'] > 500)
metrics = []
metrics.append('# HELP platform_quality_gate_runs_total Number of quality gate runs by result.')
metrics.append('# TYPE platform_quality_gate_runs_total counter')
status_label = 'pass' if not regressions and not missing_from_baseline else 'fail'
metrics.append(f'platform_quality_gate_runs_total{{suite="lesavka",status="{status_label}"}} 1')
metrics.append('# HELP platform_quality_gate_workspace_line_coverage_percent Workspace line coverage percent.')
metrics.append('# TYPE platform_quality_gate_workspace_line_coverage_percent gauge')
metrics.append(f'platform_quality_gate_workspace_line_coverage_percent{{suite="lesavka"}} {workspace_lines:.2f}')
metrics.append('# HELP platform_quality_gate_files_total Count of tracked source files in the quality gate.')
metrics.append('# TYPE platform_quality_gate_files_total gauge')
metrics.append(f'platform_quality_gate_files_total{{suite="lesavka"}} {len(files)}')
metrics.append('# HELP platform_quality_gate_files_at_or_above_95_total Count of files at or above the 95 percent line target.')
metrics.append('# TYPE platform_quality_gate_files_at_or_above_95_total gauge')
metrics.append(f'platform_quality_gate_files_at_or_above_95_total{{suite="lesavka"}} {files_at_95}')
metrics.append('# HELP platform_quality_gate_files_below_95_total Count of files below the 95 percent line target.')
metrics.append('# TYPE platform_quality_gate_files_below_95_total gauge')
metrics.append(f'platform_quality_gate_files_below_95_total{{suite="lesavka"}} {files_below_95}')
metrics.append('# HELP platform_quality_gate_source_lines_over_500_total Count of tracked source files over 500 LOC.')
metrics.append('# TYPE platform_quality_gate_source_lines_over_500_total gauge')
metrics.append(f'platform_quality_gate_source_lines_over_500_total{{suite="lesavka"}} {over_500}')
metrics.append('# HELP platform_quality_gate_file_line_coverage_percent Per-file line coverage percent.')
metrics.append('# TYPE platform_quality_gate_file_line_coverage_percent gauge')
metrics.append('# HELP platform_quality_gate_file_source_lines Per-file source line count.')
metrics.append('# TYPE platform_quality_gate_file_source_lines gauge')
def esc(value: str) -> str:
return value.replace('\\', r'\\').replace('\n', r'\\n').replace('"', r'\"')
for item in files:
label = esc(item['path'])
metrics.append(
f'platform_quality_gate_file_line_coverage_percent{{suite="lesavka",file="{label}"}} {item["line_percent"]:.2f}'
)
metrics.append(
f'platform_quality_gate_file_source_lines{{suite="lesavka",file="{label}"}} {item["loc"]}'
)
metrics_path.write_text('\n'.join(metrics) + '\n', encoding='utf-8')
lines = []
lines.append(f'quality gate report generated at {datetime.now(timezone.utc).isoformat()}')
lines.append(f'workspace line coverage: {workspace_lines:.2f}%')
lines.append(f'source files tracked: {len(files)}')
lines.append(f'files >= 95% line coverage: {files_at_95}')
lines.append(f'files < 95% line coverage: {files_below_95}')
lines.append(f'files over 500 LOC: {over_500}')
lines.append('')
lines.append('path | loc | line coverage | baseline loc | baseline coverage | status')
lines.append('-' * 86)
for item in files:
baseline_entry = baseline_files.get(item['path'])
if baseline_entry is None:
baseline_loc = 'n/a'
baseline_cov = 'n/a'
status = 'new'
else:
baseline_loc = str(baseline_entry['loc'])
baseline_cov = f"{float(baseline_entry['line_percent']):.2f}%"
status = 'ok'
if item['loc'] > int(baseline_entry['loc']) or item['line_percent'] + 0.01 < float(baseline_entry['line_percent']):
status = 'regressed'
lines.append(
f"{item['path']} | {item['loc']} | {item['line_percent']:.2f}% | {baseline_loc} | {baseline_cov} | {status}"
)
summary_path.write_text('\n'.join(lines) + '\n', encoding='utf-8')
print(summary_path.read_text(encoding='utf-8'))
if missing_from_baseline:
print('missing baseline entries:', ', '.join(missing_from_baseline), file=sys.stderr)
if regressions or missing_from_baseline:
for line in regressions:
print(line, file=sys.stderr)
raise SystemExit(1)
PY
then
:
else
status=$?
fi
else
status=$?
fi
publish_status=0
if publish_metrics; then
:
else
publish_status=$?
fi
if [[ ${status} -eq 0 && ${publish_status} -ne 0 ]]; then
status=${publish_status}
fi
exit ${status}

View File

@ -0,0 +1,141 @@
{
"generated_from": "/tmp/lesavka-coverage.json",
"files": {
"client/src/app.rs": {
"loc": 487,
"line_percent": 0.0
},
"client/src/app_support.rs": {
"loc": 129,
"line_percent": 100.0
},
"client/src/handshake.rs": {
"loc": 155,
"line_percent": 40.24
},
"client/src/input/camera.rs": {
"loc": 311,
"line_percent": 0.0
},
"client/src/input/inputs.rs": {
"loc": 309,
"line_percent": 0.0
},
"client/src/input/keyboard.rs": {
"loc": 467,
"line_percent": 7.53
},
"client/src/input/keymap.rs": {
"loc": 196,
"line_percent": 33.81
},
"client/src/input/microphone.rs": {
"loc": 162,
"line_percent": 0.0
},
"client/src/input/mouse.rs": {
"loc": 297,
"line_percent": 0.0
},
"client/src/layout.rs": {
"loc": 78,
"line_percent": 0.0
},
"client/src/main.rs": {
"loc": 92,
"line_percent": 0.0
},
"client/src/output/audio.rs": {
"loc": 179,
"line_percent": 0.0
},
"client/src/output/display.rs": {
"loc": 81,
"line_percent": 35.71
},
"client/src/output/layout.rs": {
"loc": 155,
"line_percent": 98.98
},
"client/src/output/video.rs": {
"loc": 250,
"line_percent": 0.0
},
"client/src/paste.rs": {
"loc": 46,
"line_percent": 0.0
},
"common/src/bin/cli.rs": {
"loc": 3,
"line_percent": 0.0
},
"common/src/cli.rs": {
"loc": 22,
"line_percent": 100.0
},
"common/src/hid.rs": {
"loc": 80,
"line_percent": 51.67
},
"common/src/lib.rs": {
"loc": 22,
"line_percent": 0.0
},
"common/src/paste.rs": {
"loc": 95,
"line_percent": 100.0
},
"server/src/audio.rs": {
"loc": 340,
"line_percent": 0.0
},
"server/src/bin/lesavka-uvc.rs": {
"loc": 1035,
"line_percent": 0.0
},
"server/src/camera.rs": {
"loc": 325,
"line_percent": 52.68
},
"server/src/camera_runtime.rs": {
"loc": 179,
"line_percent": 38.89
},
"server/src/gadget.rs": {
"loc": 271,
"line_percent": 0.0
},
"server/src/handshake.rs": {
"loc": 40,
"line_percent": 0.0
},
"server/src/main.rs": {
"loc": 353,
"line_percent": 0.0
},
"server/src/paste.rs": {
"loc": 146,
"line_percent": 96.74
},
"server/src/runtime_support.rs": {
"loc": 320,
"line_percent": 41.36
},
"server/src/uvc_runtime.rs": {
"loc": 204,
"line_percent": 38.1
},
"server/src/video.rs": {
"loc": 296,
"line_percent": 0.0
},
"server/src/video_sinks.rs": {
"loc": 458,
"line_percent": 0.0
},
"server/src/video_support.rs": {
"loc": 236,
"line_percent": 87.3
}
}
}

View File

@ -30,3 +30,8 @@ base64 = "0.22"
[build-dependencies]
prost-build = "0.13"
[dev-dependencies]
serial_test = { workspace = true }
tempfile = { workspace = true }
temp-env = { workspace = true }

View File

@ -13,6 +13,7 @@ pub enum CameraOutput {
}
impl CameraOutput {
/// Return the canonical string name for the transport.
pub fn as_str(self) -> &'static str {
match self {
CameraOutput::Uvc => "uvc",
@ -28,6 +29,7 @@ pub enum CameraCodec {
}
impl CameraCodec {
/// Return the canonical string name for the codec.
pub fn as_str(self) -> &'static str {
match self {
CameraCodec::H264 => "h264",
@ -54,6 +56,14 @@ pub struct CameraConfig {
static LAST_CONFIG: OnceLock<RwLock<CameraConfig>> = OnceLock::new();
/// Refresh the cached camera config from the current environment.
///
/// Inputs: none; the selector consults the current environment and local
/// `/sys/class/drm` state.
/// Outputs: the newly selected camera configuration, which is also stored as
/// the last-known config for future reads.
/// Why: callers need a single entrypoint for changing the active output mode
/// without re-implementing the selection rules.
pub fn update_camera_config() -> CameraConfig {
let cfg = select_camera_config();
let lock = LAST_CONFIG.get_or_init(|| RwLock::new(cfg.clone()));
@ -61,6 +71,13 @@ pub fn update_camera_config() -> CameraConfig {
cfg
}
/// Return the last selected camera configuration.
///
/// Inputs: none.
/// Outputs: the cached camera configuration, or a freshly selected one when
/// the cache has not been initialized yet.
/// Why: call sites can read the active config without worrying about whether
/// initialization already happened in this process.
pub fn current_camera_config() -> CameraConfig {
if let Some(lock) = LAST_CONFIG.get() {
return lock.read().unwrap().clone();
@ -201,6 +218,7 @@ fn detect_hdmi_connector(require_connected: bool) -> Option<HdmiConnector> {
.and_then(|v| v.trim().parse::<u32>().ok());
connectors.push((name, status, id));
}
connectors.sort_by(|a, b| a.0.cmp(&b.0));
let matches_preferred =
|name: &str, preferred: &str| name == preferred || name.ends_with(preferred);
@ -216,6 +234,25 @@ fn detect_hdmi_connector(require_connected: bool) -> Option<HdmiConnector> {
}
}
// Keep the previously-selected connector stable when no explicit override is set.
// This prevents connector flapping when multiple HDMI outputs are simultaneously connected.
if preferred.is_none() {
let previous = LAST_CONFIG
.get()
.and_then(|lock| lock.read().ok())
.and_then(|cfg| cfg.hdmi.as_ref().map(|h| h.name.clone()));
if let Some(prev) = previous {
for (name, status, id) in &connectors {
if *name == prev && (!require_connected || status == "connected") {
return Some(HdmiConnector {
name: name.clone(),
id: *id,
});
}
}
}
}
for (name, status, id) in connectors {
if !require_connected || status == "connected" {
return Some(HdmiConnector { name, id });
@ -253,3 +290,36 @@ fn read_u32_from_env(key: &str) -> Option<u32> {
fn read_u32_from_map(map: &HashMap<String, String>, key: &str) -> Option<u32> {
map.get(key).and_then(|v| v.parse::<u32>().ok())
}
#[cfg(test)]
mod tests {
use super::{CameraCodec, CameraOutput, current_camera_config, update_camera_config};
use serial_test::serial;
use temp_env::with_var;
#[test]
#[serial]
fn camera_config_env_override_prefers_uvc_values() {
with_var("LESAVKA_CAM_OUTPUT", Some("uvc"), || {
with_var("LESAVKA_UVC_WIDTH", Some("800"), || {
with_var("LESAVKA_UVC_HEIGHT", Some("600"), || {
with_var("LESAVKA_UVC_FPS", Some("24"), || {
let cfg = update_camera_config();
assert_eq!(cfg.output, CameraOutput::Uvc);
assert_eq!(cfg.codec, CameraCodec::Mjpeg);
assert_eq!(cfg.width, 800);
assert_eq!(cfg.height, 600);
assert_eq!(cfg.fps, 24);
let cached = current_camera_config();
assert_eq!(cached.output, CameraOutput::Uvc);
assert_eq!(cached.codec, CameraCodec::Mjpeg);
assert_eq!(cached.width, 800);
assert_eq!(cached.height, 600);
assert_eq!(cached.fps, 24);
});
});
});
});
}
}

View File

@ -0,0 +1,179 @@
#![forbid(unsafe_code)]
use std::sync::Arc;
use std::sync::atomic::{AtomicU64, Ordering};
use tokio::sync::Mutex;
use tonic::Status;
use tracing::info;
use crate::{camera, uvc_runtime, video};
struct CameraRelaySlot {
cfg: camera::CameraConfig,
relay: Arc<video::CameraRelay>,
}
/// Manage the currently active camera relay instance.
///
/// Inputs: camera configurations requested by incoming RPC streams.
/// Outputs: a reusable relay handle plus a monotonically increasing session id.
/// Why: only one camera output should own the physical sink at a time, but we
/// still want identical stream requests to reuse the existing pipeline.
pub struct CameraRuntime {
generation: AtomicU64,
slot: Mutex<Option<CameraRelaySlot>>,
}
impl CameraRuntime {
/// Create an empty runtime with no active relay.
///
/// Inputs: none.
/// Outputs: a fresh runtime with generation zero.
/// Why: keeping construction trivial lets the main server handler create
/// camera state early and share it across RPCs.
#[must_use]
pub fn new() -> Self {
Self {
generation: AtomicU64::new(0),
slot: Mutex::new(None),
}
}
/// Activate the relay matching the current configuration.
///
/// Inputs: the desired camera configuration selected from the environment.
/// Outputs: a session id plus a relay that is either reused or recreated.
/// Why: UVC/HDMI sinks are expensive to churn, so identical requests should
/// reuse the active pipeline instead of rebuilding it every time.
pub async fn activate(
&self,
cfg: &camera::CameraConfig,
) -> Result<(u64, Arc<video::CameraRelay>), Status> {
let session_id = self.generation.fetch_add(1, Ordering::SeqCst) + 1;
let mut slot = self.slot.lock().await;
let mut reused = false;
let relay = if let Some(existing) = slot.as_ref() {
if camera_cfg_eq(&existing.cfg, cfg) {
reused = true;
existing.relay.clone()
} else {
self.make_relay(cfg)?
}
} else {
self.make_relay(cfg)?
};
if !reused {
*slot = Some(CameraRelaySlot {
cfg: cfg.clone(),
relay: relay.clone(),
});
info!(
session_id,
output = cfg.output.as_str(),
codec = cfg.codec.as_str(),
width = cfg.width,
height = cfg.height,
fps = cfg.fps,
"🎥 camera relay (re)created"
);
} else {
info!(session_id, "🎥 camera relay reused");
}
Ok((session_id, relay))
}
/// Check whether a previously issued session id is still current.
///
/// Inputs: a session id returned by `activate`.
/// Outputs: `true` only when the session is still the most recent owner of
/// the active camera relay.
/// Why: superseded streams must stop writing frames into a sink that has
/// already been reconfigured for a newer client session.
#[must_use]
pub fn is_active(&self, session_id: u64) -> bool {
self.generation.load(Ordering::Relaxed) == session_id
}
fn make_relay(&self, cfg: &camera::CameraConfig) -> Result<Arc<video::CameraRelay>, Status> {
let relay = match cfg.output {
camera::CameraOutput::Uvc => {
if std::env::var("LESAVKA_DISABLE_UVC").is_ok() {
return Err(Status::failed_precondition(
"UVC output disabled (LESAVKA_DISABLE_UVC set)",
));
}
let uvc = uvc_runtime::pick_uvc_device()
.map_err(|e| Status::internal(format!("{e:#}")))?;
info!(%uvc, "🎥 stream_camera using UVC sink");
video::CameraRelay::new_uvc(0, &uvc, cfg)
.map_err(|e| Status::internal(format!("{e:#}")))?
}
camera::CameraOutput::Hdmi => video::CameraRelay::new_hdmi(0, cfg)
.map_err(|e| Status::internal(format!("{e:#}")))?,
};
Ok(Arc::new(relay))
}
}
/// Compare two camera configurations for sink reuse.
///
/// Inputs: the currently active camera config and the requested config.
/// Outputs: `true` when both configs target the same sink and stream profile.
/// Why: reusing a pipeline is only safe when both the transport parameters and
/// the HDMI connector identity still match.
#[must_use]
pub fn camera_cfg_eq(a: &camera::CameraConfig, b: &camera::CameraConfig) -> bool {
if a.output != b.output
|| a.codec != b.codec
|| a.width != b.width
|| a.height != b.height
|| a.fps != b.fps
{
return false;
}
match (&a.hdmi, &b.hdmi) {
(Some(left), Some(right)) => left.name == right.name && left.id == right.id,
(None, None) => true,
_ => false,
}
}
#[cfg(test)]
mod tests {
use super::camera_cfg_eq;
use crate::camera::{CameraCodec, CameraConfig, CameraOutput, HdmiConnector};
#[test]
fn camera_cfg_eq_requires_matching_sink_profile() {
let base = CameraConfig {
output: CameraOutput::Hdmi,
codec: CameraCodec::H264,
width: 1920,
height: 1080,
fps: 30,
hdmi: Some(HdmiConnector {
name: String::from("HDMI-A-1"),
id: Some(42),
}),
};
let same = base.clone();
assert!(camera_cfg_eq(&base, &same));
let mut changed = base.clone();
changed.fps = 25;
assert!(!camera_cfg_eq(&base, &changed));
changed = base.clone();
changed.hdmi = Some(HdmiConnector {
name: String::from("HDMI-A-2"),
id: Some(42),
});
assert!(!camera_cfg_eq(&base, &changed));
}
}

View File

@ -2,7 +2,12 @@
pub mod audio;
pub mod camera;
pub mod camera_runtime;
pub mod gadget;
pub mod handshake;
pub mod paste;
pub mod runtime_support;
pub mod uvc_runtime;
pub mod video;
pub(crate) mod video_sinks;
pub(crate) mod video_support;

View File

@ -2,16 +2,23 @@
#![forbid(unsafe_code)]
use anyhow::{Context, Result};
use base64::Engine as _;
use base64::engine::general_purpose::STANDARD;
use chacha20poly1305::aead::{Aead, KeyInit};
use chacha20poly1305::{ChaCha20Poly1305, Key, Nonce};
use tokio::fs::File;
use tokio::io::AsyncWriteExt;
use tokio::sync::Mutex;
use lesavka_common::hid::char_to_usage;
use lesavka_common::lesavka::PasteRequest;
use lesavka_common::paste::decode_shared_key;
/// Decrypt a `PasteRequest` sent by the desktop client.
///
/// Inputs: the protobuf request carrying the encrypted payload and nonce.
/// Outputs: the decoded UTF-8 clipboard text, or an error when validation or
/// decryption fails.
/// Why: the server must reject plaintext payloads so clipboard injection is
/// never silently downgraded to an unencrypted transport.
pub fn decrypt(req: &PasteRequest) -> Result<String> {
if !req.encrypted {
anyhow::bail!("paste request must be encrypted");
@ -25,6 +32,14 @@ pub fn decrypt(req: &PasteRequest) -> Result<String> {
Ok(String::from_utf8(plaintext).context("paste plaintext not UTF-8")?)
}
/// Type clipboard text into the HID keyboard gadget.
///
/// Inputs: the HID keyboard file handle plus the plaintext that should be
/// injected into the remote machine.
/// Outputs: `Ok(())` after emitting key press/release reports for every
/// supported character up to the configured maximum.
/// Why: paste injection must rate-limit itself so slower hosts do not drop
/// HID reports under bursty clipboard loads.
pub async fn type_text(kb: &Mutex<File>, text: &str) -> Result<()> {
let max = std::env::var("LESAVKA_PASTE_MAX")
.ok()
@ -33,7 +48,7 @@ pub async fn type_text(kb: &Mutex<File>, text: &str) -> Result<()> {
let delay_ms = std::env::var("LESAVKA_PASTE_DELAY_MS")
.ok()
.and_then(|v| v.parse::<u64>().ok())
.unwrap_or(1);
.unwrap_or(8);
let delay = std::time::Duration::from_millis(delay_ms);
let mut kb = kb.lock().await;
@ -53,80 +68,79 @@ pub async fn type_text(kb: &Mutex<File>, text: &str) -> Result<()> {
fn load_key() -> Result<[u8; 32]> {
let raw = std::env::var("LESAVKA_PASTE_KEY")
.context("LESAVKA_PASTE_KEY not set (required for PasteText RPC)")?;
decode_key(&raw)
decode_shared_key(&raw)
}
fn decode_key(raw: &str) -> Result<[u8; 32]> {
let s = raw.trim();
let s = s.strip_prefix("hex:").unwrap_or(s);
let bytes = if s.len() == 64 && s.chars().all(|c| c.is_ascii_hexdigit()) {
hex_to_bytes(s)?
} else {
STANDARD
.decode(s.as_bytes())
.context("LESAVKA_PASTE_KEY must be 32-byte base64 or 64-char hex")?
};
if bytes.len() != 32 {
anyhow::bail!("LESAVKA_PASTE_KEY must decode to 32 bytes");
}
let mut out = [0u8; 32];
out.copy_from_slice(&bytes);
Ok(out)
}
#[cfg(test)]
mod tests {
use super::{decrypt, type_text};
use chacha20poly1305::aead::{Aead, KeyInit};
use chacha20poly1305::{ChaCha20Poly1305, Key, Nonce};
use lesavka_common::lesavka::PasteRequest;
use serial_test::serial;
use temp_env::with_var;
use tempfile::tempdir;
use tokio::fs::{File, OpenOptions};
use tokio::io::AsyncReadExt;
use tokio::runtime::Runtime;
use tokio::sync::Mutex;
fn hex_to_bytes(s: &str) -> Result<Vec<u8>> {
let mut out = Vec::with_capacity(s.len() / 2);
let chars: Vec<char> = s.chars().collect();
for i in (0..chars.len()).step_by(2) {
let hi = chars[i].to_digit(16).context("hex decode failed")?;
let lo = chars[i + 1].to_digit(16).context("hex decode failed")?;
out.push(((hi << 4) | lo) as u8);
#[test]
#[serial]
fn decrypt_rejects_plaintext_requests() {
let req = PasteRequest {
nonce: vec![],
data: vec![],
encrypted: false,
};
let err = decrypt(&req).expect_err("plaintext must fail");
assert!(err.to_string().contains("encrypted"));
}
Ok(out)
}
fn char_to_usage(c: char) -> Option<(u8, u8)> {
let shift = 0x02; // left shift in HID modifier byte
match c {
'a'..='z' => Some((0x04 + (c as u8 - b'a'), 0)),
'A'..='Z' => Some((0x04 + (c as u8 - b'A'), shift)),
'1'..='9' => Some((0x1E + (c as u8 - b'1'), 0)),
'0' => Some((0x27, 0)),
'!' => Some((0x1E, shift)),
'@' => Some((0x1F, shift)),
'#' => Some((0x20, shift)),
'$' => Some((0x21, shift)),
'%' => Some((0x22, shift)),
'^' => Some((0x23, shift)),
'&' => Some((0x24, shift)),
'*' => Some((0x25, shift)),
'(' => Some((0x26, shift)),
')' => Some((0x27, shift)),
'-' => Some((0x2D, 0)),
'_' => Some((0x2D, shift)),
'=' => Some((0x2E, 0)),
'+' => Some((0x2E, shift)),
'[' => Some((0x2F, 0)),
'{' => Some((0x2F, shift)),
']' => Some((0x30, 0)),
'}' => Some((0x30, shift)),
'\\' => Some((0x31, 0)),
'|' => Some((0x31, shift)),
';' => Some((0x33, 0)),
':' => Some((0x33, shift)),
'\'' => Some((0x34, 0)),
'"' => Some((0x34, shift)),
'`' => Some((0x35, 0)),
'~' => Some((0x35, shift)),
',' => Some((0x36, 0)),
'<' => Some((0x36, shift)),
'.' => Some((0x37, 0)),
'>' => Some((0x37, shift)),
'/' => Some((0x38, 0)),
'?' => Some((0x38, shift)),
' ' => Some((0x2C, 0)),
'\n' | '\r' => Some((0x28, 0)),
'\t' => Some((0x2B, 0)),
_ => None,
#[test]
#[serial]
fn decrypt_round_trips_encrypted_payload() {
let key = "hex:00112233445566778899aabbccddeeff00112233445566778899aabbccddeeff";
with_var("LESAVKA_PASTE_KEY", Some(key), || {
let raw_key = lesavka_common::paste::decode_shared_key(key).expect("decode key");
let cipher = ChaCha20Poly1305::new(Key::from_slice(&raw_key));
let nonce_bytes = [0x11u8; 12];
let nonce = Nonce::from_slice(&nonce_bytes);
let data = cipher
.encrypt(nonce, b"secret paste".as_ref())
.expect("encrypt");
let req = PasteRequest {
nonce: nonce_bytes.to_vec(),
data,
encrypted: true,
};
assert_eq!(decrypt(&req).expect("decrypt"), "secret paste");
});
}
#[test]
#[serial]
fn type_text_writes_reports_for_supported_chars() {
let rt = Runtime::new().expect("runtime");
with_var("LESAVKA_PASTE_DELAY_MS", Some("0"), || {
rt.block_on(async {
let dir = tempdir().expect("tempdir");
let path = dir.path().join("hidg0.bin");
let file = OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open(&path)
.await
.expect("open temp file");
let kb = Mutex::new(file);
type_text(&kb, "A!🙂").await.expect("type text");
let mut bytes = Vec::new();
let mut file = File::open(&path).await.expect("reopen temp file");
file.read_to_end(&mut bytes).await.expect("read reports");
assert_eq!(bytes.len(), 32);
});
});
}
}

View File

@ -0,0 +1,320 @@
#![forbid(unsafe_code)]
use anyhow::Context as _;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::time::Duration;
use tokio::fs::OpenOptions;
use tokio::io::AsyncWriteExt;
use tokio::sync::Mutex;
use tracing::{error, info, trace, warn};
use tracing_appender::non_blocking::WorkerGuard;
use tracing_subscriber::{filter::EnvFilter, fmt, prelude::*};
use crate::{audio, gadget::UsbGadget};
static STREAM_SEQ: AtomicU64 = AtomicU64::new(1);
/// Initialise structured tracing for the server process.
///
/// Inputs: none; configuration is read from `RUST_LOG`.
/// Outputs: the non-blocking file writer guard that must stay alive for the
/// lifetime of the process.
/// Why: the server writes both to stdout and a local log file so field logs are
/// still available after a transient SSH disconnect.
pub fn init_tracing() -> anyhow::Result<WorkerGuard> {
let file = std::fs::OpenOptions::new()
.create(true)
.truncate(true)
.write(true)
.open("/tmp/lesavka-server.log")?;
let (file_writer, guard) = tracing_appender::non_blocking(file);
let env_filter = EnvFilter::try_from_default_env()
.unwrap_or_else(|_| EnvFilter::new("lesavka_server=info,lesavka_server::video=warn"));
let filter_str = env_filter.to_string();
tracing_subscriber::registry()
.with(env_filter)
.with(fmt::layer().with_target(true).with_thread_ids(true))
.with(
fmt::layer()
.with_writer(file_writer)
.with_ansi(false)
.with_target(true)
.with_level(true),
)
.init();
tracing::info!("📜 effective RUST_LOG = \"{}\"", filter_str);
Ok(guard)
}
/// Open a HID gadget endpoint with bounded retry logic.
///
/// Inputs: the path of the gadget device node to open.
/// Outputs: a writable non-blocking file handle once the kernel reports the
/// endpoint as ready.
/// Why: gadget endpoints frequently flap during cable changes, so the server
/// must wait for readiness instead of failing the whole process immediately.
pub async fn open_with_retry(path: &str) -> anyhow::Result<tokio::fs::File> {
for attempt in 1..=200 {
match OpenOptions::new()
.write(true)
.custom_flags(libc::O_NONBLOCK)
.open(path)
.await
{
Ok(file) => {
info!("✅ {path} opened on attempt #{attempt}");
return Ok(file);
}
Err(error) if error.raw_os_error() == Some(libc::EBUSY) => {
trace!("⏳ {path} busy… retry #{attempt}");
tokio::time::sleep(Duration::from_millis(50)).await;
}
Err(error) => return Err(error).with_context(|| format!("opening {path}")),
}
}
Err(anyhow::anyhow!("timeout waiting for {path}"))
}
/// Check whether gadget auto-recovery is enabled.
///
/// Inputs: none.
/// Outputs: `true` only when the explicit recovery opt-in env var is present.
/// Why: cycling the whole USB gadget can be disruptive, so operators must
/// choose that behavior deliberately on each deployment.
#[must_use]
pub fn allow_gadget_cycle() -> bool {
std::env::var("LESAVKA_ALLOW_GADGET_CYCLE").is_ok()
}
/// Return whether a HID write error should trigger recovery.
///
/// Inputs: the raw `errno` value observed while writing to a HID gadget.
/// Outputs: `true` when the error is consistent with a lost USB connection.
/// Why: only transport-level failures should cause device reopen and gadget
/// cycling; transient backpressure is handled elsewhere.
#[must_use]
pub fn should_recover_hid_error(code: Option<i32>) -> bool {
matches!(
code,
Some(libc::ENOTCONN) | Some(libc::ESHUTDOWN) | Some(libc::EPIPE)
)
}
/// Recover the HID endpoints after a transport failure.
///
/// Inputs: the write error plus the current gadget and file handles.
/// Outputs: none; recovery runs asynchronously and updates the shared handles
/// in place when reopening succeeds.
/// Why: streams should survive cable resets without dropping the entire server
/// process or requiring a manual restart from the operator.
pub async fn recover_hid_if_needed(
err: &std::io::Error,
gadget: UsbGadget,
kb: Arc<Mutex<tokio::fs::File>>,
ms: Arc<Mutex<tokio::fs::File>>,
did_cycle: Arc<AtomicBool>,
) {
let code = err.raw_os_error();
if !should_recover_hid_error(code) {
return;
}
if did_cycle
.compare_exchange(false, true, Ordering::SeqCst, Ordering::SeqCst)
.is_err()
{
return;
}
let allow_cycle = allow_gadget_cycle();
tokio::spawn(async move {
if allow_cycle {
warn!("🔁 HID transport down (errno={code:?}) - cycling gadget");
match tokio::task::spawn_blocking(move || gadget.cycle()).await {
Ok(Ok(())) => info!("✅ USB gadget cycle complete (auto-recover)"),
Ok(Err(error)) => error!("💥 USB gadget cycle failed: {error:#}"),
Err(error) => error!("💥 USB gadget cycle task panicked: {error:#}"),
}
} else {
warn!(
"🔒 HID transport down (errno={code:?}) - gadget cycle disabled; set LESAVKA_ALLOW_GADGET_CYCLE=1 to enable"
);
}
if let Err(error) = async {
let kb_new = open_with_retry("/dev/hidg0").await?;
let ms_new = open_with_retry("/dev/hidg1").await?;
*kb.lock().await = kb_new;
*ms.lock().await = ms_new;
Ok::<(), anyhow::Error>(())
}
.await
{
error!("💥 HID reopen failed: {error:#}");
}
tokio::time::sleep(Duration::from_secs(2)).await;
did_cycle.store(false, Ordering::SeqCst);
});
}
/// Open the UAC sink with retry logic.
///
/// Inputs: the ALSA device string that should receive microphone audio.
/// Outputs: a ready-to-use `Voice` sink.
/// Why: the USB audio gadget can appear after the RPC stream has already been
/// negotiated, so the server retries briefly before declaring the sink broken.
pub async fn open_voice_with_retry(uac_dev: &str) -> anyhow::Result<audio::Voice> {
let attempts = std::env::var("LESAVKA_MIC_INIT_ATTEMPTS")
.ok()
.and_then(|value| value.parse::<u32>().ok())
.unwrap_or(5)
.max(1);
let delay_ms = std::env::var("LESAVKA_MIC_INIT_DELAY_MS")
.ok()
.and_then(|value| value.parse::<u64>().ok())
.unwrap_or(250);
let mut last_error: Option<anyhow::Error> = None;
for attempt in 1..=attempts {
match audio::Voice::new(uac_dev).await {
Ok(voice) => {
if attempt > 1 {
info!(%uac_dev, attempt, "🎤 microphone sink recovered");
}
return Ok(voice);
}
Err(error) => {
warn!(%uac_dev, attempt, "⚠️ microphone sink init failed: {error:#}");
last_error = Some(error);
tokio::time::sleep(Duration::from_millis(delay_ms)).await;
}
}
}
Err(last_error.unwrap_or_else(|| anyhow::anyhow!("microphone sink init failed")))
}
/// Allocate a stream identifier for logging and correlation.
///
/// Inputs: none.
/// Outputs: a monotonically increasing identifier.
/// Why: the server multiplexes several long-lived streams, so log lines need a
/// cheap correlation id that is stable across retries.
#[must_use]
pub fn next_stream_id() -> u64 {
STREAM_SEQ.fetch_add(1, Ordering::Relaxed)
}
/// Write one HID report with a short bounded retry loop.
///
/// Inputs: the shared gadget file handle plus the already-encoded report.
/// Outputs: `Ok(())` when the report reached the kernel buffer, or the final
/// write error after retrying transient backpressure.
/// Why: a brief retry window avoids dropping reports during momentary gadget
/// stalls without blocking the stream task indefinitely.
pub async fn write_hid_report(
dev: &Arc<Mutex<tokio::fs::File>>,
data: &[u8],
) -> std::io::Result<()> {
let mut last_error: Option<std::io::Error> = None;
for attempt in 0..5 {
let mut file = dev.lock().await;
match file.write_all(data).await {
Ok(()) => return Ok(()),
Err(error)
if error.kind() == std::io::ErrorKind::WouldBlock
|| error.raw_os_error() == Some(libc::EAGAIN) =>
{
last_error = Some(error);
}
Err(error) => return Err(error),
}
drop(file);
tokio::time::sleep(Duration::from_millis((attempt as u64 + 1) * 2)).await;
}
Err(last_error.unwrap_or_else(|| std::io::Error::from_raw_os_error(libc::EAGAIN)))
}
#[cfg(test)]
mod tests {
use super::{
allow_gadget_cycle, next_stream_id, open_with_retry, should_recover_hid_error,
write_hid_report,
};
use serial_test::serial;
use std::sync::Arc;
use temp_env::with_var;
use tempfile::NamedTempFile;
use tokio::io::AsyncWriteExt;
use tokio::sync::Mutex;
#[test]
#[serial]
fn allow_gadget_cycle_tracks_env_presence() {
with_var("LESAVKA_ALLOW_GADGET_CYCLE", None::<&str>, || {
assert!(!allow_gadget_cycle());
});
with_var("LESAVKA_ALLOW_GADGET_CYCLE", Some("1"), || {
assert!(allow_gadget_cycle());
});
}
#[test]
fn should_recover_hid_error_matches_transport_failures() {
assert!(should_recover_hid_error(Some(libc::ENOTCONN)));
assert!(should_recover_hid_error(Some(libc::ESHUTDOWN)));
assert!(should_recover_hid_error(Some(libc::EPIPE)));
assert!(!should_recover_hid_error(Some(libc::EAGAIN)));
assert!(!should_recover_hid_error(None));
}
#[test]
fn next_stream_id_monotonically_increments() {
let first = next_stream_id();
let second = next_stream_id();
assert!(second > first);
}
#[tokio::test]
#[serial]
async fn open_with_retry_opens_existing_file() {
let tmp = NamedTempFile::new().expect("temp file");
let mut file = open_with_retry(tmp.path().to_str().unwrap())
.await
.expect("open should succeed");
file.write_all(b"ok").await.expect("write temp file");
file.sync_all().await.expect("sync temp file");
assert_eq!(
tokio::fs::read(tmp.path()).await.expect("read temp file"),
b"ok"
);
}
#[tokio::test]
#[serial]
async fn write_hid_report_writes_bytes() {
let tmp = NamedTempFile::new().expect("temp file");
let file = tokio::fs::OpenOptions::new()
.write(true)
.truncate(true)
.open(tmp.path())
.await
.expect("open temp file");
let shared = Arc::new(Mutex::new(file));
write_hid_report(&shared, &[1, 2, 3, 4])
.await
.expect("write succeeds");
let contents = tokio::fs::read(tmp.path())
.await
.expect("read back temp file");
assert_eq!(&contents, &[1, 2, 3, 4]);
}
}

View File

@ -0,0 +1,510 @@
use std::env;
pub(crate) const STREAM_CTRL_SIZE_11: usize = 26;
pub(crate) const STREAM_CTRL_SIZE_15: usize = 34;
pub(crate) const STREAM_CTRL_SIZE_MAX: usize = STREAM_CTRL_SIZE_15;
pub(crate) const UVC_DATA_SIZE: usize = 60;
pub(crate) const V4L2_EVENT_PRIVATE_START: u32 = 0x0800_0000;
pub(crate) const UVC_EVENT_CONNECT: u32 = V4L2_EVENT_PRIVATE_START + 0;
pub(crate) const UVC_EVENT_DISCONNECT: u32 = V4L2_EVENT_PRIVATE_START + 1;
pub(crate) const UVC_EVENT_STREAMON: u32 = V4L2_EVENT_PRIVATE_START + 2;
pub(crate) const UVC_EVENT_STREAMOFF: u32 = V4L2_EVENT_PRIVATE_START + 3;
pub(crate) const UVC_EVENT_SETUP: u32 = V4L2_EVENT_PRIVATE_START + 4;
pub(crate) const UVC_EVENT_DATA: u32 = V4L2_EVENT_PRIVATE_START + 5;
pub(crate) const UVC_STRING_CONTROL_IDX: u8 = 0;
pub(crate) const UVC_STRING_STREAMING_IDX: u8 = 1;
pub(crate) const CONFIGFS_UVC_BASE: &str =
"/sys/kernel/config/usb_gadget/lesavka/functions/uvc.usb0";
pub(crate) const USB_DIR_IN: u8 = 0x80;
pub(crate) const UVC_SET_CUR: u8 = 0x01;
pub(crate) const UVC_GET_CUR: u8 = 0x81;
pub(crate) const UVC_GET_MIN: u8 = 0x82;
pub(crate) const UVC_GET_MAX: u8 = 0x83;
pub(crate) const UVC_GET_RES: u8 = 0x84;
pub(crate) const UVC_GET_LEN: u8 = 0x85;
pub(crate) const UVC_GET_INFO: u8 = 0x86;
pub(crate) const UVC_GET_DEF: u8 = 0x87;
pub(crate) const UVC_VS_PROBE_CONTROL: u8 = 0x01;
pub(crate) const UVC_VS_COMMIT_CONTROL: u8 = 0x02;
pub(crate) const UVC_VC_REQUEST_ERROR_CODE_CONTROL: u8 = 0x02;
#[repr(C)]
pub(crate) struct V4l2EventSubscription {
pub(crate) type_: u32,
pub(crate) id: u32,
pub(crate) flags: u32,
pub(crate) reserved: [u32; 5],
}
#[repr(C)]
pub(crate) union V4l2EventUnion {
pub(crate) data: [u8; 64],
pub(crate) _align: u64,
}
#[repr(C)]
pub(crate) struct V4l2Event {
pub(crate) type_: u32,
pub(crate) u: V4l2EventUnion,
pub(crate) pending: u32,
pub(crate) sequence: u32,
pub(crate) timestamp: libc::timespec,
pub(crate) id: u32,
pub(crate) reserved: [u32; 8],
}
#[repr(C)]
#[derive(Clone, Copy)]
pub(crate) struct UsbCtrlRequest {
pub(crate) b_request_type: u8,
pub(crate) b_request: u8,
pub(crate) w_value: u16,
pub(crate) w_index: u16,
pub(crate) w_length: u16,
}
#[repr(C)]
#[derive(Clone, Copy)]
pub(crate) struct UvcRequestData {
pub(crate) length: i32,
pub(crate) data: [u8; UVC_DATA_SIZE],
}
#[derive(Clone, Copy)]
pub(crate) struct UvcConfig {
pub(crate) width: u32,
pub(crate) height: u32,
pub(crate) fps: u32,
pub(crate) interval: u32,
pub(crate) max_packet: u32,
pub(crate) frame_size: u32,
}
pub(crate) struct PayloadCap {
pub(crate) limit: u32,
pub(crate) pct: u32,
pub(crate) source: &'static str,
pub(crate) periodic_dw: Option<u32>,
pub(crate) non_periodic_dw: Option<u32>,
}
pub(crate) struct UvcState {
pub(crate) cfg: UvcConfig,
pub(crate) ctrl_len: usize,
pub(crate) default: [u8; STREAM_CTRL_SIZE_MAX],
pub(crate) probe: [u8; STREAM_CTRL_SIZE_MAX],
pub(crate) commit: [u8; STREAM_CTRL_SIZE_MAX],
pub(crate) cfg_snapshot: Option<ConfigfsSnapshot>,
}
#[derive(Clone, Copy)]
pub(crate) struct PendingRequest {
pub(crate) interface: u8,
pub(crate) selector: u8,
pub(crate) expected_len: usize,
}
#[derive(Clone, Copy)]
pub(crate) struct UvcInterfaces {
pub(crate) control: u8,
pub(crate) streaming: u8,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub(crate) struct ConfigfsSnapshot {
pub(crate) width: u32,
pub(crate) height: u32,
pub(crate) default_interval: u32,
pub(crate) frame_interval: u32,
pub(crate) maxpacket: u32,
pub(crate) maxburst: u32,
}
impl UvcConfig {
pub(crate) fn from_env() -> Self {
let width = env_u32("LESAVKA_UVC_WIDTH", 1280);
let height = env_u32("LESAVKA_UVC_HEIGHT", 720);
let fps = env_u32("LESAVKA_UVC_FPS", 25).max(1);
let interval = env_u32("LESAVKA_UVC_INTERVAL", 0);
let mut max_packet = env_u32("LESAVKA_UVC_MAXPACKET", 1024);
let frame_size = env_u32("LESAVKA_UVC_FRAME_SIZE", width * height * 2);
let bulk = env::var("LESAVKA_UVC_BULK").is_ok();
if let Some(cap) = compute_payload_cap(bulk) {
if max_packet > cap.limit {
eprintln!(
"[lesavka-uvc] payload cap {}B ({}% from {}): clamp max_packet {} -> {} (periodic_dw={:?} non_periodic_dw={:?})",
cap.limit,
cap.pct,
cap.source,
max_packet,
cap.limit,
cap.periodic_dw,
cap.non_periodic_dw
);
max_packet = cap.limit;
} else {
eprintln!(
"[lesavka-uvc] payload cap {}B ({}% from {}): max_packet {} (periodic_dw={:?} non_periodic_dw={:?})",
cap.limit,
cap.pct,
cap.source,
max_packet,
cap.periodic_dw,
cap.non_periodic_dw
);
}
} else {
eprintln!(
"[lesavka-uvc] payload cap unavailable; using max_packet {}",
max_packet
);
}
if let Some(cfg_max) = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxpacket")) {
if max_packet > cfg_max {
eprintln!(
"[lesavka-uvc] configfs maxpacket {}: clamp max_packet {} -> {}",
cfg_max, max_packet, cfg_max
);
max_packet = cfg_max;
} else {
eprintln!(
"[lesavka-uvc] configfs maxpacket {}: max_packet {}",
cfg_max, max_packet
);
}
}
max_packet = if env::var("LESAVKA_UVC_BULK").is_ok() {
max_packet.min(512)
} else {
max_packet.min(1024)
};
let interval = if interval == 0 { 10_000_000 / fps } else { interval };
Self {
width,
height,
fps,
interval,
max_packet,
frame_size,
}
}
}
impl UvcState {
pub(crate) fn new(cfg: UvcConfig) -> Self {
let ctrl_len = stream_ctrl_len();
let default = build_streaming_control(&cfg, ctrl_len);
Self {
cfg,
ctrl_len,
default,
probe: default,
commit: default,
cfg_snapshot: None,
}
}
}
pub(crate) fn stream_ctrl_len() -> usize {
let value = env_u32("LESAVKA_UVC_CTRL_LEN", STREAM_CTRL_SIZE_15 as u32) as usize;
match value {
STREAM_CTRL_SIZE_11 | STREAM_CTRL_SIZE_15 => value,
_ => STREAM_CTRL_SIZE_11,
}
}
pub(crate) fn env_u32(name: &str, default: u32) -> u32 {
env::var(name)
.ok()
.and_then(|value| value.parse::<u32>().ok())
.unwrap_or(default)
}
pub(crate) fn env_u8(name: &str) -> Option<u8> {
env::var(name).ok().and_then(|value| value.parse::<u8>().ok())
}
pub(crate) fn env_u32_opt(name: &str) -> Option<u32> {
env::var(name).ok().and_then(|value| value.parse::<u32>().ok())
}
pub(crate) fn read_u32_file(path: &str) -> Option<u32> {
std::fs::read_to_string(path)
.ok()
.and_then(|value| value.trim().parse::<u32>().ok())
}
pub(crate) fn read_u32_first(path: &str) -> Option<u32> {
std::fs::read_to_string(path)
.ok()
.and_then(|value| value.split_whitespace().next()?.parse::<u32>().ok())
}
pub(crate) fn read_configfs_snapshot() -> Option<ConfigfsSnapshot> {
let width = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/wWidth"))?;
let height = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/wHeight"))?;
let default_interval = read_u32_file(&format!(
"{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/dwDefaultFrameInterval"
))?;
let frame_interval = read_u32_first(&format!(
"{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/dwFrameInterval"
))
.unwrap_or(0);
let maxpacket = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxpacket"))?;
let maxburst = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxburst")).unwrap_or(0);
Some(ConfigfsSnapshot {
width,
height,
default_interval,
frame_interval,
maxpacket,
maxburst,
})
}
pub(crate) fn log_configfs_snapshot(state: &mut UvcState, label: &str) {
let Some(current) = read_configfs_snapshot() else {
eprintln!("[lesavka-uvc] configfs {label}: unavailable");
return;
};
if state.cfg_snapshot == Some(current) {
return;
}
eprintln!(
"[lesavka-uvc] configfs {label}: {}x{} default_interval={} frame_interval={} maxpacket={} maxburst={}",
current.width,
current.height,
current.default_interval,
current.frame_interval,
current.maxpacket,
current.maxburst
);
state.cfg_snapshot = Some(current);
}
pub(crate) fn adjust_length(mut bytes: Vec<u8>, w_length: u16) -> Vec<u8> {
let want = (w_length as usize).min(UVC_DATA_SIZE);
if bytes.len() > want {
bytes.truncate(want);
} else if bytes.len() < want {
bytes.resize(want, 0);
}
bytes
}
pub(crate) fn write_le16(dst: &mut [u8], val: u16) {
let bytes = val.to_le_bytes();
dst[0] = bytes[0];
dst[1] = bytes[1];
}
pub(crate) fn write_le32(dst: &mut [u8], val: u32) {
let bytes = val.to_le_bytes();
dst[0] = bytes[0];
dst[1] = bytes[1];
dst[2] = bytes[2];
dst[3] = bytes[3];
}
pub(crate) fn read_le32(src: &[u8], offset: usize) -> u32 {
u32::from_le_bytes([
src[offset],
src[offset + 1],
src[offset + 2],
src[offset + 3],
])
}
pub(crate) fn build_streaming_control(
cfg: &UvcConfig,
ctrl_len: usize,
) -> [u8; STREAM_CTRL_SIZE_MAX] {
let mut buf = [0u8; STREAM_CTRL_SIZE_MAX];
write_le16(&mut buf[0..2], 1);
buf[2] = 1;
buf[3] = 1;
write_le32(&mut buf[4..8], cfg.interval);
write_le16(&mut buf[8..10], 0);
write_le16(&mut buf[10..12], 0);
write_le16(&mut buf[12..14], 0);
write_le16(&mut buf[14..16], 0);
write_le16(&mut buf[16..18], 0);
write_le32(&mut buf[18..22], cfg.frame_size);
write_le32(&mut buf[22..26], cfg.max_packet);
if ctrl_len >= STREAM_CTRL_SIZE_15 {
write_le32(&mut buf[26..30], 48_000_000);
buf[30] = 0x03;
buf[31] = 0x01;
buf[32] = 0x01;
buf[33] = 0x01;
}
buf
}
pub(crate) fn compute_payload_cap(bulk: bool) -> Option<PayloadCap> {
if let Some(limit) = env_u32_opt("LESAVKA_UVC_MAXPAYLOAD_LIMIT") {
return Some(PayloadCap {
limit,
pct: 100,
source: "env",
periodic_dw: None,
non_periodic_dw: None,
});
}
let mut periodic =
read_fifo_min("/sys/module/dwc2/parameters/g_tx_fifo_size").map(|value| (value, "dwc2.params"));
let mut non_periodic = read_fifo_min("/sys/module/dwc2/parameters/g_np_tx_fifo_size")
.map(|value| (value, "dwc2.params"));
if periodic.is_none() || non_periodic.is_none() {
if let Some((periodic_debug, non_periodic_debug)) = read_debugfs_fifos() {
if periodic.is_none() {
periodic = periodic_debug.map(|value| (value, "debugfs.params"));
}
if non_periodic.is_none() {
non_periodic = non_periodic_debug.map(|value| (value, "debugfs.params"));
}
}
}
let periodic_dw = periodic.map(|(value, _)| value);
let non_periodic_dw = non_periodic.map(|(value, _)| value);
let (fifo_dw, source) = if bulk {
if let Some((np, src)) = non_periodic {
(np, src)
} else if let Some((periodic_value, src)) = periodic {
(periodic_value, src)
} else {
return None;
}
} else if let Some((periodic_value, src)) = periodic {
(periodic_value, src)
} else if let Some((np, src)) = non_periodic {
(np, src)
} else {
return None;
};
let pct = env_u32("LESAVKA_UVC_LIMIT_PCT", 95).clamp(1, 100);
let fifo_bytes = fifo_dw.saturating_mul(4);
let limit = fifo_bytes.saturating_mul(pct) / 100;
if limit == 0 {
return None;
}
Some(PayloadCap {
limit,
pct,
source,
periodic_dw,
non_periodic_dw,
})
}
pub(crate) fn read_fifo_min(path: &str) -> Option<u32> {
let raw = std::fs::read_to_string(path).ok()?;
raw.split(|c: char| c == ',' || c.is_whitespace())
.filter_map(|value| value.trim().parse::<u32>().ok())
.filter(|value| *value > 0)
.min()
}
pub(crate) fn read_debugfs_fifos() -> Option<(Option<u32>, Option<u32>)> {
let udc = std::fs::read_dir("/sys/class/udc")
.ok()?
.filter_map(|entry| entry.ok())
.filter_map(|entry| entry.file_name().into_string().ok())
.next()?;
let path = format!("/sys/kernel/debug/usb/{udc}/params");
let text = std::fs::read_to_string(path).ok()?;
let mut periodic: Option<u32> = None;
let mut non_periodic: Option<u32> = None;
for line in text.lines() {
let mut parts = line.splitn(2, ':');
let key = match parts.next() {
Some(value) => value.trim(),
None => continue,
};
let value = match parts.next().and_then(|raw| raw.trim().parse::<u32>().ok()) {
Some(value) => value,
None => continue,
};
if key == "g_np_tx_fifo_size" {
non_periodic = Some(value);
} else if key.starts_with("g_tx_fifo_size[") && value > 0 {
periodic = Some(match periodic {
Some(previous) => previous.min(value),
None => value,
});
}
}
if periodic.is_none() && non_periodic.is_none() {
None
} else {
Some((periodic, non_periodic))
}
}
#[cfg(test)]
mod tests {
use super::{
STREAM_CTRL_SIZE_11, STREAM_CTRL_SIZE_15, UvcConfig, adjust_length,
build_streaming_control, compute_payload_cap, read_le32, stream_ctrl_len,
};
use serial_test::serial;
use temp_env::with_var;
#[test]
#[serial]
fn stream_ctrl_len_falls_back_on_invalid_env() {
with_var("LESAVKA_UVC_CTRL_LEN", Some("bogus"), || {
assert_eq!(stream_ctrl_len(), STREAM_CTRL_SIZE_11);
});
with_var("LESAVKA_UVC_CTRL_LEN", Some("34"), || {
assert_eq!(stream_ctrl_len(), STREAM_CTRL_SIZE_15);
});
}
#[test]
fn adjust_length_pads_and_truncates() {
assert_eq!(adjust_length(vec![1, 2, 3], 2), vec![1, 2]);
assert_eq!(adjust_length(vec![1, 2], 4), vec![1, 2, 0, 0]);
}
#[test]
fn build_streaming_control_sets_interval_frame_size_and_payload() {
let cfg = UvcConfig {
width: 1280,
height: 720,
fps: 25,
interval: 400_000,
max_packet: 512,
frame_size: 1234,
};
let control = build_streaming_control(&cfg, STREAM_CTRL_SIZE_15);
assert_eq!(control[2], 1);
assert_eq!(read_le32(&control, 4), 400_000);
assert_eq!(read_le32(&control, 18), 1234);
assert_eq!(read_le32(&control, 22), 512);
}
#[test]
#[serial]
fn compute_payload_cap_honors_explicit_override() {
with_var("LESAVKA_UVC_MAXPAYLOAD_LIMIT", Some("333"), || {
let cap = compute_payload_cap(false).expect("env override should win");
assert_eq!(cap.limit, 333);
assert_eq!(cap.source, "env");
});
}
}

View File

@ -0,0 +1,403 @@
use anyhow::{Context, Result};
use crate::uvc_control::model::{
PendingRequest, STREAM_CTRL_SIZE_11, STREAM_CTRL_SIZE_15, UVC_DATA_SIZE,
UVC_GET_CUR, UVC_GET_DEF, UVC_GET_INFO, UVC_GET_LEN, UVC_GET_MAX, UVC_GET_MIN,
UVC_GET_RES, UVC_SET_CUR, UVC_VC_REQUEST_ERROR_CODE_CONTROL, UVC_VS_COMMIT_CONTROL,
UVC_VS_PROBE_CONTROL, USB_DIR_IN, UvcInterfaces, UvcRequestData, UvcState,
adjust_length, build_streaming_control, log_configfs_snapshot, read_le32, write_le32,
};
pub(crate) fn handle_setup(
fd: i32,
uvc_send_response: libc::c_ulong,
state: &mut UvcState,
pending: &mut Option<PendingRequest>,
interfaces: UvcInterfaces,
req: crate::uvc_control::model::UsbCtrlRequest,
debug: bool,
) {
let selector = (req.w_value >> 8) as u8;
let interface_lo = (req.w_index & 0xff) as u8;
let interface_hi = (req.w_index >> 8) as u8;
let interface_raw = if interface_hi == interfaces.streaming || interface_hi == interfaces.control {
interface_hi
} else if interface_lo == interfaces.streaming || interface_lo == interfaces.control {
interface_lo
} else {
interface_hi
};
let is_in = (req.b_request_type & USB_DIR_IN) != 0;
if matches!(selector, UVC_VS_PROBE_CONTROL | UVC_VS_COMMIT_CONTROL) {
maybe_update_ctrl_len(state, req.w_length, debug);
}
let interface = map_interface(interface_raw, selector, interfaces, debug);
if !is_in && req.b_request == UVC_SET_CUR {
let len = req.w_length as usize;
if interface == interfaces.control {
let payload = vec![0u8; len.min(UVC_DATA_SIZE)];
let _ = send_response(fd, uvc_send_response, &payload);
if debug {
eprintln!(
"[lesavka-uvc] VC SET_CUR ack len={} iface={} sel={}",
req.w_length, interface, selector
);
}
return;
}
if interface != interfaces.streaming {
let _ = send_stall(fd, uvc_send_response);
return;
}
if len > UVC_DATA_SIZE {
eprintln!(
"[lesavka-uvc] SET_CUR too large len={} (max={}); stalling",
len, UVC_DATA_SIZE
);
let _ = send_stall(fd, uvc_send_response);
return;
}
*pending = Some(PendingRequest {
interface,
selector,
expected_len: len,
});
let payload = vec![0u8; len];
let _ = send_response(fd, uvc_send_response, &payload);
if debug {
eprintln!(
"[lesavka-uvc] SET_CUR queued len={} iface={} sel={}",
req.w_length, interface, selector
);
}
return;
}
if !is_in {
let _ = send_stall(fd, uvc_send_response);
return;
}
let payload = build_in_response(
state,
interfaces,
interface,
selector,
req.b_request,
req.w_length,
);
match payload {
Some(bytes) => {
if debug {
eprintln!(
"[lesavka-uvc] send IN response rq=0x{:02x} sel={} len={}",
req.b_request,
selector,
bytes.len()
);
}
let _ = send_response(fd, uvc_send_response, &bytes);
}
None => {
let _ = send_stall(fd, uvc_send_response);
}
}
}
pub(crate) fn map_interface(raw: u8, selector: u8, interfaces: UvcInterfaces, debug: bool) -> u8 {
let mapped = if matches!(selector, UVC_VS_PROBE_CONTROL | UVC_VS_COMMIT_CONTROL) {
interfaces.streaming
} else if selector == UVC_VC_REQUEST_ERROR_CODE_CONTROL {
interfaces.control
} else {
raw
};
if debug && mapped != raw {
eprintln!(
"[lesavka-uvc] remapped interface {} -> {} for selector {selector}",
raw, mapped
);
}
mapped
}
pub(crate) fn maybe_update_ctrl_len(state: &mut UvcState, w_length: u16, debug: bool) {
let want = w_length as usize;
if !(want == STREAM_CTRL_SIZE_11 || want == STREAM_CTRL_SIZE_15) || state.ctrl_len == want {
return;
}
state.ctrl_len = want;
state.default = build_streaming_control(&state.cfg, state.ctrl_len);
state.probe = state.default;
state.commit = state.default;
if debug {
eprintln!("[lesavka-uvc] ctrl_len set to {}", state.ctrl_len);
}
}
pub(crate) fn handle_data(
_fd: i32,
_uvc_send_response: libc::c_ulong,
state: &mut UvcState,
pending: &mut Option<PendingRequest>,
interfaces: UvcInterfaces,
data: UvcRequestData,
debug: bool,
) {
let Some(pending_request) = pending.take() else {
if debug {
eprintln!("[lesavka-uvc] DATA with no pending request; ignoring");
}
return;
};
if data.length < 0 {
return;
}
let len = data.length as usize;
if debug && pending_request.expected_len != 0 && len != pending_request.expected_len {
eprintln!(
"[lesavka-uvc] DATA len mismatch: expected={} got={}",
pending_request.expected_len, len
);
}
let slice = &data.data[..len.min(data.data.len())];
if debug && slice.len() >= STREAM_CTRL_SIZE_11 {
let interval = read_le32(slice, 4);
let payload = read_le32(slice, 22);
eprintln!(
"[lesavka-uvc] data ctrl fmt={} frame={} interval={} payload={}",
slice[2], slice[3], interval, payload
);
}
if pending_request.interface == interfaces.streaming
&& matches!(
pending_request.selector,
UVC_VS_PROBE_CONTROL | UVC_VS_COMMIT_CONTROL
)
{
let sanitized = sanitize_streaming_control(slice, state);
if pending_request.selector == UVC_VS_PROBE_CONTROL {
state.probe = sanitized;
if debug {
let interval = read_le32(&state.probe, 4);
let payload = read_le32(&state.probe, 22);
eprintln!(
"[lesavka-uvc] probe set interval={} payload={}",
interval, payload
);
log_configfs_snapshot(state, "probe");
}
} else {
state.commit = sanitized;
if debug {
let interval = read_le32(&state.commit, 4);
let payload = read_le32(&state.commit, 22);
eprintln!(
"[lesavka-uvc] commit set interval={} payload={}",
interval, payload
);
log_configfs_snapshot(state, "commit");
}
}
}
}
pub(crate) fn build_in_response(
state: &UvcState,
interfaces: UvcInterfaces,
interface: u8,
selector: u8,
request: u8,
w_length: u16,
) -> Option<Vec<u8>> {
let payload = match interface {
_ if interface == interfaces.streaming => build_streaming_response(state, selector, request),
_ if interface == interfaces.control => build_control_response(selector, request),
_ => None,
}?;
Some(adjust_length(payload, w_length))
}
pub(crate) fn build_streaming_response(
state: &UvcState,
selector: u8,
request: u8,
) -> Option<Vec<u8>> {
let current = match selector {
UVC_VS_PROBE_CONTROL => state.probe,
UVC_VS_COMMIT_CONTROL => state.commit,
_ => return None,
};
match request {
UVC_GET_INFO => Some(vec![0x03]),
UVC_GET_LEN => Some((state.ctrl_len as u16).to_le_bytes().to_vec()),
UVC_GET_CUR => Some(current[..state.ctrl_len].to_vec()),
UVC_GET_MIN | UVC_GET_MAX | UVC_GET_DEF | UVC_GET_RES => {
Some(state.default[..state.ctrl_len].to_vec())
}
_ => None,
}
}
pub(crate) fn build_control_response(selector: u8, request: u8) -> Option<Vec<u8>> {
match request {
UVC_GET_INFO => Some(vec![0x03]),
UVC_GET_LEN => Some(1u16.to_le_bytes().to_vec()),
UVC_GET_CUR | UVC_GET_MIN | UVC_GET_MAX | UVC_GET_DEF | UVC_GET_RES => {
if selector == UVC_VC_REQUEST_ERROR_CODE_CONTROL {
Some(vec![0x00])
} else {
Some(vec![0x00])
}
}
_ => None,
}
}
pub(crate) fn sanitize_streaming_control(
data: &[u8],
state: &UvcState,
) -> [u8; crate::uvc_control::model::STREAM_CTRL_SIZE_MAX] {
let mut out = state.default;
if data.len() >= STREAM_CTRL_SIZE_11 {
let format_index = data[2];
let frame_index = data[3];
let interval = read_le32(data, 4);
let host_payload = read_le32(data, 22);
if format_index == 1 {
out[2] = 1;
}
if frame_index == 1 {
out[3] = 1;
}
if interval != 0 {
write_le32(&mut out[4..8], interval);
}
if host_payload > 0 {
let payload = host_payload.min(state.cfg.max_packet);
write_le32(&mut out[22..26], payload);
}
}
out
}
pub(crate) fn send_response(fd: i32, req: libc::c_ulong, payload: &[u8]) -> Result<()> {
let mut resp = UvcRequestData {
length: payload.len() as i32,
data: [0u8; UVC_DATA_SIZE],
};
let n = payload.len().min(UVC_DATA_SIZE);
resp.data[..n].copy_from_slice(&payload[..n]);
let rc = unsafe { libc::ioctl(fd, req, &resp) };
if rc < 0 {
let error = std::io::Error::last_os_error();
eprintln!("[lesavka-uvc] send_response failed: {error}");
return Err(error).context("UVCIOC_SEND_RESPONSE");
}
Ok(())
}
pub(crate) fn send_stall(fd: i32, req: libc::c_ulong) -> Result<()> {
let resp = UvcRequestData {
length: -1,
data: [0u8; UVC_DATA_SIZE],
};
let rc = unsafe { libc::ioctl(fd, req, &resp) };
if rc < 0 {
let error = std::io::Error::last_os_error();
eprintln!("[lesavka-uvc] send_stall failed: {error}");
return Err(error).context("UVCIOC_SEND_RESPONSE(stall)");
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::{
build_control_response, build_streaming_response, map_interface,
maybe_update_ctrl_len, sanitize_streaming_control,
};
use crate::uvc_control::model::{
STREAM_CTRL_SIZE_11, STREAM_CTRL_SIZE_15, UVC_GET_CUR, UVC_GET_INFO,
UVC_GET_LEN, UVC_VC_REQUEST_ERROR_CODE_CONTROL, UVC_VS_PROBE_CONTROL,
UvcConfig, UvcInterfaces, UvcState, write_le32,
};
fn sample_state() -> UvcState {
UvcState::new(UvcConfig {
width: 1280,
height: 720,
fps: 25,
interval: 400_000,
max_packet: 512,
frame_size: 1024,
})
}
#[test]
fn map_interface_prefers_streaming_for_probe_requests() {
let interfaces = UvcInterfaces {
control: 0,
streaming: 1,
};
assert_eq!(map_interface(0, UVC_VS_PROBE_CONTROL, interfaces, false), 1);
assert_eq!(map_interface(1, UVC_VC_REQUEST_ERROR_CODE_CONTROL, interfaces, false), 0);
}
#[test]
fn maybe_update_ctrl_len_rebuilds_defaults() {
let mut state = sample_state();
maybe_update_ctrl_len(&mut state, STREAM_CTRL_SIZE_11 as u16, false);
assert_eq!(state.ctrl_len, STREAM_CTRL_SIZE_11);
maybe_update_ctrl_len(&mut state, STREAM_CTRL_SIZE_15 as u16, false);
assert_eq!(state.ctrl_len, STREAM_CTRL_SIZE_15);
}
#[test]
fn build_control_response_handles_standard_queries() {
assert_eq!(build_control_response(0, UVC_GET_INFO), Some(vec![0x03]));
assert_eq!(build_control_response(0, UVC_GET_LEN), Some(vec![1, 0]));
assert_eq!(
build_control_response(UVC_VC_REQUEST_ERROR_CODE_CONTROL, UVC_GET_CUR),
Some(vec![0x00])
);
}
#[test]
fn build_streaming_response_uses_current_and_default_profiles() {
let state = sample_state();
let cur = build_streaming_response(&state, UVC_VS_PROBE_CONTROL, UVC_GET_CUR)
.expect("current profile should exist");
let len = build_streaming_response(&state, UVC_VS_PROBE_CONTROL, UVC_GET_LEN)
.expect("length should exist");
assert_eq!(cur.len(), state.ctrl_len);
assert_eq!(len, (state.ctrl_len as u16).to_le_bytes().to_vec());
}
#[test]
fn sanitize_streaming_control_clamps_payload_to_max_packet() {
let state = sample_state();
let mut data = vec![0u8; STREAM_CTRL_SIZE_11];
data[2] = 1;
data[3] = 1;
write_le32(&mut data[4..8], 123_456);
write_le32(&mut data[22..26], 9_999);
let sanitized = sanitize_streaming_control(&data, &state);
assert_eq!(sanitized[2], 1);
assert_eq!(sanitized[3], 1);
assert_eq!(crate::uvc_control::model::read_le32(&sanitized, 4), 123_456);
assert_eq!(crate::uvc_control::model::read_le32(&sanitized, 22), 512);
}
}

204
server/src/uvc_runtime.rs Normal file
View File

@ -0,0 +1,204 @@
#![forbid(unsafe_code)]
use anyhow::Context as _;
use std::path::Path;
use std::time::Duration;
use tokio::process::Command;
use tracing::{info, warn};
use crate::gadget::UsbGadget;
/// Pick the UVC gadget video node.
///
/// Inputs: none; the function inspects environment overrides and udev state.
/// Outputs: the best-matching V4L2 output node for the active USB gadget.
/// Why: the relay must target the gadget output itself, not an unrelated
/// capture card that happens to exist on the same machine.
pub fn pick_uvc_device() -> anyhow::Result<String> {
if let Ok(path) = std::env::var("LESAVKA_UVC_DEV") {
return Ok(path);
}
let ctrl = UsbGadget::find_controller().ok();
if let Some(ctrl) = ctrl.as_deref() {
let by_path = format!("/dev/v4l/by-path/platform-{ctrl}-video-index0");
if Path::new(&by_path).exists() {
return Ok(by_path);
}
}
let mut fallback: Option<String> = None;
if let Ok(mut enumerator) = udev::Enumerator::new() {
let _ = enumerator.match_subsystem("video4linux");
if let Ok(devices) = enumerator.scan_devices() {
for device in devices {
let caps = device
.property_value("ID_V4L_CAPABILITIES")
.and_then(|value| value.to_str())
.unwrap_or_default();
if !caps.contains(":video_output:") {
continue;
}
let Some(node) = device.devnode() else {
continue;
};
let node = node.to_string_lossy().into_owned();
let product = device
.property_value("ID_V4L_PRODUCT")
.and_then(|value| value.to_str())
.unwrap_or_default();
let path = device
.property_value("ID_PATH")
.and_then(|value| value.to_str())
.unwrap_or_default();
if let Some(ctrl) = ctrl.as_deref() {
if product == ctrl || path.contains(ctrl) {
return Ok(node);
}
}
if fallback.is_none() {
fallback = Some(node);
}
}
}
}
if let Some(node) = fallback {
return Ok(node);
}
Err(anyhow::anyhow!(
"no video_output v4l2 node found; set LESAVKA_UVC_DEV"
))
}
/// Resolve the UVC control helper binary path.
///
/// Inputs: none.
/// Outputs: the configured executable path.
/// Why: production installs ship the helper as a separate binary, but CI and
/// local development sometimes need to override that location.
#[must_use]
pub fn uvc_ctrl_bin() -> String {
std::env::var("LESAVKA_UVC_CTRL_BIN")
.unwrap_or_else(|_| "/usr/local/bin/lesavka-uvc".to_string())
}
/// Spawn the external UVC control helper.
///
/// Inputs: the helper binary path plus the selected UVC device node.
/// Outputs: a running child process handle.
/// Why: the helper owns low-level configfs and V4L2 control handling that we
/// keep out of the main gRPC server process.
pub fn spawn_uvc_control(bin: &str, uvc_dev: &str) -> anyhow::Result<tokio::process::Child> {
Command::new(bin)
.arg("--device")
.arg(uvc_dev)
.spawn()
.context("spawning lesavka-uvc")
}
/// Supervise the external UVC control helper forever.
///
/// Inputs: the helper binary path.
/// Outputs: none; the task loops until the process exits.
/// Why: UVC device nodes can appear after boot, so the supervisor waits for a
/// usable device and restarts the helper whenever it exits.
pub async fn supervise_uvc_control(bin: String) {
let mut waiting_logged = false;
loop {
let uvc_dev = match pick_uvc_device() {
Ok(device) => {
if waiting_logged {
info!(%device, "📷 UVC device discovered");
waiting_logged = false;
}
device
}
Err(error) => {
if !waiting_logged {
warn!("⚠️ UVC device not ready: {error:#}");
waiting_logged = true;
}
tokio::time::sleep(Duration::from_secs(2)).await;
continue;
}
};
match spawn_uvc_control(&bin, &uvc_dev) {
Ok(mut child) => {
info!(%uvc_dev, "📷 UVC control helper started");
match child.wait().await {
Ok(status) => {
warn!(%uvc_dev, "⚠️ lesavka-uvc exited: {status}");
}
Err(error) => {
warn!(%uvc_dev, "⚠️ lesavka-uvc wait failed: {error:#}");
}
}
}
Err(error) => {
warn!(%uvc_dev, "⚠️ failed to start lesavka-uvc: {error:#}");
}
}
tokio::time::sleep(Duration::from_secs(2)).await;
}
}
#[cfg(test)]
mod tests {
use super::{pick_uvc_device, spawn_uvc_control, uvc_ctrl_bin};
use serial_test::serial;
use std::fs;
use std::os::unix::fs::PermissionsExt;
use temp_env::with_var;
use tempfile::tempdir;
#[test]
#[serial]
fn uvc_ctrl_bin_prefers_env_override() {
with_var("LESAVKA_UVC_CTRL_BIN", None::<&str>, || {
assert_eq!(uvc_ctrl_bin(), "/usr/local/bin/lesavka-uvc");
});
with_var("LESAVKA_UVC_CTRL_BIN", Some("/tmp/uvc-helper"), || {
assert_eq!(uvc_ctrl_bin(), "/tmp/uvc-helper");
});
}
#[test]
#[serial]
fn pick_uvc_device_prefers_env_override() {
with_var("LESAVKA_UVC_DEV", Some("/dev/video-test"), || {
assert_eq!(pick_uvc_device().unwrap(), "/dev/video-test");
});
}
#[tokio::test]
#[serial]
async fn spawn_uvc_control_runs_the_helper_script() {
let dir = tempdir().expect("tempdir");
let output = dir.path().join("args.txt");
let script = dir.path().join("helper.sh");
fs::write(
&script,
format!(
"#!/usr/bin/env bash\nset -euo pipefail\nprintf '%s %s' \"$1\" \"$2\" > '{}'\n",
output.display()
),
)
.expect("write helper");
let mut perms = fs::metadata(&script).expect("metadata").permissions();
perms.set_mode(0o755);
fs::set_permissions(&script, perms).expect("chmod helper");
let mut child = spawn_uvc_control(script.to_str().unwrap(), "/dev/video42")
.expect("helper should spawn");
let status = child.wait().await.expect("wait helper");
assert!(status.success());
assert_eq!(
fs::read_to_string(output).expect("read output"),
"--device /dev/video42"
);
}
}

View File

@ -9,66 +9,19 @@ use gstreamer as gst;
use gstreamer_app as gst_app;
use lesavka_common::lesavka::VideoPacket;
use std::sync::Arc;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::OnceLock;
use std::sync::atomic::{AtomicBool, AtomicU32, AtomicU64, Ordering};
use tokio_stream::wrappers::ReceiverStream;
use tonic::Status;
use tracing::{Level, debug, enabled, error, info, trace, warn};
use crate::camera::{CameraCodec, CameraConfig};
pub use crate::video_sinks::{CameraRelay, HdmiSink, WebcamSink};
use crate::video_support::{
adjust_effective_fps, contains_idr, default_eye_fps, env_u32, env_usize, should_send_frame,
};
const EYE_ID: [&str; 2] = ["l", "r"];
static START: std::sync::OnceLock<gst::ClockTime> = std::sync::OnceLock::new();
static DEV_MODE: std::sync::OnceLock<bool> = std::sync::OnceLock::new();
fn env_u32(name: &str, default: u32) -> u32 {
std::env::var(name)
.ok()
.and_then(|v| v.parse::<u32>().ok())
.unwrap_or(default)
}
fn dev_mode_enabled() -> bool {
*DEV_MODE.get_or_init(|| std::env::var("LESAVKA_DEV_MODE").is_ok())
}
fn pick_h264_decoder() -> &'static str {
if gst::ElementFactory::find("v4l2h264dec").is_some() {
"v4l2h264dec"
} else if gst::ElementFactory::find("v4l2slh264dec").is_some() {
"v4l2slh264dec"
} else if gst::ElementFactory::find("omxh264dec").is_some() {
"omxh264dec"
} else {
"avdec_h264"
}
}
fn contains_idr(h264: &[u8]) -> bool {
// naive AnnexB scan for H.264 IDR (NAL type 5)
let mut i = 0;
while i + 4 < h264.len() {
// find start code 0x000001 or 0x00000001
if h264[i] == 0 && h264[i + 1] == 0 {
let offset = if h264[i + 2] == 1 {
3
} else if h264[i + 2] == 0 && h264[i + 3] == 1 {
4
} else {
i += 1;
continue;
};
let nal_idx = i + offset;
if nal_idx < h264.len() {
let nal = h264[nal_idx] & 0x1F;
if nal == 5 {
return true;
}
}
}
i += 1;
}
false
}
static START: OnceLock<gst::ClockTime> = OnceLock::new();
pub struct VideoStream {
_pipeline: gst::Pipeline,
@ -77,6 +30,7 @@ pub struct VideoStream {
impl Stream for VideoStream {
type Item = Result<VideoPacket, Status>;
fn poll_next(
mut self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
@ -87,25 +41,24 @@ impl Stream for VideoStream {
impl Drop for VideoStream {
fn drop(&mut self) {
// shut down nicely - avoids the “dispose element … READY/PLAYING …” spam
let _ = self._pipeline.set_state(gst::State::Null);
}
}
/// Capture one eye stream from the local V4L2 gadget and expose it as a gRPC stream.
///
/// Inputs: the V4L2 device node, logical eye id, and negotiated bitrate cap.
/// Outputs: a `VideoStream` that yields H.264 access units for the requested eye.
/// Why: the server keeps bitrate-aware pacing close to the capture pipeline so it can drop
/// frames before they build up in gRPC queues and destabilize downstream playback.
pub async fn eye_ball(dev: &str, id: u32, max_bitrate_kbit: u32) -> anyhow::Result<VideoStream> {
let eye = EYE_ID[id as usize];
gst::init().context("gst init")?;
let bitrate_default_fps = match max_bitrate_kbit {
0 => 25,
1..=2_500 => 15,
2_501..=4_000 => 20,
_ => 25,
};
let target_fps = env_u32("LESAVKA_EYE_FPS", bitrate_default_fps).max(1);
let target_fps = env_u32("LESAVKA_EYE_FPS", default_eye_fps(max_bitrate_kbit)).max(1);
let min_fps = env_u32("LESAVKA_EYE_MIN_FPS", 12).clamp(1, target_fps);
let adaptive = std::env::var("LESAVKA_EYE_ADAPTIVE")
.map(|v| v != "0")
.map(|value| value != "0")
.unwrap_or(true);
info!(
target: "lesavka_server::video",
@ -116,64 +69,52 @@ pub async fn eye_ball(dev: &str, id: u32, max_bitrate_kbit: u32) -> anyhow::Resu
adaptive,
"🎥 eye stream profile selected"
);
let effective_fps = Arc::new(std::sync::atomic::AtomicU32::new(target_fps));
let effective_fps = Arc::new(AtomicU32::new(target_fps));
let dropped_window = Arc::new(AtomicU64::new(0));
let sent_window = Arc::new(AtomicU64::new(0));
let last_adjust_sec = Arc::new(AtomicU64::new(0));
let wait_for_idr = Arc::new(AtomicBool::new(false));
let last_sent = Arc::new(AtomicU64::new(0));
let queue_buffers = env_u32("LESAVKA_EYE_QUEUE_BUFFERS", 8).max(1);
let appsink_buffers = env_u32("LESAVKA_EYE_APPSINK_BUFFERS", 8).max(1);
let desc = format!(
"v4l2src name=cam_{eye} device=\"{dev}\" io-mode=mmap do-timestamp=true ! \
queue ! \
queue max-size-buffers={queue_buffers} max-size-time=0 max-size-bytes=0 leaky=downstream ! \
h264parse disable-passthrough=true config-interval=-1 ! \
video/x-h264,stream-format=byte-stream,alignment=au ! \
appsink name=sink emit-signals=true max-buffers=32 drop=true"
appsink name=sink emit-signals=true max-buffers={appsink_buffers} drop=true"
);
// let desc = format!(
// "v4l2src device={dev} io-mode=mmap ! \
// queue max-size-buffers=0 max-size-bytes=0 max-size-time=0 ! tsdemux name=d ! \
// video/x-h264,stream-format=byte-stream,alignment=au,profile=high ! tsdemux name=d ! \
// d. ! h264parse config-interval=1 ! queue ! appsink name=vsink emit-signals=true \
// d. ! aacparse ! queue ! h264parse config-interval=1 ! appsink name=sink \
// emit-signals=true drop=false sync=false"
// );
let pipeline = gst::parse::launch(&desc)?
.downcast::<gst::Pipeline>()
.expect("not a pipeline");
// let pipeline: gst::Pipeline = gst::parse_launch(&desc)?
// .downcast()
// .expect("not a pipeline");
let sink = pipeline
.by_name("sink")
.expect("appsink")
.dynamic_cast::<gst_app::AppSink>()
.expect("appsink down-cast");
let (tx, rx) = tokio::sync::mpsc::channel(8192);
let chan_capacity = env_usize("LESAVKA_EYE_CHAN_CAPACITY", 256).max(16);
let (tx, rx) = tokio::sync::mpsc::channel(chan_capacity);
/* ----- BUS WATCH: show errors & warnings immediately --------------- */
let bus = pipeline.bus().expect("bus");
if let Some(src_pad) = pipeline
.by_name(&format!("cam_{eye}"))
.and_then(|e| e.static_pad("src"))
.and_then(|element| element.static_pad("src"))
{
src_pad.add_probe(gst::PadProbeType::EVENT_DOWNSTREAM, |pad, info| {
if let Some(gst::PadProbeData::Event(ref ev)) = info.data {
if let gst::EventView::Caps(c) = ev.view() {
trace!(target:"lesavka_server::video",
?c, "🔍 new caps on {}", pad.name());
if let Some(gst::PadProbeData::Event(ref event)) = info.data {
if let gst::EventView::Caps(caps) = event.view() {
trace!(target:"lesavka_server::video", ?caps, "🔍 new caps on {}", pad.name());
}
}
gst::PadProbeReturn::Ok
});
} else {
warn!(target:"lesavka_server::video",
eye = %eye,
"🍪 cam_{eye} not found - skipping pad-probe");
warn!(target:"lesavka_server::video", eye = %eye, "🍪 cam_{eye} not found - skipping pad-probe");
}
let eye_clone = eye.to_owned();
@ -181,73 +122,72 @@ pub async fn eye_ball(dev: &str, id: u32, max_bitrate_kbit: u32) -> anyhow::Resu
for msg in bus.iter_timed(gst::ClockTime::NONE) {
match msg.view() {
Error(err) => {
error!(target:"lesavka_server::video",
eye = %eye_clone,
"💥 pipeline error: {} ({})",
err.error(), err.debug().unwrap_or_default());
error!(
target:"lesavka_server::video",
eye = %eye_clone,
"💥 pipeline error: {} ({})",
err.error(),
err.debug().unwrap_or_default()
);
}
Warning(w) => {
warn!(target:"lesavka_server::video",
eye = %eye_clone,
"⚠️ pipeline warning: {} ({})",
w.error(), w.debug().unwrap_or_default());
Warning(warning) => {
warn!(
target:"lesavka_server::video",
eye = %eye_clone,
"⚠️ pipeline warning: {} ({})",
warning.error(),
warning.debug().unwrap_or_default()
);
}
Info(i) => {
info!(target:"lesavka_server::video",
eye = %eye_clone,
"📌 pipeline info: {} ({})",
i.error(), i.debug().unwrap_or_default());
Info(info_msg) => {
info!(
target:"lesavka_server::video",
eye = %eye_clone,
"📌 pipeline info: {} ({})",
info_msg.error(),
info_msg.debug().unwrap_or_default()
);
}
StateChanged(s) if s.current() == gst::State::Playing => {
debug!(target:"lesavka_server::video",
eye = %eye_clone,
"🎬 pipeline PLAYING");
StateChanged(state) if state.current() == gst::State::Playing => {
debug!(target:"lesavka_server::video", eye = %eye_clone, "🎬 pipeline PLAYING");
}
_ => {}
}
}
});
let last_sent_cloned = last_sent.clone();
let effective_fps_cloned = effective_fps.clone();
let dropped_window_cloned = dropped_window.clone();
let sent_window_cloned = sent_window.clone();
let last_adjust_sec_cloned = last_adjust_sec.clone();
let wait_for_idr_cloned = wait_for_idr.clone();
let eye_name = eye.to_string();
sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |sink| {
/* -------- pull frame ---------- */
let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
/* -------- map once, reuse ----- */
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
let is_idr = contains_idr(map.as_slice());
/* -------- basic counters ------ */
static FRAME: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
let n = FRAME.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if n % 120 == 0 && is_idr {
trace!(target: "lesavka_server::video", "eye-{eye}: delivered {n} frames");
static FRAME: AtomicU64 = AtomicU64::new(0);
let frame = FRAME.fetch_add(1, Ordering::Relaxed);
if frame % 120 == 0 && is_idr {
trace!(target: "lesavka_server::video", "eye-{eye}: delivered {frame} frames");
if enabled!(Level::TRACE) {
let path = format!("/tmp/eye-{eye}-srv-{:05}.h264", n);
let path = format!("/tmp/eye-{eye}-srv-{frame:05}.h264");
std::fs::write(&path, map.as_slice()).ok();
}
} else if n < 10 {
debug!(target: "lesavka_server::video",
eye = eye, frame = n, bytes = map.len(),
pts = ?buffer.pts(), "⬆️ pushed video sample eye-{eye}");
} else if frame < 10 {
debug!(
target: "lesavka_server::video",
eye = eye,
frame,
bytes = map.len(),
pts = ?buffer.pts(),
"⬆️ pushed video sample eye-{eye}"
);
}
/* -------- detect SPS / IDR ---- */
if enabled!(Level::DEBUG) && is_idr {
debug!("eye-{eye}: IDR");
if enabled!(Level::TRACE) && is_idr {
trace!("eye-{eye}: IDR");
}
/* -------- timestamps ---------- */
let origin = *START.get_or_init(|| buffer.pts().unwrap_or(gst::ClockTime::ZERO));
let pts_us = buffer
.pts()
@ -258,35 +198,30 @@ pub async fn eye_ball(dev: &str, id: u32, max_bitrate_kbit: u32) -> anyhow::Resu
if adaptive {
let sec = pts_us / 1_000_000;
let prev = last_adjust_sec_cloned.load(Ordering::Relaxed);
let prev = last_adjust_sec.load(Ordering::Relaxed);
if sec > prev
&& last_adjust_sec_cloned
&& last_adjust_sec
.compare_exchange(prev, sec, Ordering::SeqCst, Ordering::SeqCst)
.is_ok()
{
let dropped = dropped_window_cloned.swap(0, Ordering::Relaxed);
let sent = sent_window_cloned.swap(0, Ordering::Relaxed);
let total = dropped + sent;
if total > 0 {
let drop_ratio = dropped as f64 / total as f64;
let mut fps = effective_fps_cloned.load(Ordering::Relaxed).max(1);
if drop_ratio > 0.20 && fps > min_fps {
fps = fps.saturating_sub(2).max(min_fps);
effective_fps_cloned.store(fps, Ordering::Relaxed);
let dropped = dropped_window.swap(0, Ordering::Relaxed);
let sent = sent_window.swap(0, Ordering::Relaxed);
let current = effective_fps.load(Ordering::Relaxed).max(1);
let next = adjust_effective_fps(current, min_fps, target_fps, dropped, sent);
if next != current {
effective_fps.store(next, Ordering::Relaxed);
if next < current {
warn!(
target: "lesavka_server::video",
eye = %eye_name,
fps,
drop_ratio = %format_args!("{drop_ratio:.2}"),
fps = next,
"🎥 adaptive eye fps ↓"
);
} else if dropped == 0 && drop_ratio < 0.02 && fps < target_fps {
fps = (fps + 1).min(target_fps);
effective_fps_cloned.store(fps, Ordering::Relaxed);
} else {
info!(
target: "lesavka_server::video",
eye = %eye_name,
fps,
fps = next,
"🎥 adaptive eye fps ↑"
);
}
@ -294,53 +229,43 @@ pub async fn eye_ball(dev: &str, id: u32, max_bitrate_kbit: u32) -> anyhow::Resu
}
}
let cur_fps = effective_fps_cloned.load(Ordering::Relaxed).max(1);
let frame_interval_us = 1_000_000u64 / cur_fps as u64;
if frame_interval_us > 0 {
let last = last_sent_cloned.load(Ordering::Relaxed);
if last != 0 && pts_us.saturating_sub(last) < frame_interval_us {
return Ok(gst::FlowSuccess::Ok);
}
last_sent_cloned.store(pts_us, Ordering::Relaxed);
let current_fps = effective_fps.load(Ordering::Relaxed).max(1);
let last = last_sent.load(Ordering::Relaxed);
if !should_send_frame(last, pts_us, current_fps) {
return Ok(gst::FlowSuccess::Ok);
}
last_sent.store(pts_us, Ordering::Relaxed);
if wait_for_idr_cloned.load(Ordering::Relaxed) && !is_idr {
if wait_for_idr.load(Ordering::Relaxed) && !is_idr {
return Ok(gst::FlowSuccess::Ok);
}
/* -------- ship over gRPC ----- */
let data = map.as_slice().to_vec();
let size = data.len();
let pkt = VideoPacket {
id,
pts: pts_us,
data,
};
let pkt = VideoPacket { id, pts: pts_us, data };
match tx.try_send(Ok(pkt)) {
Ok(_) => {
sent_window_cloned.fetch_add(1, Ordering::Relaxed);
sent_window.fetch_add(1, Ordering::Relaxed);
if is_idr {
wait_for_idr_cloned.store(false, Ordering::Relaxed);
wait_for_idr.store(false, Ordering::Relaxed);
}
trace!(target:"lesavka_server::video",
eye = %eye,
size = size,
"🎥📤 sent");
trace!(target:"lesavka_server::video", eye = %eye, size = size, "🎥📤 sent");
}
Err(tokio::sync::mpsc::error::TrySendError::Full(_)) => {
dropped_window_cloned.fetch_add(1, Ordering::Relaxed);
wait_for_idr_cloned.store(true, Ordering::Relaxed);
static DROP_CNT: std::sync::atomic::AtomicU64 =
std::sync::atomic::AtomicU64::new(0);
let c = DROP_CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if c % 120 == 0 {
debug!(target:"lesavka_server::video",
eye = %eye,
dropped = c,
"🎥⏳ channel full - dropping frames");
dropped_window.fetch_add(1, Ordering::Relaxed);
wait_for_idr.store(true, Ordering::Relaxed);
static DROP_CNT: AtomicU64 = AtomicU64::new(0);
let dropped = DROP_CNT.fetch_add(1, Ordering::Relaxed);
if dropped % 120 == 0 {
debug!(
target:"lesavka_server::video",
eye = %eye,
dropped,
"🎥⏳ channel full - dropping frames"
);
}
}
Err(e) => error!("mpsc send err: {e}"),
Err(error) => error!("mpsc send err: {error}"),
}
Ok(gst::FlowSuccess::Ok)
@ -355,360 +280,17 @@ pub async fn eye_ball(dev: &str, id: u32, max_bitrate_kbit: u32) -> anyhow::Resu
loop {
match bus.timed_pop(gst::ClockTime::NONE) {
Some(msg)
if matches!(msg.view(), MessageView::StateChanged(s)
if s.current() == gst::State::Playing) =>
if matches!(msg.view(), MessageView::StateChanged(state)
if state.current() == gst::State::Playing) =>
{
break;
}
Some(_) => continue,
None => continue,
Some(_) | None => continue,
}
}
Ok(VideoStream {
_pipeline: pipeline,
inner: ReceiverStream::new(rx),
})
}
pub struct WebcamSink {
appsrc: gst_app::AppSrc,
_pipe: gst::Pipeline,
}
impl WebcamSink {
pub fn new(uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result<Self> {
gst::init()?;
let pipeline = gst::Pipeline::new();
let width = cfg.width as i32;
let height = cfg.height as i32;
let fps = cfg.fps.max(1) as i32;
let use_mjpeg = matches!(cfg.codec, CameraCodec::Mjpeg);
let src = gst::ElementFactory::make("appsrc")
.build()?
.downcast::<gst_app::AppSrc>()
.expect("appsrc");
src.set_is_live(true);
src.set_format(gst::Format::Time);
let block = std::env::var("LESAVKA_UVC_APP_BLOCK")
.ok()
.map(|v| v != "0")
.unwrap_or(false);
src.set_property("block", &block);
if use_mjpeg {
let caps_mjpeg = gst::Caps::builder("image/jpeg")
.field("parsed", true)
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.field("pixel-aspect-ratio", gst::Fraction::new(1, 1))
.field("colorimetry", "2:4:7:1")
.build();
src.set_caps(Some(&caps_mjpeg));
let queue = gst::ElementFactory::make("queue").build()?;
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps_mjpeg)
.build()?;
let sink = gst::ElementFactory::make("v4l2sink")
.property("device", &uvc_dev)
.property("sync", &false)
.build()?;
pipeline.add_many(&[src.upcast_ref(), &queue, &capsfilter, &sink])?;
gst::Element::link_many(&[src.upcast_ref(), &queue, &capsfilter, &sink])?;
} else {
let caps_h264 = gst::Caps::builder("video/x-h264")
.field("stream-format", "byte-stream")
.field("alignment", "au")
.build();
let raw_caps = gst::Caps::builder("video/x-raw")
.field("format", "YUY2")
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.build();
src.set_caps(Some(&caps_h264));
let h264parse = gst::ElementFactory::make("h264parse").build()?;
let decoder_name = pick_h264_decoder();
let decoder = gst::ElementFactory::make(decoder_name)
.build()
.with_context(|| format!("building decoder element {decoder_name}"))?;
let convert = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let caps = gst::ElementFactory::make("capsfilter")
.property("caps", &raw_caps)
.build()?;
let sink = gst::ElementFactory::make("v4l2sink")
.property("device", &uvc_dev)
.property("sync", &false)
.build()?;
pipeline.add_many(&[
src.upcast_ref(),
&h264parse,
&decoder,
&convert,
&scale,
&caps,
&sink,
])?;
gst::Element::link_many(&[
src.upcast_ref(),
&h264parse,
&decoder,
&convert,
&scale,
&caps,
&sink,
])?;
}
pipeline.set_state(gst::State::Playing)?;
Ok(Self {
appsrc: src,
_pipe: pipeline,
})
}
pub fn push(&self, pkt: VideoPacket) {
let mut buf = gst::Buffer::from_slice(pkt.data);
buf.get_mut()
.unwrap()
.set_pts(Some(gst::ClockTime::from_useconds(pkt.pts)));
if let Err(err) = self.appsrc.push_buffer(buf) {
tracing::warn!(target:"lesavka_server::video", %err, "📸⚠️ appsrc push failed");
}
}
}
pub struct HdmiSink {
appsrc: gst_app::AppSrc,
_pipe: gst::Pipeline,
}
impl HdmiSink {
pub fn new(cfg: &CameraConfig) -> anyhow::Result<Self> {
gst::init()?;
let pipeline = gst::Pipeline::new();
let width = cfg.width as i32;
let height = cfg.height as i32;
let fps = cfg.fps.max(1) as i32;
let src = gst::ElementFactory::make("appsrc")
.build()?
.downcast::<gst_app::AppSrc>()
.expect("appsrc");
src.set_is_live(true);
src.set_format(gst::Format::Time);
let raw_caps = gst::Caps::builder("video/x-raw")
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &raw_caps)
.build()?;
let queue = gst::ElementFactory::make("queue")
.property("max-size-buffers", 4u32)
.build()?;
let convert = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let sink = build_hdmi_sink(cfg)?;
match cfg.codec {
CameraCodec::H264 => {
let caps_h264 = gst::Caps::builder("video/x-h264")
.field("stream-format", "byte-stream")
.field("alignment", "au")
.build();
src.set_caps(Some(&caps_h264));
let h264parse = gst::ElementFactory::make("h264parse").build()?;
let decoder_name = pick_h264_decoder();
let decoder = gst::ElementFactory::make(decoder_name)
.build()
.with_context(|| format!("building decoder element {decoder_name}"))?;
pipeline.add_many(&[
src.upcast_ref(),
&queue,
&h264parse,
&decoder,
&convert,
&scale,
&capsfilter,
&sink,
])?;
gst::Element::link_many(&[
src.upcast_ref(),
&queue,
&h264parse,
&decoder,
&convert,
&scale,
&capsfilter,
&sink,
])?;
}
CameraCodec::Mjpeg => {
let caps_mjpeg = gst::Caps::builder("image/jpeg")
.field("parsed", true)
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.build();
src.set_caps(Some(&caps_mjpeg));
let jpegdec = gst::ElementFactory::make("jpegdec").build()?;
pipeline.add_many(&[
src.upcast_ref(),
&queue,
&jpegdec,
&convert,
&scale,
&capsfilter,
&sink,
])?;
gst::Element::link_many(&[
src.upcast_ref(),
&queue,
&jpegdec,
&convert,
&scale,
&capsfilter,
&sink,
])?;
}
}
pipeline.set_state(gst::State::Playing)?;
Ok(Self {
appsrc: src,
_pipe: pipeline,
})
}
pub fn push(&self, pkt: VideoPacket) {
let mut buf = gst::Buffer::from_slice(pkt.data);
buf.get_mut()
.unwrap()
.set_pts(Some(gst::ClockTime::from_useconds(pkt.pts)));
if let Err(err) = self.appsrc.push_buffer(buf) {
tracing::warn!(target:"lesavka_server::video", %err, "📺⚠️ HDMI appsrc push failed");
}
}
}
fn build_hdmi_sink(cfg: &CameraConfig) -> anyhow::Result<gst::Element> {
if let Ok(name) = std::env::var("LESAVKA_HDMI_SINK") {
return gst::ElementFactory::make(&name)
.build()
.context("building HDMI sink");
}
if gst::ElementFactory::find("kmssink").is_some() {
let sink = gst::ElementFactory::make("kmssink").build()?;
if let Some(connector) = cfg.hdmi.as_ref().and_then(|h| h.id) {
if sink.has_property("connector-id", None) {
sink.set_property("connector-id", &(connector as i32));
} else {
tracing::warn!(
target: "lesavka_server::video",
%connector,
"kmssink does not expose connector-id property; using default connector"
);
}
}
sink.set_property("sync", &false);
return Ok(sink);
}
let sink = gst::ElementFactory::make("autovideosink")
.build()
.context("building HDMI sink")?;
let _ = sink.set_property("sync", &false);
Ok(sink)
}
/*─────────────────────────────────*/
/* gRPC → CameraSink relay */
/*─────────────────────────────────*/
enum CameraSink {
Uvc(WebcamSink),
Hdmi(HdmiSink),
}
impl CameraSink {
fn push(&self, pkt: VideoPacket) {
match self {
CameraSink::Uvc(sink) => sink.push(pkt),
CameraSink::Hdmi(sink) => sink.push(pkt),
}
}
}
pub struct CameraRelay {
sink: CameraSink,
id: u32, // gRPC “id” (for future multicam)
frames: std::sync::atomic::AtomicU64,
}
impl CameraRelay {
pub fn new_uvc(id: u32, uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result<Self> {
Ok(Self {
sink: CameraSink::Uvc(WebcamSink::new(uvc_dev, cfg)?),
id,
frames: std::sync::atomic::AtomicU64::new(0),
})
}
pub fn new_hdmi(id: u32, cfg: &CameraConfig) -> anyhow::Result<Self> {
Ok(Self {
sink: CameraSink::Hdmi(HdmiSink::new(cfg)?),
id,
frames: std::sync::atomic::AtomicU64::new(0),
})
}
/// Push one VideoPacket coming from the client
pub fn feed(&self, pkt: VideoPacket) {
let n = self
.frames
.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if n < 10 || n % 60 == 0 {
tracing::debug!(target:"lesavka_server::video",
cam_id = self.id,
frame = n,
bytes = pkt.data.len(),
pts = pkt.pts,
"📸 srv webcam frame");
} else if n % 10 == 0 {
tracing::trace!(target:"lesavka_server::video",
cam_id = self.id,
bytes = pkt.data.len(),
"📸📥 srv pkt");
}
if dev_mode_enabled()
&& (cfg!(debug_assertions) || tracing::enabled!(tracing::Level::TRACE))
&& contains_idr(&pkt.data)
{
let path = format!("/tmp/eye3-cli-{n:05}.h264");
if let Err(e) = std::fs::write(&path, &pkt.data) {
tracing::warn!("📸💾 dump failed: {e}");
} else {
tracing::debug!("📸💾 wrote {}", path);
}
}
self.sink.push(pkt);
}
}

458
server/src/video_sinks.rs Normal file
View File

@ -0,0 +1,458 @@
#![forbid(unsafe_code)]
use anyhow::Context;
use gstreamer as gst;
use gstreamer::prelude::*;
use gstreamer_app as gst_app;
use lesavka_common::lesavka::VideoPacket;
use std::sync::atomic::AtomicU64;
use tracing::warn;
use crate::camera::{CameraCodec, CameraConfig};
use crate::video_support::{contains_idr, dev_mode_enabled, next_local_pts, pick_h264_decoder};
/// Push H.264 or MJPEG frames into the USB UVC gadget.
///
/// Inputs: a UVC device node and the negotiated camera configuration.
/// Outputs: a live `WebcamSink` that accepts `VideoPacket`s.
/// Why: the UVC sink owns the GStreamer pipeline details for gadget output so
/// the relay logic can focus on session lifecycle instead of media plumbing.
pub struct WebcamSink {
appsrc: gst_app::AppSrc,
pipe: gst::Pipeline,
next_pts_us: AtomicU64,
frame_step_us: u64,
}
impl WebcamSink {
/// Build a new webcam sink pipeline.
///
/// Inputs: the target UVC device plus the selected camera profile.
/// Outputs: a sink ready to receive `VideoPacket`s.
/// Why: UVC output has its own caps and decoder chain that differs from the
/// HDMI sink, so it lives in a dedicated constructor.
pub fn new(uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result<Self> {
gst::init()?;
let pipeline = gst::Pipeline::new();
let width = cfg.width as i32;
let height = cfg.height as i32;
let fps = cfg.fps.max(1) as i32;
let use_mjpeg = matches!(cfg.codec, CameraCodec::Mjpeg);
let src = gst::ElementFactory::make("appsrc")
.build()?
.downcast::<gst_app::AppSrc>()
.expect("appsrc");
src.set_is_live(true);
src.set_format(gst::Format::Time);
src.set_property("do-timestamp", &false);
let block = std::env::var("LESAVKA_UVC_APP_BLOCK")
.ok()
.map(|value| value != "0")
.unwrap_or(false);
src.set_property("block", &block);
if use_mjpeg {
let caps_mjpeg = gst::Caps::builder("image/jpeg")
.field("parsed", true)
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.field("pixel-aspect-ratio", gst::Fraction::new(1, 1))
.field("colorimetry", "2:4:7:1")
.build();
src.set_caps(Some(&caps_mjpeg));
let queue = gst::ElementFactory::make("queue").build()?;
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &caps_mjpeg)
.build()?;
let sink = gst::ElementFactory::make("v4l2sink")
.property("device", &uvc_dev)
.property("sync", &false)
.build()?;
pipeline.add_many(&[src.upcast_ref(), &queue, &capsfilter, &sink])?;
gst::Element::link_many(&[src.upcast_ref(), &queue, &capsfilter, &sink])?;
} else {
let caps_h264 = gst::Caps::builder("video/x-h264")
.field("stream-format", "byte-stream")
.field("alignment", "au")
.build();
let raw_caps = gst::Caps::builder("video/x-raw")
.field("format", "YUY2")
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.build();
src.set_caps(Some(&caps_h264));
let h264parse = gst::ElementFactory::make("h264parse").build()?;
let decoder_name = pick_h264_decoder();
let decoder = gst::ElementFactory::make(decoder_name)
.build()
.with_context(|| format!("building decoder element {decoder_name}"))?;
let convert = gst::ElementFactory::make("videoconvert").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let caps = gst::ElementFactory::make("capsfilter")
.property("caps", &raw_caps)
.build()?;
let sink = gst::ElementFactory::make("v4l2sink")
.property("device", &uvc_dev)
.property("sync", &false)
.build()?;
pipeline.add_many(&[
src.upcast_ref(),
&h264parse,
&decoder,
&convert,
&scale,
&caps,
&sink,
])?;
gst::Element::link_many(&[
src.upcast_ref(),
&h264parse,
&decoder,
&convert,
&scale,
&caps,
&sink,
])?;
}
pipeline.set_state(gst::State::Playing)?;
let frame_step_us = (1_000_000u64 / u64::from(cfg.fps.max(1))).max(1);
Ok(Self {
appsrc: src,
pipe: pipeline,
next_pts_us: AtomicU64::new(0),
frame_step_us,
})
}
/// Push one client frame into the UVC pipeline.
///
/// Inputs: the next `VideoPacket` from the gRPC camera stream.
/// Outputs: none; the frame is forwarded to the appsrc when possible.
/// Why: UVC sinks use a locally monotonic timeline so presentation remains
/// stable even when WAN packet timestamps arrive out of order.
pub fn push(&self, pkt: VideoPacket) {
let mut buf = gst::Buffer::from_slice(pkt.data);
if let Some(meta) = buf.get_mut() {
let pts_us = next_local_pts(&self.next_pts_us, self.frame_step_us);
let ts = gst::ClockTime::from_useconds(pts_us);
meta.set_pts(Some(ts));
meta.set_dts(Some(ts));
meta.set_duration(Some(gst::ClockTime::from_useconds(self.frame_step_us)));
}
if let Err(err) = self.appsrc.push_buffer(buf) {
tracing::warn!(target:"lesavka_server::video", %err, "📸⚠️ appsrc push failed");
}
}
}
impl Drop for WebcamSink {
fn drop(&mut self) {
let _ = self.pipe.set_state(gst::State::Null);
}
}
/// Push H.264 or MJPEG frames into the HDMI display pipeline.
///
/// Inputs: the negotiated camera configuration.
/// Outputs: a live `HdmiSink` ready to display frames.
/// Why: HDMI output uses a different sink selection and conversion chain than
/// the USB gadget, so it warrants a dedicated implementation.
pub struct HdmiSink {
appsrc: gst_app::AppSrc,
pipe: gst::Pipeline,
next_pts_us: AtomicU64,
frame_step_us: u64,
}
impl HdmiSink {
/// Build a new HDMI sink pipeline.
///
/// Inputs: the selected camera configuration, including optional connector
/// metadata for `kmssink`.
/// Outputs: a sink ready to receive `VideoPacket`s.
/// Why: display output must honor connector pinning and decoder selection
/// while keeping the relay code agnostic of GStreamer details.
pub fn new(cfg: &CameraConfig) -> anyhow::Result<Self> {
gst::init()?;
let pipeline = gst::Pipeline::new();
let width = cfg.width as i32;
let height = cfg.height as i32;
let fps = cfg.fps.max(1) as i32;
let src = gst::ElementFactory::make("appsrc")
.build()?
.downcast::<gst_app::AppSrc>()
.expect("appsrc");
src.set_is_live(true);
src.set_format(gst::Format::Time);
src.set_property("do-timestamp", &false);
let raw_caps = gst::Caps::builder("video/x-raw")
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.build();
let capsfilter = gst::ElementFactory::make("capsfilter")
.property("caps", &raw_caps)
.build()?;
let queue = gst::ElementFactory::make("queue")
.property("max-size-buffers", 4u32)
.build()?;
let convert = gst::ElementFactory::make("videoconvert").build()?;
let rate = gst::ElementFactory::make("videorate").build()?;
let scale = gst::ElementFactory::make("videoscale").build()?;
let sink = build_hdmi_sink(cfg)?;
match cfg.codec {
CameraCodec::H264 => {
let caps_h264 = gst::Caps::builder("video/x-h264")
.field("stream-format", "byte-stream")
.field("alignment", "au")
.build();
src.set_caps(Some(&caps_h264));
let h264parse = gst::ElementFactory::make("h264parse").build()?;
let decoder_name = pick_h264_decoder();
let decoder = gst::ElementFactory::make(decoder_name)
.build()
.with_context(|| format!("building decoder element {decoder_name}"))?;
pipeline.add_many(&[
src.upcast_ref(),
&queue,
&h264parse,
&decoder,
&rate,
&convert,
&scale,
&capsfilter,
&sink,
])?;
gst::Element::link_many(&[
src.upcast_ref(),
&queue,
&h264parse,
&decoder,
&rate,
&convert,
&scale,
&capsfilter,
&sink,
])?;
}
CameraCodec::Mjpeg => {
let caps_mjpeg = gst::Caps::builder("image/jpeg")
.field("parsed", true)
.field("width", width)
.field("height", height)
.field("framerate", gst::Fraction::new(fps, 1))
.build();
src.set_caps(Some(&caps_mjpeg));
let jpegdec = gst::ElementFactory::make("jpegdec").build()?;
pipeline.add_many(&[
src.upcast_ref(),
&queue,
&jpegdec,
&rate,
&convert,
&scale,
&capsfilter,
&sink,
])?;
gst::Element::link_many(&[
src.upcast_ref(),
&queue,
&jpegdec,
&rate,
&convert,
&scale,
&capsfilter,
&sink,
])?;
}
}
pipeline.set_state(gst::State::Playing)?;
let frame_step_us = (1_000_000u64 / u64::from(cfg.fps.max(1))).max(1);
Ok(Self {
appsrc: src,
pipe: pipeline,
next_pts_us: AtomicU64::new(0),
frame_step_us,
})
}
/// Push one client frame into the HDMI pipeline.
///
/// Inputs: the next `VideoPacket` from the gRPC camera stream.
/// Outputs: none; the frame is forwarded to the appsrc when possible.
/// Why: display playback uses the same local monotonic PTS policy as UVC to
/// avoid visible glitches when remote timestamps jitter.
pub fn push(&self, pkt: VideoPacket) {
let mut buf = gst::Buffer::from_slice(pkt.data);
if let Some(meta) = buf.get_mut() {
let pts_us = next_local_pts(&self.next_pts_us, self.frame_step_us);
let ts = gst::ClockTime::from_useconds(pts_us);
meta.set_pts(Some(ts));
meta.set_dts(Some(ts));
meta.set_duration(Some(gst::ClockTime::from_useconds(self.frame_step_us)));
}
if let Err(err) = self.appsrc.push_buffer(buf) {
tracing::warn!(target:"lesavka_server::video", %err, "📺⚠️ HDMI appsrc push failed");
}
}
}
impl Drop for HdmiSink {
fn drop(&mut self) {
let _ = self.pipe.set_state(gst::State::Null);
}
}
fn build_hdmi_sink(cfg: &CameraConfig) -> anyhow::Result<gst::Element> {
if let Ok(name) = std::env::var("LESAVKA_HDMI_SINK") {
return gst::ElementFactory::make(&name)
.build()
.context("building HDMI sink");
}
if gst::ElementFactory::find("kmssink").is_some() {
let sink = gst::ElementFactory::make("kmssink").build()?;
if sink.has_property("driver-name", None) {
let driver = std::env::var("LESAVKA_HDMI_DRIVER").unwrap_or_else(|_| "vc4".to_string());
sink.set_property("driver-name", &driver);
}
if let Some(connector) = cfg.hdmi.as_ref().and_then(|hdmi| hdmi.id) {
if sink.has_property("connector-id", None) {
sink.set_property("connector-id", &(connector as i32));
} else {
tracing::warn!(
target: "lesavka_server::video",
%connector,
"kmssink does not expose connector-id property; using default connector"
);
}
}
if sink.has_property("force-modesetting", None) {
sink.set_property("force-modesetting", &true);
}
sink.set_property("sync", &false);
return Ok(sink);
}
let sink = gst::ElementFactory::make("autovideosink")
.build()
.context("building HDMI sink")?;
let _ = sink.set_property("sync", &false);
Ok(sink)
}
enum CameraSink {
Uvc(WebcamSink),
Hdmi(HdmiSink),
}
impl CameraSink {
fn push(&self, pkt: VideoPacket) {
match self {
CameraSink::Uvc(sink) => sink.push(pkt),
CameraSink::Hdmi(sink) => sink.push(pkt),
}
}
}
/// Forward camera packets from gRPC into either a UVC or HDMI sink.
///
/// Inputs: packets received from the client camera stream.
/// Outputs: none; packets are forwarded to the configured sink.
/// Why: camera sessions share the same logging and dev-mode dump behavior even
/// though their physical sinks differ.
pub struct CameraRelay {
sink: CameraSink,
id: u32,
frames: AtomicU64,
}
impl CameraRelay {
/// Build a relay that targets the USB UVC gadget.
///
/// Inputs: the logical camera id, UVC device node, and camera config.
/// Outputs: a relay that writes frames into the gadget pipeline.
/// Why: keeping constructors explicit avoids accidental sink mismatches.
pub fn new_uvc(id: u32, uvc_dev: &str, cfg: &CameraConfig) -> anyhow::Result<Self> {
Ok(Self {
sink: CameraSink::Uvc(WebcamSink::new(uvc_dev, cfg)?),
id,
frames: AtomicU64::new(0),
})
}
/// Build a relay that targets the HDMI output pipeline.
///
/// Inputs: the logical camera id plus the camera config.
/// Outputs: a relay that writes frames into the display pipeline.
/// Why: the camera runtime reuses this constructor when the negotiated
/// output mode selects HDMI instead of UVC.
pub fn new_hdmi(id: u32, cfg: &CameraConfig) -> anyhow::Result<Self> {
Ok(Self {
sink: CameraSink::Hdmi(HdmiSink::new(cfg)?),
id,
frames: AtomicU64::new(0),
})
}
/// Push one `VideoPacket` coming from the client.
///
/// Inputs: the next packet from the camera stream.
/// Outputs: none; the packet is logged and forwarded to the sink.
/// Why: centralizing frame logging and dev-mode dump behavior keeps the
/// transport session logic separate from media sink mechanics.
pub fn feed(&self, pkt: VideoPacket) {
let frame = self
.frames
.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if frame < 10 || frame % 60 == 0 {
tracing::debug!(
target:"lesavka_server::video",
cam_id = self.id,
frame,
bytes = pkt.data.len(),
pts = pkt.pts,
"📸 srv webcam frame"
);
} else if frame % 10 == 0 {
tracing::trace!(
target:"lesavka_server::video",
cam_id = self.id,
bytes = pkt.data.len(),
"📸📥 srv pkt"
);
}
if dev_mode_enabled()
&& (cfg!(debug_assertions) || tracing::enabled!(tracing::Level::TRACE))
&& contains_idr(&pkt.data)
{
let path = format!("/tmp/eye3-cli-{frame:05}.h264");
if let Err(error) = std::fs::write(&path, &pkt.data) {
warn!("📸💾 dump failed: {error}");
} else {
tracing::debug!("📸💾 wrote {}", path);
}
}
self.sink.push(pkt);
}
}

236
server/src/video_support.rs Normal file
View File

@ -0,0 +1,236 @@
#![forbid(unsafe_code)]
use gstreamer as gst;
use std::sync::OnceLock;
use std::sync::atomic::{AtomicU64, Ordering};
static DEV_MODE: OnceLock<bool> = OnceLock::new();
/// Read an unsigned integer environment variable with a default.
///
/// Inputs: the env var name plus the fallback value.
/// Outputs: the parsed value when present and valid, or the fallback.
/// Why: video tuning knobs are operator-controlled and should never panic the
/// server when a typo slips into the environment.
#[must_use]
pub fn env_u32(name: &str, default: u32) -> u32 {
std::env::var(name)
.ok()
.and_then(|value| value.parse::<u32>().ok())
.unwrap_or(default)
}
/// Read a `usize` environment variable with a default.
///
/// Inputs: the env var name plus the fallback value.
/// Outputs: the parsed value when present and valid, or the fallback.
/// Why: queue and channel capacities use `usize`, but should otherwise follow
/// the same forgiving behavior as the numeric video tuning env vars.
#[must_use]
pub fn env_usize(name: &str, default: usize) -> usize {
std::env::var(name)
.ok()
.and_then(|value| value.parse::<usize>().ok())
.unwrap_or(default)
}
/// Check whether development-mode video dumps are enabled.
///
/// Inputs: none.
/// Outputs: `true` once the process observes `LESAVKA_DEV_MODE`.
/// Why: the value is cached because the callback hot path checks it on every
/// frame when deciding whether to dump debug samples.
#[must_use]
pub fn dev_mode_enabled() -> bool {
*DEV_MODE.get_or_init(|| std::env::var("LESAVKA_DEV_MODE").is_ok())
}
/// Pick the first available H.264 decoder in our preference order.
///
/// Inputs: none.
/// Outputs: the GStreamer element name that should be instantiated.
/// Why: different targets expose different hardware decoders, so we probe in a
/// stable order before falling back to software decoding.
#[must_use]
pub fn pick_h264_decoder() -> &'static str {
if gst::ElementFactory::find("v4l2h264dec").is_some() {
"v4l2h264dec"
} else if gst::ElementFactory::find("v4l2slh264dec").is_some() {
"v4l2slh264dec"
} else if gst::ElementFactory::find("omxh264dec").is_some() {
"omxh264dec"
} else {
"avdec_h264"
}
}
/// Choose the default eye-stream FPS for the requested bitrate tier.
///
/// Inputs: the negotiated maximum bitrate in kbit/s.
/// Outputs: the target FPS before env overrides are applied.
/// Why: low bitrates need a lower frame rate to preserve visual quality, while
/// higher bitrates can sustain the full target cadence.
#[must_use]
pub fn default_eye_fps(max_bitrate_kbit: u32) -> u32 {
match max_bitrate_kbit {
0 => 25,
1..=2_500 => 15,
2_501..=4_000 => 20,
_ => 25,
}
}
/// Detect whether an H.264 access unit contains an IDR NAL.
///
/// Inputs: one Annex-B encoded H.264 access unit.
/// Outputs: `true` when the access unit carries an IDR frame.
/// Why: after dropping frames we wait for the next keyframe so downstream
/// decoders do not resume from a broken prediction chain.
#[must_use]
pub fn contains_idr(h264: &[u8]) -> bool {
let mut index = 0;
while index + 4 < h264.len() {
if h264[index] == 0 && h264[index + 1] == 0 {
let offset = if h264[index + 2] == 1 {
3
} else if h264[index + 2] == 0 && h264[index + 3] == 1 {
4
} else {
index += 1;
continue;
};
let nal_index = index + offset;
if nal_index < h264.len() && (h264[nal_index] & 0x1F) == 5 {
return true;
}
}
index += 1;
}
false
}
/// Compute the next adaptive eye-stream FPS after one reporting window.
///
/// Inputs: the current FPS plus the target/min bounds and the sent/dropped
/// frame counts collected during the last window.
/// Outputs: the adjusted FPS for the next window.
/// Why: the callback path keeps only counters; this pure policy function makes
/// the adaptation behavior unit-testable and easier to tune.
#[must_use]
pub fn adjust_effective_fps(
current_fps: u32,
min_fps: u32,
target_fps: u32,
dropped: u64,
sent: u64,
) -> u32 {
let total = dropped + sent;
if total == 0 {
return current_fps.max(1);
}
let drop_ratio = dropped as f64 / total as f64;
if drop_ratio > 0.10 && current_fps > min_fps {
current_fps.saturating_sub(3).max(min_fps)
} else if dropped == 0 && drop_ratio < 0.02 && current_fps < target_fps {
(current_fps + 1).min(target_fps)
} else {
current_fps.max(1)
}
}
/// Decide whether a frame should be emitted at the current pacing budget.
///
/// Inputs: the previous sent timestamp, the candidate frame timestamp, and the
/// current target FPS.
/// Outputs: `true` when enough time has elapsed to send another frame.
/// Why: rate limiting on timestamps keeps the gRPC stream bounded without
/// requiring the callback to inspect wall-clock time.
#[must_use]
pub fn should_send_frame(last_pts_us: u64, current_pts_us: u64, fps: u32) -> bool {
let frame_interval_us = 1_000_000u64 / u64::from(fps.max(1));
if frame_interval_us == 0 || last_pts_us == 0 {
return true;
}
current_pts_us.saturating_sub(last_pts_us) >= frame_interval_us
}
/// Advance the local monotonic PTS used by sink appsrc instances.
///
/// Inputs: the shared counter and the per-frame duration in microseconds.
/// Outputs: the next strictly monotonic local timestamp.
/// Why: WAN-delivered packet PTS values can arrive out of order, so sink-side
/// playback uses a synthetic monotonic timeline instead.
#[must_use]
pub fn next_local_pts(counter: &AtomicU64, frame_step_us: u64) -> u64 {
counter.fetch_add(frame_step_us, Ordering::Relaxed)
}
#[cfg(test)]
mod tests {
use super::{
adjust_effective_fps, contains_idr, default_eye_fps, env_u32, env_usize, next_local_pts,
should_send_frame,
};
use serial_test::serial;
use std::sync::atomic::AtomicU64;
use temp_env::with_var;
#[test]
fn default_eye_fps_tracks_bitrate_tiers() {
assert_eq!(default_eye_fps(0), 25);
assert_eq!(default_eye_fps(2_000), 15);
assert_eq!(default_eye_fps(3_000), 20);
assert_eq!(default_eye_fps(8_000), 25);
}
#[test]
fn contains_idr_finds_annex_b_keyframes() {
let sample = [0, 0, 0, 1, 0x65, 0x88, 0x99];
assert!(contains_idr(&sample));
assert!(!contains_idr(&[0, 0, 0, 1, 0x41, 0x99]));
}
#[test]
fn adjust_effective_fps_reacts_to_drop_windows() {
assert_eq!(adjust_effective_fps(20, 12, 25, 5, 10), 17);
assert_eq!(adjust_effective_fps(20, 12, 25, 0, 20), 21);
assert_eq!(adjust_effective_fps(12, 12, 25, 10, 10), 12);
}
#[test]
fn should_send_frame_enforces_interval() {
assert!(should_send_frame(0, 10, 25));
assert!(!should_send_frame(40_000, 50_000, 25));
assert!(should_send_frame(40_000, 90_000, 25));
}
#[test]
fn next_local_pts_monotonically_advances() {
let counter = AtomicU64::new(0);
assert_eq!(next_local_pts(&counter, 40_000), 0);
assert_eq!(next_local_pts(&counter, 40_000), 40_000);
}
#[test]
#[serial]
fn env_helpers_parse_values_and_fallbacks() {
with_var("LESAVKA_TEST_U32", Some("42"), || {
assert_eq!(env_u32("LESAVKA_TEST_U32", 7), 42);
});
with_var("LESAVKA_TEST_U32", Some("oops"), || {
assert_eq!(env_u32("LESAVKA_TEST_U32", 7), 7);
});
with_var("LESAVKA_TEST_USIZE", Some("128"), || {
assert_eq!(env_usize("LESAVKA_TEST_USIZE", 64), 128);
});
with_var("LESAVKA_TEST_USIZE", None::<&str>, || {
assert_eq!(env_usize("LESAVKA_TEST_USIZE", 64), 64);
});
}
#[test]
fn adjust_effective_fps_keeps_current_rate_when_no_samples() {
assert_eq!(adjust_effective_fps(18, 12, 25, 0, 0), 18);
}
}