2026-04-23 03:49:49 -03:00
|
|
|
//! Extra media and helper coverage for server main relay branches.
|
|
|
|
|
//!
|
|
|
|
|
//! Scope: include `server/src/main.rs` and exercise camera, eye-hub, and UVC
|
|
|
|
|
//! helper branches without pushing the binary contract past the LOC cap.
|
|
|
|
|
//! Targets: `server/src/main.rs`.
|
|
|
|
|
//! Why: live media paths need bounded, deterministic contracts even when no
|
|
|
|
|
//! real camera, UVC helper, or capture hardware is present.
|
|
|
|
|
|
|
|
|
|
#[allow(warnings)]
|
|
|
|
|
mod server_main_media_extra {
|
|
|
|
|
include!(env!("LESAVKA_SERVER_MAIN_SRC"));
|
|
|
|
|
|
|
|
|
|
use futures_util::stream;
|
|
|
|
|
use lesavka_common::lesavka::relay_client::RelayClient;
|
|
|
|
|
use serial_test::serial;
|
|
|
|
|
use std::os::unix::fs::PermissionsExt;
|
|
|
|
|
use std::path::Path;
|
|
|
|
|
use temp_env::with_var;
|
|
|
|
|
use tempfile::tempdir;
|
|
|
|
|
|
|
|
|
|
async fn connect_with_retry(addr: std::net::SocketAddr) -> tonic::transport::Channel {
|
|
|
|
|
let endpoint = tonic::transport::Endpoint::from_shared(format!("http://{addr}"))
|
|
|
|
|
.expect("endpoint")
|
|
|
|
|
.tcp_nodelay(true);
|
|
|
|
|
for _ in 0..40 {
|
|
|
|
|
if let Ok(channel) = endpoint.clone().connect().await {
|
|
|
|
|
return channel;
|
|
|
|
|
}
|
|
|
|
|
tokio::time::sleep(std::time::Duration::from_millis(25)).await;
|
|
|
|
|
}
|
|
|
|
|
panic!("failed to connect to local tonic server");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn write_file(path: &Path, content: &str) {
|
|
|
|
|
if let Some(parent) = path.parent() {
|
|
|
|
|
std::fs::create_dir_all(parent).expect("create parent");
|
|
|
|
|
}
|
|
|
|
|
std::fs::write(path, content).expect("write file");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn write_helper(path: &Path, body: &str) {
|
|
|
|
|
write_file(path, body);
|
|
|
|
|
let mut perms = std::fs::metadata(path)
|
|
|
|
|
.expect("helper metadata")
|
|
|
|
|
.permissions();
|
|
|
|
|
perms.set_mode(0o755);
|
|
|
|
|
std::fs::set_permissions(path, perms).expect("chmod helper");
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn with_capture_power_disabled(f: impl FnOnce()) {
|
|
|
|
|
with_var("LESAVKA_CAPTURE_POWER_UNIT", Some("none"), f);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn build_handler_for_tests_with_modes(
|
|
|
|
|
kb_writable: bool,
|
|
|
|
|
ms_writable: bool,
|
|
|
|
|
) -> (tempfile::TempDir, Handler) {
|
|
|
|
|
let dir = tempdir().expect("tempdir");
|
|
|
|
|
let kb_path = dir.path().join("hidg0.bin");
|
|
|
|
|
let ms_path = dir.path().join("hidg1.bin");
|
|
|
|
|
std::fs::write(&kb_path, []).expect("create kb file");
|
|
|
|
|
std::fs::write(&ms_path, []).expect("create ms file");
|
|
|
|
|
|
|
|
|
|
let kb = tokio::fs::File::from_std(
|
|
|
|
|
std::fs::OpenOptions::new()
|
|
|
|
|
.read(true)
|
|
|
|
|
.write(kb_writable)
|
|
|
|
|
.create(kb_writable)
|
|
|
|
|
.truncate(kb_writable)
|
|
|
|
|
.open(&kb_path)
|
|
|
|
|
.expect("open kb"),
|
|
|
|
|
);
|
|
|
|
|
let ms = tokio::fs::File::from_std(
|
|
|
|
|
std::fs::OpenOptions::new()
|
|
|
|
|
.read(true)
|
|
|
|
|
.write(ms_writable)
|
|
|
|
|
.create(ms_writable)
|
|
|
|
|
.truncate(ms_writable)
|
|
|
|
|
.open(&ms_path)
|
|
|
|
|
.expect("open ms"),
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
(
|
|
|
|
|
dir,
|
|
|
|
|
Handler {
|
|
|
|
|
kb: std::sync::Arc::new(tokio::sync::Mutex::new(Some(kb))),
|
|
|
|
|
ms: std::sync::Arc::new(tokio::sync::Mutex::new(Some(ms))),
|
|
|
|
|
gadget: UsbGadget::new("lesavka"),
|
|
|
|
|
did_cycle: std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)),
|
|
|
|
|
camera_rt: std::sync::Arc::new(CameraRuntime::new()),
|
2026-04-24 14:49:57 -03:00
|
|
|
upstream_media_rt: std::sync::Arc::new(UpstreamMediaRuntime::new()),
|
2026-04-30 08:16:57 -03:00
|
|
|
calibration: std::sync::Arc::new(CalibrationStore::load(std::sync::Arc::new(
|
|
|
|
|
UpstreamMediaRuntime::new(),
|
|
|
|
|
))),
|
2026-04-23 03:49:49 -03:00
|
|
|
capture_power: CapturePowerManager::new(),
|
|
|
|
|
eye_hubs: std::sync::Arc::new(tokio::sync::Mutex::new(
|
|
|
|
|
std::collections::HashMap::new(),
|
|
|
|
|
)),
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
fn build_handler_for_tests() -> (tempfile::TempDir, Handler) {
|
|
|
|
|
build_handler_for_tests_with_modes(true, true)
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
#[serial]
|
|
|
|
|
fn stream_camera_reports_error_or_terminates_cleanly_without_camera_hardware() {
|
|
|
|
|
let rt = tokio::runtime::Runtime::new().expect("runtime");
|
|
|
|
|
rt.block_on(async {
|
|
|
|
|
let (_dir, handler) = build_handler_for_tests();
|
|
|
|
|
|
|
|
|
|
let listener = std::net::TcpListener::bind("127.0.0.1:0").expect("bind");
|
|
|
|
|
let addr = listener.local_addr().expect("addr");
|
|
|
|
|
drop(listener);
|
|
|
|
|
|
|
|
|
|
let server = tokio::spawn(async move {
|
|
|
|
|
let _ = tonic::transport::Server::builder()
|
|
|
|
|
.add_service(RelayServer::new(handler))
|
|
|
|
|
.serve(addr)
|
|
|
|
|
.await;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let channel = connect_with_retry(addr).await;
|
|
|
|
|
let mut cli = RelayClient::new(channel);
|
|
|
|
|
let (tx, rx) = tokio::sync::mpsc::channel(4);
|
|
|
|
|
tx.send(VideoPacket {
|
|
|
|
|
id: 2,
|
|
|
|
|
pts: 1,
|
|
|
|
|
data: vec![0, 1, 2, 3],
|
|
|
|
|
..Default::default()
|
|
|
|
|
})
|
|
|
|
|
.await
|
|
|
|
|
.expect("send camera packet");
|
|
|
|
|
drop(tx);
|
|
|
|
|
|
|
|
|
|
let outbound = tokio_stream::wrappers::ReceiverStream::new(rx);
|
|
|
|
|
let result = cli.stream_camera(tonic::Request::new(outbound)).await;
|
|
|
|
|
match result {
|
|
|
|
|
Ok(mut stream) => {
|
|
|
|
|
let _ = stream.get_mut().message().await;
|
|
|
|
|
}
|
|
|
|
|
Err(err) => {
|
|
|
|
|
assert!(
|
|
|
|
|
matches!(
|
|
|
|
|
err.code(),
|
|
|
|
|
tonic::Code::Internal | tonic::Code::Unavailable | tonic::Code::Unknown
|
|
|
|
|
),
|
|
|
|
|
"unexpected camera stream error code: {}",
|
|
|
|
|
err.code()
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
server.abort();
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-29 01:25:06 -03:00
|
|
|
#[test]
|
|
|
|
|
#[serial]
|
|
|
|
|
fn stream_camera_drops_frames_when_audio_master_never_advances() {
|
|
|
|
|
let rt = tokio::runtime::Runtime::new().expect("runtime");
|
|
|
|
|
temp_env::with_var("LESAVKA_UPSTREAM_PLAYOUT_DELAY_MS", Some("0"), || {
|
|
|
|
|
rt.block_on(async {
|
|
|
|
|
let (_dir, handler) = build_handler_for_tests();
|
|
|
|
|
let _stalled_microphone = handler.upstream_media_rt.activate_microphone();
|
|
|
|
|
|
|
|
|
|
let listener = std::net::TcpListener::bind("127.0.0.1:0").expect("bind");
|
|
|
|
|
let addr = listener.local_addr().expect("addr");
|
|
|
|
|
drop(listener);
|
|
|
|
|
|
|
|
|
|
let server = tokio::spawn(async move {
|
|
|
|
|
let _ = tonic::transport::Server::builder()
|
|
|
|
|
.add_service(RelayServer::new(handler))
|
|
|
|
|
.serve(addr)
|
|
|
|
|
.await;
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
let channel = connect_with_retry(addr).await;
|
|
|
|
|
let mut cli = RelayClient::new(channel);
|
|
|
|
|
let (tx, rx) = tokio::sync::mpsc::channel(4);
|
|
|
|
|
tx.send(VideoPacket {
|
|
|
|
|
id: 2,
|
|
|
|
|
pts: 1,
|
|
|
|
|
data: vec![0, 1, 2, 3],
|
|
|
|
|
..Default::default()
|
|
|
|
|
})
|
|
|
|
|
.await
|
|
|
|
|
.expect("send camera packet");
|
|
|
|
|
drop(tx);
|
|
|
|
|
|
|
|
|
|
let outbound = tokio_stream::wrappers::ReceiverStream::new(rx);
|
2026-04-30 08:16:57 -03:00
|
|
|
let mut resp = match cli.stream_camera(tonic::Request::new(outbound)).await {
|
|
|
|
|
Ok(resp) => resp,
|
|
|
|
|
Err(err)
|
|
|
|
|
if err.code() == tonic::Code::Internal
|
|
|
|
|
&& err.message().contains("no Lesavka video_output") =>
|
|
|
|
|
{
|
|
|
|
|
server.abort();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
Err(err) => panic!("stream camera should terminate cleanly: {err:?}"),
|
|
|
|
|
};
|
2026-04-29 01:25:06 -03:00
|
|
|
let _ = tokio::time::timeout(
|
|
|
|
|
std::time::Duration::from_secs(2),
|
|
|
|
|
resp.get_mut().message(),
|
|
|
|
|
)
|
|
|
|
|
.await
|
|
|
|
|
.expect("camera stream should not block forever")
|
|
|
|
|
.expect("grpc message read");
|
|
|
|
|
|
|
|
|
|
server.abort();
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
2026-04-23 03:49:49 -03:00
|
|
|
#[test]
|
|
|
|
|
#[serial]
|
|
|
|
|
fn shared_eye_hub_covers_conflict_idle_and_error_shutdown_paths() {
|
|
|
|
|
let rt = tokio::runtime::Runtime::new().expect("runtime");
|
|
|
|
|
with_capture_power_disabled(|| {
|
|
|
|
|
rt.block_on(async {
|
|
|
|
|
let (_dir, handler) = build_handler_for_tests();
|
|
|
|
|
let first_key = EyeHubKey {
|
|
|
|
|
source_id: 0,
|
|
|
|
|
requested_width: 640,
|
|
|
|
|
requested_height: 480,
|
|
|
|
|
requested_fps: 30,
|
|
|
|
|
};
|
|
|
|
|
let conflicting_key = EyeHubKey {
|
|
|
|
|
source_id: 0,
|
|
|
|
|
requested_width: 1280,
|
|
|
|
|
requested_height: 720,
|
|
|
|
|
requested_fps: 30,
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
#[cfg(coverage)]
|
|
|
|
|
{
|
|
|
|
|
let first = handler
|
|
|
|
|
.eye_hub("testsrc", first_key, 3_000)
|
|
|
|
|
.await
|
|
|
|
|
.expect("first eye hub");
|
|
|
|
|
assert_eq!(handler.active_eye_source_count().await, 1);
|
|
|
|
|
let replacement = handler
|
|
|
|
|
.eye_hub("testsrc", conflicting_key, 3_000)
|
|
|
|
|
.await
|
|
|
|
|
.expect("replacement eye hub");
|
|
|
|
|
assert!(!std::sync::Arc::ptr_eq(&first, &replacement));
|
|
|
|
|
assert!(!first.running.load(std::sync::atomic::Ordering::Relaxed));
|
|
|
|
|
replacement.shutdown();
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[cfg(not(coverage))]
|
|
|
|
|
{
|
|
|
|
|
let first_lease = handler.capture_power.acquire().await;
|
|
|
|
|
let first = EyeHub::spawn(
|
|
|
|
|
stream::pending::<Result<VideoPacket, tonic::Status>>(),
|
|
|
|
|
first_lease,
|
|
|
|
|
);
|
|
|
|
|
handler
|
|
|
|
|
.eye_hubs
|
|
|
|
|
.lock()
|
|
|
|
|
.await
|
|
|
|
|
.insert(first_key, std::sync::Arc::clone(&first));
|
|
|
|
|
assert_eq!(handler.active_eye_source_count().await, 1);
|
|
|
|
|
let stale = {
|
|
|
|
|
let mut hubs = handler.eye_hubs.lock().await;
|
|
|
|
|
take_conflicting_eye_hubs(&mut hubs, conflicting_key)
|
|
|
|
|
};
|
|
|
|
|
assert_eq!(stale.len(), 1);
|
|
|
|
|
assert!(std::sync::Arc::ptr_eq(&first, &stale[0]));
|
|
|
|
|
for hub in stale {
|
|
|
|
|
hub.shutdown();
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let manager = CapturePowerManager::new();
|
|
|
|
|
let idle_lease = manager.acquire().await;
|
|
|
|
|
let idle_packets = (0..61).map(|idx| {
|
|
|
|
|
Ok(VideoPacket {
|
|
|
|
|
id: 0,
|
|
|
|
|
pts: idx,
|
|
|
|
|
data: vec![idx as u8],
|
|
|
|
|
..Default::default()
|
|
|
|
|
})
|
|
|
|
|
});
|
|
|
|
|
let idle_hub = EyeHub::spawn(stream::iter(idle_packets), idle_lease);
|
|
|
|
|
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
|
|
|
|
assert!(!idle_hub.running.load(std::sync::atomic::Ordering::Relaxed));
|
|
|
|
|
|
|
|
|
|
let error_lease = manager.acquire().await;
|
|
|
|
|
let error_hub = EyeHub::spawn(
|
|
|
|
|
stream::iter(vec![Err(tonic::Status::internal("boom"))]),
|
|
|
|
|
error_lease,
|
|
|
|
|
);
|
|
|
|
|
tokio::time::sleep(std::time::Duration::from_millis(50)).await;
|
|
|
|
|
assert!(!error_hub.running.load(std::sync::atomic::Ordering::Relaxed));
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
#[serial]
|
|
|
|
|
fn uvc_helper_restart_systemctl_branches_are_classified() {
|
|
|
|
|
let dir = tempdir().expect("tempdir");
|
|
|
|
|
let systemctl = dir.path().join("systemctl");
|
|
|
|
|
write_helper(
|
|
|
|
|
&systemctl,
|
|
|
|
|
r#"#!/usr/bin/env sh
|
|
|
|
|
case "$*" in
|
|
|
|
|
"reset-failed lesavka-uvc.service")
|
|
|
|
|
case "$LESAVKA_FAKE_SYSTEMCTL_MODE" in
|
|
|
|
|
resetfail) echo "reset failed first" >&2; exit 1 ;;
|
|
|
|
|
*) exit 0 ;;
|
|
|
|
|
esac
|
|
|
|
|
;;
|
|
|
|
|
"restart lesavka-uvc.service")
|
|
|
|
|
case "$LESAVKA_FAKE_SYSTEMCTL_MODE" in
|
|
|
|
|
refused) echo "Operation refused, unit may be requested by dependency only" >&2; exit 1 ;;
|
|
|
|
|
fail) echo "stderr detail" >&2; echo "stdout detail"; exit 1 ;;
|
|
|
|
|
stdout-only) echo "stdout detail"; exit 1 ;;
|
|
|
|
|
*) exit 0 ;;
|
|
|
|
|
esac
|
|
|
|
|
;;
|
|
|
|
|
esac
|
|
|
|
|
exit 1
|
|
|
|
|
"#,
|
|
|
|
|
);
|
|
|
|
|
let prior = std::env::var("PATH").unwrap_or_default();
|
|
|
|
|
let path = if prior.is_empty() {
|
|
|
|
|
dir.path().display().to_string()
|
|
|
|
|
} else {
|
|
|
|
|
format!("{}:{prior}", dir.path().display())
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
with_var("PATH", Some(path), || {
|
|
|
|
|
with_var("LESAVKA_GADGET_SYSFS_ROOT", None::<&str>, || {
|
|
|
|
|
with_var("LESAVKA_GADGET_CONFIGFS_ROOT", None::<&str>, || {
|
|
|
|
|
with_var("LESAVKA_FAKE_SYSTEMCTL_MODE", None::<&str>, || {
|
|
|
|
|
restart_uvc_helper().expect("successful fake restart");
|
|
|
|
|
});
|
|
|
|
|
with_var("LESAVKA_FAKE_SYSTEMCTL_MODE", Some("refused"), || {
|
|
|
|
|
restart_uvc_helper().expect("dependency-managed restart is acceptable");
|
|
|
|
|
});
|
|
|
|
|
with_var("LESAVKA_FAKE_SYSTEMCTL_MODE", Some("fail"), || {
|
|
|
|
|
let err = restart_uvc_helper().expect_err("generic restart failure");
|
|
|
|
|
let message = err.to_string();
|
|
|
|
|
assert!(message.contains("stderr detail"));
|
|
|
|
|
assert!(message.contains("also see stdout"));
|
|
|
|
|
});
|
|
|
|
|
with_var("LESAVKA_FAKE_SYSTEMCTL_MODE", Some("stdout-only"), || {
|
|
|
|
|
let err = restart_uvc_helper().expect_err("stdout-only restart failure");
|
|
|
|
|
let message = err.to_string();
|
|
|
|
|
assert!(message.contains("stdout detail"));
|
|
|
|
|
assert!(!message.contains("also see stdout"));
|
|
|
|
|
});
|
|
|
|
|
with_var("LESAVKA_FAKE_SYSTEMCTL_MODE", Some("resetfail"), || {
|
|
|
|
|
let err = restart_uvc_helper().expect_err("reset-failed should propagate");
|
|
|
|
|
assert!(err.to_string().contains("reset failed first"));
|
|
|
|
|
});
|
|
|
|
|
with_var("LESAVKA_FAKE_SYSTEMCTL_MODE", Some("stdout-only"), || {
|
|
|
|
|
let err = run_systemctl(&["restart", "lesavka-uvc.service"])
|
|
|
|
|
.expect_err("stdout-only direct systemctl failure");
|
|
|
|
|
let message = err.to_string();
|
|
|
|
|
assert!(message.contains("stdout detail"));
|
|
|
|
|
assert!(!message.contains("also see stdout"));
|
|
|
|
|
});
|
|
|
|
|
assert!(uvc_helper_restart_was_dependency_refused(
|
|
|
|
|
"unit may be requested by dependency only"
|
|
|
|
|
));
|
|
|
|
|
assert!(!uvc_helper_restart_was_dependency_refused("plain failure"));
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
#[test]
|
|
|
|
|
#[serial]
|
|
|
|
|
fn coverage_main_and_status_helpers_report_expected_edges() {
|
|
|
|
|
assert_eq!(
|
|
|
|
|
remote_audio_status("remote USB gadget is not attached".to_string()).code(),
|
|
|
|
|
tonic::Code::Unavailable
|
|
|
|
|
);
|
|
|
|
|
assert_eq!(
|
|
|
|
|
remote_audio_status("alsa failed".to_string()).code(),
|
|
|
|
|
tonic::Code::Internal
|
|
|
|
|
);
|
|
|
|
|
with_var("LESAVKA_LIVE_KEYBOARD_REPORT_DELAY_MS", Some("12"), || {
|
|
|
|
|
assert_eq!(
|
|
|
|
|
live_keyboard_report_delay(),
|
|
|
|
|
std::time::Duration::from_millis(12)
|
|
|
|
|
);
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
#[cfg(coverage)]
|
|
|
|
|
{
|
|
|
|
|
let err = main().expect_err("coverage main should skip live gRPC serve loop");
|
|
|
|
|
assert!(
|
|
|
|
|
err.to_string()
|
|
|
|
|
.contains("coverage mode skips live gRPC serve loop")
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|