//! Integration coverage for server binary startup and RPC guards. //! //! Scope: include sanitized `server/src/main.rs` and execute startup/runtime //! error branches directly so llvm-cov attributes lines to the entrypoint file. //! Targets: `server/src/main.rs`. //! Why: subprocess-only coverage does not reliably move binary file coverage. #[allow(warnings)] mod server_main_binary { include!(env!("LESAVKA_SERVER_MAIN_SRC")); use lesavka_common::lesavka::relay_client::RelayClient; use serial_test::serial; use temp_env::with_var; use tempfile::tempdir; fn build_handler_for_tests_with_modes( kb_writable: bool, ms_writable: bool, ) -> (tempfile::TempDir, Handler) { let dir = tempdir().expect("tempdir"); let kb_path = dir.path().join("hidg0.bin"); let ms_path = dir.path().join("hidg1.bin"); std::fs::write(&kb_path, []).expect("create kb file"); std::fs::write(&ms_path, []).expect("create ms file"); let kb_std = std::fs::OpenOptions::new() .read(true) .write(kb_writable) .create(kb_writable) .truncate(kb_writable) .open(&kb_path) .expect("open kb"); let ms_std = std::fs::OpenOptions::new() .read(true) .write(ms_writable) .create(ms_writable) .truncate(ms_writable) .open(&ms_path) .expect("open ms"); let kb = tokio::fs::File::from_std(kb_std); let ms = tokio::fs::File::from_std(ms_std); ( dir, Handler { kb: std::sync::Arc::new(tokio::sync::Mutex::new(Some(kb))), ms: std::sync::Arc::new(tokio::sync::Mutex::new(Some(ms))), gadget: UsbGadget::new("lesavka"), did_cycle: std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)), camera_rt: std::sync::Arc::new(CameraRuntime::new()), capture_power: CapturePowerManager::new(), eye_hubs: std::sync::Arc::new(tokio::sync::Mutex::new( std::collections::HashMap::new(), )), }, ) } fn build_handler_for_tests() -> (tempfile::TempDir, Handler) { build_handler_for_tests_with_modes(true, true) } async fn connect_with_retry(addr: std::net::SocketAddr) -> tonic::transport::Channel { let endpoint = tonic::transport::Endpoint::from_shared(format!("http://{addr}")) .expect("endpoint") .tcp_nodelay(true); for _ in 0..40 { if let Ok(channel) = endpoint.clone().connect().await { return channel; } tokio::time::sleep(std::time::Duration::from_millis(25)).await; } panic!("failed to connect to local tonic server"); } #[test] #[serial] fn handler_new_tolerates_missing_hid_nodes_without_cycle() { let dir = tempdir().expect("tempdir"); with_var("LESAVKA_DISABLE_UVC", Some("1"), || { with_var("LESAVKA_ALLOW_GADGET_CYCLE", None::<&str>, || { with_var( "LESAVKA_HID_DIR", Some(dir.path().join("missing").to_string_lossy().to_string()), || { let rt = tokio::runtime::Runtime::new().expect("runtime"); let handler = rt .block_on(Handler::new(UsbGadget::new("lesavka"))) .expect("server should stay up while HID endpoints appear"); let endpoints = rt.block_on(async { ( handler.kb.lock().await.is_none(), handler.ms.lock().await.is_none(), ) }); assert_eq!(endpoints, (true, true)); }, ); }); }); } #[test] #[serial] fn handler_new_tolerates_missing_hid_nodes_with_external_uvc() { let dir = tempdir().expect("tempdir"); with_var("LESAVKA_DISABLE_UVC", None::<&str>, || { with_var("LESAVKA_UVC_EXTERNAL", Some("1"), || { with_var("LESAVKA_ALLOW_GADGET_CYCLE", None::<&str>, || { with_var( "LESAVKA_HID_DIR", Some(dir.path().join("missing").to_string_lossy().to_string()), || { let rt = tokio::runtime::Runtime::new().expect("runtime"); let handler = rt .block_on(Handler::new(UsbGadget::new("lesavka"))) .expect("external UVC mode should still tolerate missing HID"); let endpoints = rt.block_on(async { ( handler.kb.lock().await.is_none(), handler.ms.lock().await.is_none(), ) }); assert_eq!(endpoints, (true, true)); }, ); }); }); }); } #[test] #[serial] fn handler_new_tolerates_missing_hid_nodes_when_cycle_is_enabled() { let dir = tempdir().expect("tempdir"); with_var("LESAVKA_DISABLE_UVC", None::<&str>, || { with_var("LESAVKA_UVC_EXTERNAL", None::<&str>, || { with_var( "LESAVKA_UVC_CTRL_BIN", Some("/definitely/missing/uvc-helper"), || { with_var("LESAVKA_ALLOW_GADGET_CYCLE", Some("1"), || { with_var( "LESAVKA_HID_DIR", Some(dir.path().join("missing").to_string_lossy().to_string()), || { let rt = tokio::runtime::Runtime::new().expect("runtime"); let handler = rt .block_on(Handler::new(UsbGadget::new("lesavka"))) .expect( "cycle-enabled startup should tolerate missing HID", ); let endpoints = rt.block_on(async { ( handler.kb.lock().await.is_none(), handler.ms.lock().await.is_none(), ) }); assert_eq!(endpoints, (true, true)); }, ); }); }, ); }); }); } #[test] #[serial] fn handler_new_opens_missing_hid_endpoints_as_lazy_none() { let dir = tempdir().expect("tempdir"); with_var("LESAVKA_ALLOW_GADGET_CYCLE", None::<&str>, || { with_var( "LESAVKA_HID_DIR", Some(dir.path().join("missing").to_string_lossy().to_string()), || { let rt = tokio::runtime::Runtime::new().expect("runtime"); let handler = rt .block_on(Handler::new(UsbGadget::new("lesavka"))) .expect("missing hid nodes should be lazy-opened later"); let endpoints = rt.block_on(async { ( handler.kb.lock().await.is_none(), handler.ms.lock().await.is_none(), ) }); assert_eq!(endpoints, (true, true)); }, ); }); } #[test] #[serial] fn handler_new_attempts_cycle_when_explicitly_enabled() { let dir = tempdir().expect("tempdir"); with_var("LESAVKA_ALLOW_GADGET_CYCLE", Some("1"), || { with_var( "LESAVKA_HID_DIR", Some(dir.path().join("missing").to_string_lossy().to_string()), || { let rt = tokio::runtime::Runtime::new().expect("runtime"); let handler = rt .block_on(Handler::new(UsbGadget::new("lesavka"))) .expect("cycle-enabled startup should still tolerate lazy HID"); let endpoints = rt.block_on(async { ( handler.kb.lock().await.is_none(), handler.ms.lock().await.is_none(), ) }); assert_eq!(endpoints, (true, true)); }, ); }); } #[test] #[serial] fn capture_video_rejects_invalid_monitor_id() { let (_dir, handler) = build_handler_for_tests(); let rt = tokio::runtime::Runtime::new().expect("runtime"); let result = rt.block_on(async { handler .capture_video(tonic::Request::new(MonitorRequest { id: 9, max_bitrate: 4_000, requested_width: 0, requested_height: 0, requested_fps: 0, source_id: None, })) .await }); let err = match result { Ok(_) => panic!("invalid monitor id must be rejected"), Err(err) => err, }; assert_eq!(err.code(), tonic::Code::InvalidArgument); } #[test] #[serial] fn paste_text_rejects_plaintext_requests() { let (_dir, handler) = build_handler_for_tests(); let req = PasteRequest { nonce: vec![], data: vec![], encrypted: false, }; let rt = tokio::runtime::Runtime::new().expect("runtime"); let result = rt.block_on(async { handler.paste_text(tonic::Request::new(req)).await }); let err = match result { Ok(_) => panic!("plaintext paste request should be rejected"), Err(err) => err, }; assert_eq!(err.code(), tonic::Code::Unauthenticated); } #[test] #[serial] fn reset_usb_returns_internal_status_when_cycle_fails() { let (_dir, handler) = build_handler_for_tests(); let rt = tokio::runtime::Runtime::new().expect("runtime"); let result = rt.block_on(async { handler.reset_usb(tonic::Request::new(Empty {})).await }); let err = match result { Ok(_) => panic!("cycle should fail without gadget sysfs"), Err(err) => err, }; assert_eq!(err.code(), tonic::Code::Internal); } #[test] #[serial] fn capture_audio_returns_internal_status_when_sink_is_missing() { let (_dir, handler) = build_handler_for_tests(); let req = MonitorRequest { id: 0, max_bitrate: 0, requested_width: 0, requested_height: 0, requested_fps: 0, source_id: None, }; let rt = tokio::runtime::Runtime::new().expect("runtime"); let result = rt.block_on(async { handler.capture_audio(tonic::Request::new(req)).await }); let err = match result { Ok(_) => panic!("missing ALSA source should fail"), Err(err) => err, }; assert_eq!(err.code(), tonic::Code::Internal); } }