lesavka/server/src/main/relay_service_coverage.rs

340 lines
12 KiB
Rust

#[cfg(coverage)]
fn upstream_stale_drop_budget() -> Duration {
let drop_ms = std::env::var("LESAVKA_UPSTREAM_STALE_DROP_MS")
.ok()
.and_then(|value| value.trim().parse::<u64>().ok())
.unwrap_or(80);
Duration::from_millis(drop_ms)
}
#[cfg(coverage)]
fn retain_freshest_video_packet(
pending: &mut std::collections::VecDeque<VideoPacket>,
) -> usize {
if pending.len() <= 1 {
return 0;
}
let newest = pending.pop_back().expect("non-empty pending video queue");
let dropped = pending.len();
pending.clear();
pending.push_back(newest);
dropped
}
#[cfg(coverage)]
fn retain_freshest_audio_packet(
pending: &mut std::collections::VecDeque<AudioPacket>,
) -> usize {
if pending.len() <= 1 {
return 0;
}
let newest = pending.pop_back().expect("non-empty pending audio queue");
let dropped = pending.len();
pending.clear();
pending.push_back(newest);
dropped
}
#[cfg(coverage)]
#[tonic::async_trait]
impl Relay for Handler {
type StreamKeyboardStream = ReceiverStream<Result<KeyboardReport, Status>>;
type StreamMouseStream = ReceiverStream<Result<MouseReport, Status>>;
type CaptureVideoStream = VideoStream;
type CaptureAudioStream = AudioStream;
type StreamMicrophoneStream = ReceiverStream<Result<Empty, Status>>;
type StreamCameraStream = ReceiverStream<Result<Empty, Status>>;
async fn stream_keyboard(
&self,
req: Request<tonic::Streaming<KeyboardReport>>,
) -> Result<Response<Self::StreamKeyboardStream>, Status> {
let (tx, rx) = tokio::sync::mpsc::channel(32);
let kb = self.kb.clone();
let report_delay = live_keyboard_report_delay();
tokio::spawn(async move {
let mut s = req.into_inner();
while let Some(pkt) = s.next().await.transpose()? {
let _ = runtime_support::write_hid_report(&kb, &hid_endpoint(0), &pkt.data).await;
tx.send(Ok(pkt)).await.ok();
if !report_delay.is_zero() {
#[cfg(not(coverage))]
tokio::time::sleep(report_delay).await;
}
}
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn stream_mouse(
&self,
req: Request<tonic::Streaming<MouseReport>>,
) -> Result<Response<Self::StreamMouseStream>, Status> {
let (tx, rx) = tokio::sync::mpsc::channel(32);
let ms = self.ms.clone();
tokio::spawn(async move {
let mut s = req.into_inner();
while let Some(pkt) = s.next().await.transpose()? {
let _ = runtime_support::write_hid_report(&ms, &hid_endpoint(1), &pkt.data).await;
tx.send(Ok(pkt)).await.ok();
}
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn stream_microphone(
&self,
req: Request<tonic::Streaming<AudioPacket>>,
) -> Result<Response<Self::StreamMicrophoneStream>, Status> {
let lease = self.upstream_media_rt.activate_microphone();
let Some(microphone_sink_permit) = self
.upstream_media_rt
.reserve_microphone_sink(lease.generation)
.await
else {
return Err(Status::aborted(
"microphone stream superseded before sink became available",
));
};
let uac_dev = std::env::var("LESAVKA_UAC_DEV").unwrap_or_else(|_| "hw:UAC2Gadget,0".into());
let mut sink = runtime_support::open_voice_with_retry(&uac_dev)
.await
.map_err(|e| {
self.upstream_media_rt.close_microphone(lease.generation);
Status::internal(format!("{e:#}"))
})?;
let (tx, rx) = tokio::sync::mpsc::channel(1);
let upstream_media_rt = self.upstream_media_rt.clone();
tokio::spawn(async move {
let _microphone_sink_permit = microphone_sink_permit;
let mut inbound = req.into_inner();
let mut pending = std::collections::VecDeque::new();
let mut inbound_closed = false;
let stale_drop_budget = upstream_stale_drop_budget();
loop {
if !upstream_media_rt.is_microphone_active(lease.generation) {
break;
}
if !inbound_closed {
let next_packet = tokio::select! {
packet = inbound.next() => Some(packet),
_ = tokio::time::sleep(Duration::from_millis(25)) => None,
};
if let Some(next_packet) = next_packet {
match next_packet.transpose()? {
Some(pkt) => {
pending.push_back(pkt);
let _ = retain_freshest_audio_packet(&mut pending);
}
None => inbound_closed = true,
}
}
}
let Some(mut pkt) = pending.pop_front() else {
if inbound_closed {
break;
}
continue;
};
let plan = match upstream_media_rt.plan_audio_pts(pkt.pts) {
lesavka_server::upstream_media_runtime::UpstreamPlanDecision::AwaitingPair => {
if inbound_closed {
continue;
}
pending.push_front(pkt);
continue;
}
lesavka_server::upstream_media_runtime::UpstreamPlanDecision::DropBeforeOverlap => {
continue;
}
lesavka_server::upstream_media_runtime::UpstreamPlanDecision::Play(plan) => plan,
};
if plan.late_by > stale_drop_budget {
continue;
}
tokio::time::sleep_until(plan.due_at).await;
let actual_late_by = tokio::time::Instant::now()
.checked_duration_since(plan.due_at)
.unwrap_or_default();
if actual_late_by > stale_drop_budget {
continue;
}
pkt.pts = plan.local_pts_us;
sink.push(&pkt);
}
sink.finish();
upstream_media_rt.close_microphone(lease.generation);
let _ = tx.send(Ok(Empty {})).await;
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn stream_camera(
&self,
req: Request<tonic::Streaming<VideoPacket>>,
) -> Result<Response<Self::StreamCameraStream>, Status> {
let cfg = camera::current_camera_config();
let upstream_lease = self.upstream_media_rt.activate_camera();
let (session_id, relay, _relay_reused) = self.camera_rt.activate(&cfg).await?;
let camera_rt = self.camera_rt.clone();
let upstream_media_rt = self.upstream_media_rt.clone();
let (tx, rx) = tokio::sync::mpsc::channel(1);
let frame_step_us = (1_000_000u64 / u64::from(cfg.fps.max(1))).max(1);
tokio::spawn(async move {
let mut s = req.into_inner();
let mut pending = std::collections::VecDeque::new();
let mut inbound_closed = false;
let stale_drop_budget = upstream_stale_drop_budget();
loop {
if !camera_rt.is_active(session_id)
|| !upstream_media_rt.is_camera_active(upstream_lease.generation)
{
break;
}
if !inbound_closed {
let next_packet = tokio::select! {
packet = s.next() => Some(packet),
_ = tokio::time::sleep(Duration::from_millis(25)) => None,
};
if let Some(next_packet) = next_packet {
match next_packet.transpose()? {
Some(pkt) => {
pending.push_back(pkt);
let _ = retain_freshest_video_packet(&mut pending);
}
None => inbound_closed = true,
}
}
}
let Some(mut pkt) = pending.pop_front() else {
if inbound_closed {
break;
}
continue;
};
let plan = match upstream_media_rt.plan_video_pts(pkt.pts, frame_step_us) {
lesavka_server::upstream_media_runtime::UpstreamPlanDecision::AwaitingPair => {
if inbound_closed {
continue;
}
pending.push_front(pkt);
continue;
}
lesavka_server::upstream_media_runtime::UpstreamPlanDecision::DropBeforeOverlap => {
continue;
}
lesavka_server::upstream_media_runtime::UpstreamPlanDecision::Play(plan) => plan,
};
if !upstream_media_rt
.wait_for_audio_master(plan.local_pts_us, plan.due_at)
.await
{
continue;
}
if plan.late_by > stale_drop_budget {
let _ = retain_freshest_video_packet(&mut pending);
continue;
}
tokio::time::sleep_until(plan.due_at).await;
let actual_late_by = tokio::time::Instant::now()
.checked_duration_since(plan.due_at)
.unwrap_or_default();
if actual_late_by > stale_drop_budget {
let _ = retain_freshest_video_packet(&mut pending);
continue;
}
pkt.pts = plan.local_pts_us;
relay.feed(pkt);
}
upstream_media_rt.close_camera(upstream_lease.generation);
tx.send(Ok(Empty {})).await.ok();
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn capture_video(
&self,
req: Request<MonitorRequest>,
) -> Result<Response<Self::CaptureVideoStream>, Status> {
self.capture_video_reply(req.into_inner()).await
}
async fn capture_audio(
&self,
_req: Request<MonitorRequest>,
) -> Result<Response<Self::CaptureAudioStream>, Status> {
Err(Status::internal(
"audio capture unavailable in coverage harness",
))
}
async fn paste_text(&self, req: Request<PasteRequest>) -> Result<Response<PasteReply>, Status> {
self.paste_text_reply(req).await
}
async fn reset_usb(&self, _req: Request<Empty>) -> Result<Response<ResetUsbReply>, Status> {
self.reset_usb_reply().await
}
async fn recover_usb(
&self,
_req: Request<Empty>,
) -> Result<Response<ResetUsbReply>, Status> {
self.recover_usb_reply().await
}
async fn recover_uac(
&self,
_req: Request<Empty>,
) -> Result<Response<ResetUsbReply>, Status> {
self.recover_uac_reply().await
}
async fn recover_uvc(
&self,
_req: Request<Empty>,
) -> Result<Response<ResetUsbReply>, Status> {
self.recover_uvc_reply().await
}
async fn get_capture_power(
&self,
_req: Request<Empty>,
) -> Result<Response<CapturePowerState>, Status> {
self.get_capture_power_reply().await
}
async fn set_capture_power(
&self,
req: Request<SetCapturePowerRequest>,
) -> Result<Response<CapturePowerState>, Status> {
self.set_capture_power_reply(req).await
}
async fn get_calibration(
&self,
_req: Request<Empty>,
) -> Result<Response<CalibrationState>, Status> {
self.get_calibration_reply().await
}
async fn calibrate(
&self,
req: Request<CalibrationRequest>,
) -> Result<Response<CalibrationState>, Status> {
self.calibrate_reply(req).await
}
}