lesavka/server/src/main/relay_service.rs

243 lines
9.5 KiB
Rust
Raw Normal View History

/*──────────────── gRPC service ─────────────*/
#[cfg(not(coverage))]
#[tonic::async_trait]
impl Relay for Handler {
/* existing streams ─ unchanged, except: no more auto-reset */
type StreamKeyboardStream = ReceiverStream<Result<KeyboardReport, Status>>;
type StreamMouseStream = ReceiverStream<Result<MouseReport, Status>>;
type CaptureVideoStream = VideoStream;
type CaptureAudioStream = AudioStream;
type StreamMicrophoneStream = ReceiverStream<Result<Empty, Status>>;
type StreamCameraStream = ReceiverStream<Result<Empty, Status>>;
async fn stream_keyboard(
&self,
req: Request<tonic::Streaming<KeyboardReport>>,
) -> Result<Response<Self::StreamKeyboardStream>, Status> {
let rpc_id = runtime_support::next_stream_id();
info!(rpc_id, "⌨️ stream_keyboard opened");
let (tx, rx) = tokio::sync::mpsc::channel(32);
let kb = self.kb.clone();
let ms = self.ms.clone();
let kb_path = hid_endpoint(0);
let ms_path = hid_endpoint(1);
let gadget = self.gadget.clone();
let did_cycle = self.did_cycle.clone();
let session_lease = self.capture_power.acquire_session().await;
let report_delay = live_keyboard_report_delay();
tokio::spawn(async move {
let _session_lease = session_lease;
let mut s = req.into_inner();
while let Some(pkt) = s.next().await.transpose()? {
if let Err(e) = runtime_support::write_hid_report(&kb, &kb_path, &pkt.data).await {
if e.raw_os_error() == Some(libc::EAGAIN) {
debug!(rpc_id, "⌨️ write would block (dropped)");
} else {
warn!(rpc_id, "⌨️ write failed: {e} (dropped)");
runtime_support::recover_hid_if_needed(
&e,
gadget.clone(),
kb.clone(),
ms.clone(),
kb_path.clone(),
ms_path.clone(),
did_cycle.clone(),
)
.await;
}
}
tx.send(Ok(pkt)).await.ok();
if !report_delay.is_zero() {
tokio::time::sleep(report_delay).await;
}
}
info!(rpc_id, "⌨️ stream_keyboard closed");
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn stream_mouse(
&self,
req: Request<tonic::Streaming<MouseReport>>,
) -> Result<Response<Self::StreamMouseStream>, Status> {
let rpc_id = runtime_support::next_stream_id();
info!(rpc_id, "🖱️ stream_mouse opened");
let (tx, rx) = tokio::sync::mpsc::channel(1024);
let ms = self.ms.clone();
let kb = self.kb.clone();
let kb_path = hid_endpoint(0);
let ms_path = hid_endpoint(1);
let gadget = self.gadget.clone();
let did_cycle = self.did_cycle.clone();
let session_lease = self.capture_power.acquire_session().await;
tokio::spawn(async move {
let _session_lease = session_lease;
let mut s = req.into_inner();
while let Some(pkt) = s.next().await.transpose()? {
if let Err(e) = runtime_support::write_hid_report(&ms, &ms_path, &pkt.data).await {
if e.raw_os_error() == Some(libc::EAGAIN) {
debug!(rpc_id, "🖱️ write would block (dropped)");
} else {
warn!(rpc_id, "🖱️ write failed: {e} (dropped)");
runtime_support::recover_hid_if_needed(
&e,
gadget.clone(),
kb.clone(),
ms.clone(),
kb_path.clone(),
ms_path.clone(),
did_cycle.clone(),
)
.await;
}
}
tx.send(Ok(pkt)).await.ok();
}
info!(rpc_id, "🖱️ stream_mouse closed");
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
/// Accept synthetic upstream microphone packets without ALSA hardware.
async fn stream_microphone(
&self,
req: Request<tonic::Streaming<AudioPacket>>,
) -> Result<Response<Self::StreamMicrophoneStream>, Status> {
let rpc_id = runtime_support::next_stream_id();
info!(rpc_id, "🎤 stream_microphone opened");
// 1 ─ build once, early
let uac_dev = std::env::var("LESAVKA_UAC_DEV").unwrap_or_else(|_| "hw:UAC2Gadget,0".into());
info!(%uac_dev, "🎤 stream_microphone using UAC sink");
let mut sink = runtime_support::open_voice_with_retry(&uac_dev)
.await
.map_err(|e| Status::internal(format!("{e:#}")))?;
// 2 ─ dummy outbound stream (same trick as before)
let (tx, rx) = tokio::sync::mpsc::channel(1);
// 3 ─ drive the sink in a background task
tokio::spawn(async move {
let mut inbound = req.into_inner();
static CNT: std::sync::atomic::AtomicU64 = std::sync::atomic::AtomicU64::new(0);
while let Some(pkt) = inbound.next().await.transpose()? {
let n = CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed);
if n < 5 || n.is_multiple_of(3_000) {
tracing::info!(rpc_id, "🎤⬇ srv pkt#{n} {} bytes", pkt.data.len());
}
sink.push(&pkt);
}
sink.finish(); // flush on EOS
let _ = tx.send(Ok(Empty {})).await;
info!(rpc_id, "🎤 stream_microphone closed");
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
/// Accept synthetic upstream webcam packets without UVC/HDMI hardware.
async fn stream_camera(
&self,
req: Request<tonic::Streaming<VideoPacket>>,
) -> Result<Response<Self::StreamCameraStream>, Status> {
let rpc_id = runtime_support::next_stream_id();
let cfg = camera::current_camera_config();
info!(
rpc_id,
output = cfg.output.as_str(),
codec = cfg.codec.as_str(),
width = cfg.width,
height = cfg.height,
fps = cfg.fps,
hdmi = cfg.hdmi.as_ref().map(|h| h.name.as_str()).unwrap_or("none"),
"🎥 stream_camera output selected"
);
let (session_id, relay) = self.camera_rt.activate(&cfg).await?;
let camera_rt = self.camera_rt.clone();
info!(rpc_id, session_id, "🎥 stream_camera opened");
// dummy outbound (same pattern as other streams)
let (tx, rx) = tokio::sync::mpsc::channel(1);
tokio::spawn(async move {
let mut s = req.into_inner();
while let Some(pkt) = s.next().await.transpose()? {
if !camera_rt.is_active(session_id) {
info!(rpc_id, session_id, "🎥 stream_camera session superseded");
break;
}
relay.feed(pkt); // ← all logging inside video.rs
}
tx.send(Ok(Empty {})).await.ok();
info!(rpc_id, session_id, "🎥 stream_camera closed");
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn capture_video(
&self,
req: Request<MonitorRequest>,
) -> Result<Response<Self::CaptureVideoStream>, Status> {
self.capture_video_reply(req.into_inner()).await
}
async fn capture_audio(
&self,
req: Request<MonitorRequest>,
) -> Result<Response<Self::CaptureAudioStream>, Status> {
let rpc_id = runtime_support::next_stream_id();
// Only one speaker stream for now; both 0/1 → same ALSA dev.
let _id = req.into_inner().id;
// Allow override (`LESAVKA_ALSA_DEV=hw:2,0` for debugging).
let dev = std::env::var("LESAVKA_ALSA_DEV").unwrap_or_else(|_| "hw:UAC2Gadget,0".into());
info!(rpc_id, %dev, "🔊 capture_audio opened");
let s = runtime_support::open_ear_with_retry(&dev, 0)
.await
.map_err(|e| remote_audio_status(format!("{e:#}")))?;
Ok(Response::new(Box::pin(s)))
}
async fn paste_text(&self, req: Request<PasteRequest>) -> Result<Response<PasteReply>, Status> {
self.paste_text_reply(req).await
}
/*────────────── USB-reset RPC ────────────*/
async fn reset_usb(&self, _req: Request<Empty>) -> Result<Response<ResetUsbReply>, Status> {
self.reset_usb_reply().await
}
async fn get_capture_power(
&self,
_req: Request<Empty>,
) -> Result<Response<CapturePowerState>, Status> {
self.get_capture_power_reply().await
}
async fn set_capture_power(
&self,
req: Request<SetCapturePowerRequest>,
) -> Result<Response<CapturePowerState>, Status> {
self.set_capture_power_reply(req).await
}
}
fn remote_audio_status(message: String) -> Status {
if message.contains("remote USB gadget is not attached") {
Status::unavailable(message)
} else {
Status::internal(message)
}
}