fix(upstream): surface active webcam and mic probes

This commit is contained in:
Brad Stein 2026-04-22 05:46:31 -03:00
parent d0e98f42a5
commit ef1b7ee3a7
19 changed files with 1174 additions and 89 deletions

View File

@ -4,7 +4,7 @@ path = "src/main.rs"
[package]
name = "lesavka_client"
version = "0.11.46"
version = "0.11.47"
edition = "2024"
[dependencies]

View File

@ -4,6 +4,20 @@ use gst::prelude::*;
use gstreamer as gst;
use gstreamer_app as gst_app;
use lesavka_common::lesavka::VideoPacket;
use std::{
io::Write,
path::{Path, PathBuf},
sync::{
Arc,
atomic::{AtomicBool, Ordering},
},
thread,
time::Duration,
};
const CAMERA_PREVIEW_TAP_ENV: &str = "LESAVKA_UPLINK_CAMERA_PREVIEW";
const CAMERA_PREVIEW_WIDTH: i32 = 128;
const CAMERA_PREVIEW_HEIGHT: i32 = 72;
fn env_u32(name: &str, default: u32) -> u32 {
std::env::var(name)
@ -30,6 +44,7 @@ pub struct CameraCapture {
#[allow(dead_code)] // kept alive to hold PLAYING state
pipeline: gst::Pipeline,
sink: gst_app::AppSink,
preview_tap_running: Option<Arc<AtomicBool>>,
}
impl CameraCapture {
@ -99,19 +114,7 @@ impl CameraCapture {
if use_mjpg_source && !output_mjpeg {
tracing::info!("📸 using MJPG source with software encode");
}
let _enc_opts = if enc == "x264enc" {
let bitrate_kbit = env_u32("LESAVKA_CAM_H264_KBIT", 4500);
let keyframe_opt = kf_prop
.map(|property| format!(" {property}={keyframe_interval}"))
.unwrap_or_default();
format!(
"{enc} tune=zerolatency speed-preset=faster bitrate={bitrate_kbit}{keyframe_opt}"
)
} else if let Some(property) = kf_prop {
format!("{enc} {property}={keyframe_interval}")
} else {
enc.to_string()
};
let _enc_opts = Self::encoder_options(enc, kf_prop, keyframe_interval);
if output_mjpeg {
tracing::info!("📸 outputting MJPEG frames for UVC (quality={jpeg_quality})");
} else {
@ -155,7 +158,61 @@ impl CameraCapture {
// * nvh264enc needs NVMM memory caps;
// * vaapih264enc wants system-memory caps;
// * x264enc needs the usual raw caps.
let desc = if output_mjpeg {
let preview_tap_path = camera_preview_tap_path();
let desc = if preview_tap_path.is_some() {
if output_mjpeg {
if use_mjpg_source {
format!(
"{src_desc} ! \
image/jpeg,width={width},height={height},framerate={fps}/1 ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! jpegdec ! \
{}",
camera_preview_tap_branch()
)
} else {
format!(
"{src_desc} ! \
video/x-raw,width={width},height={height},framerate={fps}/1 ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
videoconvert ! jpegenc quality={jpeg_quality} ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! \
{}",
camera_preview_tap_branch()
)
}
} else if use_mjpg_source {
format!(
"{src_desc} ! \
image/jpeg,width={width},height={height} ! \
jpegdec ! videorate ! video/x-raw,framerate={fps}/1 ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
videoconvert ! {_enc_opts} ! \
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! \
{}",
camera_preview_tap_branch()
)
} else {
format!(
"{src_desc} ! {src_caps} ! \
tee name=t \
t. ! queue max-size-buffers=30 leaky=downstream ! \
{preenc} {_enc_opts} ! \
h264parse config-interval=-1 ! video/x-h264,stream-format=byte-stream,alignment=au ! \
appsink name=asink emit-signals=true max-buffers=60 drop=true \
t. ! queue max-size-buffers=2 leaky=downstream ! \
{}",
camera_preview_tap_branch()
)
}
} else if output_mjpeg {
if use_mjpg_source {
format!(
"{src_desc} ! \
@ -208,7 +265,22 @@ impl CameraCapture {
pipeline.set_state(gst::State::Playing)?;
tracing::info!("📸 webcam pipeline ▶️ device={dev_label}");
Ok(Self { pipeline, sink })
let preview_tap_running = if let Some(path) = preview_tap_path {
let preview_sink = pipeline
.by_name("preview_sink")
.context("missing camera preview tap appsink")?
.downcast::<gst_app::AppSink>()
.expect("camera preview tap appsink");
Some(spawn_camera_preview_tap(preview_sink, path))
} else {
None
};
Ok(Self {
pipeline,
sink,
preview_tap_running,
})
}
pub fn pull(&self) -> Option<VideoPacket> {
@ -315,7 +387,11 @@ impl CameraCapture {
.expect("appsink")
.downcast::<gst_app::AppSink>()
.unwrap();
Self { pipeline, sink }
Self {
pipeline,
sink,
preview_tap_running: None,
}
}
#[allow(dead_code)] // helper kept for future heuristics
@ -380,6 +456,86 @@ impl CameraCapture {
_ => ("x264enc", Some("key-int-max")),
}
}
fn encoder_options(
enc: &'static str,
kf_prop: Option<&'static str>,
keyframe_interval: u32,
) -> String {
if enc == "x264enc" {
let bitrate_kbit = env_u32("LESAVKA_CAM_H264_KBIT", 4500);
let keyframe_opt = kf_prop
.map(|property| format!(" {property}={keyframe_interval}"))
.unwrap_or_default();
format!(
"{enc} tune=zerolatency speed-preset=faster bitrate={bitrate_kbit}{keyframe_opt}"
)
} else if let Some(property) = kf_prop {
format!("{enc} {property}={keyframe_interval}")
} else {
enc.to_string()
}
}
}
fn camera_preview_tap_path() -> Option<PathBuf> {
std::env::var(CAMERA_PREVIEW_TAP_ENV)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
.map(PathBuf::from)
}
fn camera_preview_tap_branch() -> String {
format!(
"videoconvert ! videoscale ! videorate ! \
video/x-raw,format=RGBA,width={CAMERA_PREVIEW_WIDTH},height={CAMERA_PREVIEW_HEIGHT},framerate=10/1,pixel-aspect-ratio=1/1 ! \
appsink name=preview_sink emit-signals=false sync=false max-buffers=1 drop=true"
)
}
fn spawn_camera_preview_tap(sink: gst_app::AppSink, path: PathBuf) -> Arc<AtomicBool> {
let running = Arc::new(AtomicBool::new(true));
let thread_running = Arc::clone(&running);
thread::spawn(move || {
while thread_running.load(Ordering::Acquire) {
if let Some(sample) = sink.try_pull_sample(gst::ClockTime::from_mseconds(250)) {
if let Err(err) = write_camera_preview_tap(&path, &sample) {
tracing::debug!("📸 local uplink preview tap write failed: {err:#}");
thread::sleep(Duration::from_millis(100));
}
}
}
});
running
}
fn write_camera_preview_tap(path: &Path, sample: &gst::Sample) -> anyhow::Result<()> {
let caps = sample.caps().context("preview tap sample missing caps")?;
let structure = caps
.structure(0)
.context("preview tap caps missing structure")?;
let width = structure
.get::<i32>("width")
.context("preview tap caps missing width")?;
let height = structure
.get::<i32>("height")
.context("preview tap caps missing height")?;
let buffer = sample
.buffer()
.context("preview tap sample missing buffer")?;
let map = buffer
.map_readable()
.context("preview tap buffer unreadable")?;
let stride = map.as_slice().len() / height.max(1) as usize;
let tmp_path = path.with_extension("tmp");
let mut file = std::fs::File::create(&tmp_path)
.with_context(|| format!("creating {}", tmp_path.display()))?;
writeln!(file, "LESAVKA_RGBA {width} {height} {stride}")?;
file.write_all(map.as_slice())?;
file.sync_all().ok();
std::fs::rename(&tmp_path, path).with_context(|| format!("publishing {}", path.display()))?;
Ok(())
}
#[cfg(not(coverage))]
@ -402,6 +558,9 @@ fn supported_encoder_property(
impl Drop for CameraCapture {
fn drop(&mut self) {
if let Some(running) = &self.preview_tap_running {
running.store(false, Ordering::Release);
}
let _ = self.pipeline.set_state(gst::State::Null);
}
}

View File

@ -7,18 +7,28 @@ use lesavka_common::lesavka::AudioPacket;
use shell_escape::unix::escape;
#[cfg(not(coverage))]
use std::sync::atomic::{AtomicU64, Ordering};
use std::{path::Path as StdPath, thread, time::Duration};
use std::{
path::{Path as StdPath, PathBuf},
sync::{
Arc,
atomic::{AtomicBool, Ordering as AtomicOrdering},
},
thread,
time::Duration,
};
use tracing::{debug, warn};
#[cfg(not(coverage))]
use tracing::{error, info, trace};
const MIC_GAIN_ENV: &str = "LESAVKA_MIC_GAIN";
const MIC_GAIN_CONTROL_ENV: &str = "LESAVKA_MIC_GAIN_CONTROL";
const MIC_LEVEL_TAP_ENV: &str = "LESAVKA_UPLINK_MIC_LEVEL";
pub struct MicrophoneCapture {
#[allow(dead_code)] // kept alive to hold PLAYING state
pipeline: gst::Pipeline,
sink: gst_app::AppSink,
level_tap_running: Option<Arc<AtomicBool>>,
}
impl MicrophoneCapture {
@ -41,25 +51,11 @@ impl MicrophoneCapture {
.into_iter()
.find(|e| gst::ElementFactory::find(e).is_some())
.unwrap_or("opusenc");
let parser = if aac.contains("opus") {
// opusenc already outputs raw Opus frames just state the caps
"capsfilter caps=audio/x-opus,rate=48000,channels=2"
} else {
// AAC → ADTS frames
"aacparse ! capsfilter caps=audio/mpeg,stream-format=adts,rate=48000,channels=2"
};
let parser = parser_for_encoder(aac);
let gain = mic_gain_from_env();
let desc = format!(
"{source_desc} ! \
audio/x-raw,format=S16LE,channels=2,rate=48000 ! \
audioconvert ! audioresample ! \
volume name=mic_input_gain volume={} ! \
{aac} bitrate=128000 ! \
{parser} ! \
queue max-size-buffers=100 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=50 drop=true",
format_mic_gain_for_gst(gain)
);
let level_tap_path = mic_level_tap_path();
let desc =
microphone_pipeline_desc(&source_desc, aac, parser, gain, level_tap_path.is_some());
let pipeline: gst::Pipeline = gst::parse::launch(&desc)?.downcast().expect("pipeline");
let sink: gst_app::AppSink = pipeline.by_name("asink").unwrap().downcast().unwrap();
@ -101,8 +97,22 @@ impl MicrophoneCapture {
.set_state(gst::State::Playing)
.context("start mic pipeline")?;
maybe_spawn_mic_gain_control(volume);
let level_tap_running = if let Some(path) = level_tap_path {
let level_sink = pipeline
.by_name("level_sink")
.context("missing microphone level tap appsink")?
.downcast::<gst_app::AppSink>()
.expect("microphone level tap appsink");
Some(spawn_mic_level_tap(level_sink, path))
} else {
None
};
Ok(Self { pipeline, sink })
Ok(Self {
pipeline,
sink,
level_tap_running,
})
}
/// Blocking pull; call from an async wrapper
@ -208,6 +218,12 @@ impl MicrophoneCapture {
}
fn default_source_desc() -> String {
#[cfg(coverage)]
if let Ok(source) = std::env::var("LESAVKA_MIC_TEST_SOURCE_DESC")
&& !source.trim().is_empty()
{
return source;
}
if Self::pipewire_source_available() {
return Self::pipewire_source_desc(None);
}
@ -215,6 +231,59 @@ impl MicrophoneCapture {
}
}
fn mic_level_tap_path() -> Option<PathBuf> {
std::env::var(MIC_LEVEL_TAP_ENV)
.ok()
.map(|value| value.trim().to_string())
.filter(|value| !value.is_empty())
.map(PathBuf::from)
}
fn parser_for_encoder(aac: &str) -> &'static str {
if aac.contains("opus") {
"capsfilter caps=audio/x-opus,rate=48000,channels=2"
} else {
"aacparse ! capsfilter caps=audio/mpeg,stream-format=adts,rate=48000,channels=2"
}
}
fn microphone_pipeline_desc(
source_desc: &str,
encoder: &str,
parser: &str,
gain: f64,
level_tap_enabled: bool,
) -> String {
let gain = format_mic_gain_for_gst(gain);
if level_tap_enabled {
format!(
"{source_desc} ! \
audio/x-raw,format=S16LE,channels=2,rate=48000 ! \
audioconvert ! audioresample ! \
volume name=mic_input_gain volume={gain} ! \
tee name=t \
t. ! queue max-size-buffers=100 leaky=downstream ! \
{encoder} bitrate=128000 ! \
{parser} ! \
appsink name=asink emit-signals=true max-buffers=50 drop=true \
t. ! queue max-size-buffers=8 leaky=downstream ! \
audio/x-raw,format=S16LE,channels=2,rate=48000 ! \
appsink name=level_sink emit-signals=false sync=false max-buffers=8 drop=true"
)
} else {
format!(
"{source_desc} ! \
audio/x-raw,format=S16LE,channels=2,rate=48000 ! \
audioconvert ! audioresample ! \
volume name=mic_input_gain volume={gain} ! \
{encoder} bitrate=128000 ! \
{parser} ! \
queue max-size-buffers=100 leaky=downstream ! \
appsink name=asink emit-signals=true max-buffers=50 drop=true"
)
}
}
fn mic_gain_from_env() -> f64 {
std::env::var(MIC_GAIN_ENV)
.ok()
@ -255,6 +324,41 @@ fn maybe_spawn_mic_gain_control(volume: gst::Element) {
});
}
fn spawn_mic_level_tap(sink: gst_app::AppSink, path: PathBuf) -> Arc<AtomicBool> {
let running = Arc::new(AtomicBool::new(true));
let thread_running = Arc::clone(&running);
thread::spawn(move || {
while thread_running.load(AtomicOrdering::Acquire) {
if let Some(sample) = sink.try_pull_sample(gst::ClockTime::from_mseconds(250))
&& let Some(buffer) = sample.buffer()
&& let Ok(map) = buffer.map_readable()
{
let level = pcm_peak_fraction(map.as_slice());
if let Err(err) = write_mic_level_tap(&path, level) {
tracing::debug!("🎤 local uplink level tap write failed: {err:#}");
}
}
}
});
running
}
fn pcm_peak_fraction(bytes: &[u8]) -> f64 {
let peak = bytes
.chunks_exact(2)
.map(|chunk| i16::from_le_bytes([chunk[0], chunk[1]]).unsigned_abs() as f64)
.fold(0.0, f64::max);
(peak / i16::MAX as f64).clamp(0.0, 1.0)
}
fn write_mic_level_tap(path: &StdPath, level: f64) -> Result<()> {
let tmp_path = path.with_extension("tmp");
std::fs::write(&tmp_path, format!("{level:.6}\n"))
.with_context(|| format!("writing {}", tmp_path.display()))?;
std::fs::rename(&tmp_path, path).with_context(|| format!("publishing {}", path.display()))?;
Ok(())
}
fn read_mic_gain_control(path: &StdPath) -> Option<f64> {
std::fs::read_to_string(path)
.ok()
@ -263,6 +367,9 @@ fn read_mic_gain_control(path: &StdPath) -> Option<f64> {
impl Drop for MicrophoneCapture {
fn drop(&mut self) {
if let Some(running) = &self.level_tap_running {
running.store(false, AtomicOrdering::Release);
}
let _ = self.pipeline.set_state(gst::State::Null);
}
}

View File

@ -6,6 +6,7 @@ use gtk::{gdk, glib};
use shell_escape::escape;
use std::borrow::Cow;
use std::fs;
use std::path::{Path, PathBuf};
use std::process::{Child, Command};
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::{Arc, Mutex};
@ -36,6 +37,7 @@ pub struct DeviceTestController {
camera: Option<LocalCameraPreview>,
selected_camera: Option<String>,
microphone: Option<LocalMicrophoneMonitor>,
microphone_probe: Option<LocalMicrophoneLevelProbe>,
speaker: Option<Child>,
microphone_replay: Option<Child>,
microphone_buffer: Arc<Mutex<Vec<u8>>>,
@ -48,6 +50,7 @@ impl Default for DeviceTestController {
camera: None,
selected_camera: None,
microphone: None,
microphone_probe: None,
speaker: None,
microphone_replay: None,
microphone_buffer: Arc::new(Mutex::new(Vec::new())),
@ -82,10 +85,15 @@ impl DeviceTestController {
.camera
.as_ref()
.is_some_and(LocalCameraPreview::is_running),
DeviceTestKind::Microphone => self
.microphone
.as_ref()
.is_some_and(LocalMicrophoneMonitor::is_running),
DeviceTestKind::Microphone => {
self.microphone
.as_ref()
.is_some_and(LocalMicrophoneMonitor::is_running)
|| self
.microphone_probe
.as_ref()
.is_some_and(LocalMicrophoneLevelProbe::is_running)
}
DeviceTestKind::MicrophoneReplay => self.microphone_replay.is_some(),
DeviceTestKind::Speaker => self.speaker.is_some(),
}
@ -124,6 +132,71 @@ impl DeviceTestController {
Ok(true)
}
pub fn stop_local_capture_for_relay(&mut self) {
if self
.camera
.as_ref()
.is_some_and(LocalCameraPreview::is_device_preview_running)
&& let Some(camera) = self.camera.as_mut()
{
camera.stop();
}
if let Some(mut monitor) = self.microphone.take() {
monitor.stop();
}
}
pub fn sync_relay_uplink_probe(
&mut self,
relay_live: bool,
camera_active: bool,
camera_label: Option<&str>,
camera_preview_path: &Path,
microphone_active: bool,
microphone_level_path: &Path,
) -> Result<()> {
self.cleanup_finished();
let camera_should_probe = relay_live && camera_active && camera_label.is_some();
if camera_should_probe {
let preview = self
.camera
.as_mut()
.ok_or_else(|| anyhow!("camera preview panel is not ready yet"))?;
preview.start_relay_file(
camera_preview_path.to_path_buf(),
camera_label.unwrap_or("selected webcam").to_string(),
)?;
} else if self
.camera
.as_ref()
.is_some_and(LocalCameraPreview::is_relay_file_running)
&& let Some(preview) = self.camera.as_mut()
{
preview.stop();
}
let microphone_should_probe = relay_live && microphone_active;
if microphone_should_probe {
if self.microphone.is_some() {
self.stop(DeviceTestKind::Microphone);
}
let needs_probe = self
.microphone_probe
.as_ref()
.is_none_or(|probe| !probe.is_running_for(microphone_level_path));
if needs_probe {
self.stop_microphone_probe();
self.microphone_probe = Some(LocalMicrophoneLevelProbe::start(
microphone_level_path.to_path_buf(),
Arc::clone(&self.microphone_level),
));
}
} else {
self.stop_microphone_probe();
}
Ok(())
}
pub fn toggle_speaker(&mut self, sink: Option<&str>) -> Result<bool> {
self.toggle_child(DeviceTestKind::Speaker, build_speaker_test(sink))
}
@ -171,6 +244,7 @@ impl DeviceTestController {
] {
self.stop(kind);
}
self.stop_microphone_probe();
}
fn toggle_child(&mut self, kind: DeviceTestKind, command: Result<Command>) -> Result<bool> {
@ -193,6 +267,7 @@ impl DeviceTestController {
if let Some(mut monitor) = self.microphone.take() {
monitor.stop();
}
self.stop_microphone_probe();
if let Ok(mut level) = self.microphone_level.lock() {
*level = 0.0;
}
@ -214,6 +289,13 @@ impl DeviceTestController {
{
self.microphone = None;
}
if self
.microphone_probe
.as_mut()
.is_some_and(|probe| !probe.is_running())
{
self.microphone_probe = None;
}
for kind in [DeviceTestKind::MicrophoneReplay, DeviceTestKind::Speaker] {
let finished = self
.slot_mut(kind)
@ -247,6 +329,12 @@ impl DeviceTestController {
}
}
fn stop_microphone_probe(&mut self) {
if let Some(mut probe) = self.microphone_probe.take() {
probe.stop();
}
}
fn replay_wav_bytes(&self) -> Result<Vec<u8>> {
let audio = self
.microphone_buffer
@ -273,6 +361,7 @@ struct LocalCameraPreview {
generation: Arc<AtomicU64>,
running: Arc<AtomicBool>,
selected_device: Option<String>,
relay_preview_path: Option<PathBuf>,
}
struct LocalMicrophoneMonitor {
@ -280,6 +369,12 @@ struct LocalMicrophoneMonitor {
generation: Arc<AtomicU64>,
}
struct LocalMicrophoneLevelProbe {
path: PathBuf,
running: Arc<AtomicBool>,
generation: Arc<AtomicU64>,
}
struct PreviewFrame {
width: i32,
height: i32,
@ -327,6 +422,7 @@ impl LocalCameraPreview {
generation,
running,
selected_device: None,
relay_preview_path: None,
}
}
@ -334,6 +430,14 @@ impl LocalCameraPreview {
self.running.load(Ordering::Acquire)
}
fn is_device_preview_running(&self) -> bool {
self.is_running() && self.relay_preview_path.is_none()
}
fn is_relay_file_running(&self) -> bool {
self.is_running() && self.relay_preview_path.is_some()
}
fn set_selected(&mut self, camera: Option<&str>) -> Result<()> {
self.selected_device = normalize_camera_selection(camera);
@ -367,6 +471,7 @@ impl LocalCameraPreview {
.selected_device
.clone()
.ok_or_else(|| anyhow!("select a camera before starting the in-launcher preview"))?;
self.relay_preview_path = None;
let device = resolve_camera_device(&selected);
let latest = Arc::clone(&self.latest);
let status_text = Arc::clone(&self.status_text);
@ -396,18 +501,64 @@ impl LocalCameraPreview {
Ok(())
}
fn start_relay_file(&mut self, path: PathBuf, selected: String) -> Result<()> {
if self.is_running()
&& self
.relay_preview_path
.as_ref()
.is_some_and(|existing| existing == &path)
{
return Ok(());
}
if self.is_running() {
self.stop();
}
let latest = Arc::clone(&self.latest);
let status_text = Arc::clone(&self.status_text);
let generation = Arc::clone(&self.generation);
let running = Arc::clone(&self.running);
let token = generation.fetch_add(1, Ordering::AcqRel) + 1;
running.store(true, Ordering::Release);
self.relay_preview_path = Some(path.clone());
self.set_status(format!(
"Waiting for relay webcam frames from {selected}..."
));
std::thread::spawn(move || {
run_camera_file_preview_feed(
path,
selected,
token,
latest,
status_text,
generation,
running,
);
});
Ok(())
}
fn stop(&mut self) {
let was_relay_file = self.relay_preview_path.take().is_some();
self.running.store(false, Ordering::Release);
self.generation.fetch_add(1, Ordering::AcqRel);
if let Ok(mut latest) = self.latest.lock() {
*latest = None;
}
self.set_status(match self.selected_device.as_deref() {
Some(camera) => {
format!("Local preview stopped. {camera} stays selected for the next relay launch.")
let message = if was_relay_file {
"Relay webcam preview stopped.".to_string()
} else {
match self.selected_device.as_deref() {
Some(camera) => {
format!(
"Local preview stopped. {camera} stays selected for the next relay launch."
)
}
None => CAMERA_PREVIEW_IDLE.to_string(),
}
None => CAMERA_PREVIEW_IDLE.to_string(),
});
};
self.set_status(message);
}
fn set_status(&self, text: String) {
@ -485,6 +636,44 @@ impl LocalMicrophoneMonitor {
}
}
impl LocalMicrophoneLevelProbe {
fn start(path: PathBuf, level: Arc<Mutex<f64>>) -> Self {
let running = Arc::new(AtomicBool::new(true));
let generation = Arc::new(AtomicU64::new(1));
let running_handle = Arc::clone(&running);
let generation_handle = Arc::clone(&generation);
let path_handle = path.clone();
let token = generation.load(Ordering::Acquire);
std::thread::spawn(move || {
run_microphone_level_probe(
path_handle,
token,
level,
generation_handle,
running_handle,
);
});
Self {
path,
running,
generation,
}
}
fn is_running(&self) -> bool {
self.running.load(Ordering::Acquire)
}
fn is_running_for(&self, path: &Path) -> bool {
self.is_running() && self.path == path
}
fn stop(&mut self) {
self.running.store(false, Ordering::Release);
self.generation.fetch_add(1, Ordering::AcqRel);
}
}
fn normalize_camera_selection(camera: Option<&str>) -> Option<String> {
camera
.map(str::trim)
@ -567,6 +756,65 @@ fn run_camera_preview_feed(
Ok(())
}
fn run_camera_file_preview_feed(
path: PathBuf,
selected: String,
token: u64,
latest: Arc<Mutex<Option<PreviewFrame>>>,
status_text: Arc<Mutex<String>>,
generation: Arc<AtomicU64>,
running: Arc<AtomicBool>,
) {
let mut has_frame = false;
while running.load(Ordering::Acquire) && generation.load(Ordering::Acquire) == token {
match read_camera_preview_tap(&path) {
Ok(frame) => {
if let Ok(mut slot) = latest.lock() {
*slot = Some(frame);
}
if !has_frame {
has_frame = true;
if let Ok(mut status) = status_text.lock() {
*status = format!("Relay webcam preview live for {selected}.");
}
}
}
Err(err) => {
if !has_frame && let Ok(mut status) = status_text.lock() {
*status = format!("Waiting for relay webcam frames from {selected}: {err}");
}
}
}
std::thread::sleep(Duration::from_millis(120));
}
running.store(false, Ordering::Release);
}
fn run_microphone_level_probe(
path: PathBuf,
token: u64,
level: Arc<Mutex<f64>>,
generation: Arc<AtomicU64>,
running: Arc<AtomicBool>,
) {
while running.load(Ordering::Acquire) && generation.load(Ordering::Acquire) == token {
let next = read_microphone_level_tap(&path).unwrap_or_else(|| {
level
.lock()
.map(|current| (*current * 0.8).clamp(0.0, 1.0))
.unwrap_or(0.0)
});
if let Ok(mut meter) = level.lock() {
*meter = next.clamp(0.0, 1.0);
}
std::thread::sleep(Duration::from_millis(100));
}
if let Ok(mut meter) = level.lock() {
*meter = 0.0;
}
running.store(false, Ordering::Release);
}
fn build_camera_preview_pipeline(device: &str) -> Result<(gst::Pipeline, gst_app::AppSink)> {
let desc = camera_preview_pipeline_desc(device);
let pipeline = gst::parse::launch(&desc)?
@ -659,6 +907,54 @@ fn sample_to_frame(sample: &gst::Sample) -> Option<PreviewFrame> {
})
}
fn read_camera_preview_tap(path: &Path) -> Result<PreviewFrame> {
let bytes = fs::read(path).with_context(|| format!("{} is not ready", path.display()))?;
let header_end = bytes
.iter()
.position(|byte| *byte == b'\n')
.ok_or_else(|| anyhow!("preview frame header is incomplete"))?;
let header =
std::str::from_utf8(&bytes[..header_end]).context("preview frame header is not UTF-8")?;
let mut fields = header.split_whitespace();
if fields.next() != Some("LESAVKA_RGBA") {
return Err(anyhow!("preview frame has an unknown format"));
}
let width = fields
.next()
.and_then(|value| value.parse::<i32>().ok())
.filter(|value| *value > 0)
.ok_or_else(|| anyhow!("preview frame width is invalid"))?;
let height = fields
.next()
.and_then(|value| value.parse::<i32>().ok())
.filter(|value| *value > 0)
.ok_or_else(|| anyhow!("preview frame height is invalid"))?;
let stride = fields
.next()
.and_then(|value| value.parse::<usize>().ok())
.filter(|value| *value > 0)
.ok_or_else(|| anyhow!("preview frame stride is invalid"))?;
let rgba = bytes[header_end + 1..].to_vec();
let expected_min = stride.saturating_mul(height as usize);
if rgba.len() < expected_min {
return Err(anyhow!("preview frame payload is incomplete"));
}
Ok(PreviewFrame {
width,
height,
stride,
rgba,
})
}
fn read_microphone_level_tap(path: &Path) -> Option<f64> {
fs::read_to_string(path)
.ok()
.and_then(|raw| raw.split_ascii_whitespace().next()?.parse::<f64>().ok())
.filter(|value| value.is_finite())
.map(|value| value.clamp(0.0, 1.0))
}
fn gst_quote(value: &str) -> String {
value.replace('\\', "\\\\").replace('"', "\\\"")
}
@ -745,7 +1041,8 @@ fn build_wav_bytes(audio: &[u8], sample_rate: u32, channels: u16, bits_per_sampl
mod tests {
use super::{
MIC_REPLAY_MAX_BYTES, build_wav_bytes, camera_preview_pipeline_desc,
normalize_camera_selection, push_recent_audio, resolve_camera_device,
normalize_camera_selection, push_recent_audio, read_camera_preview_tap,
read_microphone_level_tap, resolve_camera_device,
};
use std::sync::{Arc, Mutex};
@ -796,4 +1093,37 @@ mod tests {
assert_eq!(&wav[36..40], b"data");
assert_eq!(wav.len(), 44 + audio.len());
}
#[test]
fn relay_camera_preview_tap_round_trips_rgba_frame() {
let path =
std::env::temp_dir().join(format!("lesavka-camera-preview-tap-{}", std::process::id()));
std::fs::write(
&path,
[b"LESAVKA_RGBA 2 2 8\n".as_slice(), &[1_u8; 16]].concat(),
)
.expect("write tap");
let frame = read_camera_preview_tap(&path).expect("read tap");
assert_eq!(frame.width, 2);
assert_eq!(frame.height, 2);
assert_eq!(frame.stride, 8);
assert_eq!(frame.rgba.len(), 16);
let _ = std::fs::remove_file(path);
}
#[test]
fn relay_microphone_level_tap_clamps_values() {
let path =
std::env::temp_dir().join(format!("lesavka-mic-level-tap-{}", std::process::id()));
std::fs::write(&path, "1.25\n").expect("write high");
assert_eq!(read_microphone_level_tap(&path), Some(1.0));
std::fs::write(&path, "-0.5\n").expect("write low");
assert_eq!(read_microphone_level_tap(&path), Some(0.0));
std::fs::write(&path, "not-a-number\n").expect("write invalid");
assert_eq!(read_microphone_level_tap(&path), None);
let _ = std::fs::remove_file(path);
}
}

View File

@ -24,8 +24,9 @@ use {
present_popout_windows, read_input_routing_state, reap_exited_child, refresh_launcher_ui,
refresh_test_buttons, routing_name, selected_combo_value, selected_server_addr,
shutdown_launcher_runtime, spawn_client_process, stop_child_process, toggle_key_label,
update_test_action_result, write_audio_gain_request, write_input_routing_request,
write_input_toggle_key_request, write_mic_gain_request,
update_test_action_result, uplink_camera_preview_path, uplink_mic_level_path,
write_audio_gain_request, write_input_routing_request, write_input_toggle_key_request,
write_mic_gain_request,
},
crate::handshake::{HandshakeProbe, probe},
crate::output::display::enumerate_monitors,
@ -1360,6 +1361,7 @@ pub fn run_gui_launcher(server_addr: String) -> Result<()> {
{
let state = Rc::clone(&state);
let child_proc = Rc::clone(&child_proc);
let tests = Rc::clone(&tests);
let widgets = widgets.clone();
let server_entry = server_entry.clone();
let camera_combo = camera_combo.clone();
@ -1440,6 +1442,8 @@ pub fn run_gui_launcher(server_addr: String) -> Result<()> {
let _ = std::fs::remove_file(input_state_path.as_path());
let _ = std::fs::remove_file(input_toggle_control_path.as_path());
let launch_state = state.borrow().clone();
tests.borrow_mut().stop_local_capture_for_relay();
refresh_test_buttons(&widgets_handle, &mut tests.borrow_mut());
let input_toggle_key = launch_state.swap_key.clone();
let input_control_path = input_control_path.as_ref().clone();
let input_state_path = input_state_path.as_ref().clone();
@ -2011,6 +2015,8 @@ pub fn run_gui_launcher(server_addr: String) -> Result<()> {
let next_diagnostics_sample = Rc::clone(&next_diagnostics_sample);
let preview_session_active = Rc::clone(&preview_session_active);
let log_tx = log_tx.clone();
let camera_preview_path = uplink_camera_preview_path();
let mic_level_path = uplink_mic_level_path();
glib::timeout_add_local(Duration::from_millis(180), move || {
let child_running = reap_exited_child(&child_proc);
if let Some(preview) = preview.as_ref() {
@ -2328,6 +2334,27 @@ pub fn run_gui_launcher(server_addr: String) -> Result<()> {
next_diagnostics_sample.set(now + Duration::from_secs(1));
}
let (camera_probe_active, camera_label, mic_probe_active) = {
let state = state.borrow();
(
state.channels.camera && state.devices.camera.is_some(),
state.devices.camera.clone(),
state.channels.microphone && state.devices.microphone.is_some(),
)
};
if let Err(err) = tests.borrow_mut().sync_relay_uplink_probe(
child_running,
camera_probe_active,
camera_label.as_deref(),
&camera_preview_path,
mic_probe_active,
&mic_level_path,
) {
widgets
.status_label
.set_text(&format!("Local uplink monitor could not start: {err}"));
}
refresh_launcher_ui(&widgets, &state.borrow(), child_running);
refresh_test_buttons(&widgets, &mut tests.borrow_mut());
glib::ControlFlow::Continue

View File

@ -26,11 +26,15 @@ pub const INPUT_STATE_ENV: &str = "LESAVKA_LAUNCHER_INPUT_STATE";
pub const TOGGLE_KEY_CONTROL_ENV: &str = "LESAVKA_LAUNCHER_TOGGLE_KEY_CONTROL";
pub const AUDIO_GAIN_CONTROL_ENV: &str = "LESAVKA_AUDIO_GAIN_CONTROL";
pub const MIC_GAIN_CONTROL_ENV: &str = "LESAVKA_MIC_GAIN_CONTROL";
pub const UPLINK_CAMERA_PREVIEW_ENV: &str = "LESAVKA_UPLINK_CAMERA_PREVIEW";
pub const UPLINK_MIC_LEVEL_ENV: &str = "LESAVKA_UPLINK_MIC_LEVEL";
pub const DEFAULT_INPUT_CONTROL_PATH: &str = "/tmp/lesavka-launcher-input.control";
pub const DEFAULT_INPUT_STATE_PATH: &str = "/tmp/lesavka-launcher-input.state";
pub const DEFAULT_TOGGLE_KEY_CONTROL_PATH: &str = "/tmp/lesavka-launcher-toggle-key.control";
pub const DEFAULT_AUDIO_GAIN_CONTROL_PATH: &str = "/tmp/lesavka-audio-gain.control";
pub const DEFAULT_MIC_GAIN_CONTROL_PATH: &str = "/tmp/lesavka-mic-gain.control";
pub const DEFAULT_UPLINK_CAMERA_PREVIEW_PATH: &str = "/tmp/lesavka-uplink-camera-preview.rgba";
pub const DEFAULT_UPLINK_MIC_LEVEL_PATH: &str = "/tmp/lesavka-uplink-mic-level.value";
pub type RelayChild = Child;
@ -826,6 +830,18 @@ pub fn mic_gain_control_path() -> PathBuf {
.unwrap_or_else(|_| PathBuf::from(DEFAULT_MIC_GAIN_CONTROL_PATH))
}
pub fn uplink_camera_preview_path() -> PathBuf {
std::env::var(UPLINK_CAMERA_PREVIEW_ENV)
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from(DEFAULT_UPLINK_CAMERA_PREVIEW_PATH))
}
pub fn uplink_mic_level_path() -> PathBuf {
std::env::var(UPLINK_MIC_LEVEL_ENV)
.map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from(DEFAULT_UPLINK_MIC_LEVEL_PATH))
}
pub fn write_input_routing_request(path: &Path, routing: InputRouting) -> Result<()> {
std::fs::write(
path,
@ -1017,6 +1033,12 @@ pub fn spawn_client_process(
let mic_gain_path = mic_gain_control_path();
let _ = write_mic_gain_request(&mic_gain_path, state.mic_gain_percent);
command.env(MIC_GAIN_CONTROL_ENV, mic_gain_path);
let camera_preview_path = uplink_camera_preview_path();
let _ = std::fs::remove_file(&camera_preview_path);
command.env(UPLINK_CAMERA_PREVIEW_ENV, camera_preview_path);
let mic_level_path = uplink_mic_level_path();
let _ = std::fs::remove_file(&mic_level_path);
command.env(UPLINK_MIC_LEVEL_ENV, mic_level_path);
for (key, value) in runtime_env_vars(state) {
command.env(key, value);
}

View File

@ -1,6 +1,6 @@
[package]
name = "lesavka_common"
version = "0.11.46"
version = "0.11.47"
edition = "2024"
build = "build.rs"

View File

@ -21,9 +21,9 @@
"loc": 381
},
"client/src/input/camera.rs": {
"clippy_warnings": 30,
"doc_debt": 8,
"loc": 407
"clippy_warnings": 38,
"doc_debt": 12,
"loc": 566
},
"client/src/input/inputs.rs": {
"clippy_warnings": 40,
@ -41,9 +41,9 @@
"loc": 196
},
"client/src/input/microphone.rs": {
"clippy_warnings": 17,
"doc_debt": 9,
"loc": 268
"clippy_warnings": 21,
"doc_debt": 13,
"loc": 375
},
"client/src/input/mod.rs": {
"clippy_warnings": 0,
@ -61,9 +61,9 @@
"loc": 178
},
"client/src/launcher/device_test.rs": {
"clippy_warnings": 43,
"doc_debt": 29,
"loc": 799
"clippy_warnings": 67,
"doc_debt": 40,
"loc": 1129
},
"client/src/launcher/devices.rs": {
"clippy_warnings": 6,
@ -98,7 +98,7 @@
"client/src/launcher/ui.rs": {
"clippy_warnings": 68,
"doc_debt": 23,
"loc": 2497
"loc": 2524
},
"client/src/launcher/ui_components.rs": {
"clippy_warnings": 22,
@ -106,9 +106,9 @@
"loc": 1497
},
"client/src/launcher/ui_runtime.rs": {
"clippy_warnings": 70,
"clippy_warnings": 74,
"doc_debt": 44,
"loc": 1768
"loc": 1790
},
"client/src/layout.rs": {
"clippy_warnings": 6,
@ -243,7 +243,7 @@
"server/src/main.rs": {
"clippy_warnings": 23,
"doc_debt": 21,
"loc": 952
"loc": 983
},
"server/src/paste.rs": {
"clippy_warnings": 8,
@ -278,7 +278,7 @@
"server/src/video_sinks.rs": {
"clippy_warnings": 78,
"doc_debt": 11,
"loc": 559
"loc": 574
},
"server/src/video_support.rs": {
"clippy_warnings": 8,

View File

@ -17,8 +17,8 @@
"loc": 381
},
"client/src/input/camera.rs": {
"line_percent": 97.99,
"loc": 407
"line_percent": 95.24,
"loc": 566
},
"client/src/input/inputs.rs": {
"line_percent": 96.39,
@ -33,8 +33,8 @@
"loc": 196
},
"client/src/input/microphone.rs": {
"line_percent": 96.71,
"loc": 268
"line_percent": 97.83,
"loc": 375
},
"client/src/input/mouse.rs": {
"line_percent": 97.32,
@ -62,7 +62,7 @@
},
"client/src/launcher/ui.rs": {
"line_percent": 100.0,
"loc": 2497
"loc": 2524
},
"client/src/layout.rs": {
"line_percent": 97.73,
@ -154,7 +154,7 @@
},
"server/src/main.rs": {
"line_percent": 79.34,
"loc": 952
"loc": 983
},
"server/src/paste.rs": {
"line_percent": 96.32,
@ -174,7 +174,7 @@
},
"server/src/video_sinks.rs": {
"line_percent": 100.0,
"loc": 559
"loc": 574
},
"server/src/video_support.rs": {
"line_percent": 97.62,

View File

@ -10,7 +10,7 @@ bench = false
[package]
name = "lesavka_server"
version = "0.11.46"
version = "0.11.47"
edition = "2024"
autobins = false

View File

@ -60,9 +60,7 @@ impl CameraRuntime {
"UVC output disabled (LESAVKA_DISABLE_UVC set)",
));
}
Err(Status::internal(
"camera relay unavailable in coverage harness",
))
Ok((session_id, Arc::new(video::CameraRelay::new_noop(0))))
}
#[cfg(not(coverage))]

View File

@ -662,6 +662,7 @@ impl Relay for Handler {
Ok(Response::new(ReceiverStream::new(rx)))
}
/// Accept synthetic upstream microphone packets without ALSA hardware.
async fn stream_microphone(
&self,
req: Request<tonic::Streaming<AudioPacket>>,
@ -699,6 +700,7 @@ impl Relay for Handler {
Ok(Response::new(ReceiverStream::new(rx)))
}
/// Accept synthetic upstream webcam packets without UVC/HDMI hardware.
async fn stream_camera(
&self,
req: Request<tonic::Streaming<VideoPacket>>,
@ -851,20 +853,49 @@ impl Relay for Handler {
async fn stream_microphone(
&self,
_req: Request<tonic::Streaming<AudioPacket>>,
req: Request<tonic::Streaming<AudioPacket>>,
) -> Result<Response<Self::StreamMicrophoneStream>, Status> {
Err(Status::internal(
"microphone sink unavailable in coverage harness",
))
let uac_dev = std::env::var("LESAVKA_UAC_DEV").unwrap_or_else(|_| "hw:UAC2Gadget,0".into());
let mut sink = runtime_support::open_voice_with_retry(&uac_dev)
.await
.map_err(|e| Status::internal(format!("{e:#}")))?;
let (tx, rx) = tokio::sync::mpsc::channel(1);
tokio::spawn(async move {
let mut inbound = req.into_inner();
while let Some(pkt) = inbound.next().await.transpose()? {
sink.push(&pkt);
}
sink.finish();
let _ = tx.send(Ok(Empty {})).await;
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn stream_camera(
&self,
_req: Request<tonic::Streaming<VideoPacket>>,
req: Request<tonic::Streaming<VideoPacket>>,
) -> Result<Response<Self::StreamCameraStream>, Status> {
Err(Status::internal(
"camera stream unavailable in coverage harness",
))
let cfg = camera::current_camera_config();
let (session_id, relay) = self.camera_rt.activate(&cfg).await?;
let camera_rt = self.camera_rt.clone();
let (tx, rx) = tokio::sync::mpsc::channel(1);
tokio::spawn(async move {
let mut s = req.into_inner();
while let Some(pkt) = s.next().await.transpose()? {
if !camera_rt.is_active(session_id) {
break;
}
relay.feed(pkt);
}
tx.send(Ok(Empty {})).await.ok();
Ok::<(), Status>(())
});
Ok(Response::new(ReceiverStream::new(rx)))
}
async fn capture_video(

View File

@ -448,6 +448,8 @@ fn build_hdmi_sink(cfg: &CameraConfig) -> anyhow::Result<gst::Element> {
enum CameraSink {
Uvc(WebcamSink),
Hdmi(HdmiSink),
#[cfg(coverage)]
Noop,
}
impl CameraSink {
@ -455,6 +457,10 @@ impl CameraSink {
match self {
CameraSink::Uvc(sink) => sink.push(pkt),
CameraSink::Hdmi(sink) => sink.push(pkt),
#[cfg(coverage)]
CameraSink::Noop => {
let _ = pkt;
}
}
}
}
@ -499,6 +505,15 @@ impl CameraRelay {
})
}
#[cfg(coverage)]
pub fn new_noop(id: u32) -> Self {
Self {
sink: CameraSink::Noop,
id,
frames: AtomicU64::new(0),
}
}
/// Push one `VideoPacket` coming from the client.
///
/// Inputs: the next packet from the camera stream.

View File

@ -134,6 +134,43 @@ mod camera_include_contract {
});
}
#[test]
#[serial]
fn active_camera_capture_can_publish_local_preview_tap() {
init_gst();
let dir = tempdir().expect("tempdir");
let path = dir.path().join("uplink-camera-preview.rgba");
let cfg = CameraConfig {
codec: CameraCodec::H264,
width: 160,
height: 90,
fps: 10,
};
with_var(
"LESAVKA_UPLINK_CAMERA_PREVIEW",
Some(path.to_string_lossy().to_string()),
|| {
let Ok(cap) = CameraCapture::new(Some("test"), Some(cfg)) else {
return;
};
for _ in 0..30 {
let _ = cap.pull();
if let Ok(bytes) = std::fs::read(&path) {
assert!(
bytes.starts_with(b"LESAVKA_RGBA "),
"preview tap should publish an RGBA frame header"
);
return;
}
std::thread::sleep(std::time::Duration::from_millis(50));
}
panic!("camera preview tap did not publish a frame");
},
);
}
#[test]
fn new_stub_and_pull_are_stable_without_frames() {
init_gst();
@ -198,6 +235,10 @@ mod camera_include_contract {
let result = CameraCapture::new(Some("test"), Some(cfg));
assert!(result.is_ok() || result.is_err());
});
assert_eq!(
CameraCapture::encoder_options("nvh264enc", None, 30),
"nvh264enc"
);
}
#[test]

View File

@ -7,6 +7,9 @@
const UI_RUNTIME_SRC: &str = include_str!("../../client/src/launcher/ui_runtime.rs");
const UI_SRC: &str = include_str!("../../client/src/launcher/ui.rs");
const DEVICE_TEST_SRC: &str = include_str!("../../client/src/launcher/device_test.rs");
const CAMERA_SRC: &str = include_str!("../../client/src/input/camera.rs");
const MICROPHONE_SRC: &str = include_str!("../../client/src/input/microphone.rs");
const LAUNCHER_MOD_SRC: &str = include_str!("../../client/src/launcher/mod.rs");
const MAIN_SRC: &str = include_str!("../../client/src/main.rs");
@ -72,3 +75,26 @@ fn live_power_probe_failures_do_not_flip_relay_state_red() {
assert!(UI_SRC.contains("state.set_server_available(true);"));
assert!(UI_SRC.contains("if !state.capture_power.available"));
}
#[test]
fn active_relay_keeps_local_upstream_camera_and_microphone_evidence_visible() {
assert!(UI_RUNTIME_SRC.contains("UPLINK_CAMERA_PREVIEW_ENV"));
assert!(UI_RUNTIME_SRC.contains("UPLINK_MIC_LEVEL_ENV"));
assert!(UI_RUNTIME_SRC.contains("command.env(UPLINK_CAMERA_PREVIEW_ENV"));
assert!(UI_RUNTIME_SRC.contains("command.env(UPLINK_MIC_LEVEL_ENV"));
assert!(UI_SRC.contains("stop_local_capture_for_relay"));
assert!(UI_SRC.contains("sync_relay_uplink_probe"));
assert!(DEVICE_TEST_SRC.contains("fn start_relay_file("));
assert!(DEVICE_TEST_SRC.contains("run_camera_file_preview_feed"));
assert!(DEVICE_TEST_SRC.contains("read_camera_preview_tap"));
assert!(DEVICE_TEST_SRC.contains("LocalMicrophoneLevelProbe"));
assert!(DEVICE_TEST_SRC.contains("read_microphone_level_tap"));
assert!(CAMERA_SRC.contains("LESAVKA_UPLINK_CAMERA_PREVIEW"));
assert!(CAMERA_SRC.contains("appsink name=preview_sink"));
assert!(CAMERA_SRC.contains("spawn_camera_preview_tap"));
assert!(MICROPHONE_SRC.contains("LESAVKA_UPLINK_MIC_LEVEL"));
assert!(MICROPHONE_SRC.contains("appsink name=level_sink"));
assert!(MICROPHONE_SRC.contains("spawn_mic_level_tap"));
}

View File

@ -169,6 +169,33 @@ JSON
});
}
#[test]
fn microphone_pipeline_desc_adds_level_tap_only_when_requested() {
assert!(parser_for_encoder("opusenc").contains("audio/x-opus"));
assert!(parser_for_encoder("avenc_aac").contains("audio/mpeg"));
let with_tap = microphone_pipeline_desc(
"audiotestsrc is-live=true",
"opusenc",
parser_for_encoder("opusenc"),
2.5,
true,
);
assert!(with_tap.contains("tee name=t"));
assert!(with_tap.contains("appsink name=level_sink"));
assert!(with_tap.contains("volume name=mic_input_gain volume=2.500"));
let without_tap = microphone_pipeline_desc(
"audiotestsrc is-live=true",
"avenc_aac",
parser_for_encoder("avenc_aac"),
1.0,
false,
);
assert!(!without_tap.contains("level_sink"));
assert!(without_tap.contains("queue max-size-buffers=100 leaky=downstream"));
}
#[test]
fn mic_gain_control_reads_first_token_and_clamps() {
let dir = tempdir().expect("tempdir");
@ -183,6 +210,33 @@ JSON
assert_eq!(read_mic_gain_control(&path), None);
}
#[test]
#[serial]
fn mic_level_tap_env_and_payload_helpers_are_stable() {
with_var("LESAVKA_UPLINK_MIC_LEVEL", None::<&str>, || {
assert!(mic_level_tap_path().is_none());
});
let dir = tempdir().expect("tempdir");
let path = dir.path().join("uplink-mic-level.value");
with_var(
"LESAVKA_UPLINK_MIC_LEVEL",
Some(path.to_string_lossy().to_string()),
|| {
assert_eq!(mic_level_tap_path().as_deref(), Some(path.as_path()));
},
);
assert_eq!(pcm_peak_fraction(&0_i16.to_le_bytes()), 0.0);
assert!(pcm_peak_fraction(&i16::MAX.to_le_bytes()) > 0.99);
write_mic_level_tap(&path, 0.375).expect("write level tap");
assert_eq!(
fs::read_to_string(&path).expect("read level tap").trim(),
"0.375000"
);
}
#[test]
#[serial]
fn mic_gain_control_returns_without_env() {
@ -238,14 +292,88 @@ JSON
.expect("appsink")
.downcast::<gst_app::AppSink>()
.expect("appsink cast");
let running = std::sync::Arc::new(AtomicBool::new(true));
let cap = MicrophoneCapture {
pipeline: gst::Pipeline::new(),
sink,
level_tap_running: Some(std::sync::Arc::clone(&running)),
};
assert!(
cap.pull().is_none(),
"empty appsink should produce no packet"
);
drop(cap);
assert!(!running.load(AtomicOrdering::Acquire));
}
#[test]
fn spawned_mic_level_tap_publishes_peak_from_appsink() {
gst::init().ok();
let dir = tempdir().expect("tempdir");
let path = dir.path().join("mic-level.value");
let pipeline: gst::Pipeline = gst::parse::launch(
"appsrc name=src is-live=true format=time caps=audio/x-raw,format=S16LE,channels=2,rate=48000 ! \
appsink name=sink emit-signals=false sync=false max-buffers=4 drop=true",
)
.expect("pipeline")
.downcast()
.expect("pipeline cast");
let src: gst_app::AppSrc = pipeline
.by_name("src")
.expect("appsrc")
.downcast()
.expect("appsrc cast");
let sink: gst_app::AppSink = pipeline
.by_name("sink")
.expect("appsink")
.downcast()
.expect("appsink cast");
pipeline.set_state(gst::State::Playing).expect("playing");
let running = spawn_mic_level_tap(sink, path.clone());
src.push_buffer(gst::Buffer::from_slice(i16::MAX.to_le_bytes().repeat(4)))
.expect("push buffer");
for _ in 0..20 {
if let Ok(raw) = fs::read_to_string(&path) {
let level = raw.trim().parse::<f64>().expect("level");
assert!(level > 0.99);
running.store(false, AtomicOrdering::Release);
let _ = pipeline.set_state(gst::State::Null);
return;
}
std::thread::sleep(std::time::Duration::from_millis(25));
}
running.store(false, AtomicOrdering::Release);
let _ = pipeline.set_state(gst::State::Null);
panic!("microphone level tap did not publish a value");
}
#[test]
#[cfg(coverage)]
#[serial]
fn microphone_capture_with_level_tap_uses_the_same_uplink_pipeline() {
gst::init().ok();
let dir = tempdir().expect("tempdir");
let level_path = dir.path().join("uplink-mic-level.value");
with_var("LESAVKA_MIC_SOURCE", None::<&str>, || {
with_var(
"LESAVKA_MIC_TEST_SOURCE_DESC",
Some("audiotestsrc is-live=true wave=sine freq=440".to_string()),
|| {
with_var(
"LESAVKA_UPLINK_MIC_LEVEL",
Some(level_path.to_string_lossy().to_string()),
|| {
let cap = MicrophoneCapture::new().expect("synthetic mic capture");
assert!(cap.level_tap_running.is_some());
},
);
},
);
});
}
#[test]
@ -279,7 +407,11 @@ JSON
.set_pts(Some(gst::ClockTime::from_useconds(321)));
src.push_buffer(buf).expect("push sample");
let cap = MicrophoneCapture { pipeline, sink };
let cap = MicrophoneCapture {
pipeline,
sink,
level_tap_running: None,
};
let pkt = cap.pull().expect("audio packet");
assert_eq!(pkt.id, 0);
assert_eq!(pkt.pts, 321);
@ -306,6 +438,27 @@ exit 0
});
}
#[test]
#[serial]
fn resolve_source_desc_prefers_pipewire_named_source_when_available() {
if !MicrophoneCapture::pipewire_source_available() {
return;
}
let script = r#"#!/usr/bin/env sh
cat <<'JSON'
[
{"info":{"props":{"media.class":"Audio/Source","node.name":"alsa_input.usb-UpstreamMic"}}}
]
JSON
"#;
with_fake_pw_dump(script, || {
let desc =
MicrophoneCapture::resolve_source_desc("UpstreamMic").expect("pipewire source");
assert!(desc.contains("pipewiresrc target-object=alsa_input.usb-UpstreamMic"));
});
}
#[test]
#[serial]
fn new_falls_back_to_default_source_when_requested_fragment_is_missing() {

View File

@ -68,7 +68,7 @@ fn activate_tracks_latest_generation_across_repeated_failures() {
#[test]
#[cfg(coverage)]
fn activate_non_uvc_returns_internal_error_in_coverage_harness() {
fn activate_non_uvc_returns_noop_relay_in_coverage_harness() {
let runtime = CameraRuntime::new();
let cfg = CameraConfig {
output: CameraOutput::Hdmi,
@ -84,10 +84,14 @@ fn activate_non_uvc_returns_internal_error_in_coverage_harness() {
let rt = Runtime::new().expect("runtime");
let result = rt.block_on(runtime.activate(&cfg));
match result {
Ok(_) => panic!("coverage harness should not create a real relay"),
Err(err) => assert_eq!(err.code(), Code::Internal),
}
let (session_id, relay) = result.expect("coverage harness should create a no-op relay");
assert_eq!(session_id, 1);
relay.feed(lesavka_common::lesavka::VideoPacket {
id: 2,
pts: 1,
data: vec![0, 0, 0, 1, 0x65],
..Default::default()
});
assert!(runtime.is_active(1));
assert!(!runtime.is_active(2));

View File

@ -376,6 +376,7 @@ mod server_main_binary_extra {
}
#[test]
#[cfg(not(coverage))]
#[serial]
fn stream_microphone_returns_internal_error_without_uac_device() {
let rt = tokio::runtime::Runtime::new().expect("runtime");

View File

@ -0,0 +1,171 @@
//! End-to-end server coverage for upstream media streams.
//!
//! Scope: run a local gRPC server and push synthetic client webcam/mic packets
//! through the public `StreamCamera` and `StreamMicrophone` RPCs.
//! Targets: `server/src/main.rs`, `server/src/audio.rs`, `server/src/video_sinks.rs`.
//! Why: local webcam/mic uplink should stay testable without physical UVC,
//! HDMI, or ALSA hardware in CI.
#[cfg(coverage)]
#[allow(warnings)]
mod server_upstream_media {
include!(env!("LESAVKA_SERVER_MAIN_SRC"));
use lesavka_common::lesavka::relay_client::RelayClient;
use serial_test::serial;
use temp_env::with_var;
use tempfile::tempdir;
use tonic::transport::Channel;
async fn connect_with_retry(addr: std::net::SocketAddr) -> Channel {
let endpoint = tonic::transport::Endpoint::from_shared(format!("http://{addr}"))
.expect("endpoint")
.tcp_nodelay(true);
for _ in 0..40 {
if let Ok(channel) = endpoint.clone().connect().await {
return channel;
}
tokio::time::sleep(std::time::Duration::from_millis(25)).await;
}
panic!("failed to connect to local tonic server");
}
fn build_handler_for_tests() -> (tempfile::TempDir, Handler) {
let dir = tempdir().expect("tempdir");
let kb_path = dir.path().join("hidg0.bin");
let ms_path = dir.path().join("hidg1.bin");
std::fs::write(&kb_path, []).expect("create kb file");
std::fs::write(&ms_path, []).expect("create ms file");
let kb = tokio::fs::File::from_std(
std::fs::OpenOptions::new()
.read(true)
.write(true)
.open(&kb_path)
.expect("open kb"),
);
let ms = tokio::fs::File::from_std(
std::fs::OpenOptions::new()
.read(true)
.write(true)
.open(&ms_path)
.expect("open ms"),
);
(
dir,
Handler {
kb: std::sync::Arc::new(tokio::sync::Mutex::new(Some(kb))),
ms: std::sync::Arc::new(tokio::sync::Mutex::new(Some(ms))),
gadget: UsbGadget::new("lesavka"),
did_cycle: std::sync::Arc::new(std::sync::atomic::AtomicBool::new(false)),
camera_rt: std::sync::Arc::new(CameraRuntime::new()),
capture_power: CapturePowerManager::new(),
eye_hubs: std::sync::Arc::new(tokio::sync::Mutex::new(
std::collections::HashMap::new(),
)),
},
)
}
async fn serve_handler(
handler: Handler,
) -> (
tokio::task::JoinHandle<()>,
RelayClient<tonic::transport::Channel>,
) {
let listener = std::net::TcpListener::bind("127.0.0.1:0").expect("bind");
let addr = listener.local_addr().expect("addr");
drop(listener);
let server = tokio::spawn(async move {
let _ = tonic::transport::Server::builder()
.add_service(RelayServer::new(handler))
.serve(addr)
.await;
});
let channel = connect_with_retry(addr).await;
(server, RelayClient::new(channel))
}
#[test]
#[serial]
fn stream_microphone_accepts_upstream_audio_packets() {
let rt = tokio::runtime::Runtime::new().expect("runtime");
with_var("LESAVKA_CAPTURE_POWER_UNIT", Some("none"), || {
rt.block_on(async {
let (_dir, handler) = build_handler_for_tests();
let (server, mut cli) = serve_handler(handler).await;
let (tx, rx) = tokio::sync::mpsc::channel(4);
tx.send(AudioPacket {
id: 0,
pts: 12_345,
data: vec![1, 2, 3, 4, 5, 6],
})
.await
.expect("send synthetic upstream audio");
drop(tx);
let outbound = tokio_stream::wrappers::ReceiverStream::new(rx);
let mut response = cli
.stream_microphone(tonic::Request::new(outbound))
.await
.expect("microphone stream should open");
let ack = tokio::time::timeout(
std::time::Duration::from_secs(1),
response.get_mut().message(),
)
.await
.expect("microphone ack timeout")
.expect("microphone ack grpc")
.expect("microphone ack item");
assert_eq!(ack, Empty {});
server.abort();
});
});
}
#[test]
#[serial]
fn stream_camera_accepts_upstream_video_packets() {
let rt = tokio::runtime::Runtime::new().expect("runtime");
with_var("LESAVKA_CAPTURE_POWER_UNIT", Some("none"), || {
with_var("LESAVKA_DISABLE_UVC", None::<&str>, || {
rt.block_on(async {
let (_dir, handler) = build_handler_for_tests();
let (server, mut cli) = serve_handler(handler).await;
let (tx, rx) = tokio::sync::mpsc::channel(4);
tx.send(VideoPacket {
id: 2,
pts: 54_321,
data: vec![0, 0, 0, 1, 0x65, 0x88],
..Default::default()
})
.await
.expect("send synthetic upstream video");
drop(tx);
let outbound = tokio_stream::wrappers::ReceiverStream::new(rx);
let mut response = cli
.stream_camera(tonic::Request::new(outbound))
.await
.expect("camera stream should open");
let ack = tokio::time::timeout(
std::time::Duration::from_secs(1),
response.get_mut().message(),
)
.await
.expect("camera ack timeout")
.expect("camera ack grpc")
.expect("camera ack item");
assert_eq!(ack, Empty {});
server.abort();
});
});
});
}
}