lesavka/client/src/launcher/device_test.rs

447 lines
14 KiB
Rust
Raw Normal View History

use anyhow::{Context, Result, anyhow};
use gst::prelude::*;
use gstreamer as gst;
use gstreamer_app as gst_app;
use gtk::{gdk, glib};
use shell_escape::escape;
use std::borrow::Cow;
use std::process::{Child, Command};
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::{Arc, Mutex};
use std::time::Duration;
const CAMERA_PREVIEW_WIDTH: i32 = 360;
const CAMERA_PREVIEW_HEIGHT: i32 = 202;
const CAMERA_PREVIEW_IDLE: &str = "Select a camera and click Start Preview.";
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DeviceTestKind {
Camera,
Microphone,
Speaker,
}
#[derive(Default)]
pub struct DeviceTestController {
camera: Option<LocalCameraPreview>,
selected_camera: Option<String>,
microphone: Option<Child>,
speaker: Option<Child>,
}
impl DeviceTestController {
pub fn new() -> Self {
Self::default()
}
pub fn bind_camera_preview(
&mut self,
camera_picture: &gtk::Picture,
camera_status: &gtk::Label,
) -> Result<()> {
if let Some(camera) = self.camera.as_mut() {
camera.stop();
}
let mut preview = LocalCameraPreview::new(camera_picture, camera_status);
preview.set_selected(self.selected_camera.as_deref())?;
self.camera = Some(preview);
Ok(())
}
pub fn is_running(&mut self, kind: DeviceTestKind) -> bool {
self.cleanup_finished();
match kind {
DeviceTestKind::Camera => self
.camera
.as_ref()
.is_some_and(LocalCameraPreview::is_running),
DeviceTestKind::Microphone => self.microphone.is_some(),
DeviceTestKind::Speaker => self.speaker.is_some(),
}
}
pub fn set_camera_selection(&mut self, camera: Option<&str>) -> Result<()> {
self.selected_camera = normalize_camera_selection(camera);
if let Some(preview) = self.camera.as_mut() {
preview.set_selected(self.selected_camera.as_deref())?;
}
Ok(())
}
pub fn toggle_camera(&mut self) -> Result<bool> {
let preview = self
.camera
.as_mut()
.ok_or_else(|| anyhow!("camera preview panel is not ready yet"))?;
preview.toggle()
}
pub fn toggle_microphone(&mut self, source: Option<&str>, sink: Option<&str>) -> Result<bool> {
self.toggle(
DeviceTestKind::Microphone,
build_microphone_test(source, sink),
)
}
pub fn toggle_speaker(&mut self, sink: Option<&str>) -> Result<bool> {
self.toggle(DeviceTestKind::Speaker, build_speaker_test(sink))
}
pub fn stop_all(&mut self) {
if let Some(camera) = self.camera.as_mut() {
camera.stop();
}
for kind in [DeviceTestKind::Microphone, DeviceTestKind::Speaker] {
self.stop(kind);
}
}
fn toggle(&mut self, kind: DeviceTestKind, command: Result<Command>) -> Result<bool> {
self.cleanup_finished();
if self.slot(kind).is_some() {
self.stop(kind);
return Ok(false);
}
let child = command?
.spawn()
.with_context(|| format!("starting {kind:?} test"))?;
*self.slot_mut(kind) = Some(child);
Ok(true)
}
fn stop(&mut self, kind: DeviceTestKind) {
if let Some(mut child) = self.slot_mut(kind).take() {
let _ = child.kill();
let _ = child.wait();
}
}
fn cleanup_finished(&mut self) {
for kind in [DeviceTestKind::Microphone, DeviceTestKind::Speaker] {
let finished = self
.slot_mut(kind)
.as_mut()
.and_then(|child| child.try_wait().ok())
.flatten()
.is_some();
if finished {
let _ = self.slot_mut(kind).take();
}
}
}
fn slot(&self, kind: DeviceTestKind) -> &Option<Child> {
match kind {
DeviceTestKind::Camera => panic!("camera preview is not an external child process"),
DeviceTestKind::Microphone => &self.microphone,
DeviceTestKind::Speaker => &self.speaker,
}
}
fn slot_mut(&mut self, kind: DeviceTestKind) -> &mut Option<Child> {
match kind {
DeviceTestKind::Camera => panic!("camera preview is not an external child process"),
DeviceTestKind::Microphone => &mut self.microphone,
DeviceTestKind::Speaker => &mut self.speaker,
}
}
}
struct LocalCameraPreview {
latest: Arc<Mutex<Option<PreviewFrame>>>,
status_text: Arc<Mutex<String>>,
generation: Arc<AtomicU64>,
running: Arc<AtomicBool>,
selected_device: Option<String>,
}
struct PreviewFrame {
width: i32,
height: i32,
stride: usize,
rgba: Vec<u8>,
}
impl LocalCameraPreview {
fn new(picture: &gtk::Picture, status_label: &gtk::Label) -> Self {
let latest = Arc::new(Mutex::new(None::<PreviewFrame>));
let status_text = Arc::new(Mutex::new(CAMERA_PREVIEW_IDLE.to_string()));
let generation = Arc::new(AtomicU64::new(0));
let running = Arc::new(AtomicBool::new(false));
{
let picture = picture.clone();
let status_label = status_label.clone();
let latest = Arc::clone(&latest);
let status_text = Arc::clone(&status_text);
glib::timeout_add_local(Duration::from_millis(120), move || {
let next = latest.lock().ok().and_then(|mut slot| slot.take());
if let Some(frame) = next {
let bytes = glib::Bytes::from_owned(frame.rgba);
let texture = gdk::MemoryTexture::new(
frame.width,
frame.height,
gdk::MemoryFormat::R8g8b8a8,
&bytes,
frame.stride,
);
picture.set_paintable(Some(&texture));
}
if let Ok(text) = status_text.lock() {
status_label.set_text(text.as_str());
}
glib::ControlFlow::Continue
});
}
Self {
latest,
status_text,
generation,
running,
selected_device: None,
}
}
fn is_running(&self) -> bool {
self.running.load(Ordering::Acquire)
}
fn set_selected(&mut self, camera: Option<&str>) -> Result<()> {
self.selected_device = normalize_camera_selection(camera);
if self.is_running() {
self.stop();
self.start()?;
return Ok(());
}
self.set_status(match self.selected_device.as_deref() {
Some(camera) => format!(
"Selected {camera}. Start Preview to confirm framing here before you launch the relay."
),
None => CAMERA_PREVIEW_IDLE.to_string(),
});
Ok(())
}
fn toggle(&mut self) -> Result<bool> {
if self.is_running() {
self.stop();
return Ok(false);
}
self.start()?;
Ok(true)
}
fn start(&mut self) -> Result<()> {
gst::init().context("initialising in-launcher camera preview")?;
let selected = self
.selected_device
.clone()
.ok_or_else(|| anyhow!("select a camera before starting the in-launcher preview"))?;
let device = resolve_camera_device(&selected);
let latest = Arc::clone(&self.latest);
let status_text = Arc::clone(&self.status_text);
let generation = Arc::clone(&self.generation);
let running = Arc::clone(&self.running);
let token = generation.fetch_add(1, Ordering::AcqRel) + 1;
running.store(true, Ordering::Release);
self.set_status(format!("Starting local preview for {selected}..."));
std::thread::spawn(move || {
if let Err(err) = run_camera_preview_feed(
selected,
device,
token,
latest,
status_text.clone(),
generation.clone(),
running.clone(),
) && generation.load(Ordering::Acquire) == token
{
running.store(false, Ordering::Release);
if let Ok(mut status) = status_text.lock() {
*status = format!("Camera preview failed: {err}");
}
}
});
Ok(())
}
fn stop(&mut self) {
self.running.store(false, Ordering::Release);
self.generation.fetch_add(1, Ordering::AcqRel);
if let Ok(mut latest) = self.latest.lock() {
*latest = None;
}
self.set_status(match self.selected_device.as_deref() {
Some(camera) => {
format!("Local preview stopped. {camera} stays selected for the next relay launch.")
}
None => CAMERA_PREVIEW_IDLE.to_string(),
});
}
fn set_status(&self, text: String) {
if let Ok(mut status) = self.status_text.lock() {
*status = text;
}
}
}
fn normalize_camera_selection(camera: Option<&str>) -> Option<String> {
camera
.map(str::trim)
.filter(|value| !value.is_empty() && !value.eq_ignore_ascii_case("auto"))
.map(ToOwned::to_owned)
}
fn resolve_camera_device(camera: &str) -> String {
if camera.starts_with("/dev/") {
camera.to_string()
} else {
format!("/dev/v4l/by-id/{camera}")
}
}
fn run_camera_preview_feed(
selected: String,
device: String,
token: u64,
latest: Arc<Mutex<Option<PreviewFrame>>>,
status_text: Arc<Mutex<String>>,
generation: Arc<AtomicU64>,
running: Arc<AtomicBool>,
) -> Result<()> {
let (pipeline, appsink) = build_camera_preview_pipeline(&device)?;
pipeline
.set_state(gst::State::Playing)
.context("starting in-launcher camera preview pipeline")?;
if let Ok(mut status) = status_text.lock() {
*status = format!("Local preview live for {selected}.");
}
while running.load(Ordering::Acquire) && generation.load(Ordering::Acquire) == token {
if let Some(sample) = appsink.try_pull_sample(gst::ClockTime::from_mseconds(250))
&& let Some(frame) = sample_to_frame(&sample)
&& let Ok(mut slot) = latest.lock()
{
*slot = Some(frame);
}
}
let _ = pipeline.set_state(gst::State::Null);
Ok(())
}
fn build_camera_preview_pipeline(device: &str) -> Result<(gst::Pipeline, gst_app::AppSink)> {
let device = gst_quote(device);
let desc = format!(
"v4l2src device=\"{device}\" do-timestamp=true ! \
video/x-raw,width={CAMERA_PREVIEW_WIDTH},height={CAMERA_PREVIEW_HEIGHT},framerate=30/1 ! \
videoconvert ! videoscale ! \
video/x-raw,format=RGBA,width={CAMERA_PREVIEW_WIDTH},height={CAMERA_PREVIEW_HEIGHT},pixel-aspect-ratio=1/1 ! \
appsink name=sink emit-signals=false sync=false max-buffers=1 drop=true"
);
let pipeline = gst::parse::launch(&desc)?
.downcast::<gst::Pipeline>()
.expect("camera preview pipeline");
let appsink = pipeline
.by_name("sink")
.context("missing in-launcher camera preview appsink")?
.downcast::<gst_app::AppSink>()
.expect("camera preview appsink");
appsink.set_caps(Some(
&gst::Caps::builder("video/x-raw")
.field("format", "RGBA")
.field("width", CAMERA_PREVIEW_WIDTH)
.field("height", CAMERA_PREVIEW_HEIGHT)
.build(),
));
Ok((pipeline, appsink))
}
fn sample_to_frame(sample: &gst::Sample) -> Option<PreviewFrame> {
let caps = sample.caps()?;
let structure = caps.structure(0)?;
let width = structure.get::<i32>("width").ok()?;
let height = structure.get::<i32>("height").ok()?;
let buffer = sample.buffer()?;
let map = buffer.map_readable().ok()?;
let rgba = map.as_slice().to_vec();
let stride = rgba.len() / height.max(1) as usize;
Some(PreviewFrame {
width,
height,
stride,
rgba,
})
}
fn gst_quote(value: &str) -> String {
value.replace('\\', "\\\\").replace('"', "\\\"")
}
fn build_microphone_test(source: Option<&str>, sink: Option<&str>) -> Result<Command> {
let source = source
.filter(|value| !value.trim().is_empty())
.ok_or_else(|| anyhow!("select a microphone before starting a monitor test"))?;
let sink = sink.filter(|value| !value.trim().is_empty());
let sink_prop = sink
.map(|value| format!("device={}", quote(value)))
.unwrap_or_default();
Ok(shell_command(format!(
"gst-launch-1.0 -q pulsesrc device={} ! audioconvert ! audioresample ! queue ! pulsesink {}",
quote(source),
sink_prop
)))
}
fn build_speaker_test(sink: Option<&str>) -> Result<Command> {
let sink_prop = sink
.filter(|value| !value.trim().is_empty())
.map(|value| format!("device={}", quote(value)))
.unwrap_or_default();
Ok(shell_command(format!(
"gst-launch-1.0 -q audiotestsrc is-live=true wave=sine freq=880 volume=0.25 ! audioconvert ! audioresample ! queue ! pulsesink {}",
sink_prop
)))
}
fn shell_command(command: String) -> Command {
let mut child = Command::new("bash");
child.args(["-lc", &command]);
child
}
fn quote(value: impl Into<String>) -> String {
escape(Cow::Owned(value.into())).into_owned()
}
#[cfg(test)]
mod tests {
use super::{normalize_camera_selection, resolve_camera_device};
#[test]
fn resolve_camera_device_accepts_explicit_paths_and_catalog_names() {
assert_eq!(resolve_camera_device("/dev/video0"), "/dev/video0");
assert_eq!(
resolve_camera_device("usb-Logitech_C920-video-index0"),
"/dev/v4l/by-id/usb-Logitech_C920-video-index0"
);
}
#[test]
fn normalize_camera_selection_drops_auto_and_blank_values() {
assert_eq!(normalize_camera_selection(None), None);
assert_eq!(normalize_camera_selection(Some("")), None);
assert_eq!(normalize_camera_selection(Some("auto")), None);
assert_eq!(
normalize_camera_selection(Some("usb-Logitech_C920-video-index0")),
Some("usb-Logitech_C920-video-index0".to_string())
);
}
}