fix(audio): restore live playback timing

This commit is contained in:
Brad Stein 2026-04-21 10:57:57 -03:00
parent 3903eeb3ff
commit b6cf15767d
9 changed files with 65 additions and 39 deletions

View File

@ -4,7 +4,7 @@ path = "src/main.rs"
[package] [package]
name = "lesavka_client" name = "lesavka_client"
version = "0.11.31" version = "0.11.32"
edition = "2024" edition = "2024"
[dependencies] [dependencies]

View File

@ -975,7 +975,7 @@ fn remote_failsafe_timeout_from_env() -> Duration {
let millis = std::env::var("LESAVKA_INPUT_REMOTE_FAILSAFE_MS") let millis = std::env::var("LESAVKA_INPUT_REMOTE_FAILSAFE_MS")
.ok() .ok()
.and_then(|raw| raw.parse::<u64>().ok()) .and_then(|raw| raw.parse::<u64>().ok())
.unwrap_or(5_000); .unwrap_or(60_000);
Duration::from_millis(millis) Duration::from_millis(millis)
} }

View File

@ -111,8 +111,8 @@ const LESAVKA_ICON_SEARCH_PATH: &str = concat!(env!("CARGO_MANIFEST_DIR"), "/ass
const LAUNCHER_DEFAULT_WIDTH: i32 = 1380; const LAUNCHER_DEFAULT_WIDTH: i32 = 1380;
const LAUNCHER_DEFAULT_HEIGHT: i32 = 860; const LAUNCHER_DEFAULT_HEIGHT: i32 = 860;
const OPERATIONS_RAIL_WIDTH: i32 = 288; const OPERATIONS_RAIL_WIDTH: i32 = 288;
const CAMERA_PREVIEW_VIEWPORT_HEIGHT: i32 = 144; const CAMERA_PREVIEW_VIEWPORT_HEIGHT: i32 = 108;
const CAMERA_PREVIEW_VIEWPORT_WIDTH: i32 = 256; const CAMERA_PREVIEW_VIEWPORT_WIDTH: i32 = 192;
pub fn build_launcher_view( pub fn build_launcher_view(
app: &gtk::Application, app: &gtk::Application,
@ -211,7 +211,7 @@ pub fn build_launcher_view(
build_panel_with_action("Device Staging", Some(device_refresh_button.upcast_ref())); build_panel_with_action("Device Staging", Some(device_refresh_button.upcast_ref()));
devices_panel.set_hexpand(true); devices_panel.set_hexpand(true);
devices_panel.set_vexpand(false); devices_panel.set_vexpand(false);
devices_panel.set_valign(gtk::Align::Start); devices_panel.set_valign(gtk::Align::Fill);
devices_body.set_spacing(8); devices_body.set_spacing(8);
let control_group = build_subgroup("Control Inputs"); let control_group = build_subgroup("Control Inputs");
@ -321,7 +321,8 @@ pub fn build_launcher_view(
let (preview_panel, preview_body) = build_panel("Device Testing"); let (preview_panel, preview_body) = build_panel("Device Testing");
preview_panel.set_hexpand(true); preview_panel.set_hexpand(true);
preview_panel.set_vexpand(false); preview_panel.set_vexpand(false);
preview_panel.set_valign(gtk::Align::Start); preview_panel.set_valign(gtk::Align::Fill);
preview_body.set_vexpand(true);
preview_body.set_spacing(6); preview_body.set_spacing(6);
let camera_preview = gtk::Picture::new(); let camera_preview = gtk::Picture::new();
camera_preview.set_can_shrink(false); camera_preview.set_can_shrink(false);
@ -355,6 +356,8 @@ pub fn build_launcher_view(
preview_body.append(&webcam_group); preview_body.append(&webcam_group);
let playback_group = build_subgroup("Mic Playback"); let playback_group = build_subgroup("Mic Playback");
playback_group.set_vexpand(true);
playback_group.set_valign(gtk::Align::Fill);
let playback_body = gtk::Box::new(gtk::Orientation::Vertical, 6); let playback_body = gtk::Box::new(gtk::Orientation::Vertical, 6);
let playback_row = gtk::Box::new(gtk::Orientation::Horizontal, 8); let playback_row = gtk::Box::new(gtk::Orientation::Horizontal, 8);
playback_row.set_homogeneous(false); playback_row.set_homogeneous(false);

View File

@ -18,8 +18,7 @@ pub struct AudioOut {
#[derive(Default)] #[derive(Default)]
struct AudioTimeline { struct AudioTimeline {
first_remote_pts_us: Option<u64>, last_remote_pts_us: Option<u64>,
last_local_pts_us: u64,
packets: u64, packets: u64,
} }
@ -39,6 +38,7 @@ impl AudioOut {
aacparse ! avdec_aac ! \ aacparse ! avdec_aac ! \
audioconvert ! audioresample ! \ audioconvert ! audioresample ! \
audio/x-raw,format=S16LE,channels=2,rate=48000 ! \ audio/x-raw,format=S16LE,channels=2,rate=48000 ! \
level name=remote_audio_level interval=1000000000 message=true ! \
queue max-size-time=400000000 max-size-bytes=0 max-size-buffers=0 ! {}", queue max-size-time=400000000 max-size-bytes=0 max-size-buffers=0 ! {}",
sink, sink,
); );
@ -49,6 +49,7 @@ impl AudioOut {
queue max-size-time=500000000 max-size-bytes=0 max-size-buffers=0 ! \ queue max-size-time=500000000 max-size-bytes=0 max-size-buffers=0 ! \
aacparse ! avdec_aac ! audioconvert ! audioresample ! \ aacparse ! avdec_aac ! audioconvert ! audioresample ! \
audio/x-raw,format=S16LE,channels=2,rate=48000 ! \ audio/x-raw,format=S16LE,channels=2,rate=48000 ! \
level name=remote_audio_level interval=1000000000 message=true ! \
queue max-size-time=400000000 max-size-bytes=0 max-size-buffers=0 ! {} \ queue max-size-time=400000000 max-size-bytes=0 max-size-buffers=0 ! {} \
t. ! queue ! filesink location=/tmp/lesavka-audio.aac", t. ! queue ! filesink location=/tmp/lesavka-audio.aac",
sink, sink,
@ -93,10 +94,15 @@ impl AudioOut {
w.error(), w.error(),
w.debug().unwrap_or_default() w.debug().unwrap_or_default()
), ),
Element(e) => debug!( Element(e) => {
"🔎 gst element message: {}", if let Some(structure) = e.structure() {
e.structure().map(|s| s.to_string()).unwrap_or_default() if structure.name() == "level" {
), info!("🔊 decoded audio level {}", structure);
} else {
debug!("🔎 gst element message: {}", structure);
}
}
}
StateChanged(s) if s.current() == gst::State::Playing => { StateChanged(s) if s.current() == gst::State::Playing => {
if msg.src().map(|s| s.is::<gst::Pipeline>()).unwrap_or(false) { if msg.src().map(|s| s.is::<gst::Pipeline>()).unwrap_or(false) {
info!("🔊 audio pipeline ▶️ (sink='{}')", sink); info!("🔊 audio pipeline ▶️ (sink='{}')", sink);
@ -123,28 +129,7 @@ impl AudioOut {
} }
pub fn push(&self, pkt: AudioPacket) { pub fn push(&self, pkt: AudioPacket) {
let mut buf = gst::Buffer::from_slice(pkt.data); let buf = live_audio_buffer(pkt, &self.timeline);
if let Ok(mut timeline) = self.timeline.lock() {
let base = timeline.first_remote_pts_us.get_or_insert(pkt.pts);
let mut local_pts_us = pkt.pts.saturating_sub(*base);
if local_pts_us < timeline.last_local_pts_us {
local_pts_us = timeline.last_local_pts_us.saturating_add(1);
}
timeline.last_local_pts_us = local_pts_us;
timeline.packets = timeline.packets.saturating_add(1);
if timeline.packets <= 8 || timeline.packets % 600 == 0 {
debug!(
packet = timeline.packets,
remote_pts_us = pkt.pts,
local_pts_us,
bytes = buf.size(),
"🔊 audio packet queued"
);
}
buf.get_mut()
.unwrap()
.set_pts(Some(gst::ClockTime::from_useconds(local_pts_us)));
}
#[cfg(not(coverage))] #[cfg(not(coverage))]
if let Err(e) = self.src.push_buffer(buf) { if let Err(e) = self.src.push_buffer(buf) {
warn!("📉 AppSrc push failed: {e:?}"); warn!("📉 AppSrc push failed: {e:?}");
@ -157,6 +142,27 @@ impl AudioOut {
} }
} }
fn live_audio_buffer(pkt: AudioPacket, timeline: &Mutex<AudioTimeline>) -> gst::Buffer {
let buf = gst::Buffer::from_slice(pkt.data);
if let Ok(mut timeline) = timeline.lock() {
let remote_gap_us = timeline
.last_remote_pts_us
.map(|last| pkt.pts.saturating_sub(last));
timeline.last_remote_pts_us = Some(pkt.pts);
timeline.packets = timeline.packets.saturating_add(1);
if timeline.packets <= 8 || timeline.packets % 600 == 0 {
debug!(
packet = timeline.packets,
remote_pts_us = pkt.pts,
remote_gap_us,
bytes = buf.size(),
"🔊 audio packet queued for live appsrc timestamping"
);
}
}
buf
}
impl Drop for AudioOut { impl Drop for AudioOut {
fn drop(&mut self) { fn drop(&mut self) {
let _ = self.pipeline.set_state(gst::State::Null); let _ = self.pipeline.set_state(gst::State::Null);

View File

@ -1,6 +1,6 @@
[package] [package]
name = "lesavka_common" name = "lesavka_common"
version = "0.11.31" version = "0.11.32"
edition = "2024" edition = "2024"
build = "build.rs" build = "build.rs"

View File

@ -17,6 +17,6 @@ mod tests {
#[test] #[test]
fn banner_includes_version() { fn banner_includes_version() {
assert_eq!(banner("0.11.31"), "lesavka-common CLI (v0.11.31)"); assert_eq!(banner("0.11.32"), "lesavka-common CLI (v0.11.32)");
} }
} }

View File

@ -10,7 +10,7 @@ bench = false
[package] [package]
name = "lesavka_server" name = "lesavka_server"
version = "0.11.31" version = "0.11.32"
edition = "2024" edition = "2024"
autobins = false autobins = false

View File

@ -466,7 +466,7 @@ mod inputs_contract {
with_var("LESAVKA_INPUT_REMOTE_FAILSAFE_MS", None::<&str>, || { with_var("LESAVKA_INPUT_REMOTE_FAILSAFE_MS", None::<&str>, || {
assert_eq!( assert_eq!(
remote_failsafe_timeout_from_env(), remote_failsafe_timeout_from_env(),
Duration::from_millis(5_000) Duration::from_millis(60_000)
); );
}); });
with_var("LESAVKA_INPUT_REMOTE_FAILSAFE_MS", Some("0"), || { with_var("LESAVKA_INPUT_REMOTE_FAILSAFE_MS", Some("0"), || {

View File

@ -80,7 +80,7 @@ exit 0
sinks, sinks,
vec![( vec![(
"alsa_output.usb-DAC_1234-00.analog-stereo".to_string(), "alsa_output.usb-DAC_1234-00.analog-stereo".to_string(),
"UNKNOWN".to_string() "DEFAULT".to_string()
)] )]
); );
let sink = pick_sink_element().expect("pick sink"); let sink = pick_sink_element().expect("pick sink");
@ -168,4 +168,21 @@ exit 0
}, },
); );
} }
#[test]
fn live_audio_buffer_leaves_pts_for_appsrc_timestamping() {
let _ = gst::init();
let timeline = std::sync::Mutex::new(AudioTimeline::default());
let buffer = live_audio_buffer(
AudioPacket {
id: 0,
pts: 42_666,
data: vec![0xFF, 0xF1, 0x50, 0x80, 0x00, 0x1F, 0xFC],
},
&timeline,
);
assert_eq!(buffer.pts(), None);
assert_eq!(timeline.lock().expect("timeline").packets, 1);
}
} }