media: gate video against presented audio

This commit is contained in:
Brad Stein 2026-05-02 11:04:36 -03:00
parent 314c55b199
commit fbf274d21b
8 changed files with 55 additions and 26 deletions

View File

@ -245,3 +245,15 @@ Context: 0.17.7 with the Bumblebee mic and BRIO camera removed the seconds-scale
- [x] Run focused calibration/installer/runtime tests.
- [x] Run package checks before push.
- [x] Push clean semver `0.17.8` for installed client/server testing.
## 0.17.9 Sync-Only Audio-Master Presentation Checklist
Context: 0.17.8 installed cleanly on both ends (`314c55b`) but the mirrored probe failed with insufficient data: only 2 paired events, 1187 video freezes, and planner phase `healing`. The server was using the newest planned audio packet as the video-drop reference, so future audio planning could make current video look falsely behind before that audio was actually handed to UAC.
- [x] Keep 0.17.9 scoped to sync enforcement only; no freshness ceilings, queue policy, or smoothness changes.
- [x] Make video freeze/drop decisions compare against audio actually presented to UAC, not merely planned audio.
- [x] Make `wait_for_audio_master` wake on `mark_audio_presented` so video waits for real audio progress.
- [x] Add/adjust tests proving future planned audio alone cannot freeze video.
- [x] Run focused upstream planner tests.
- [x] Run package checks before push.
- [x] Push clean semver `0.17.9` for installed client/server testing.

6
Cargo.lock generated
View File

@ -1652,7 +1652,7 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]]
name = "lesavka_client"
version = "0.17.8"
version = "0.17.9"
dependencies = [
"anyhow",
"async-stream",
@ -1686,7 +1686,7 @@ dependencies = [
[[package]]
name = "lesavka_common"
version = "0.17.8"
version = "0.17.9"
dependencies = [
"anyhow",
"base64",
@ -1698,7 +1698,7 @@ dependencies = [
[[package]]
name = "lesavka_server"
version = "0.17.8"
version = "0.17.9"
dependencies = [
"anyhow",
"base64",

View File

@ -4,7 +4,7 @@ path = "src/main.rs"
[package]
name = "lesavka_client"
version = "0.17.8"
version = "0.17.9"
edition = "2024"
[dependencies]

View File

@ -1,6 +1,6 @@
[package]
name = "lesavka_common"
version = "0.17.8"
version = "0.17.9"
edition = "2024"
build = "build.rs"

View File

@ -10,7 +10,7 @@ bench = false
[package]
name = "lesavka_server"
version = "0.17.8"
version = "0.17.9"
edition = "2024"
autobins = false

View File

@ -113,6 +113,7 @@ impl UpstreamMediaRuntime {
state.phase = UpstreamSyncPhase::Live;
state.last_reason = "audio-master playhead flowing".to_string();
}
self.audio_progress_notify.notify_waiters();
}
/// Mark one video frame as actually handed to the UVC/HDMI sink.
@ -230,12 +231,12 @@ impl UpstreamMediaRuntime {
if state.active_microphone_generation.is_none() {
return true;
}
if state.last_audio_local_pts_us.is_some_and(|audio_pts_us| {
audio_pts_us
.saturating_add(slack_us)
.saturating_add(audio_delay_allowance_us)
>= video_local_pts_us
}) {
let audio_presented_pts_us = state.last_audio_presented_pts_us.unwrap_or(0);
if audio_presented_pts_us
.saturating_add(slack_us)
.saturating_add(audio_delay_allowance_us)
>= video_local_pts_us
{
return true;
}
}
@ -463,13 +464,15 @@ impl UpstreamMediaRuntime {
*last_slot = Some(local_pts_us);
let audio_ahead_video_allowance_us = self.audio_ahead_video_allowance_us();
if kind == UpstreamMediaKind::Camera
&& state.last_audio_local_pts_us.is_some_and(|audio_pts_us| {
video_is_too_far_behind_audio(
local_pts_us,
audio_pts_us,
audio_ahead_video_allowance_us,
)
})
&& state
.last_audio_presented_pts_us
.is_some_and(|audio_pts_us| {
video_is_too_far_behind_audio(
local_pts_us,
audio_pts_us,
audio_ahead_video_allowance_us,
)
})
{
state.skew_video_drops = state.skew_video_drops.saturating_add(1);
state.video_freezes = state.video_freezes.saturating_add(1);

View File

@ -14,7 +14,8 @@ async fn wait_for_audio_master_releases_video_once_audio_catches_up() {
runtime.plan_video_pts(1_000_000, 16_666),
super::UpstreamPlanDecision::AwaitingPair
));
let _audio_first = play(runtime.plan_audio_pts(1_000_000));
let audio_first = play(runtime.plan_audio_pts(1_000_000));
runtime.mark_audio_presented(audio_first.local_pts_us);
let video_first = play(runtime.plan_video_pts(1_000_000, 16_666));
let waiter = tokio::spawn({
@ -27,7 +28,8 @@ async fn wait_for_audio_master_releases_video_once_audio_catches_up() {
});
tokio::time::sleep(Duration::from_millis(5)).await;
let _audio_next = play(runtime.plan_audio_pts(1_010_000));
let audio_next = play(runtime.plan_audio_pts(1_010_000));
runtime.mark_audio_presented(audio_next.local_pts_us);
assert!(waiter.await.expect("audio master waiter should finish"));
}
@ -44,7 +46,8 @@ async fn wait_for_audio_master_allows_configured_positive_audio_delay() {
runtime.plan_video_pts(1_000_000, 16_666),
super::UpstreamPlanDecision::AwaitingPair
));
let _audio_first = play(runtime.plan_audio_pts(1_000_000));
let audio_first = play(runtime.plan_audio_pts(1_000_000));
runtime.mark_audio_presented(audio_first.local_pts_us);
let _video_first = play(runtime.plan_video_pts(1_000_000, 16_666));
let delayed_video = play(runtime.plan_video_pts(1_700_000, 16_666));
@ -71,7 +74,8 @@ async fn wait_for_audio_master_times_out_when_audio_never_catches_up() {
runtime.plan_video_pts(1_000_000, 16_666),
super::UpstreamPlanDecision::AwaitingPair
));
let _audio_first = play(runtime.plan_audio_pts(1_000_000));
let audio_first = play(runtime.plan_audio_pts(1_000_000));
runtime.mark_audio_presented(audio_first.local_pts_us);
let video_first = play(runtime.plan_video_pts(1_000_000, 16_666));
let due_at = tokio::time::Instant::now() + Duration::from_millis(20);

View File

@ -343,12 +343,22 @@ fn video_too_far_behind_audio_master_is_dropped_and_counted_as_freeze() {
runtime.plan_video_pts(1_000_000, 16_666),
super::UpstreamPlanDecision::AwaitingPair
));
let _audio = play(runtime.plan_audio_pts(1_000_000));
let audio = play(runtime.plan_audio_pts(1_000_000));
let _video = play(runtime.plan_video_pts(1_000_000, 16_666));
let _audio_master = play(runtime.plan_audio_pts(1_200_000));
runtime.mark_audio_presented(audio.local_pts_us);
let audio_master = play(runtime.plan_audio_pts(1_200_000));
assert!(
matches!(
runtime.plan_video_pts(1_100_000, 16_666),
super::UpstreamPlanDecision::Play(_)
),
"future planned audio alone must not freeze video before UAC presentation"
);
runtime.mark_audio_presented(audio_master.local_pts_us);
assert!(matches!(
runtime.plan_video_pts(1_100_000, 16_666),
runtime.plan_video_pts(1_116_666, 16_666),
super::UpstreamPlanDecision::DropStale(
"video frame was too far behind audio master"
)