268 lines
8.8 KiB
Rust
Raw Normal View History

use super::{UpstreamMediaKind, UpstreamMediaRuntime};
use std::sync::Arc;
use std::time::Duration;
#[test]
fn first_stream_starts_a_new_shared_session() {
let runtime = UpstreamMediaRuntime::new();
let camera = runtime.activate_camera();
let microphone = runtime.activate_microphone();
assert_eq!(camera.session_id, 1);
assert_eq!(microphone.session_id, 1);
assert!(runtime.is_camera_active(camera.generation));
assert!(runtime.is_microphone_active(microphone.generation));
}
#[test]
fn replacing_one_kind_keeps_the_session_but_preempts_the_old_owner() {
let runtime = UpstreamMediaRuntime::new();
let first = runtime.activate_microphone();
let second = runtime.activate_microphone();
assert_eq!(first.session_id, second.session_id);
assert!(!runtime.is_microphone_active(first.generation));
assert!(runtime.is_microphone_active(second.generation));
}
#[test]
fn closing_the_last_stream_resets_the_next_session_anchor() {
let runtime = UpstreamMediaRuntime::new();
let camera = runtime.activate_camera();
let microphone = runtime.activate_microphone();
runtime.close_camera(camera.generation);
runtime.close_microphone(microphone.generation);
let next = runtime.activate_camera();
assert_eq!(next.session_id, 2);
}
#[test]
fn shared_clock_rebases_audio_and_video_against_the_same_origin() {
let runtime = UpstreamMediaRuntime::new();
let _camera = runtime.activate_camera();
let _microphone = runtime.activate_microphone();
let video_first = runtime.map_video_pts(1_000_000, 16_666);
let audio_first = runtime.map_audio_pts(1_000_000);
let audio_next = runtime.map_audio_pts(1_010_000);
let video_next = runtime.map_video_pts(1_033_333, 16_666);
assert_eq!(video_first, 0);
assert_eq!(audio_first, 0);
assert_eq!(audio_next, 10_000);
assert_eq!(video_next, 33_333);
}
#[test]
fn per_kind_session_bases_cancel_constant_startup_path_offsets() {
let runtime = UpstreamMediaRuntime::new();
let _camera = runtime.activate_camera();
let _microphone = runtime.activate_microphone();
let audio_first = runtime.map_audio_pts(1_000_000);
let video_first = runtime.map_video_pts(1_300_000, 16_666);
let audio_next = runtime.map_audio_pts(1_010_000);
let video_next = runtime.map_video_pts(1_333_333, 16_666);
assert_eq!(audio_first, 0);
assert_eq!(video_first, 0);
assert_eq!(audio_next, 10_000);
assert_eq!(video_next, 33_333);
}
#[test]
fn shared_clock_keeps_each_kind_monotonic_when_remote_pts_repeat() {
let runtime = UpstreamMediaRuntime::new();
let _camera = runtime.activate_camera();
let first = runtime.map_video_pts(50_000, 16_666);
let repeated = runtime.map_video_pts(50_000, 16_666);
assert_eq!(first, 0);
assert_eq!(repeated, 16_666);
}
#[test]
fn close_ignores_superseded_generation_values() {
let runtime = UpstreamMediaRuntime::new();
let first = runtime.activate_camera();
let second = runtime.activate_camera();
runtime.close_camera(first.generation);
assert!(runtime.is_camera_active(second.generation));
runtime.close(super::UpstreamMediaKind::Camera, second.generation);
let next = runtime.activate_camera();
assert_eq!(next.session_id, 2);
}
#[test]
fn upstream_playout_delay_defaults_to_one_second_and_accepts_overrides() {
temp_env::with_var_unset("LESAVKA_UPSTREAM_PLAYOUT_DELAY_MS", || {
assert_eq!(super::upstream_playout_delay(), Duration::from_secs(1));
});
temp_env::with_var("LESAVKA_UPSTREAM_PLAYOUT_DELAY_MS", Some("250"), || {
assert_eq!(super::upstream_playout_delay(), Duration::from_millis(250));
});
}
#[test]
fn upstream_playout_offsets_default_to_zero_and_accept_overrides() {
temp_env::with_var_unset("LESAVKA_UPSTREAM_AUDIO_PLAYOUT_OFFSET_US", || {
temp_env::with_var_unset("LESAVKA_UPSTREAM_VIDEO_PLAYOUT_OFFSET_US", || {
assert_eq!(
super::upstream_playout_offset_us(UpstreamMediaKind::Microphone),
0
);
assert_eq!(
super::upstream_playout_offset_us(UpstreamMediaKind::Camera),
0
);
});
});
temp_env::with_var(
"LESAVKA_UPSTREAM_AUDIO_PLAYOUT_OFFSET_US",
Some("-20000"),
|| {
temp_env::with_var(
"LESAVKA_UPSTREAM_VIDEO_PLAYOUT_OFFSET_US",
Some("35000"),
|| {
assert_eq!(
super::upstream_playout_offset_us(UpstreamMediaKind::Microphone),
-20_000
);
assert_eq!(
super::upstream_playout_offset_us(UpstreamMediaKind::Camera),
35_000
);
},
);
},
);
}
#[test]
fn upstream_timing_trace_flag_accepts_false_values() {
temp_env::with_var("LESAVKA_UPSTREAM_TIMING_TRACE", Some("off"), || {
assert!(!super::upstream_timing_trace_enabled());
});
temp_env::with_var("LESAVKA_UPSTREAM_TIMING_TRACE", Some("false"), || {
assert!(!super::upstream_timing_trace_enabled());
});
temp_env::with_var("LESAVKA_UPSTREAM_TIMING_TRACE", Some("1"), || {
assert!(super::upstream_timing_trace_enabled());
});
}
#[test]
fn apply_playout_offset_supports_negative_offsets() {
let base = tokio::time::Instant::now() + Duration::from_millis(50);
let shifted = super::apply_playout_offset(base, -20_000);
let delta = base.saturating_duration_since(shifted);
assert_eq!(delta, Duration::from_micros(20_000));
}
#[test]
fn shared_playout_epoch_is_reused_across_audio_and_video() {
let runtime = UpstreamMediaRuntime::new();
let _camera = runtime.activate_camera();
let _microphone = runtime.activate_microphone();
let video_first = runtime.plan_video_pts(1_000_000, 16_666);
let audio_first = runtime.plan_audio_pts(1_000_000);
let audio_next = runtime.plan_audio_pts(1_010_000);
assert_eq!(video_first.local_pts_us, 0);
assert_eq!(audio_first.local_pts_us, 0);
assert_eq!(video_first.due_at, audio_first.due_at);
assert_eq!(
audio_next
.due_at
.saturating_duration_since(audio_first.due_at),
Duration::from_micros(10_000)
);
}
#[test]
fn shared_playout_trace_path_keeps_planned_pts_stable() {
temp_env::with_var("LESAVKA_UPSTREAM_TIMING_TRACE", Some("1"), || {
let runtime = UpstreamMediaRuntime::new();
let _camera = runtime.activate_camera();
let _microphone = runtime.activate_microphone();
let video = runtime.plan_video_pts(1_000_000, 16_666);
let audio = runtime.plan_audio_pts(1_000_000);
assert_eq!(video.local_pts_us, 0);
assert_eq!(audio.local_pts_us, 0);
});
}
#[tokio::test(flavor = "current_thread")]
async fn new_microphone_owner_waits_for_the_previous_sink_to_release() {
let runtime = Arc::new(UpstreamMediaRuntime::new());
let first = runtime.activate_microphone();
let first_permit = runtime
.reserve_microphone_sink(first.generation)
.await
.expect("first owner should acquire the sink gate");
let second = runtime.activate_microphone();
let waiter = tokio::spawn({
let runtime = runtime.clone();
async move {
runtime
.reserve_microphone_sink(second.generation)
.await
.is_some()
}
});
tokio::time::sleep(Duration::from_millis(25)).await;
assert!(!waiter.is_finished());
drop(first_permit);
assert!(waiter.await.expect("waiter task should finish"));
}
#[tokio::test(flavor = "current_thread")]
async fn superseded_microphone_waiter_stands_down_before_opening_a_sink() {
let runtime = Arc::new(UpstreamMediaRuntime::new());
let first = runtime.activate_microphone();
let first_permit = runtime
.reserve_microphone_sink(first.generation)
.await
.expect("first owner should acquire the sink gate");
let second = runtime.activate_microphone();
let superseded_waiter = tokio::spawn({
let runtime = runtime.clone();
async move {
runtime
.reserve_microphone_sink(second.generation)
.await
.is_some()
}
});
tokio::time::sleep(Duration::from_millis(25)).await;
let third = runtime.activate_microphone();
drop(first_permit);
assert!(
!superseded_waiter
.await
.expect("superseded waiter task should finish"),
"older waiter should stand down instead of opening a sink after supersession"
);
let third_permit = runtime
.reserve_microphone_sink(third.generation)
.await
.expect("latest owner should acquire the sink gate");
drop(third_permit);
}