149 lines
5.5 KiB
Rust
Raw Normal View History

/// Generate a server-local A/V signature and feed the physical UVC/UAC sinks.
///
/// Inputs: the active camera relay, active UAC voice sink, camera profile, and
/// probe request timing.
/// Outputs: a small count summary after the last generated packet.
/// Why: this probe intentionally bypasses client capture/uplink but uses the
/// same final server output handoff calls as received client media, so the
/// measured skew/freshness is the server final-handoff-to-RCT path.
#[cfg(not(coverage))]
pub async fn run_server_output_delay_probe(
relay: Arc<CameraRelay>,
sink: &mut Voice,
camera: &CameraConfig,
request: &OutputDelayProbeRequest,
) -> Result<OutputDelayProbeSummary> {
let config = ProbeConfig::from_request(request)?;
if config.event_count() == 0 {
bail!("probe duration must extend beyond warmup");
}
let frame_step = Duration::from_nanos(1_000_000_000u64 / u64::from(camera.fps.max(1)));
let audio_chunk = Duration::from_millis(AUDIO_CHUNK_MS);
let samples_per_chunk = ((u64::from(AUDIO_SAMPLE_RATE) * AUDIO_CHUNK_MS) / 1_000) as usize;
let frames = EncodedProbeFrames::new(camera, &config, frame_step)?;
let server_start_unix_ns = unix_ns_now();
let start = tokio::time::Instant::now();
let mut timeline = OutputDelayProbeTimeline::new(&config, camera, server_start_unix_ns);
let mut frame_index = 0u64;
let mut audio_index = 0u64;
let mut video_frames = 0u64;
let mut audio_packets = 0u64;
loop {
let next_frame_pts = duration_mul(frame_step, frame_index);
let next_audio_pts = duration_mul(audio_chunk, audio_index);
let frame_active = next_frame_pts <= config.duration;
let audio_active = next_audio_pts <= config.duration;
if !frame_active && !audio_active {
break;
}
let next_frame_due = if frame_active {
next_frame_pts.saturating_add(config.video_delay)
} else {
Duration::MAX
};
let next_audio_due = if audio_active {
next_audio_pts.saturating_add(config.audio_delay)
} else {
Duration::MAX
};
tokio::time::sleep_until(start + next_frame_due.min(next_audio_due)).await;
if audio_active && next_audio_due <= next_frame_due {
let pts_us = duration_us(next_audio_pts);
let event_slot = config.event_slot_at(next_audio_pts);
let data = render_audio_chunk(&config, next_audio_pts, samples_per_chunk);
let seq = audio_index.saturating_add(1);
sink.push(&AudioPacket {
id: 0,
pts: pts_us,
data,
seq,
client_capture_pts_us: pts_us,
client_send_pts_us: pts_us,
client_queue_depth: 0,
client_queue_age_ms: 0,
});
if let Some(slot) = event_slot {
let monotonic_us = monotonic_us_since(start);
timeline.mark_audio(
slot,
seq,
monotonic_us,
unix_ns_from_start(server_start_unix_ns, monotonic_us),
);
}
audio_packets = audio_packets.saturating_add(1);
audio_index = audio_index.saturating_add(1);
}
if frame_active && next_frame_due <= next_audio_due {
let pts_us = duration_us(next_frame_pts);
let event_slot = config.event_slot_at(next_frame_pts);
let seq = frame_index.saturating_add(1);
relay.feed(VideoPacket {
id: 0,
pts: pts_us,
data: frames.packet_for_frame(frame_index)?.to_vec(),
seq,
effective_fps: camera.fps,
client_capture_pts_us: pts_us,
client_send_pts_us: pts_us,
client_queue_depth: 0,
client_queue_age_ms: 0,
..Default::default()
});
if let Some(slot) = event_slot {
let monotonic_us = monotonic_us_since(start);
timeline.mark_video(
slot,
seq,
monotonic_us,
unix_ns_from_start(server_start_unix_ns, monotonic_us),
);
}
video_frames = video_frames.saturating_add(1);
frame_index = frame_index.saturating_add(1);
}
}
sink.finish();
Ok(OutputDelayProbeSummary {
video_frames,
audio_packets,
event_count: config.event_count(),
timeline_json: serde_json::to_string(&timeline)
.context("serializing output-delay server timeline")?,
})
}
#[cfg(coverage)]
pub async fn run_server_output_delay_probe(
_relay: Arc<CameraRelay>,
_sink: &mut Voice,
camera: &CameraConfig,
request: &OutputDelayProbeRequest,
) -> Result<OutputDelayProbeSummary> {
coverage_output_delay_summary(camera, request)
}
#[cfg(coverage)]
fn coverage_output_delay_summary(
camera: &CameraConfig,
request: &OutputDelayProbeRequest,
) -> Result<OutputDelayProbeSummary> {
let config = ProbeConfig::from_request(request)?;
Ok(OutputDelayProbeSummary {
video_frames: 1,
audio_packets: 1,
event_count: config.event_count(),
timeline_json: serde_json::to_string(&OutputDelayProbeTimeline::new(
&config,
camera,
unix_ns_now(),
))
.unwrap_or_else(|_| "{}".to_string()),
})
}