1214 lines
50 KiB
Rust
1214 lines
50 KiB
Rust
use serde::{Deserialize, Serialize};
|
|
use std::collections::VecDeque;
|
|
use std::fmt::Write as _;
|
|
|
|
use super::{
|
|
devices::CameraMode,
|
|
state::{CaptureSizeChoice, FeedSourcePreset, InputRouting, LauncherState, ViewMode},
|
|
};
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
|
|
pub struct PerformanceSample {
|
|
pub rtt_ms: f32,
|
|
pub probe_spread_ms: f32,
|
|
pub input_latency_ms: f32,
|
|
pub probe_loss_pct: f32,
|
|
pub client_process_cpu_pct: f32,
|
|
pub server_process_cpu_pct: f32,
|
|
pub video_loss_pct: f32,
|
|
pub left_receive_fps: f32,
|
|
pub left_present_fps: f32,
|
|
pub left_server_fps: f32,
|
|
pub left_stream_spread_ms: f32,
|
|
pub left_packet_gap_peak_ms: f32,
|
|
pub left_present_gap_peak_ms: f32,
|
|
pub left_queue_depth: u32,
|
|
pub left_queue_peak: u32,
|
|
pub left_server_source_gap_peak_ms: f32,
|
|
pub left_server_send_gap_peak_ms: f32,
|
|
pub left_server_queue_peak: u32,
|
|
pub left_server_encoder_label: String,
|
|
pub left_decoder_label: String,
|
|
pub left_stream_caps_label: String,
|
|
pub left_decoded_caps_label: String,
|
|
pub left_rendered_caps_label: String,
|
|
pub right_receive_fps: f32,
|
|
pub right_present_fps: f32,
|
|
pub right_server_fps: f32,
|
|
pub right_stream_spread_ms: f32,
|
|
pub right_packet_gap_peak_ms: f32,
|
|
pub right_present_gap_peak_ms: f32,
|
|
pub right_queue_depth: u32,
|
|
pub right_queue_peak: u32,
|
|
pub right_server_source_gap_peak_ms: f32,
|
|
pub right_server_send_gap_peak_ms: f32,
|
|
pub right_server_queue_peak: u32,
|
|
pub right_server_encoder_label: String,
|
|
pub right_decoder_label: String,
|
|
pub right_stream_caps_label: String,
|
|
pub right_decoded_caps_label: String,
|
|
pub right_rendered_caps_label: String,
|
|
pub dropped_frames: u64,
|
|
pub queue_depth: u32,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct DiagnosticsLog {
|
|
capacity: usize,
|
|
history: VecDeque<PerformanceSample>,
|
|
}
|
|
|
|
impl DiagnosticsLog {
|
|
pub fn new(capacity: usize) -> Self {
|
|
let capacity = capacity.max(1);
|
|
Self {
|
|
capacity,
|
|
history: VecDeque::with_capacity(capacity),
|
|
}
|
|
}
|
|
|
|
pub fn record(&mut self, sample: PerformanceSample) {
|
|
if self.history.len() == self.capacity {
|
|
let _ = self.history.pop_front();
|
|
}
|
|
self.history.push_back(sample);
|
|
}
|
|
|
|
pub fn latest(&self) -> Option<&PerformanceSample> {
|
|
self.history.back()
|
|
}
|
|
|
|
pub fn len(&self) -> usize {
|
|
self.history.len()
|
|
}
|
|
|
|
pub fn is_empty(&self) -> bool {
|
|
self.history.is_empty()
|
|
}
|
|
|
|
pub fn iter(&self) -> impl Iterator<Item = &PerformanceSample> {
|
|
self.history.iter()
|
|
}
|
|
}
|
|
|
|
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
|
|
pub struct MediaChannelState {
|
|
pub camera: bool,
|
|
pub microphone: bool,
|
|
pub audio: bool,
|
|
}
|
|
|
|
#[derive(Debug, Clone, Serialize, Deserialize)]
|
|
pub struct SnapshotReport {
|
|
pub client_version: String,
|
|
pub server_version: Option<String>,
|
|
pub server_available: bool,
|
|
pub routing: InputRouting,
|
|
pub view_mode: ViewMode,
|
|
pub remote_active: bool,
|
|
pub power_state: String,
|
|
pub client_process_cpu_pct: f32,
|
|
pub server_process_cpu_pct: f32,
|
|
pub preview_source: String,
|
|
pub client_display_limit: String,
|
|
pub left_surface: String,
|
|
pub left_feed_source: String,
|
|
pub left_capture_profile: String,
|
|
pub left_capture_transport: String,
|
|
pub left_breakout_profile: String,
|
|
pub left_decoder_label: String,
|
|
pub left_stream_spread_ms: f32,
|
|
pub left_packet_gap_peak_ms: f32,
|
|
pub left_present_gap_peak_ms: f32,
|
|
pub left_queue_depth: u32,
|
|
pub left_queue_peak: u32,
|
|
pub left_server_source_gap_peak_ms: f32,
|
|
pub left_server_send_gap_peak_ms: f32,
|
|
pub left_server_queue_peak: u32,
|
|
pub left_server_encoder_label: String,
|
|
pub left_stream_caps_label: String,
|
|
pub left_decoded_caps_label: String,
|
|
pub left_rendered_caps_label: String,
|
|
pub right_surface: String,
|
|
pub right_feed_source: String,
|
|
pub right_capture_profile: String,
|
|
pub right_capture_transport: String,
|
|
pub right_breakout_profile: String,
|
|
pub right_decoder_label: String,
|
|
pub right_stream_spread_ms: f32,
|
|
pub right_packet_gap_peak_ms: f32,
|
|
pub right_present_gap_peak_ms: f32,
|
|
pub right_queue_depth: u32,
|
|
pub right_queue_peak: u32,
|
|
pub right_server_source_gap_peak_ms: f32,
|
|
pub right_server_send_gap_peak_ms: f32,
|
|
pub right_server_queue_peak: u32,
|
|
pub right_server_encoder_label: String,
|
|
pub right_stream_caps_label: String,
|
|
pub right_decoded_caps_label: String,
|
|
pub right_rendered_caps_label: String,
|
|
pub selected_camera: Option<String>,
|
|
pub camera_quality_label: String,
|
|
pub selected_microphone: Option<String>,
|
|
pub selected_speaker: Option<String>,
|
|
pub media_channels: MediaChannelState,
|
|
pub audio_gain_label: String,
|
|
pub mic_gain_label: String,
|
|
pub selected_keyboard: Option<String>,
|
|
pub selected_mouse: Option<String>,
|
|
pub status: String,
|
|
pub recent_samples: Vec<PerformanceSample>,
|
|
pub notes: Vec<String>,
|
|
pub recommendations: Vec<String>,
|
|
pub probe_command: String,
|
|
}
|
|
|
|
impl SnapshotReport {
|
|
pub fn from_state(state: &LauncherState, log: &DiagnosticsLog, probe_command: String) -> Self {
|
|
let left_capture = state
|
|
.display_capture_size_choice(0)
|
|
.unwrap_or_else(|| state.capture_size_choice(0));
|
|
let right_capture = state
|
|
.display_capture_size_choice(1)
|
|
.unwrap_or_else(|| state.capture_size_choice(1));
|
|
let left_breakout = state.breakout_size_choice(0);
|
|
let right_breakout = state.breakout_size_choice(1);
|
|
let latest = log.latest();
|
|
let left_stream_caps = latest
|
|
.map(|sample| sample.left_stream_caps_label.clone())
|
|
.unwrap_or_default();
|
|
let right_stream_caps = latest
|
|
.map(|sample| sample.right_stream_caps_label.clone())
|
|
.unwrap_or_default();
|
|
Self {
|
|
client_version: crate::VERSION.to_string(),
|
|
server_version: state.server_version.clone(),
|
|
server_available: state.server_available,
|
|
routing: state.routing,
|
|
view_mode: state.view_mode,
|
|
remote_active: state.remote_active,
|
|
power_state: format!(
|
|
"{} | {} | leases {}",
|
|
state.capture_power.mode,
|
|
state.capture_power.detail,
|
|
state.capture_power.active_leases
|
|
),
|
|
client_process_cpu_pct: latest
|
|
.map(|sample| sample.client_process_cpu_pct)
|
|
.unwrap_or(0.0),
|
|
server_process_cpu_pct: latest
|
|
.map(|sample| sample.server_process_cpu_pct)
|
|
.unwrap_or(0.0),
|
|
preview_source: format!(
|
|
"{}x{} @ {} fps",
|
|
state.preview_source.width, state.preview_source.height, state.preview_source.fps
|
|
),
|
|
client_display_limit: format!(
|
|
"{}x{}",
|
|
state.breakout_display.width, state.breakout_display.height
|
|
),
|
|
left_surface: state.display_surface(0).label().to_string(),
|
|
left_feed_source: match state.feed_source_preset(0) {
|
|
super::state::FeedSourcePreset::ThisEye => "Left Eye".to_string(),
|
|
super::state::FeedSourcePreset::OtherEye => "Right Eye (mirrored)".to_string(),
|
|
super::state::FeedSourcePreset::Off => "Off".to_string(),
|
|
},
|
|
left_capture_profile: capture_profile_label(&left_capture, &left_stream_caps),
|
|
left_capture_transport: left_capture.preset.transport_label().to_string(),
|
|
left_breakout_profile: format!(
|
|
"{} | {}x{}",
|
|
left_breakout.preset.label(),
|
|
left_breakout.width,
|
|
left_breakout.height
|
|
),
|
|
left_decoder_label: latest
|
|
.map(|sample| {
|
|
if sample.left_decoder_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.left_decoder_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
left_stream_spread_ms: latest
|
|
.map(|sample| sample.left_stream_spread_ms)
|
|
.unwrap_or(0.0),
|
|
left_packet_gap_peak_ms: latest
|
|
.map(|sample| sample.left_packet_gap_peak_ms)
|
|
.unwrap_or(0.0),
|
|
left_present_gap_peak_ms: latest
|
|
.map(|sample| sample.left_present_gap_peak_ms)
|
|
.unwrap_or(0.0),
|
|
left_queue_depth: latest.map(|sample| sample.left_queue_depth).unwrap_or(0),
|
|
left_queue_peak: latest.map(|sample| sample.left_queue_peak).unwrap_or(0),
|
|
left_server_source_gap_peak_ms: latest
|
|
.map(|sample| sample.left_server_source_gap_peak_ms)
|
|
.unwrap_or(0.0),
|
|
left_server_send_gap_peak_ms: latest
|
|
.map(|sample| sample.left_server_send_gap_peak_ms)
|
|
.unwrap_or(0.0),
|
|
left_server_queue_peak: latest
|
|
.map(|sample| sample.left_server_queue_peak)
|
|
.unwrap_or(0),
|
|
left_server_encoder_label: latest
|
|
.map(|sample| {
|
|
if sample.left_server_encoder_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.left_server_encoder_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
left_stream_caps_label: latest
|
|
.map(|sample| {
|
|
if sample.left_stream_caps_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.left_stream_caps_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
left_decoded_caps_label: latest
|
|
.map(|sample| {
|
|
if sample.left_decoded_caps_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.left_decoded_caps_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
left_rendered_caps_label: latest
|
|
.map(|sample| {
|
|
if sample.left_rendered_caps_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.left_rendered_caps_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
right_surface: state.display_surface(1).label().to_string(),
|
|
right_feed_source: match state.feed_source_preset(1) {
|
|
super::state::FeedSourcePreset::ThisEye => "Right Eye".to_string(),
|
|
super::state::FeedSourcePreset::OtherEye => "Left Eye (mirrored)".to_string(),
|
|
super::state::FeedSourcePreset::Off => "Off".to_string(),
|
|
},
|
|
right_capture_profile: capture_profile_label(&right_capture, &right_stream_caps),
|
|
right_capture_transport: right_capture.preset.transport_label().to_string(),
|
|
right_breakout_profile: format!(
|
|
"{} | {}x{}",
|
|
right_breakout.preset.label(),
|
|
right_breakout.width,
|
|
right_breakout.height
|
|
),
|
|
right_decoder_label: latest
|
|
.map(|sample| {
|
|
if sample.right_decoder_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.right_decoder_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
right_stream_spread_ms: latest
|
|
.map(|sample| sample.right_stream_spread_ms)
|
|
.unwrap_or(0.0),
|
|
right_packet_gap_peak_ms: latest
|
|
.map(|sample| sample.right_packet_gap_peak_ms)
|
|
.unwrap_or(0.0),
|
|
right_present_gap_peak_ms: latest
|
|
.map(|sample| sample.right_present_gap_peak_ms)
|
|
.unwrap_or(0.0),
|
|
right_queue_depth: latest.map(|sample| sample.right_queue_depth).unwrap_or(0),
|
|
right_queue_peak: latest.map(|sample| sample.right_queue_peak).unwrap_or(0),
|
|
right_server_source_gap_peak_ms: latest
|
|
.map(|sample| sample.right_server_source_gap_peak_ms)
|
|
.unwrap_or(0.0),
|
|
right_server_send_gap_peak_ms: latest
|
|
.map(|sample| sample.right_server_send_gap_peak_ms)
|
|
.unwrap_or(0.0),
|
|
right_server_queue_peak: latest
|
|
.map(|sample| sample.right_server_queue_peak)
|
|
.unwrap_or(0),
|
|
right_server_encoder_label: latest
|
|
.map(|sample| {
|
|
if sample.right_server_encoder_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.right_server_encoder_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
right_stream_caps_label: latest
|
|
.map(|sample| {
|
|
if sample.right_stream_caps_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.right_stream_caps_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
right_decoded_caps_label: latest
|
|
.map(|sample| {
|
|
if sample.right_decoded_caps_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.right_decoded_caps_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
right_rendered_caps_label: latest
|
|
.map(|sample| {
|
|
if sample.right_rendered_caps_label.is_empty() {
|
|
"pending".to_string()
|
|
} else {
|
|
sample.right_rendered_caps_label.clone()
|
|
}
|
|
})
|
|
.unwrap_or_else(|| "pending".to_string()),
|
|
selected_camera: state.devices.camera.clone(),
|
|
camera_quality_label: state
|
|
.camera_quality
|
|
.map(CameraMode::short_label)
|
|
.unwrap_or_else(|| "default".to_string()),
|
|
selected_microphone: state.devices.microphone.clone(),
|
|
selected_speaker: state.devices.speaker.clone(),
|
|
media_channels: MediaChannelState {
|
|
camera: state.channels.camera,
|
|
microphone: state.channels.microphone,
|
|
audio: state.channels.audio,
|
|
},
|
|
audio_gain_label: state.audio_gain_label(),
|
|
mic_gain_label: state.mic_gain_label(),
|
|
selected_keyboard: state.devices.keyboard.clone(),
|
|
selected_mouse: state.devices.mouse.clone(),
|
|
status: state.status_line(),
|
|
recent_samples: log.iter().cloned().collect(),
|
|
notes: state.notes.clone(),
|
|
recommendations: recommendations_for(state, log),
|
|
probe_command,
|
|
}
|
|
}
|
|
|
|
pub fn to_pretty_json(&self) -> Result<String, serde_json::Error> {
|
|
serde_json::to_string_pretty(self)
|
|
}
|
|
|
|
pub fn to_pretty_text(&self) -> String {
|
|
let mut text = String::new();
|
|
let server_version = self.server_version.as_deref().unwrap_or("unknown");
|
|
let server_state = if self.server_available {
|
|
"reachable"
|
|
} else {
|
|
"unreachable"
|
|
};
|
|
let _ = writeln!(text, "Lesavka Diagnostics");
|
|
let _ = writeln!(text, "client: v{}", self.client_version);
|
|
let _ = writeln!(text, "server: {server_version} ({server_state})");
|
|
let _ = writeln!(
|
|
text,
|
|
"session: routing={:?} view={:?} relay={} capture_power={}",
|
|
self.routing,
|
|
self.view_mode,
|
|
if self.remote_active { "active" } else { "idle" },
|
|
self.power_state
|
|
);
|
|
let _ = writeln!(
|
|
text,
|
|
"runtime: client CPU {:.1}% | server CPU {:.1}%",
|
|
self.client_process_cpu_pct, self.server_process_cpu_pct
|
|
);
|
|
let _ = writeln!(text, "source feed: {}", self.preview_source);
|
|
let _ = writeln!(text, "display limit: {}", self.client_display_limit);
|
|
let _ = writeln!(text);
|
|
let _ = writeln!(text, "left eye");
|
|
let _ = writeln!(text, " surface: {}", self.left_surface);
|
|
let _ = writeln!(text, " source: {}", self.left_feed_source);
|
|
let _ = writeln!(text, " capture: {}", self.left_capture_profile);
|
|
let _ = writeln!(text, " transport: {}", self.left_capture_transport);
|
|
let _ = writeln!(text, " breakout: {}", self.left_breakout_profile);
|
|
let _ = writeln!(
|
|
text,
|
|
" live: decoder={} spread={:.1}ms gaps={:.0}/{:.0}ms queue={}/{}",
|
|
self.left_decoder_label,
|
|
self.left_stream_spread_ms,
|
|
self.left_packet_gap_peak_ms,
|
|
self.left_present_gap_peak_ms,
|
|
self.left_queue_depth,
|
|
self.left_queue_peak
|
|
);
|
|
let _ = writeln!(text, " stream caps: {}", self.left_stream_caps_label);
|
|
let _ = writeln!(text, " decoded caps: {}", self.left_decoded_caps_label);
|
|
let _ = writeln!(text, " rendered caps: {}", self.left_rendered_caps_label);
|
|
let _ = writeln!(
|
|
text,
|
|
" server: encoder={} cpu={:.1}% gaps={:.0}/{:.0}ms queue-peak={}",
|
|
self.left_server_encoder_label,
|
|
self.server_process_cpu_pct,
|
|
self.left_server_source_gap_peak_ms,
|
|
self.left_server_send_gap_peak_ms,
|
|
self.left_server_queue_peak
|
|
);
|
|
let _ = writeln!(text, "right eye");
|
|
let _ = writeln!(text, " surface: {}", self.right_surface);
|
|
let _ = writeln!(text, " source: {}", self.right_feed_source);
|
|
let _ = writeln!(text, " capture: {}", self.right_capture_profile);
|
|
let _ = writeln!(text, " transport: {}", self.right_capture_transport);
|
|
let _ = writeln!(text, " breakout: {}", self.right_breakout_profile);
|
|
let _ = writeln!(
|
|
text,
|
|
" live: decoder={} spread={:.1}ms gaps={:.0}/{:.0}ms queue={}/{}",
|
|
self.right_decoder_label,
|
|
self.right_stream_spread_ms,
|
|
self.right_packet_gap_peak_ms,
|
|
self.right_present_gap_peak_ms,
|
|
self.right_queue_depth,
|
|
self.right_queue_peak
|
|
);
|
|
let _ = writeln!(text, " stream caps: {}", self.right_stream_caps_label);
|
|
let _ = writeln!(text, " decoded caps: {}", self.right_decoded_caps_label);
|
|
let _ = writeln!(text, " rendered caps: {}", self.right_rendered_caps_label);
|
|
let _ = writeln!(
|
|
text,
|
|
" server: encoder={} cpu={:.1}% gaps={:.0}/{:.0}ms queue-peak={}",
|
|
self.right_server_encoder_label,
|
|
self.server_process_cpu_pct,
|
|
self.right_server_source_gap_peak_ms,
|
|
self.right_server_send_gap_peak_ms,
|
|
self.right_server_queue_peak
|
|
);
|
|
let _ = writeln!(text);
|
|
let _ = writeln!(text, "media staging");
|
|
let _ = writeln!(
|
|
text,
|
|
" camera: {} | quality={} | enabled={}",
|
|
self.selected_camera.as_deref().unwrap_or("auto"),
|
|
self.camera_quality_label,
|
|
self.media_channels.camera
|
|
);
|
|
let _ = writeln!(
|
|
text,
|
|
" speaker: {} | volume={} | enabled={}",
|
|
self.selected_speaker.as_deref().unwrap_or("auto"),
|
|
self.audio_gain_label,
|
|
self.media_channels.audio
|
|
);
|
|
let _ = writeln!(
|
|
text,
|
|
" microphone: {} | gain={} | enabled={}",
|
|
self.selected_microphone.as_deref().unwrap_or("auto"),
|
|
self.mic_gain_label,
|
|
self.media_channels.microphone
|
|
);
|
|
let _ = writeln!(
|
|
text,
|
|
" keyboard: {}",
|
|
self.selected_keyboard.as_deref().unwrap_or("all")
|
|
);
|
|
let _ = writeln!(
|
|
text,
|
|
" mouse: {}",
|
|
self.selected_mouse.as_deref().unwrap_or("all")
|
|
);
|
|
let _ = writeln!(text);
|
|
let _ = writeln!(text, "current UI state");
|
|
let _ = writeln!(text, " {}", self.status);
|
|
let _ = writeln!(text);
|
|
let _ = writeln!(text, "recent samples");
|
|
if self.recent_samples.is_empty() {
|
|
let _ = writeln!(
|
|
text,
|
|
" no live RTT/probe-spread/loss samples yet; this report is currently a launcher state snapshot."
|
|
);
|
|
} else {
|
|
for sample in &self.recent_samples {
|
|
let _ = writeln!(
|
|
text,
|
|
" rtt={:.1}ms probe-spread={:.1}ms input-floor={:.1}ms cpu={:.1}/{:.1}% probe-loss={:.1}% video-loss={:.1}% left={:.1}/{:.1}/{:.1}fps right={:.1}/{:.1}/{:.1}fps dropped={} queue={}/{} peaks=l{:.0}/{:.0}ms r{:.0}/{:.0}ms server=l{}:{:.0}/{:.0}/{} r{}:{:.0}/{:.0}/{}",
|
|
sample.rtt_ms,
|
|
sample.probe_spread_ms,
|
|
sample.input_latency_ms,
|
|
sample.client_process_cpu_pct,
|
|
sample.server_process_cpu_pct,
|
|
sample.probe_loss_pct,
|
|
sample.video_loss_pct,
|
|
sample.left_receive_fps,
|
|
sample.left_present_fps,
|
|
sample.left_server_fps,
|
|
sample.right_receive_fps,
|
|
sample.right_present_fps,
|
|
sample.right_server_fps,
|
|
sample.dropped_frames,
|
|
sample.queue_depth,
|
|
sample.left_queue_peak.max(sample.right_queue_peak),
|
|
sample.left_packet_gap_peak_ms,
|
|
sample.left_present_gap_peak_ms,
|
|
sample.right_packet_gap_peak_ms,
|
|
sample.right_present_gap_peak_ms,
|
|
sample.left_server_encoder_label,
|
|
sample.left_server_source_gap_peak_ms,
|
|
sample.left_server_send_gap_peak_ms,
|
|
sample.left_server_queue_peak,
|
|
sample.right_server_encoder_label,
|
|
sample.right_server_source_gap_peak_ms,
|
|
sample.right_server_send_gap_peak_ms,
|
|
sample.right_server_queue_peak
|
|
);
|
|
}
|
|
}
|
|
let _ = writeln!(text);
|
|
let _ = writeln!(text, "recommendations");
|
|
for item in &self.recommendations {
|
|
let _ = writeln!(text, " - {item}");
|
|
}
|
|
if !self.notes.is_empty() {
|
|
let _ = writeln!(text);
|
|
let _ = writeln!(text, "notes");
|
|
for item in &self.notes {
|
|
let _ = writeln!(text, " - {item}");
|
|
}
|
|
}
|
|
let _ = writeln!(text);
|
|
let _ = writeln!(text, "quality probe");
|
|
let _ = writeln!(text, " {}", self.probe_command);
|
|
text
|
|
}
|
|
}
|
|
|
|
pub fn quality_probe_command() -> &'static str {
|
|
"scripts/ci/hygiene_gate.sh && scripts/ci/quality_gate.sh"
|
|
}
|
|
|
|
fn capture_profile_label(capture: &CaptureSizeChoice, stream_caps_label: &str) -> String {
|
|
if let Some((width, height, fps)) = parse_stream_caps_profile(stream_caps_label) {
|
|
return format!(
|
|
"{} | observed {}x{} @ {} fps | bitrate est ~{} kbit",
|
|
capture.preset.label(),
|
|
width,
|
|
height,
|
|
fps,
|
|
capture.max_bitrate_kbit
|
|
);
|
|
}
|
|
format!(
|
|
"{} | {}x{} | {} fps | bitrate est ~{} kbit",
|
|
capture.preset.label(),
|
|
capture.width,
|
|
capture.height,
|
|
capture.fps,
|
|
capture.max_bitrate_kbit
|
|
)
|
|
}
|
|
|
|
fn parse_stream_caps_profile(caps: &str) -> Option<(u32, u32, u32)> {
|
|
let width = parse_caps_u32(caps, "width=(int)")?;
|
|
let height = parse_caps_u32(caps, "height=(int)")?;
|
|
let fps = parse_caps_fraction_numerator(caps, "framerate=(fraction)")?;
|
|
Some((width, height, fps))
|
|
}
|
|
|
|
fn parse_caps_u32(caps: &str, needle: &str) -> Option<u32> {
|
|
let start = caps.find(needle)? + needle.len();
|
|
let tail = &caps[start..];
|
|
let end = tail.find([',', ';']).unwrap_or(tail.len());
|
|
tail[..end].trim().parse::<u32>().ok()
|
|
}
|
|
|
|
fn parse_caps_fraction_numerator(caps: &str, needle: &str) -> Option<u32> {
|
|
let start = caps.find(needle)? + needle.len();
|
|
let tail = &caps[start..];
|
|
let end = tail.find([',', ';']).unwrap_or(tail.len());
|
|
let value = tail[..end].trim();
|
|
let numerator = value.split('/').next()?;
|
|
numerator.parse::<u32>().ok()
|
|
}
|
|
|
|
fn recommendations_for(state: &LauncherState, log: &DiagnosticsLog) -> Vec<String> {
|
|
let mut items = Vec::new();
|
|
let hardware_decode_active = log.latest().is_some_and(sample_uses_hardware_decode);
|
|
let software_decode_active = log.latest().is_some_and(sample_uses_software_decode);
|
|
if !state.server_available {
|
|
items.push(
|
|
"The server is not reachable from this launcher yet, so stream-quality results would not be meaningful."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if log.is_empty() {
|
|
items.push(
|
|
"Live stream samples will appear here after the launcher collects a few probe windows. Leave the relay up for a few seconds to populate RTT, probe spread, loss, and fps."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if let Some(sample) = log.latest() {
|
|
if sample.probe_loss_pct >= 3.0 || sample.probe_spread_ms >= 18.0 {
|
|
items.push(
|
|
"Control-plane probe spread or loss is elevated. That can come from the network or from server stalls, so compare it against the eye fps before blaming the WAN."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if sample.video_loss_pct >= 2.0 || sample.dropped_frames > 0 {
|
|
items.push(
|
|
"Video packets are arriving with gaps or server-side drops. Stay on device H.264 pass-through for now and reduce concurrent load before trying more invasive changes."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if sample.left_present_fps + 1.0 < sample.left_receive_fps
|
|
|| sample.right_present_fps + 1.0 < sample.right_receive_fps
|
|
{
|
|
items.push(if hardware_decode_active {
|
|
"The client is receiving more frames than it is presenting. That points at local decode/render pressure, so prefer lighter breakout sizes or a cheaper source mode before adding bitrate."
|
|
.to_string()
|
|
} else {
|
|
"The client is receiving more frames than it is presenting. That points at local decode/render pressure, so prefer lighter breakout sizes or hardware decode."
|
|
.to_string()
|
|
});
|
|
}
|
|
if (sample.left_present_gap_peak_ms - sample.left_packet_gap_peak_ms) > 40.0
|
|
|| (sample.right_present_gap_peak_ms - sample.right_packet_gap_peak_ms) > 40.0
|
|
{
|
|
items.push(
|
|
"Present-gap spikes are materially larger than packet-gap spikes. That usually means the client decode/render path is stalling after packets arrive."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if sample.queue_depth > 8 {
|
|
items.push(
|
|
"The preview queue is backing up. When queue depth climbs, expect laggy mouse feel and delayed visual response even if raw fps still looks okay."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if sample.left_queue_peak >= 4 || sample.right_queue_peak >= 4 {
|
|
items.push(
|
|
"Queue depth is spiking even if the latest sample looks calm. That points at bursty backpressure rather than steady-state overload."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if (sample.left_packet_gap_peak_ms - sample.left_server_send_gap_peak_ms) > 60.0
|
|
|| (sample.right_packet_gap_peak_ms - sample.right_server_send_gap_peak_ms) > 60.0
|
|
{
|
|
items.push(
|
|
"Client packet-gap spikes are much larger than the server's send-gap peaks. That points away from the server pipeline and toward network burstiness or client-side receive scheduling."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if sample.left_server_source_gap_peak_ms >= 120.0
|
|
|| sample.right_server_source_gap_peak_ms >= 120.0
|
|
{
|
|
items.push(
|
|
"The server is seeing large source-frame gaps before packets even leave the box. That points at capture cadence or server-side pipeline stalls more than WAN loss."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if sample.left_server_queue_peak >= 4 || sample.right_server_queue_peak >= 4 {
|
|
items.push(
|
|
"The server-side stream queue is peaking above its steady state. That suggests bursty backpressure is already forming before the client sees it."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if sample.client_process_cpu_pct >= 85.0 {
|
|
items.push(if hardware_decode_active {
|
|
"Client process CPU is high even though hardware decode is active. If motion still looks rough, favor lighter breakout layouts or a cheaper source mode before adding more bitrate."
|
|
.to_string()
|
|
} else {
|
|
"Client process CPU is high. If motion still looks rough, favor lighter breakout layouts or a hardware decoder before adding more bitrate."
|
|
.to_string()
|
|
});
|
|
}
|
|
if sample.server_process_cpu_pct >= 85.0 {
|
|
items.push(
|
|
"Server process CPU is high. On current hardware that is a strong reason to stay on device H.264 pass-through and avoid any server-side eye transcoding."
|
|
.to_string(),
|
|
);
|
|
}
|
|
}
|
|
let source_passthrough = state
|
|
.feed_sources
|
|
.iter()
|
|
.any(|preset| !matches!(preset, FeedSourcePreset::Off));
|
|
if source_passthrough {
|
|
items.push(
|
|
"Device H.264 pass-through is active. On current GC311 hardware, prefer the real 1080p/720p source modes. The lower SD/VGA modes are intentionally retired because they center-cut widescreen HDMI sources."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if let Some(sample) = log.latest()
|
|
&& sample.video_loss_pct < 0.5
|
|
&& sample.dropped_frames == 0
|
|
&& ((sample.left_server_fps - sample.left_receive_fps) > 6.0
|
|
|| (sample.right_server_fps - sample.right_receive_fps) > 6.0)
|
|
{
|
|
items.push(
|
|
"Receive fps is well below the target without packet loss. That usually points at source cadence or local decode pressure more than WAN loss."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if let Some(sample) = log.latest()
|
|
&& sample.video_loss_pct < 0.5
|
|
&& sample.dropped_frames == 0
|
|
&& (sample.left_packet_gap_peak_ms >= 140.0 || sample.right_packet_gap_peak_ms >= 140.0)
|
|
{
|
|
items.push(
|
|
"Packet-gap spikes are high without packet loss. That means the stream is arriving in bursts, which usually points at source cadence, encoder stalls, or local decoder starvation more than raw WAN loss."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if let Some(sample) = log.latest()
|
|
&& software_decode_active
|
|
&& ((sample.left_decoder_label.contains("avdec")
|
|
&& sample.left_present_fps + 1.0 < sample.left_receive_fps)
|
|
|| (sample.right_decoder_label.contains("avdec")
|
|
&& sample.right_present_fps + 1.0 < sample.right_receive_fps))
|
|
{
|
|
items.push(
|
|
"At least one eye is falling back to `avdec_*` while presentation lags behind receive. A hardware decode path would likely help more than extra bitrate."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if let Some(sample) = log.latest()
|
|
&& sample.server_process_cpu_pct >= 70.0
|
|
&& (sample.left_server_encoder_label.contains("x264")
|
|
|| sample.right_server_encoder_label.contains("x264"))
|
|
{
|
|
items.push(
|
|
"At least one eye is still leaning on `x264enc`. That is now unexpected on the source-first path, so treat it as a bug or stale install rather than a normal operating mode."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if state.breakout_count() == 2 {
|
|
items.push(
|
|
"Both eye feeds are broken out right now. If the client starts struggling, compare in-launcher preview smoothness against full-window decode."
|
|
.to_string(),
|
|
);
|
|
}
|
|
if items.is_empty() {
|
|
items.push("Session state looks stable. Collect a few real samples before changing capture settings.".to_string());
|
|
}
|
|
items
|
|
}
|
|
|
|
fn sample_uses_hardware_decode(sample: &PerformanceSample) -> bool {
|
|
decoder_label_is_hardware(&sample.left_decoder_label)
|
|
|| decoder_label_is_hardware(&sample.right_decoder_label)
|
|
}
|
|
|
|
fn sample_uses_software_decode(sample: &PerformanceSample) -> bool {
|
|
sample.left_decoder_label.contains("avdec") || sample.right_decoder_label.contains("avdec")
|
|
}
|
|
|
|
fn decoder_label_is_hardware(label: &str) -> bool {
|
|
let lower = label.to_ascii_lowercase();
|
|
lower.contains("nvh264dec")
|
|
|| lower.contains("nvdec")
|
|
|| lower.contains("vah264dec")
|
|
|| lower.contains("vaapih264dec")
|
|
|| lower.contains("v4l2slh264dec")
|
|
|| lower.contains("d3d11")
|
|
|| lower.contains("vtdec")
|
|
}
|
|
|
|
#[cfg(test)]
|
|
mod tests {
|
|
use super::*;
|
|
use crate::launcher::state::{
|
|
CaptureSizePreset, DeviceSelection, DisplaySurface, FeedSourcePreset, LauncherState,
|
|
};
|
|
|
|
fn sample(n: u64) -> PerformanceSample {
|
|
PerformanceSample {
|
|
rtt_ms: 20.0 + n as f32,
|
|
probe_spread_ms: 3.0 + n as f32,
|
|
input_latency_ms: 10.0 + n as f32,
|
|
probe_loss_pct: n as f32,
|
|
client_process_cpu_pct: 12.5 + n as f32,
|
|
server_process_cpu_pct: 22.5 + n as f32,
|
|
video_loss_pct: (n as f32) * 0.5,
|
|
left_receive_fps: 30.0,
|
|
left_present_fps: 29.0,
|
|
left_server_fps: 30.0,
|
|
left_stream_spread_ms: 4.0,
|
|
left_packet_gap_peak_ms: 55.0,
|
|
left_present_gap_peak_ms: 60.0,
|
|
left_queue_depth: n as u32,
|
|
left_queue_peak: n as u32,
|
|
left_server_source_gap_peak_ms: 42.0,
|
|
left_server_send_gap_peak_ms: 48.0,
|
|
left_server_queue_peak: n as u32 + 1,
|
|
left_server_encoder_label: "x264enc".to_string(),
|
|
left_decoder_label: "decodebin".to_string(),
|
|
left_stream_caps_label:
|
|
"video/x-h264, width=(int)1920, height=(int)1080, framerate=(fraction)60/1"
|
|
.to_string(),
|
|
left_decoded_caps_label:
|
|
"video/x-raw, format=(string)NV12, width=(int)1920, height=(int)1080".to_string(),
|
|
left_rendered_caps_label:
|
|
"video/x-raw, format=(string)RGBA, width=(int)1920, height=(int)1080".to_string(),
|
|
right_receive_fps: 30.0,
|
|
right_present_fps: 28.0,
|
|
right_server_fps: 30.0,
|
|
right_stream_spread_ms: 5.0,
|
|
right_packet_gap_peak_ms: 65.0,
|
|
right_present_gap_peak_ms: 75.0,
|
|
right_queue_depth: n as u32,
|
|
right_queue_peak: n as u32,
|
|
right_server_source_gap_peak_ms: 51.0,
|
|
right_server_send_gap_peak_ms: 58.0,
|
|
right_server_queue_peak: n as u32 + 1,
|
|
right_server_encoder_label: "source-pass-through".to_string(),
|
|
right_decoder_label: "decodebin".to_string(),
|
|
right_stream_caps_label:
|
|
"video/x-h264, width=(int)1920, height=(int)1080, framerate=(fraction)60/1"
|
|
.to_string(),
|
|
right_decoded_caps_label:
|
|
"video/x-raw, format=(string)NV12, width=(int)1920, height=(int)1080".to_string(),
|
|
right_rendered_caps_label:
|
|
"video/x-raw, format=(string)RGBA, width=(int)1920, height=(int)1080".to_string(),
|
|
dropped_frames: n,
|
|
queue_depth: n as u32,
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn diagnostics_log_keeps_only_latest_samples_with_capacity() {
|
|
let mut log = DiagnosticsLog::new(2);
|
|
log.record(sample(1));
|
|
log.record(sample(2));
|
|
log.record(sample(3));
|
|
|
|
let kept: Vec<u64> = log.iter().map(|item| item.dropped_frames).collect();
|
|
assert_eq!(kept, vec![2, 3]);
|
|
assert_eq!(log.latest().map(|s| s.dropped_frames), Some(3));
|
|
}
|
|
|
|
#[test]
|
|
fn diagnostics_log_enforces_minimum_capacity() {
|
|
let mut log = DiagnosticsLog::new(0);
|
|
log.record(sample(1));
|
|
log.record(sample(2));
|
|
assert_eq!(log.len(), 1);
|
|
assert_eq!(log.latest().map(|s| s.dropped_frames), Some(2));
|
|
}
|
|
|
|
#[test]
|
|
fn snapshot_report_contains_state_fields_and_samples() {
|
|
let mut state = LauncherState::new();
|
|
state.devices = DeviceSelection {
|
|
camera: Some("/dev/video0".to_string()),
|
|
microphone: Some("alsa_input.usb".to_string()),
|
|
speaker: Some("alsa_output.usb".to_string()),
|
|
keyboard: Some("/dev/input/event10".to_string()),
|
|
mouse: Some("/dev/input/event11".to_string()),
|
|
};
|
|
state.push_note("first note");
|
|
|
|
let mut log = DiagnosticsLog::new(4);
|
|
log.record(sample(7));
|
|
|
|
let report = SnapshotReport::from_state(&state, &log, quality_probe_command().to_string());
|
|
assert_eq!(report.selected_camera.as_deref(), Some("/dev/video0"));
|
|
assert_eq!(
|
|
report.selected_microphone.as_deref(),
|
|
Some("alsa_input.usb")
|
|
);
|
|
assert_eq!(report.selected_speaker.as_deref(), Some("alsa_output.usb"));
|
|
assert_eq!(report.audio_gain_label, "200%");
|
|
assert_eq!(
|
|
report.selected_keyboard.as_deref(),
|
|
Some("/dev/input/event10")
|
|
);
|
|
assert_eq!(report.selected_mouse.as_deref(), Some("/dev/input/event11"));
|
|
assert_eq!(report.recent_samples.len(), 1);
|
|
assert_eq!(report.notes, vec!["first note".to_string()]);
|
|
assert!(report.status.contains("mode=remote"));
|
|
assert!(report.client_version.starts_with("0."));
|
|
assert_eq!(report.left_feed_source, "Left Eye");
|
|
assert!(
|
|
report
|
|
.left_capture_profile
|
|
.contains("observed 1920x1080 @ 60 fps")
|
|
);
|
|
assert_eq!(report.left_capture_transport, "device H.264 pass-through");
|
|
assert_eq!(report.left_decoder_label, "decodebin");
|
|
assert!(report.left_stream_caps_label.contains("video/x-h264"));
|
|
assert!(report.left_decoded_caps_label.contains("video/x-raw"));
|
|
assert!(report.left_rendered_caps_label.contains("video/x-raw"));
|
|
}
|
|
|
|
#[test]
|
|
fn snapshot_report_marks_empty_live_labels_pending() {
|
|
let mut log = DiagnosticsLog::new(1);
|
|
let mut sample = sample(0);
|
|
sample.left_decoder_label.clear();
|
|
sample.left_server_encoder_label.clear();
|
|
sample.left_stream_caps_label.clear();
|
|
sample.left_decoded_caps_label.clear();
|
|
sample.left_rendered_caps_label.clear();
|
|
sample.right_decoder_label.clear();
|
|
sample.right_server_encoder_label.clear();
|
|
sample.right_stream_caps_label.clear();
|
|
sample.right_decoded_caps_label.clear();
|
|
sample.right_rendered_caps_label.clear();
|
|
log.record(sample);
|
|
|
|
let mut state = LauncherState::new();
|
|
state.set_feed_source_preset(1, FeedSourcePreset::OtherEye);
|
|
let report = SnapshotReport::from_state(&state, &log, quality_probe_command().to_string());
|
|
|
|
assert_eq!(report.left_decoder_label, "pending");
|
|
assert_eq!(report.left_server_encoder_label, "pending");
|
|
assert_eq!(report.left_stream_caps_label, "pending");
|
|
assert_eq!(report.left_decoded_caps_label, "pending");
|
|
assert_eq!(report.left_rendered_caps_label, "pending");
|
|
assert_eq!(report.right_feed_source, "Left Eye (mirrored)");
|
|
assert_eq!(report.right_decoder_label, "pending");
|
|
assert_eq!(report.right_server_encoder_label, "pending");
|
|
assert_eq!(report.right_stream_caps_label, "pending");
|
|
assert_eq!(report.right_decoded_caps_label, "pending");
|
|
assert_eq!(report.right_rendered_caps_label, "pending");
|
|
}
|
|
|
|
#[test]
|
|
fn snapshot_json_is_serializable_and_mentions_probe_command() {
|
|
let report = SnapshotReport::from_state(
|
|
&LauncherState::new(),
|
|
&DiagnosticsLog::new(1),
|
|
quality_probe_command().to_string(),
|
|
);
|
|
let json = report.to_pretty_json().expect("serialize");
|
|
assert!(json.contains("quality_gate.sh"));
|
|
assert!(json.contains("routing"));
|
|
assert!(json.contains("view_mode"));
|
|
}
|
|
|
|
#[test]
|
|
fn snapshot_text_mentions_versions_profiles_and_recommendations() {
|
|
let report = SnapshotReport::from_state(
|
|
&LauncherState::new(),
|
|
&DiagnosticsLog::new(1),
|
|
quality_probe_command().to_string(),
|
|
);
|
|
let text = report.to_pretty_text();
|
|
assert!(text.contains("Lesavka Diagnostics"));
|
|
assert!(text.contains("client: v"));
|
|
assert!(text.contains("left eye"));
|
|
assert!(text.contains("source:"));
|
|
assert!(text.contains("transport:"));
|
|
assert!(text.contains("live: decoder="));
|
|
assert!(text.contains("stream caps:"));
|
|
assert!(text.contains("decoded caps:"));
|
|
assert!(text.contains("rendered caps:"));
|
|
assert!(text.contains("media staging"));
|
|
assert!(text.contains("current UI state"));
|
|
assert!(text.contains("recommendations"));
|
|
}
|
|
|
|
#[test]
|
|
#[doc = "Verifies diagnostics text follows live media settings."]
|
|
fn snapshot_text_reflects_live_media_control_changes() {
|
|
let mut state = LauncherState::new();
|
|
state.select_camera(Some("/dev/video9".to_string()));
|
|
state.select_camera_quality(Some(crate::launcher::devices::CameraMode::new(
|
|
1920, 1080, 30,
|
|
)));
|
|
state.select_microphone(Some("alsa_input.usb".to_string()));
|
|
state.select_speaker(Some("alsa_output.usb".to_string()));
|
|
state.set_audio_gain_percent(250);
|
|
state.set_mic_gain_percent(125);
|
|
state.set_camera_channel_enabled(false);
|
|
state.set_microphone_channel_enabled(true);
|
|
|
|
let report = SnapshotReport::from_state(
|
|
&state,
|
|
&DiagnosticsLog::new(1),
|
|
quality_probe_command().to_string(),
|
|
);
|
|
let text = report.to_pretty_text();
|
|
|
|
assert!(text.contains("camera: /dev/video9 | quality=1080p@30 | enabled=false"));
|
|
assert!(text.contains("speaker: alsa_output.usb | volume=250% | enabled=true"));
|
|
assert!(text.contains("microphone: alsa_input.usb | gain=125% | enabled=true"));
|
|
}
|
|
|
|
#[test]
|
|
fn snapshot_text_renders_recent_samples_and_notes() {
|
|
let mut state = LauncherState::new();
|
|
state.set_server_available(true);
|
|
state.push_note("operator changed camera quality during the run");
|
|
let mut log = DiagnosticsLog::new(2);
|
|
log.record(sample(3));
|
|
|
|
let report = SnapshotReport::from_state(&state, &log, quality_probe_command().to_string());
|
|
let text = report.to_pretty_text();
|
|
|
|
assert!(text.contains("server: unknown (reachable)"));
|
|
assert!(text.contains("rtt=23.0ms"));
|
|
assert!(text.contains("server=lx264enc:42/48/4"));
|
|
assert!(text.contains("notes"));
|
|
assert!(text.contains("operator changed camera quality during the run"));
|
|
}
|
|
|
|
#[test]
|
|
fn snapshot_report_uses_effective_mirrored_capture_profile() {
|
|
let mut state = LauncherState::new();
|
|
state.set_feed_source_preset(0, FeedSourcePreset::OtherEye);
|
|
state.set_capture_size_preset(1, CaptureSizePreset::P720);
|
|
|
|
let report = SnapshotReport::from_state(
|
|
&state,
|
|
&DiagnosticsLog::new(1),
|
|
quality_probe_command().to_string(),
|
|
);
|
|
|
|
assert_eq!(report.left_feed_source, "Right Eye (mirrored)");
|
|
assert!(report.left_capture_profile.contains("720p"));
|
|
assert!(report.left_capture_profile.contains("1280x720"));
|
|
}
|
|
|
|
#[test]
|
|
fn quality_probe_command_mentions_both_gates() {
|
|
let cmd = quality_probe_command();
|
|
assert!(cmd.contains("hygiene_gate.sh"));
|
|
assert!(cmd.contains("quality_gate.sh"));
|
|
}
|
|
|
|
#[test]
|
|
fn source_capture_profile_prefers_observed_stream_caps_when_available() {
|
|
let capture = CaptureSizeChoice {
|
|
preset: CaptureSizePreset::P1080,
|
|
width: 1920,
|
|
height: 1080,
|
|
fps: 60,
|
|
max_bitrate_kbit: 18_000,
|
|
};
|
|
let label = capture_profile_label(
|
|
&capture,
|
|
"video/x-h264, width=(int)1920, height=(int)1080, framerate=(fraction)60/1",
|
|
);
|
|
assert_eq!(
|
|
label,
|
|
"1080p | observed 1920x1080 @ 60 fps | bitrate est ~18000 kbit"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn capture_profile_falls_back_when_stream_caps_are_incomplete() {
|
|
let capture = CaptureSizeChoice {
|
|
preset: CaptureSizePreset::P1080,
|
|
width: 1920,
|
|
height: 1080,
|
|
fps: 60,
|
|
max_bitrate_kbit: 18_000,
|
|
};
|
|
let label = capture_profile_label(&capture, "video/x-h264, width=(int)1920");
|
|
assert_eq!(
|
|
label,
|
|
"1080p | 1920x1080 | 60 fps | bitrate est ~18000 kbit"
|
|
);
|
|
}
|
|
|
|
#[test]
|
|
fn recommendations_do_not_suggest_hardware_decode_when_nvdec_is_active() {
|
|
let mut log = DiagnosticsLog::new(1);
|
|
let mut sample = sample(1);
|
|
sample.client_process_cpu_pct = 96.0;
|
|
sample.left_receive_fps = 40.0;
|
|
sample.left_present_fps = 30.0;
|
|
sample.left_decoder_label = "nvh264dec".to_string();
|
|
sample.right_decoder_label = "nvh264dec".to_string();
|
|
log.record(sample);
|
|
|
|
let items = recommendations_for(&LauncherState::new(), &log);
|
|
let joined = items.join("\n");
|
|
assert!(!joined.contains("hardware decoder before adding more bitrate"));
|
|
assert!(!joined.contains("lighter breakout sizes or hardware decode"));
|
|
assert!(joined.contains("cheaper source mode"));
|
|
}
|
|
|
|
#[test]
|
|
fn recommendations_cover_video_network_queue_cpu_and_decoder_pressure() {
|
|
let mut state = LauncherState::new();
|
|
state.set_server_available(true);
|
|
state.set_display_surface(0, DisplaySurface::Window);
|
|
state.set_display_surface(1, DisplaySurface::Window);
|
|
|
|
let mut sample = sample(12);
|
|
sample.probe_loss_pct = 4.0;
|
|
sample.probe_spread_ms = 22.0;
|
|
sample.video_loss_pct = 3.0;
|
|
sample.dropped_frames = 2;
|
|
sample.left_receive_fps = 58.0;
|
|
sample.left_present_fps = 42.0;
|
|
sample.right_receive_fps = 58.0;
|
|
sample.right_present_fps = 42.0;
|
|
sample.left_packet_gap_peak_ms = 180.0;
|
|
sample.right_packet_gap_peak_ms = 181.0;
|
|
sample.left_present_gap_peak_ms = 250.0;
|
|
sample.right_present_gap_peak_ms = 260.0;
|
|
sample.queue_depth = 9;
|
|
sample.left_queue_peak = 5;
|
|
sample.right_queue_peak = 5;
|
|
sample.left_server_send_gap_peak_ms = 40.0;
|
|
sample.right_server_send_gap_peak_ms = 40.0;
|
|
sample.left_server_source_gap_peak_ms = 130.0;
|
|
sample.right_server_source_gap_peak_ms = 131.0;
|
|
sample.left_server_queue_peak = 5;
|
|
sample.right_server_queue_peak = 5;
|
|
sample.client_process_cpu_pct = 90.0;
|
|
sample.server_process_cpu_pct = 88.0;
|
|
sample.left_decoder_label = "avdec_h264".to_string();
|
|
sample.right_decoder_label = "avdec_h264".to_string();
|
|
sample.left_server_encoder_label = "x264enc".to_string();
|
|
sample.right_server_encoder_label = "x264enc".to_string();
|
|
|
|
let mut log = DiagnosticsLog::new(1);
|
|
log.record(sample);
|
|
let joined = recommendations_for(&state, &log).join("\n");
|
|
|
|
for needle in [
|
|
"Control-plane probe spread or loss is elevated",
|
|
"Video packets are arriving with gaps",
|
|
"receiving more frames than it is presenting",
|
|
"Present-gap spikes are materially larger",
|
|
"preview queue is backing up",
|
|
"Queue depth is spiking",
|
|
"Client packet-gap spikes are much larger",
|
|
"large source-frame gaps",
|
|
"server-side stream queue is peaking",
|
|
"Client process CPU is high",
|
|
"Server process CPU is high",
|
|
"Device H.264 pass-through is active",
|
|
"At least one eye is falling back",
|
|
"At least one eye is still leaning on `x264enc`",
|
|
"Both eye feeds are broken out",
|
|
] {
|
|
assert!(joined.contains(needle), "{needle} missing from {joined}");
|
|
}
|
|
}
|
|
|
|
#[test]
|
|
fn recommendations_cover_low_receive_fps_and_bursty_gap_without_loss() {
|
|
let mut sample = sample(0);
|
|
sample.video_loss_pct = 0.0;
|
|
sample.dropped_frames = 0;
|
|
sample.left_server_fps = 60.0;
|
|
sample.left_receive_fps = 48.0;
|
|
sample.right_server_fps = 60.0;
|
|
sample.right_receive_fps = 48.0;
|
|
sample.left_packet_gap_peak_ms = 150.0;
|
|
sample.right_packet_gap_peak_ms = 151.0;
|
|
|
|
let mut log = DiagnosticsLog::new(1);
|
|
log.record(sample);
|
|
let joined = recommendations_for(&LauncherState::new(), &log).join("\n");
|
|
|
|
assert!(joined.contains("Receive fps is well below the target without packet loss"));
|
|
assert!(joined.contains("Packet-gap spikes are high without packet loss"));
|
|
}
|
|
|
|
#[test]
|
|
fn hardware_decoder_detection_recognizes_nvdec_labels() {
|
|
let mut sample = sample(1);
|
|
sample.left_decoder_label = "nvh264dec".to_string();
|
|
assert!(sample_uses_hardware_decode(&sample));
|
|
assert!(!sample_uses_software_decode(&sample));
|
|
}
|
|
}
|