use super::*; use crate::launcher::state::{ CaptureSizePreset, DeviceSelection, DisplaySurface, FeedSourcePreset, LauncherState, }; use crate::uplink_telemetry::UpstreamStreamTelemetry; fn sample(n: u64) -> PerformanceSample { PerformanceSample { rtt_ms: 20.0 + n as f32, probe_spread_ms: 3.0 + n as f32, input_latency_ms: 10.0 + n as f32, probe_loss_pct: n as f32, client_process_cpu_pct: 12.5 + n as f32, server_process_cpu_pct: 22.5 + n as f32, video_loss_pct: (n as f32) * 0.5, left_receive_fps: 30.0, left_present_fps: 29.0, left_server_fps: 30.0, left_stream_spread_ms: 4.0, left_packet_gap_peak_ms: 55.0, left_present_gap_peak_ms: 60.0, left_queue_depth: n as u32, left_queue_peak: n as u32, left_server_source_gap_peak_ms: 42.0, left_server_send_gap_peak_ms: 48.0, left_server_queue_peak: n as u32 + 1, left_server_encoder_label: "x264enc".to_string(), left_decoder_label: "decodebin".to_string(), left_stream_caps_label: "video/x-h264, width=(int)1920, height=(int)1080, framerate=(fraction)60/1".to_string(), left_decoded_caps_label: "video/x-raw, format=(string)NV12, width=(int)1920, height=(int)1080".to_string(), left_rendered_caps_label: "video/x-raw, format=(string)RGBA, width=(int)1920, height=(int)1080".to_string(), right_receive_fps: 30.0, right_present_fps: 28.0, right_server_fps: 30.0, right_stream_spread_ms: 5.0, right_packet_gap_peak_ms: 65.0, right_present_gap_peak_ms: 75.0, right_queue_depth: n as u32, right_queue_peak: n as u32, right_server_source_gap_peak_ms: 51.0, right_server_send_gap_peak_ms: 58.0, right_server_queue_peak: n as u32 + 1, right_server_encoder_label: "source-pass-through".to_string(), right_decoder_label: "decodebin".to_string(), right_stream_caps_label: "video/x-h264, width=(int)1920, height=(int)1080, framerate=(fraction)60/1".to_string(), right_decoded_caps_label: "video/x-raw, format=(string)NV12, width=(int)1920, height=(int)1080".to_string(), right_rendered_caps_label: "video/x-raw, format=(string)RGBA, width=(int)1920, height=(int)1080".to_string(), upstream_camera: UpstreamStreamTelemetry { enabled: true, connected: true, reconnect_count: 2, queue_depth: 3, queue_peak: 7, latest_enqueue_age_ms: 14.0, enqueue_age_peak_ms: 48.0, enqueue_block_peak_ms: 5.0, packets_enqueued: 120, packets_streamed: 118, dropped_packets: 0, dropped_queue_full_packets: 0, dropped_stale_packets: 0, latest_delivery_age_ms: 22.0, delivery_age_peak_ms: 61.0, last_error: String::new(), }, upstream_microphone: UpstreamStreamTelemetry { enabled: true, connected: true, reconnect_count: 1, queue_depth: 2, queue_peak: 5, latest_enqueue_age_ms: 11.0, enqueue_age_peak_ms: 22.0, enqueue_block_peak_ms: 3.0, packets_enqueued: 220, packets_streamed: 216, dropped_packets: 1, dropped_queue_full_packets: 1, dropped_stale_packets: 0, latest_delivery_age_ms: 19.0, delivery_age_peak_ms: 37.0, last_error: String::new(), }, dropped_frames: n, queue_depth: n as u32, } } #[test] fn diagnostics_log_keeps_only_latest_samples_with_capacity() { let mut log = DiagnosticsLog::new(2); log.record(sample(1)); log.record(sample(2)); log.record(sample(3)); let kept: Vec = log.iter().map(|item| item.dropped_frames).collect(); assert_eq!(kept, vec![2, 3]); assert_eq!(log.latest().map(|s| s.dropped_frames), Some(3)); } #[test] fn diagnostics_log_enforces_minimum_capacity() { let mut log = DiagnosticsLog::new(0); log.record(sample(1)); log.record(sample(2)); assert_eq!(log.len(), 1); assert_eq!(log.latest().map(|s| s.dropped_frames), Some(2)); } #[test] fn snapshot_report_contains_state_fields_and_samples() { let mut state = LauncherState::new(); state.devices = DeviceSelection { camera: Some("/dev/video0".to_string()), microphone: Some("alsa_input.usb".to_string()), speaker: Some("alsa_output.usb".to_string()), keyboard: Some("/dev/input/event10".to_string()), mouse: Some("/dev/input/event11".to_string()), }; state.push_note("first note"); let mut log = DiagnosticsLog::new(4); log.record(sample(7)); let report = SnapshotReport::from_state(&state, &log, quality_probe_command().to_string()); assert_eq!(report.selected_camera.as_deref(), Some("/dev/video0")); assert_eq!( report.selected_microphone.as_deref(), Some("alsa_input.usb") ); assert_eq!(report.selected_speaker.as_deref(), Some("alsa_output.usb")); assert_eq!(report.audio_gain_label, "200%"); assert_eq!( report.selected_keyboard.as_deref(), Some("/dev/input/event10") ); assert_eq!(report.selected_mouse.as_deref(), Some("/dev/input/event11")); assert_eq!(report.recent_samples.len(), 1); assert_eq!(report.notes, vec!["first note".to_string()]); assert!(report.status.contains("mode=remote")); assert!(report.client_version.starts_with("0.")); assert_eq!(report.left_feed_source, "Left Eye"); assert!( report .left_capture_profile .contains("observed 1920x1080 @ 60 fps") ); assert_eq!(report.left_capture_transport, "device H.264 pass-through"); assert_eq!(report.left_decoder_label, "decodebin"); assert!(report.left_stream_caps_label.contains("video/x-h264")); assert!(report.left_decoded_caps_label.contains("video/x-raw")); assert!(report.left_rendered_caps_label.contains("video/x-raw")); assert_eq!(report.upstream_camera.queue_peak, 7); assert_eq!(report.upstream_microphone.reconnect_count, 1); } #[test] fn snapshot_report_marks_empty_live_labels_pending() { let mut log = DiagnosticsLog::new(1); let mut sample = sample(0); sample.left_decoder_label.clear(); sample.left_server_encoder_label.clear(); sample.left_stream_caps_label.clear(); sample.left_decoded_caps_label.clear(); sample.left_rendered_caps_label.clear(); sample.right_decoder_label.clear(); sample.right_server_encoder_label.clear(); sample.right_stream_caps_label.clear(); sample.right_decoded_caps_label.clear(); sample.right_rendered_caps_label.clear(); log.record(sample); let mut state = LauncherState::new(); state.set_feed_source_preset(1, FeedSourcePreset::OtherEye); let report = SnapshotReport::from_state(&state, &log, quality_probe_command().to_string()); assert_eq!(report.left_decoder_label, "pending"); assert_eq!(report.left_server_encoder_label, "pending"); assert_eq!(report.left_stream_caps_label, "pending"); assert_eq!(report.left_decoded_caps_label, "pending"); assert_eq!(report.left_rendered_caps_label, "pending"); assert_eq!(report.right_feed_source, "Left Eye (mirrored)"); assert_eq!(report.right_decoder_label, "pending"); assert_eq!(report.right_server_encoder_label, "pending"); assert_eq!(report.right_stream_caps_label, "pending"); assert_eq!(report.right_decoded_caps_label, "pending"); assert_eq!(report.right_rendered_caps_label, "pending"); } #[test] fn snapshot_json_is_serializable_and_mentions_probe_command() { let report = SnapshotReport::from_state( &LauncherState::new(), &DiagnosticsLog::new(1), quality_probe_command().to_string(), ); let json = report.to_pretty_json().expect("serialize"); assert!(json.contains("quality_gate.sh")); assert!(json.contains("routing")); assert!(json.contains("view_mode")); } #[test] fn snapshot_text_mentions_versions_profiles_and_recommendations() { let report = SnapshotReport::from_state( &LauncherState::new(), &DiagnosticsLog::new(1), quality_probe_command().to_string(), ); let text = report.to_pretty_text(); assert!(text.contains("Lesavka Diagnostics")); assert!(text.contains("client: v")); assert!(text.contains("left eye")); assert!(text.contains("source:")); assert!(text.contains("transport:")); assert!(text.contains("live: decoder=")); assert!(text.contains("stream caps:")); assert!(text.contains("decoded caps:")); assert!(text.contains("rendered caps:")); assert!(text.contains("media staging")); assert!(text.contains("uplink camera:")); assert!(text.contains("uplink microphone:")); assert!(text.contains("current UI state")); assert!(text.contains("recommendations")); } #[test] #[doc = "Verifies diagnostics text follows live media settings."] fn snapshot_text_reflects_live_media_control_changes() { let mut state = LauncherState::new(); state.select_camera(Some("/dev/video9".to_string())); state.select_camera_quality(Some(crate::launcher::devices::CameraMode::new( 1920, 1080, 30, ))); state.select_microphone(Some("alsa_input.usb".to_string())); state.select_speaker(Some("alsa_output.usb".to_string())); state.set_audio_gain_percent(250); state.set_mic_gain_percent(125); state.set_camera_channel_enabled(false); state.set_microphone_channel_enabled(true); let report = SnapshotReport::from_state( &state, &DiagnosticsLog::new(1), quality_probe_command().to_string(), ); let text = report.to_pretty_text(); assert!(text.contains("camera: /dev/video9 | quality=1080p@30 | enabled=false")); assert!(text.contains("speaker: alsa_output.usb | volume=250% | enabled=true")); assert!(text.contains("microphone: alsa_input.usb | gain=125% | enabled=true")); } #[test] fn snapshot_text_renders_recent_samples_and_notes() { let mut state = LauncherState::new(); state.set_server_available(true); state.push_note("operator changed camera quality during the run"); let mut log = DiagnosticsLog::new(2); log.record(sample(3)); let report = SnapshotReport::from_state(&state, &log, quality_probe_command().to_string()); let text = report.to_pretty_text(); assert!(text.contains("server: unknown (reachable)")); assert!(text.contains("rtt=23.0ms")); assert!(text.contains("server=lx264enc:42/48/4")); assert!(text.contains("uplink: cam=live queue=3/7")); assert!(text.contains("notes")); assert!(text.contains("operator changed camera quality during the run")); } #[test] fn recommendations_call_out_upstream_queue_age_and_reconnect_churn() { let mut log = DiagnosticsLog::new(1); let mut stressed = sample(9); stressed.upstream_camera.latest_enqueue_age_ms = 180.0; stressed.upstream_camera.enqueue_age_peak_ms = 320.0; stressed.upstream_camera.enqueue_block_peak_ms = 55.0; stressed.upstream_camera.delivery_age_peak_ms = 420.0; stressed.upstream_camera.dropped_stale_packets = 4; stressed.upstream_camera.reconnect_count = 4; stressed.upstream_microphone.latest_enqueue_age_ms = 120.0; stressed.upstream_microphone.enqueue_age_peak_ms = 260.0; stressed.upstream_microphone.enqueue_block_peak_ms = 31.0; stressed.upstream_microphone.delivery_age_peak_ms = 240.0; stressed.upstream_microphone.dropped_queue_full_packets = 2; log.record(stressed); let report = SnapshotReport::from_state( &LauncherState::new(), &log, quality_probe_command().to_string(), ); assert!( report .recommendations .iter() .any(|item| { item.contains("webcam uplink queue is aging packets") }) ); assert!( report .recommendations .iter() .any(|item| { item.contains("microphone uplink queue is aging live audio") }) ); assert!( report .recommendations .iter() .any(|item| { item.contains("upstream media loops have already reconnected") }) ); assert!( report .recommendations .iter() .any(|item| { item.contains("webcam uplink is now choosing freshness") }) ); assert!( report .recommendations .iter() .any(|item| { item.contains("microphone uplink is dropping or aging live chunks") }) ); } #[test] fn snapshot_report_uses_effective_mirrored_capture_profile() { let mut state = LauncherState::new(); state.set_feed_source_preset(0, FeedSourcePreset::OtherEye); state.set_capture_size_preset(1, CaptureSizePreset::P720); let report = SnapshotReport::from_state( &state, &DiagnosticsLog::new(1), quality_probe_command().to_string(), ); assert_eq!(report.left_feed_source, "Right Eye (mirrored)"); assert!(report.left_capture_profile.contains("720p")); assert!(report.left_capture_profile.contains("1280x720")); } #[test] fn quality_probe_command_mentions_both_gates() { let cmd = quality_probe_command(); assert!(cmd.contains("hygiene_gate.sh")); assert!(cmd.contains("quality_gate.sh")); } #[test] fn source_capture_profile_prefers_observed_stream_caps_when_available() { let capture = CaptureSizeChoice { preset: CaptureSizePreset::P1080, width: 1920, height: 1080, fps: 60, max_bitrate_kbit: 18_000, }; let label = capture_profile_label( &capture, "video/x-h264, width=(int)1920, height=(int)1080, framerate=(fraction)60/1", ); assert_eq!( label, "1080p | observed 1920x1080 @ 60 fps | bitrate est ~18000 kbit" ); } #[test] fn capture_profile_falls_back_when_stream_caps_are_incomplete() { let capture = CaptureSizeChoice { preset: CaptureSizePreset::P1080, width: 1920, height: 1080, fps: 60, max_bitrate_kbit: 18_000, }; let label = capture_profile_label(&capture, "video/x-h264, width=(int)1920"); assert_eq!( label, "1080p | 1920x1080 | 60 fps | bitrate est ~18000 kbit" ); } #[test] fn recommendations_do_not_suggest_hardware_decode_when_nvdec_is_active() { let mut log = DiagnosticsLog::new(1); let mut sample = sample(1); sample.client_process_cpu_pct = 96.0; sample.left_receive_fps = 40.0; sample.left_present_fps = 30.0; sample.left_decoder_label = "nvh264dec".to_string(); sample.right_decoder_label = "nvh264dec".to_string(); log.record(sample); let items = recommendations_for(&LauncherState::new(), &log); let joined = items.join("\n"); assert!(!joined.contains("hardware decoder before adding more bitrate")); assert!(!joined.contains("lighter breakout sizes or hardware decode")); assert!(joined.contains("cheaper source mode")); } #[test] fn recommendations_cover_video_network_queue_cpu_and_decoder_pressure() { let mut state = LauncherState::new(); state.set_server_available(true); state.set_display_surface(0, DisplaySurface::Window); state.set_display_surface(1, DisplaySurface::Window); let mut sample = sample(12); sample.probe_loss_pct = 4.0; sample.probe_spread_ms = 22.0; sample.video_loss_pct = 3.0; sample.dropped_frames = 2; sample.left_receive_fps = 58.0; sample.left_present_fps = 42.0; sample.right_receive_fps = 58.0; sample.right_present_fps = 42.0; sample.left_packet_gap_peak_ms = 180.0; sample.right_packet_gap_peak_ms = 181.0; sample.left_present_gap_peak_ms = 250.0; sample.right_present_gap_peak_ms = 260.0; sample.queue_depth = 9; sample.left_queue_peak = 5; sample.right_queue_peak = 5; sample.left_server_send_gap_peak_ms = 40.0; sample.right_server_send_gap_peak_ms = 40.0; sample.left_server_source_gap_peak_ms = 130.0; sample.right_server_source_gap_peak_ms = 131.0; sample.left_server_queue_peak = 5; sample.right_server_queue_peak = 5; sample.client_process_cpu_pct = 90.0; sample.server_process_cpu_pct = 88.0; sample.left_decoder_label = "avdec_h264".to_string(); sample.right_decoder_label = "avdec_h264".to_string(); sample.left_server_encoder_label = "x264enc".to_string(); sample.right_server_encoder_label = "x264enc".to_string(); let mut log = DiagnosticsLog::new(1); log.record(sample); let joined = recommendations_for(&state, &log).join("\n"); for needle in [ "Control-plane probe spread or loss is elevated", "Video packets are arriving with gaps", "receiving more frames than it is presenting", "Present-gap spikes are materially larger", "preview queue is backing up", "Queue depth is spiking", "Client packet-gap spikes are much larger", "large source-frame gaps", "server-side stream queue is peaking", "Client process CPU is high", "Server process CPU is high", "Device H.264 pass-through is active", "At least one eye is falling back", "At least one eye is still leaning on `x264enc`", "Both eye feeds are broken out", ] { assert!(joined.contains(needle), "{needle} missing from {joined}"); } } #[test] fn recommendations_cover_low_receive_fps_and_bursty_gap_without_loss() { let mut sample = sample(0); sample.video_loss_pct = 0.0; sample.dropped_frames = 0; sample.left_server_fps = 60.0; sample.left_receive_fps = 48.0; sample.right_server_fps = 60.0; sample.right_receive_fps = 48.0; sample.left_packet_gap_peak_ms = 150.0; sample.right_packet_gap_peak_ms = 151.0; let mut log = DiagnosticsLog::new(1); log.record(sample); let joined = recommendations_for(&LauncherState::new(), &log).join("\n"); assert!(joined.contains("Receive fps is well below the target without packet loss")); assert!(joined.contains("Packet-gap spikes are high without packet loss")); } #[test] fn hardware_decoder_detection_recognizes_nvdec_labels() { let mut sample = sample(1); sample.left_decoder_label = "nvh264dec".to_string(); assert!(sample_uses_hardware_decode(&sample)); assert!(!sample_uses_software_decode(&sample)); }