uvc: restore four MJPEG output profiles

This commit is contained in:
Brad Stein 2026-05-12 23:56:28 -03:00
parent 0e5de9d21b
commit d9cc0d2237
24 changed files with 594 additions and 268 deletions

6
Cargo.lock generated
View File

@ -1652,7 +1652,7 @@ checksum = "09edd9e8b54e49e587e4f6295a7d29c3ea94d469cb40ab8ca70b288248a81db2"
[[package]] [[package]]
name = "lesavka_client" name = "lesavka_client"
version = "0.22.18" version = "0.22.19"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"async-stream", "async-stream",
@ -1686,7 +1686,7 @@ dependencies = [
[[package]] [[package]]
name = "lesavka_common" name = "lesavka_common"
version = "0.22.18" version = "0.22.19"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base64", "base64",
@ -1698,7 +1698,7 @@ dependencies = [
[[package]] [[package]]
name = "lesavka_server" name = "lesavka_server"
version = "0.22.18" version = "0.22.19"
dependencies = [ dependencies = [
"anyhow", "anyhow",
"base64", "base64",

View File

@ -4,7 +4,7 @@ path = "src/main.rs"
[package] [package]
name = "lesavka_client" name = "lesavka_client"
version = "0.22.18" version = "0.22.19"
edition = "2024" edition = "2024"
[dependencies] [dependencies]

View File

@ -54,7 +54,7 @@ pub fn next_delay(current: Duration) -> Duration {
/// Inputs: operator/env codec text. Output: the supported transport codec when /// Inputs: operator/env codec text. Output: the supported transport codec when
/// recognized. Why: the client must not silently fall back to a differently /// recognized. Why: the client must not silently fall back to a differently
/// calibrated upstream path when the UI asks for HEVC or MJPEG. /// calibrated upstream path when the UI asks for HEVC or MJPEG.
fn parse_camera_codec(raw: &str) -> Option<CameraCodec> { pub(crate) fn parse_camera_codec(raw: &str) -> Option<CameraCodec> {
match raw.trim().to_ascii_lowercase().as_str() { match raw.trim().to_ascii_lowercase().as_str() {
"mjpeg" | "mjpg" | "jpeg" => Some(CameraCodec::Mjpeg), "mjpeg" | "mjpg" | "jpeg" => Some(CameraCodec::Mjpeg),
"h264" => Some(CameraCodec::H264), "h264" => Some(CameraCodec::H264),

View File

@ -33,14 +33,14 @@ fn env_u32(name: &str, default: u32) -> u32 {
.unwrap_or(default) .unwrap_or(default)
} }
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum CameraCodec { pub enum CameraCodec {
H264, H264,
Hevc, Hevc,
Mjpeg, Mjpeg,
} }
#[derive(Clone, Copy, Debug)] #[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct CameraConfig { pub struct CameraConfig {
pub codec: CameraCodec, pub codec: CameraCodec,
pub width: u32, pub width: u32,

View File

@ -250,18 +250,24 @@ fn pick_h264_encoder(fps: u32) -> Result<String> {
/// Why: the client-to-server probe should exercise the same HEVC transport /// Why: the client-to-server probe should exercise the same HEVC transport
/// shape as real webcam uplink without requiring a specific GPU encoder. /// shape as real webcam uplink without requiring a specific GPU encoder.
fn pick_hevc_encoder(fps: u32) -> Result<String> { fn pick_hevc_encoder(fps: u32) -> Result<String> {
let keyframe_interval = low_latency_hevc_keyframe_interval(fps);
if gst::ElementFactory::find("nvh265enc").is_some() {
return Ok(format!(
"video/x-raw,format=NV12 ! nvh265enc preset=p1 tune=ultra-low-latency rc-mode=cbr bitrate=3000 max-bitrate=3000 vbv-buffer-size=3000 gop-size={} zerolatency=true repeat-sequence-header=true",
keyframe_interval
));
}
for encoder in ["vah265enc", "vaapih265enc", "v4l2h265enc"] {
if gst::ElementFactory::find(encoder).is_some() {
return Ok(encoder.to_string());
}
}
if gst::ElementFactory::find("x265enc").is_some() { if gst::ElementFactory::find("x265enc").is_some() {
let keyframe_interval = low_latency_hevc_keyframe_interval(fps);
return Ok(format!( return Ok(format!(
"x265enc tune=zerolatency speed-preset=ultrafast bitrate=2500 key-int-max={}", "x265enc tune=zerolatency speed-preset=ultrafast bitrate=2500 key-int-max={}",
keyframe_interval keyframe_interval
)); ));
} }
for encoder in ["nvh265enc", "vah265enc", "vaapih265enc", "v4l2h265enc"] {
if gst::ElementFactory::find(encoder).is_some() {
return Ok(encoder.to_string());
}
}
bail!("no usable HEVC encoder found for sync probe") bail!("no usable HEVC encoder found for sync probe")
} }

View File

@ -5,6 +5,7 @@ use std::path::PathBuf;
use std::time::Duration; use std::time::Duration;
use crate::app_support::DEFAULT_SERVER_ADDR; use crate::app_support::DEFAULT_SERVER_ADDR;
use crate::input::camera::CameraCodec;
use crate::sync_probe::signature::first_unsupported_event_code; use crate::sync_probe::signature::first_unsupported_event_code;
#[derive(Clone, Debug, Eq, PartialEq)] #[derive(Clone, Debug, Eq, PartialEq)]
@ -17,6 +18,10 @@ pub struct ProbeConfig {
pub marker_tick_period: u32, pub marker_tick_period: u32,
pub event_width_codes: Vec<u32>, pub event_width_codes: Vec<u32>,
pub timeline_json: Option<PathBuf>, pub timeline_json: Option<PathBuf>,
pub camera_codec: Option<CameraCodec>,
pub camera_width: Option<u32>,
pub camera_height: Option<u32>,
pub camera_fps: Option<u32>,
} }
#[derive(Debug, Eq, PartialEq)] #[derive(Debug, Eq, PartialEq)]
@ -26,7 +31,7 @@ pub enum ParseOutcome {
} }
pub fn usage() -> &'static str { pub fn usage() -> &'static str {
"Usage: lesavka-sync-probe [--server http://HOST:50051] [--duration-seconds 10] [--warmup-seconds 4] [--pulse-period-ms 1000] [--pulse-width-ms 120] [--marker-tick-period 5] [--event-width-codes 1,2,3] [--timeline-json PATH]" "Usage: lesavka-sync-probe [--server http://HOST:50051] [--duration-seconds 10] [--warmup-seconds 4] [--pulse-period-ms 1000] [--pulse-width-ms 120] [--marker-tick-period 5] [--event-width-codes 1,2,3] [--timeline-json PATH] [--camera-codec hevc|mjpeg] [--camera-mode WIDTHxHEIGHT@FPS]"
} }
pub fn parse_args_outcome_from<I, S>(args: I) -> Result<ParseOutcome> pub fn parse_args_outcome_from<I, S>(args: I) -> Result<ParseOutcome>
@ -43,6 +48,10 @@ where
let mut marker_tick_period = 5u32; let mut marker_tick_period = 5u32;
let mut event_width_codes = Vec::<u32>::new(); let mut event_width_codes = Vec::<u32>::new();
let mut timeline_json = None::<PathBuf>; let mut timeline_json = None::<PathBuf>;
let mut camera_codec = None::<CameraCodec>;
let mut camera_width = None::<u32>;
let mut camera_height = None::<u32>;
let mut camera_fps = None::<u32>;
while let Some(arg) = args.next() { while let Some(arg) = args.next() {
match arg.as_str() { match arg.as_str() {
@ -114,6 +123,48 @@ where
} }
timeline_json = Some(PathBuf::from(path)); timeline_json = Some(PathBuf::from(path));
} }
"--camera-codec" => {
let raw = args.next().context("missing value after --camera-codec")?;
camera_codec = crate::app_support::parse_camera_codec(&raw)
.with_context(|| format!("unsupported camera codec `{raw}`\n{}", usage()))
.map(Some)?;
}
"--camera-mode" => {
let (width, height, fps) = parse_camera_mode(args.next())?;
camera_width = Some(width);
camera_height = Some(height);
camera_fps = Some(fps);
}
"--camera-width" => {
camera_width = Some(parse_u32_arg(
args.next(),
"--camera-width",
"camera width must be positive",
)?);
if camera_width == Some(0) {
bail!("camera width must be positive\n{}", usage());
}
}
"--camera-height" => {
camera_height = Some(parse_u32_arg(
args.next(),
"--camera-height",
"camera height must be positive",
)?);
if camera_height == Some(0) {
bail!("camera height must be positive\n{}", usage());
}
}
"--camera-fps" => {
camera_fps = Some(parse_u32_arg(
args.next(),
"--camera-fps",
"camera fps must be positive",
)?);
if camera_fps == Some(0) {
bail!("camera fps must be positive\n{}", usage());
}
}
"--help" | "-h" => return Ok(ParseOutcome::Help), "--help" | "-h" => return Ok(ParseOutcome::Help),
_ => bail!("unexpected argument `{arg}`\n{}", usage()), _ => bail!("unexpected argument `{arg}`\n{}", usage()),
} }
@ -134,6 +185,10 @@ where
marker_tick_period, marker_tick_period,
event_width_codes, event_width_codes,
timeline_json, timeline_json,
camera_codec,
camera_width,
camera_height,
camera_fps,
})) }))
} }
@ -153,6 +208,35 @@ fn parse_u32_arg(value: Option<String>, flag: &str, context: &str) -> Result<u32
.with_context(|| format!("{context}\n{}", usage())) .with_context(|| format!("{context}\n{}", usage()))
} }
fn parse_camera_mode(value: Option<String>) -> Result<(u32, u32, u32)> {
let raw = value
.context("missing value after --camera-mode")?
.trim()
.to_string();
let Some((width, rest)) = raw.split_once('x') else {
bail!("camera mode must look like WIDTHxHEIGHT@FPS\n{}", usage());
};
let Some((height, fps)) = rest.split_once('@') else {
bail!("camera mode must look like WIDTHxHEIGHT@FPS\n{}", usage());
};
let width = width
.parse::<u32>()
.with_context(|| format!("camera mode width must be numeric\n{}", usage()))?;
let height = height
.parse::<u32>()
.with_context(|| format!("camera mode height must be numeric\n{}", usage()))?;
let fps = fps
.parse::<u32>()
.with_context(|| format!("camera mode fps must be numeric\n{}", usage()))?;
if width == 0 || height == 0 || fps == 0 {
bail!(
"camera mode dimensions and fps must be positive\n{}",
usage()
);
}
Ok((width, height, fps))
}
/// Parse the event identity sequence for coded synthetic probes. /// Parse the event identity sequence for coded synthetic probes.
/// ///
/// Inputs: optional raw CLI value after `--event-width-codes`. /// Inputs: optional raw CLI value after `--event-width-codes`.
@ -212,6 +296,10 @@ mod tests {
assert_eq!(config.marker_tick_period, 5); assert_eq!(config.marker_tick_period, 5);
assert!(config.event_width_codes.is_empty()); assert!(config.event_width_codes.is_empty());
assert_eq!(config.timeline_json, None); assert_eq!(config.timeline_json, None);
assert_eq!(config.camera_codec, None);
assert_eq!(config.camera_width, None);
assert_eq!(config.camera_height, None);
assert_eq!(config.camera_fps, None);
} }
#[test] #[test]
@ -233,6 +321,10 @@ mod tests {
"1,2,3", "1,2,3",
"--timeline-json", "--timeline-json",
"/tmp/client-timeline.json", "/tmp/client-timeline.json",
"--camera-codec",
"hevc",
"--camera-mode",
"1920x1080@30",
]) ])
.expect("configured run"); .expect("configured run");
let ParseOutcome::Run(config) = outcome else { let ParseOutcome::Run(config) = outcome else {
@ -250,6 +342,13 @@ mod tests {
config.timeline_json, config.timeline_json,
Some("/tmp/client-timeline.json".into()) Some("/tmp/client-timeline.json".into())
); );
assert_eq!(
config.camera_codec,
Some(crate::input::camera::CameraCodec::Hevc)
);
assert_eq!(config.camera_width, Some(1920));
assert_eq!(config.camera_height, Some(1080));
assert_eq!(config.camera_fps, Some(30));
} }
#[test] #[test]
@ -260,6 +359,9 @@ mod tests {
assert!(parse_args_outcome_from(["--marker-tick-period", "0"]).is_err()); assert!(parse_args_outcome_from(["--marker-tick-period", "0"]).is_err());
assert!(parse_args_outcome_from(["--event-width-codes", "0"]).is_err()); assert!(parse_args_outcome_from(["--event-width-codes", "0"]).is_err());
assert!(parse_args_outcome_from(["--event-width-codes", "17"]).is_err()); assert!(parse_args_outcome_from(["--event-width-codes", "17"]).is_err());
assert!(parse_args_outcome_from(["--camera-mode", "1920x1080"]).is_err());
assert!(parse_args_outcome_from(["--camera-mode", "0x1080@30"]).is_err());
assert!(parse_args_outcome_from(["--camera-codec", "vp9"]).is_err());
} }
#[test] #[test]
@ -274,6 +376,8 @@ mod tests {
assert!(parse_args_outcome_from(["--duration-seconds"]).is_err()); assert!(parse_args_outcome_from(["--duration-seconds"]).is_err());
assert!(parse_args_outcome_from(["--marker-tick-period"]).is_err()); assert!(parse_args_outcome_from(["--marker-tick-period"]).is_err());
assert!(parse_args_outcome_from(["--timeline-json"]).is_err()); assert!(parse_args_outcome_from(["--timeline-json"]).is_err());
assert!(parse_args_outcome_from(["--camera-codec"]).is_err());
assert!(parse_args_outcome_from(["--camera-mode"]).is_err());
assert!(parse_args_outcome_from(["--wat"]).is_err()); assert!(parse_args_outcome_from(["--wat"]).is_err());
} }

View File

@ -43,8 +43,20 @@ async fn run_sync_probe(config: ProbeConfig) -> Result<()> {
if !caps.bundled_webcam_media { if !caps.bundled_webcam_media {
bail!("server does not advertise bundled webcam media; refusing to measure split upstream"); bail!("server does not advertise bundled webcam media; refusing to measure split upstream");
} }
let camera = app_support::camera_config_from_caps(&caps) let mut camera = app_support::camera_config_from_caps(&caps)
.context("server handshake did not include a complete camera profile")?; .context("server handshake did not include a complete camera profile")?;
if let Some(codec) = config.camera_codec {
camera.codec = codec;
}
if let Some(width) = config.camera_width {
camera.width = width;
}
if let Some(height) = config.camera_height {
camera.height = height;
}
if let Some(fps) = config.camera_fps {
camera.fps = fps;
}
let schedule = if config.event_width_codes.is_empty() { let schedule = if config.event_width_codes.is_empty() {
PulseSchedule::new( PulseSchedule::new(

View File

@ -1,6 +1,6 @@
[package] [package]
name = "lesavka_common" name = "lesavka_common"
version = "0.22.18" version = "0.22.19"
edition = "2024" edition = "2024"
build = "build.rs" build = "build.rs"

View File

@ -304,6 +304,49 @@ if [[ -z $UVC_INTERVAL ]]; then
UVC_INTERVAL=$((10000000 / UVC_FPS)) UVC_INTERVAL=$((10000000 / UVC_FPS))
fi fi
UVC_FRAME_SIZE=${LESAVKA_UVC_FRAME_SIZE:-$((UVC_WIDTH * UVC_HEIGHT * 2))} UVC_FRAME_SIZE=${LESAVKA_UVC_FRAME_SIZE:-$((UVC_WIDTH * UVC_HEIGHT * 2))}
UVC_INTERVAL_30=${LESAVKA_UVC_INTERVAL_30:-333333}
UVC_INTERVAL_20=${LESAVKA_UVC_INTERVAL_20:-500000}
uvc_selected_frame_index() {
case "${UVC_WIDTH}x${UVC_HEIGHT}" in
1920x1080) echo 1 ;;
1280x720) echo 2 ;;
*) echo 2 ;;
esac
}
uvc_frame_size_for() {
local width=$1 height=$2
if [[ $width == "$UVC_WIDTH" && $height == "$UVC_HEIGHT" && -n ${LESAVKA_UVC_FRAME_SIZE:-} ]]; then
echo "$LESAVKA_UVC_FRAME_SIZE"
else
echo $((width * height * 2))
fi
}
uvc_default_interval_for() {
local width=$1 height=$2
if [[ $width == "$UVC_WIDTH" && $height == "$UVC_HEIGHT" ]]; then
echo "$UVC_INTERVAL"
else
echo "$UVC_INTERVAL_30"
fi
}
write_mjpeg_frame_descriptor() {
local name=$1 width=$2 height=$3
local frame="$F/streaming/mjpeg/m/$name"
mkdir -p "$frame"
echo 0 >"$frame/bmCapabilities"
echo "$width" >"$frame/wWidth"
echo "$height" >"$frame/wHeight"
echo "$(uvc_frame_size_for "$width" "$height")" >"$frame/dwMaxVideoFrameBufferSize"
echo "$(uvc_default_interval_for "$width" "$height")" >"$frame/dwDefaultFrameInterval"
cat <<EOF >"$frame/dwFrameInterval"
${UVC_INTERVAL_30}
${UVC_INTERVAL_20}
EOF
}
wait_for_enum() { wait_for_enum() {
local tries=${1:-50} # 50 x 100ms = 5s local tries=${1:-50} # 50 x 100ms = 5s
@ -516,19 +559,11 @@ if [[ -z $DISABLE_UVC ]]; then
# ── 1. FORMAT DESCRIPTOR ────────────────────────────────────────── # ── 1. FORMAT DESCRIPTOR ──────────────────────────────────────────
if [[ "$UVC_CODEC" == "mjpeg" ]]; then if [[ "$UVC_CODEC" == "mjpeg" ]]; then
mkdir -p "$F/streaming/mjpeg/m" mkdir -p "$F/streaming/mjpeg/m"
echo 1 >"$F/streaming/mjpeg/m/bDefaultFrameIndex" 2>/dev/null || true echo "$(uvc_selected_frame_index)" >"$F/streaming/mjpeg/m/bDefaultFrameIndex" 2>/dev/null || true
echo 0 >"$F/streaming/mjpeg/m/bmaControls" 2>/dev/null || true echo 0 >"$F/streaming/mjpeg/m/bmaControls" 2>/dev/null || true
mkdir -p "$F/streaming/mjpeg/m/720p" write_mjpeg_frame_descriptor 1080p 1920 1080
echo 0 >"$F/streaming/mjpeg/m/720p/bmCapabilities" write_mjpeg_frame_descriptor 720p 1280 720
echo "$UVC_WIDTH" >"$F/streaming/mjpeg/m/720p/wWidth"
echo "$UVC_HEIGHT" >"$F/streaming/mjpeg/m/720p/wHeight"
echo "$UVC_FRAME_SIZE" >"$F/streaming/mjpeg/m/720p/dwMaxVideoFrameBufferSize"
echo "$UVC_INTERVAL" >"$F/streaming/mjpeg/m/720p/dwDefaultFrameInterval"
cat <<EOF >"$F/streaming/mjpeg/m/720p/dwFrameInterval"
${UVC_INTERVAL}
$((UVC_INTERVAL * 2))
EOF
else else
# uncompressed YUY2, 16 bpp # uncompressed YUY2, 16 bpp
mkdir -p "$F/streaming/uncompressed/yuyv" mkdir -p "$F/streaming/uncompressed/yuyv"

View File

@ -552,7 +552,11 @@ live_uvc_descriptor_matches_request() {
case "$(live_uvc_descriptor_codec)" in case "$(live_uvc_descriptor_codec)" in
mjpeg) mjpeg)
[[ "$INSTALL_UVC_CODEC" == "mjpeg" ]] || return 1 [[ "$INSTALL_UVC_CODEC" == "mjpeg" ]] || return 1
frame_root="$function_root/streaming/mjpeg/m/720p" case "${LESAVKA_UVC_WIDTH:-1280}x${LESAVKA_UVC_HEIGHT:-720}" in
1920x1080) frame_root="$function_root/streaming/mjpeg/m/1080p" ;;
1280x720) frame_root="$function_root/streaming/mjpeg/m/720p" ;;
*) return 1 ;;
esac
;; ;;
yuyv) yuyv)
[[ "$INSTALL_UVC_CODEC" == "yuyv" ]] || return 1 [[ "$INSTALL_UVC_CODEC" == "yuyv" ]] || return 1
@ -565,7 +569,8 @@ live_uvc_descriptor_matches_request() {
[[ -r "$frame_root/wWidth" && -r "$frame_root/wHeight" && -r "$frame_root/dwDefaultFrameInterval" ]] || return 1 [[ -r "$frame_root/wWidth" && -r "$frame_root/wHeight" && -r "$frame_root/dwDefaultFrameInterval" ]] || return 1
[[ "$(cat "$frame_root/wWidth" 2>/dev/null || true)" == "${LESAVKA_UVC_WIDTH:-1280}" ]] || return 1 [[ "$(cat "$frame_root/wWidth" 2>/dev/null || true)" == "${LESAVKA_UVC_WIDTH:-1280}" ]] || return 1
[[ "$(cat "$frame_root/wHeight" 2>/dev/null || true)" == "${LESAVKA_UVC_HEIGHT:-720}" ]] || return 1 [[ "$(cat "$frame_root/wHeight" 2>/dev/null || true)" == "${LESAVKA_UVC_HEIGHT:-720}" ]] || return 1
[[ "$(cat "$frame_root/dwDefaultFrameInterval" 2>/dev/null || true)" == "${LESAVKA_UVC_INTERVAL:-333333}" ]] grep -qx "${LESAVKA_UVC_INTERVAL:-333333}" "$frame_root/dwFrameInterval" 2>/dev/null \
|| [[ "$(cat "$frame_root/dwDefaultFrameInterval" 2>/dev/null || true)" == "${LESAVKA_UVC_INTERVAL:-333333}" ]]
} }
live_uac_descriptor_matches_request() { live_uac_descriptor_matches_request() {

View File

@ -0,0 +1,184 @@
#!/usr/bin/env bash
# scripts/manual/client_rct_remote_capture.sh
# Remote Tethys-side UVC/UAC capture helper for run_client_to_rct_transport_probe.sh.
set -euo pipefail
remote_capture=$1
capture_seconds=$2
width=$3
height=$4
fps=$5
capture_stack=$6
pulse_tool=$7
video_mode=$8
anchor_silence=$9
preroll_discard=${10}
preroll_settle=${11}
ready_settle=${12}
ready_marker=${13}
resolve_video_device() {
find /dev/v4l/by-id -maxdepth 1 -type l \
-name 'usb-Lesavka_Lesavka_Composite*video-index0' | sort | head -n 1
}
resolve_pulse_source() {
pactl list short sources 2>/dev/null \
| awk '
/alsa_input\..*Lesavka_Lesavka_Composite/ { print $2; found=1; exit }
/Lesavka_Lesavka_Composite/ && $2 !~ /\.monitor$/ && !fallback { fallback=$2 }
END {
if (found) exit 0
if (fallback != "") { print fallback; exit 0 }
exit 1
}
'
}
current_video_profile() {
v4l2-ctl -d "${video_device}" --all 2>/dev/null \
| awk '
/Width\/Height[[:space:]]*:/ {
split($0, a, ":")
gsub(/^[ \t]+/, "", a[2])
split(a[2], wh, "/")
width=wh[1]
height=wh[2]
next
}
/Frames per second[[:space:]]*:/ {
split($0, a, ":")
gsub(/^[ \t]+/, "", a[2])
split(a[2], fps_parts, "\\.")
fps=fps_parts[1]
}
END {
if (width && height && fps) {
printf "%s %s %s\n", width, height, fps
exit 0
}
exit 1
}
'
}
gst_audio_mixer_element() {
if gst-inspect-1.0 audiomixer 2>/dev/null | grep -q 'ignore-inactive-pads'; then
printf 'audiomixer name=amix ignore-inactive-pads=true'
else
printf 'audiomixer name=amix'
fi
}
run_preroll() {
local video_device=$1
local seconds=$2
[[ "${seconds}" =~ ^[0-9]+$ && "${seconds}" -gt 0 ]] || return 0
printf 'discarding %ss of post-enumeration capture before probe\n' "${seconds}" >&2
timeout --kill-after=5 --signal=INT "$((seconds + 5))" \
gst-launch-1.0 -q -e v4l2src device="${video_device}" do-timestamp=true num-buffers="$((fps * seconds))" \
! "image/jpeg,width=${width},height=${height},framerate=${fps}/1" ! fakesink \
>/dev/null 2>&1 || true
if [[ "${preroll_settle}" =~ ^[0-9]+$ && "${preroll_settle}" -gt 0 ]]; then
printf 'settling %ss after preroll discard\n' "${preroll_settle}" >&2
sleep "${preroll_settle}"
fi
}
run_gst_pulse_capture() {
local video_device=$1
local pulse_source=$2
local video_caps="image/jpeg,width=${width},height=${height},framerate=${fps}/1"
local decode_chain="jpegdec !"
local audio_mixer
audio_mixer="$(gst_audio_mixer_element)"
local audio_anchor=()
if [[ "${anchor_silence}" != "0" ]]; then
printf 'anchoring Pulse capture audio timeline with generated silence\n' >&2
audio_anchor=(audiotestsrc wave=silence is-live=true do-timestamp=true ! "audio/x-raw,rate=48000,channels=2" ! queue ! amix.)
fi
printf 'capture_start_unix_ns=%s\n' "$(date +%s%N)" >&2
if [[ "${video_mode}" == "cfr" ]]; then
timeout --kill-after=5 --signal=INT "$((capture_seconds + 3))" \
gst-launch-1.0 -q -e \
matroskamux name=mux ! filesink location="${remote_capture}" \
v4l2src device="${video_device}" do-timestamp=true ! ${video_caps} ! \
${decode_chain} videoconvert ! videorate ! video/x-raw,framerate="${fps}"/1 ! \
x264enc tune=zerolatency speed-preset=ultrafast key-int-max=1 bitrate=5000 ! \
h264parse ! queue ! mux. \
${audio_mixer} ! audio/x-raw,rate=48000,channels=2 ! queue ! mux. \
"${audio_anchor[@]}" \
pulsesrc device="${pulse_source}" do-timestamp=true ! audio/x-raw,rate=48000,channels=2 ! \
audioconvert ! audioresample ! audio/x-raw,rate=48000,channels=2 ! queue ! amix. &
else
timeout --kill-after=5 --signal=INT "$((capture_seconds + 3))" \
gst-launch-1.0 -q -e \
matroskamux name=mux ! filesink location="${remote_capture}" \
v4l2src device="${video_device}" do-timestamp=true ! ${video_caps} ! queue ! mux. \
${audio_mixer} ! audio/x-raw,rate=48000,channels=2 ! queue ! mux. \
"${audio_anchor[@]}" \
pulsesrc device="${pulse_source}" do-timestamp=true ! audio/x-raw,rate=48000,channels=2 ! \
audioconvert ! audioresample ! audio/x-raw,rate=48000,channels=2 ! queue ! amix. &
fi
local capture_pid=$!
sleep "${ready_settle}"
printf '%s\n' "${ready_marker}" >&2
wait "${capture_pid}"
}
run_ffmpeg_pulse_capture() {
local video_device=$1
local pulse_source=$2
printf 'capture_start_unix_ns=%s\n' "$(date +%s%N)" >&2
timeout --kill-after=5 --signal=INT "$((capture_seconds + 5))" \
ffmpeg -nostdin -hide_banner -loglevel error -y \
-thread_queue_size 1024 -f video4linux2 -framerate "${fps}" \
-video_size "${width}x${height}" -input_format mjpeg -i "${video_device}" \
-thread_queue_size 1024 -f pulse -i "${pulse_source}" \
-map 0:v:0 -map 1:a:0 -t "${capture_seconds}" \
-c:v copy -c:a pcm_s16le "${remote_capture}" </dev/null &
local capture_pid=$!
sleep "${ready_settle}"
printf '%s\n' "${ready_marker}" >&2
wait "${capture_pid}"
}
rm -f "${remote_capture}"
video_device="$(resolve_video_device)"
if [[ -z "${video_device}" ]]; then
printf 'Lesavka UVC video device not found on RCT host; refusing unrelated capture devices.\n' >&2
exit 2
fi
if [[ "${width}" == "0" || "${height}" == "0" || "${fps}" == "0" ]]; then
if read -r width height fps < <(current_video_profile); then
:
else
printf 'unable to auto-detect current UVC mode; set LESAVKA_CLIENT_RCT_MODE=WIDTHxHEIGHT@FPS\n' >&2
exit 2
fi
fi
printf 'using video device: %s\n' "${video_device}" >&2
printf 'using video mode: %sx%s @ %s fps (mjpeg)\n' "${width}" "${height}" "${fps}" >&2
case "${capture_stack}" in
pulse)
pulse_source="$(resolve_pulse_source)"
if [[ -z "${pulse_source}" ]]; then
printf 'Lesavka Pulse audio source not found; refusing timing-sensitive fallback.\n' >&2
exit 2
fi
printf 'using Pulse source: %s\n' "${pulse_source}" >&2
run_preroll "${video_device}" "${preroll_discard}"
case "${pulse_tool}" in
gst) run_gst_pulse_capture "${video_device}" "${pulse_source}" ;;
ffmpeg) run_ffmpeg_pulse_capture "${video_device}" "${pulse_source}" ;;
*)
printf 'unsupported REMOTE_PULSE_CAPTURE_TOOL=%s for client-to-RCT probe; use gst or ffmpeg\n' "${pulse_tool}" >&2
exit 2
;;
esac
;;
*)
printf 'unsupported REMOTE_CAPTURE_STACK=%s for client-to-RCT probe\n' "${capture_stack}" >&2
exit 2
;;
esac

View File

@ -14,6 +14,9 @@ SERVER_TUNNEL_REMOTE_PORT=${SERVER_TUNNEL_REMOTE_PORT:-50051}
SSH_OPTS=${SSH_OPTS:-"-o BatchMode=yes -o ConnectTimeout=5"} SSH_OPTS=${SSH_OPTS:-"-o BatchMode=yes -o ConnectTimeout=5"}
LESAVKA_CLIENT_RCT_MODE=${LESAVKA_CLIENT_RCT_MODE:-auto} LESAVKA_CLIENT_RCT_MODE=${LESAVKA_CLIENT_RCT_MODE:-auto}
LESAVKA_CLIENT_RCT_UPSTREAM_MODE=${LESAVKA_CLIENT_RCT_UPSTREAM_MODE:-${LESAVKA_CLIENT_RCT_MODE}}
LESAVKA_CLIENT_RCT_UPSTREAM_CAMERA_CODEC=${LESAVKA_CLIENT_RCT_UPSTREAM_CAMERA_CODEC:-auto}
LESAVKA_CLIENT_RCT_UPSTREAM_AUDIO_CODEC=${LESAVKA_CLIENT_RCT_UPSTREAM_AUDIO_CODEC:-${LESAVKA_UPLINK_AUDIO_CODEC:-opus}}
REMOTE_CAPTURE_STACK=${REMOTE_CAPTURE_STACK:-pulse} REMOTE_CAPTURE_STACK=${REMOTE_CAPTURE_STACK:-pulse}
REMOTE_PULSE_CAPTURE_TOOL=${REMOTE_PULSE_CAPTURE_TOOL:-gst} REMOTE_PULSE_CAPTURE_TOOL=${REMOTE_PULSE_CAPTURE_TOOL:-gst}
REMOTE_PULSE_VIDEO_MODE=${REMOTE_PULSE_VIDEO_MODE:-cfr} REMOTE_PULSE_VIDEO_MODE=${REMOTE_PULSE_VIDEO_MODE:-cfr}
@ -193,169 +196,7 @@ start_tethys_capture() {
"${REMOTE_CAPTURE_PREROLL_SETTLE_SECONDS}" \ "${REMOTE_CAPTURE_PREROLL_SETTLE_SECONDS}" \
"${REMOTE_CAPTURE_READY_SETTLE_SECONDS}" \ "${REMOTE_CAPTURE_READY_SETTLE_SECONDS}" \
"${CAPTURE_READY_MARKER}" \ "${CAPTURE_READY_MARKER}" \
>"${LOCAL_CAPTURE_LOG}" 2>&1 <<'REMOTE_CAPTURE_SCRIPT' & >"${LOCAL_CAPTURE_LOG}" 2>&1 <"${SCRIPT_DIR}/client_rct_remote_capture.sh" &
set -euo pipefail
remote_capture=$1
capture_seconds=$2
width=$3
height=$4
fps=$5
capture_stack=$6
pulse_tool=$7
video_mode=$8
anchor_silence=$9
preroll_discard=${10}
preroll_settle=${11}
ready_settle=${12}
ready_marker=${13}
resolve_video_device() {
find /dev/v4l/by-id -maxdepth 1 -type l \
-name 'usb-Lesavka_Lesavka_Composite*video-index0' | sort | head -n 1
}
resolve_pulse_source() {
pactl list short sources 2>/dev/null \
| awk '
/alsa_input\..*Lesavka_Lesavka_Composite/ { print $2; found=1; exit }
/Lesavka_Lesavka_Composite/ && $2 !~ /\.monitor$/ && !fallback { fallback=$2 }
END {
if (found) exit 0
if (fallback != "") { print fallback; exit 0 }
exit 1
}
'
}
current_video_profile() {
v4l2-ctl -d "${video_device}" --all 2>/dev/null \
| awk '
/Width\/Height[[:space:]]*:/ {
split($0, a, ":")
gsub(/^[ \t]+/, "", a[2])
split(a[2], wh, "/")
width=wh[1]
height=wh[2]
next
}
/Frames per second[[:space:]]*:/ {
split($0, a, ":")
gsub(/^[ \t]+/, "", a[2])
split(a[2], fps_parts, "\\.")
fps=fps_parts[1]
}
END {
if (width && height && fps) {
printf "%s %s %s\n", width, height, fps
exit 0
}
exit 1
}
'
}
gst_audio_mixer_element() {
if gst-inspect-1.0 audiomixer 2>/dev/null | grep -q 'ignore-inactive-pads'; then
printf 'audiomixer name=amix ignore-inactive-pads=true'
else
printf 'audiomixer name=amix'
fi
}
run_preroll() {
local video_device=$1
local seconds=$2
[[ "${seconds}" =~ ^[0-9]+$ && "${seconds}" -gt 0 ]] || return 0
printf 'discarding %ss of post-enumeration capture before probe\n' "${seconds}" >&2
timeout --kill-after=5 --signal=INT "$((seconds + 5))" \
gst-launch-1.0 -q -e v4l2src device="${video_device}" do-timestamp=true num-buffers="$((fps * seconds))" \
! "image/jpeg,width=${width},height=${height},framerate=${fps}/1" ! fakesink \
>/dev/null 2>&1 || true
if [[ "${preroll_settle}" =~ ^[0-9]+$ && "${preroll_settle}" -gt 0 ]]; then
printf 'settling %ss after preroll discard\n' "${preroll_settle}" >&2
sleep "${preroll_settle}"
fi
}
run_gst_pulse_capture() {
local video_device=$1
local pulse_source=$2
local video_caps="image/jpeg,width=${width},height=${height},framerate=${fps}/1"
local decode_chain="jpegdec !"
local audio_mixer
audio_mixer="$(gst_audio_mixer_element)"
local audio_anchor=()
if [[ "${anchor_silence}" != "0" ]]; then
printf 'anchoring Pulse capture audio timeline with generated silence\n' >&2
audio_anchor=(audiotestsrc wave=silence is-live=true do-timestamp=true ! "audio/x-raw,rate=48000,channels=2" ! queue ! amix.)
fi
printf 'capture_start_unix_ns=%s\n' "$(date +%s%N)" >&2
if [[ "${video_mode}" == "cfr" ]]; then
timeout --kill-after=5 --signal=INT "$((capture_seconds + 3))" \
gst-launch-1.0 -q -e \
matroskamux name=mux ! filesink location="${remote_capture}" \
v4l2src device="${video_device}" do-timestamp=true ! ${video_caps} ! \
${decode_chain} videoconvert ! videorate ! video/x-raw,framerate="${fps}"/1 ! \
x264enc tune=zerolatency speed-preset=ultrafast key-int-max=1 bitrate=5000 ! \
h264parse ! queue ! mux. \
${audio_mixer} ! audio/x-raw,rate=48000,channels=2 ! queue ! mux. \
"${audio_anchor[@]}" \
pulsesrc device="${pulse_source}" do-timestamp=true ! audio/x-raw,rate=48000,channels=2 ! \
audioconvert ! audioresample ! audio/x-raw,rate=48000,channels=2 ! queue ! amix. &
else
timeout --kill-after=5 --signal=INT "$((capture_seconds + 3))" \
gst-launch-1.0 -q -e \
matroskamux name=mux ! filesink location="${remote_capture}" \
v4l2src device="${video_device}" do-timestamp=true ! ${video_caps} ! queue ! mux. \
${audio_mixer} ! audio/x-raw,rate=48000,channels=2 ! queue ! mux. \
"${audio_anchor[@]}" \
pulsesrc device="${pulse_source}" do-timestamp=true ! audio/x-raw,rate=48000,channels=2 ! \
audioconvert ! audioresample ! audio/x-raw,rate=48000,channels=2 ! queue ! amix. &
fi
local capture_pid=$!
sleep "${ready_settle}"
printf '%s\n' "${ready_marker}" >&2
wait "${capture_pid}"
}
rm -f "${remote_capture}"
video_device="$(resolve_video_device)"
if [[ -z "${video_device}" ]]; then
printf 'Lesavka UVC video device not found on RCT host; refusing unrelated capture devices.\n' >&2
exit 2
fi
if [[ "${width}" == "0" || "${height}" == "0" || "${fps}" == "0" ]]; then
if read -r width height fps < <(current_video_profile); then
:
else
printf 'unable to auto-detect current UVC mode; set LESAVKA_CLIENT_RCT_MODE=WIDTHxHEIGHT@FPS\n' >&2
exit 2
fi
fi
printf 'using video device: %s\n' "${video_device}" >&2
printf 'using video mode: %sx%s @ %s fps (mjpeg)\n' "${width}" "${height}" "${fps}" >&2
case "${capture_stack}" in
pulse)
if [[ "${pulse_tool}" != "gst" ]]; then
printf 'unsupported REMOTE_PULSE_CAPTURE_TOOL=%s for client-to-RCT probe; use gst\n' "${pulse_tool}" >&2
exit 2
fi
pulse_source="$(resolve_pulse_source)"
if [[ -z "${pulse_source}" ]]; then
printf 'Lesavka Pulse audio source not found; refusing timing-sensitive fallback.\n' >&2
exit 2
fi
printf 'using Pulse source: %s\n' "${pulse_source}" >&2
run_preroll "${video_device}" "${preroll_discard}"
run_gst_pulse_capture "${video_device}" "${pulse_source}"
;;
*)
printf 'unsupported REMOTE_CAPTURE_STACK=%s for client-to-RCT probe\n' "${capture_stack}" >&2
exit 2
;;
esac
REMOTE_CAPTURE_SCRIPT
CAPTURE_PID=$! CAPTURE_PID=$!
} }
@ -377,9 +218,17 @@ wait_for_capture_ready() {
run_client_sync_probe() { run_client_sync_probe() {
echo "==> running client-origin bundled transport probe against ${RESOLVED_LESAVKA_SERVER_ADDR}" echo "==> running client-origin bundled transport probe against ${RESOLVED_LESAVKA_SERVER_ADDR}"
local camera_args=()
if [[ "${LESAVKA_CLIENT_RCT_UPSTREAM_MODE}" != "auto" ]]; then
camera_args+=(--camera-mode "${LESAVKA_CLIENT_RCT_UPSTREAM_MODE}")
fi
if [[ "${LESAVKA_CLIENT_RCT_UPSTREAM_CAMERA_CODEC}" != "auto" ]]; then
camera_args+=(--camera-codec "${LESAVKA_CLIENT_RCT_UPSTREAM_CAMERA_CODEC}")
fi
( (
cd "${REPO_ROOT}" cd "${REPO_ROOT}"
LESAVKA_TLS_DOMAIN="${LESAVKA_TLS_DOMAIN}" \ LESAVKA_TLS_DOMAIN="${LESAVKA_TLS_DOMAIN}" \
LESAVKA_UPLINK_AUDIO_CODEC="${LESAVKA_CLIENT_RCT_UPSTREAM_AUDIO_CODEC}" \
LESAVKA_SYNC_PROBE_SEND_LOG="${LOCAL_CLIENT_SEND_JSONL}" \ LESAVKA_SYNC_PROBE_SEND_LOG="${LOCAL_CLIENT_SEND_JSONL}" \
timeout --signal=INT "${PROBE_TIMEOUT_SECONDS}" \ timeout --signal=INT "${PROBE_TIMEOUT_SECONDS}" \
"${REPO_ROOT}/target/debug/lesavka-sync-probe" \ "${REPO_ROOT}/target/debug/lesavka-sync-probe" \
@ -390,7 +239,8 @@ run_client_sync_probe() {
--pulse-width-ms "${PROBE_PULSE_WIDTH_MS}" \ --pulse-width-ms "${PROBE_PULSE_WIDTH_MS}" \
--marker-tick-period "${PROBE_MARKER_TICK_PERIOD}" \ --marker-tick-period "${PROBE_MARKER_TICK_PERIOD}" \
--event-width-codes "${PROBE_EVENT_WIDTH_CODES}" \ --event-width-codes "${PROBE_EVENT_WIDTH_CODES}" \
--timeline-json "${LOCAL_CLIENT_TIMELINE_JSON}" --timeline-json "${LOCAL_CLIENT_TIMELINE_JSON}" \
"${camera_args[@]}"
) )
} }
@ -457,6 +307,7 @@ read -r MODE_WIDTH MODE_HEIGHT MODE_FPS < <(parse_mode "${LESAVKA_CLIENT_RCT_MOD
echo "==> client-to-RCT bundled transport probe" echo "==> client-to-RCT bundled transport probe"
echo " ↪ mode=${LESAVKA_CLIENT_RCT_MODE}" echo " ↪ mode=${LESAVKA_CLIENT_RCT_MODE}"
echo " ↪ upstream_mode=${LESAVKA_CLIENT_RCT_UPSTREAM_MODE} upstream_camera_codec=${LESAVKA_CLIENT_RCT_UPSTREAM_CAMERA_CODEC} upstream_audio_codec=${LESAVKA_CLIENT_RCT_UPSTREAM_AUDIO_CODEC}"
echo " ↪ capture_stack=${REMOTE_CAPTURE_STACK} pulse_tool=${REMOTE_PULSE_CAPTURE_TOOL} video_mode=${REMOTE_PULSE_VIDEO_MODE}" echo " ↪ capture_stack=${REMOTE_CAPTURE_STACK} pulse_tool=${REMOTE_PULSE_CAPTURE_TOOL} video_mode=${REMOTE_PULSE_VIDEO_MODE}"
echo " ↪ server_addr=${LESAVKA_SERVER_ADDR}" echo " ↪ server_addr=${LESAVKA_SERVER_ADDR}"
echo " ↪ max_client_to_rct_age_ms=${LESAVKA_CLIENT_RCT_MAX_AGE_MS}" echo " ↪ max_client_to_rct_age_ms=${LESAVKA_CLIENT_RCT_MAX_AGE_MS}"

View File

@ -10,7 +10,7 @@ bench = false
[package] [package]
name = "lesavka_server" name = "lesavka_server"
version = "0.22.18" version = "0.22.19"
edition = "2024" edition = "2024"
autobins = false autobins = false

View File

@ -1270,9 +1270,12 @@ fn sanitize_streaming_control(data: &[u8], state: &UvcState) -> [u8; STREAM_CTRL
if format_index == 1 { if format_index == 1 {
out[2] = 1; out[2] = 1;
} }
if frame_index == 1 { let frame_index = uvc_frame_index_for_request(frame_index, &state.cfg);
out[3] = 1; out[3] = frame_index;
} write_le32(
&mut out[18..22],
uvc_frame_size_for_index(frame_index, state.cfg.frame_size),
);
if interval != 0 { if interval != 0 {
write_le32(&mut out[4..8], interval); write_le32(&mut out[4..8], interval);
} }
@ -1320,7 +1323,7 @@ fn build_streaming_control(cfg: &UvcConfig, ctrl_len: usize) -> [u8; STREAM_CTRL
write_le16(&mut buf[0..2], 1); // bmHint: dwFrameInterval write_le16(&mut buf[0..2], 1); // bmHint: dwFrameInterval
buf[2] = 1; // bFormatIndex buf[2] = 1; // bFormatIndex
buf[3] = 1; // bFrameIndex buf[3] = uvc_frame_index_for_mode(cfg.width, cfg.height); // bFrameIndex
write_le32(&mut buf[4..8], cfg.interval); write_le32(&mut buf[4..8], cfg.interval);
write_le16(&mut buf[8..10], 0); write_le16(&mut buf[8..10], 0);
write_le16(&mut buf[10..12], 0); write_le16(&mut buf[10..12], 0);
@ -1340,6 +1343,29 @@ fn build_streaming_control(cfg: &UvcConfig, ctrl_len: usize) -> [u8; STREAM_CTRL
buf buf
} }
fn uvc_frame_index_for_mode(width: u32, height: u32) -> u8 {
match (width, height) {
(1920, 1080) => 1,
(1280, 720) => 2,
_ => 2,
}
}
fn uvc_frame_index_for_request(requested: u8, cfg: &UvcConfig) -> u8 {
match requested {
1 | 2 => requested,
_ => uvc_frame_index_for_mode(cfg.width, cfg.height),
}
}
fn uvc_frame_size_for_index(frame_index: u8, fallback: u32) -> u32 {
match frame_index {
1 => 1920 * 1080 * 2,
2 => 1280 * 720 * 2,
_ => fallback,
}
}
fn event_bytes(ev: &V4l2Event) -> [u8; 64] { fn event_bytes(ev: &V4l2Event) -> [u8; 64] {
unsafe { ev.u.data } unsafe { ev.u.data }
} }
@ -1404,19 +1430,14 @@ fn read_u32_first(path: &str) -> Option<u32> {
} }
fn read_configfs_snapshot() -> Option<ConfigfsSnapshot> { fn read_configfs_snapshot() -> Option<ConfigfsSnapshot> {
let width = read_u32_file(&format!( let frame = configfs_frame_name_from_env();
"{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/wWidth" let frame_root = format!("{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/{frame}");
))?; let width = read_u32_file(&format!("{frame_root}/wWidth"))?;
let height = read_u32_file(&format!( let height = read_u32_file(&format!("{frame_root}/wHeight"))?;
"{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/wHeight"
))?;
let default_interval = read_u32_file(&format!( let default_interval = read_u32_file(&format!(
"{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/dwDefaultFrameInterval" "{frame_root}/dwDefaultFrameInterval"
))?; ))?;
let frame_interval = read_u32_first(&format!( let frame_interval = read_u32_first(&format!("{frame_root}/dwFrameInterval")).unwrap_or(0);
"{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/dwFrameInterval"
))
.unwrap_or(0);
let maxpacket = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxpacket"))?; let maxpacket = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxpacket"))?;
let maxburst = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxburst")).unwrap_or(0); let maxburst = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxburst")).unwrap_or(0);
Some(ConfigfsSnapshot { Some(ConfigfsSnapshot {
@ -1429,6 +1450,16 @@ fn read_configfs_snapshot() -> Option<ConfigfsSnapshot> {
}) })
} }
fn configfs_frame_name_from_env() -> &'static str {
match (
env_u32("LESAVKA_UVC_WIDTH", 1280),
env_u32("LESAVKA_UVC_HEIGHT", 720),
) {
(1920, 1080) => "1080p",
_ => "720p",
}
}
fn log_configfs_snapshot(state: &mut UvcState, label: &str) { fn log_configfs_snapshot(state: &mut UvcState, label: &str) {
let Some(current) = read_configfs_snapshot() else { let Some(current) = read_configfs_snapshot() else {
eprintln!("[lesavka-uvc] configfs {label}: unavailable"); eprintln!("[lesavka-uvc] configfs {label}: unavailable");

View File

@ -15,7 +15,7 @@ fn build_streaming_control(cfg: &UvcConfig, ctrl_len: usize) -> [u8; STREAM_CTRL
write_le16(&mut buf[0..2], 1); write_le16(&mut buf[0..2], 1);
buf[2] = 1; buf[2] = 1;
buf[3] = 1; buf[3] = uvc_frame_index_for_mode(cfg.width, cfg.height);
write_le32(&mut buf[4..8], cfg.interval); write_le32(&mut buf[4..8], cfg.interval);
write_le16(&mut buf[8..10], 0); write_le16(&mut buf[8..10], 0);
write_le16(&mut buf[10..12], 0); write_le16(&mut buf[10..12], 0);
@ -35,6 +35,29 @@ fn build_streaming_control(cfg: &UvcConfig, ctrl_len: usize) -> [u8; STREAM_CTRL
buf buf
} }
fn uvc_frame_index_for_mode(width: u32, height: u32) -> u8 {
match (width, height) {
(1920, 1080) => 1,
(1280, 720) => 2,
_ => 2,
}
}
fn uvc_frame_index_for_request(requested: u8, cfg: &UvcConfig) -> u8 {
match requested {
1 | 2 => requested,
_ => uvc_frame_index_for_mode(cfg.width, cfg.height),
}
}
fn uvc_frame_size_for_index(frame_index: u8, fallback: u32) -> u32 {
match frame_index {
1 => 1920 * 1080 * 2,
2 => 1280 * 720 * 2,
_ => fallback,
}
}
#[cfg(coverage)] #[cfg(coverage)]
fn parse_ctrl_request(data: [u8; 64]) -> UsbCtrlRequest { fn parse_ctrl_request(data: [u8; 64]) -> UsbCtrlRequest {
UsbCtrlRequest { UsbCtrlRequest {

View File

@ -146,9 +146,12 @@ fn sanitize_streaming_control(data: &[u8], state: &UvcState) -> [u8; STREAM_CTRL
if data[2] == 1 { if data[2] == 1 {
out[2] = 1; out[2] = 1;
} }
if data[3] == 1 { let frame_index = uvc_frame_index_for_request(data[3], &state.cfg);
out[3] = 1; out[3] = frame_index;
} write_le32(
&mut out[18..22],
uvc_frame_size_for_index(frame_index, state.cfg.frame_size),
);
let interval = read_le32(data, 4); let interval = read_le32(data, 4);
if interval != 0 { if interval != 0 {
write_le32(&mut out[4..8], interval); write_le32(&mut out[4..8], interval);

View File

@ -225,8 +225,20 @@ fn select_uvc_config() -> CameraConfig {
.unwrap_or(30); .unwrap_or(30);
let codec = select_uvc_codec(Some(&uvc_env)); let codec = select_uvc_codec(Some(&uvc_env));
if let Some(live) = read_live_uvc_configfs_profile() { let live_profiles = read_live_uvc_configfs_profiles();
if (width, height, fps) != (live.width, live.height, live.fps) { if !live_profiles.is_empty() {
if live_profiles
.iter()
.any(|live| (width, height, fps) == (live.width, live.height, live.fps))
{
info!(
configured_width = width,
configured_height = height,
configured_fps = fps,
"📷 configured UVC profile is advertised by the live gadget descriptor"
);
} else {
let live = live_profiles[0];
warn!( warn!(
configured_width = width, configured_width = width,
configured_height = height, configured_height = height,
@ -237,10 +249,10 @@ fn select_uvc_config() -> CameraConfig {
live_interval_100ns = live.interval_100ns, live_interval_100ns = live.interval_100ns,
"📷 live UVC descriptor differs from configured profile; honoring attached gadget profile until rebuild" "📷 live UVC descriptor differs from configured profile; honoring attached gadget profile until rebuild"
); );
width = live.width;
height = live.height;
fps = live.fps.max(1);
} }
width = live.width;
height = live.height;
fps = live.fps.max(1);
} }
CameraConfig { CameraConfig {
@ -256,45 +268,45 @@ fn select_uvc_config() -> CameraConfig {
#[cfg(not(coverage))] #[cfg(not(coverage))]
/// Keeps `read_live_uvc_configfs_profile` explicit because it sits on camera selection, where negotiated profiles must match the server output contract. /// Keeps `read_live_uvc_configfs_profile` explicit because it sits on camera selection, where negotiated profiles must match the server output contract.
/// Inputs are the typed parameters; output is the return value or side effect. /// Inputs are the typed parameters; output is the return value or side effect.
fn read_live_uvc_configfs_profile() -> Option<LiveUvcProfile> { fn read_live_uvc_configfs_profiles() -> Vec<LiveUvcProfile> {
let base = std::env::var(UVC_CONFIGFS_BASE_ENV) let base = std::env::var(UVC_CONFIGFS_BASE_ENV)
.map(PathBuf::from) .map(PathBuf::from)
.unwrap_or_else(|_| PathBuf::from(DEFAULT_UVC_CONFIGFS_BASE)); .unwrap_or_else(|_| PathBuf::from(DEFAULT_UVC_CONFIGFS_BASE));
let frame_dir = live_uvc_frame_dir(&base)?;
let width = read_u32_file(frame_dir.join("wWidth"))?;
let height = read_u32_file(frame_dir.join("wHeight"))?;
let interval_100ns = read_u32_file(frame_dir.join("dwDefaultFrameInterval"))
.or_else(|| read_first_u32_line(frame_dir.join("dwFrameInterval")))?;
if width == 0 || height == 0 || interval_100ns == 0 {
return None;
}
Some(LiveUvcProfile {
width,
height,
fps: (10_000_000 / interval_100ns).max(1),
interval_100ns,
})
}
#[cfg(not(coverage))]
/// Keeps `live_uvc_frame_dir` explicit because it sits on camera selection, where negotiated profiles must match the server output contract.
/// Inputs are the typed parameters; output is the return value or side effect.
fn live_uvc_frame_dir(base: &Path) -> Option<PathBuf> {
let preferred = base.join("streaming/mjpeg/m/720p");
if preferred.join("wWidth").is_file() && preferred.join("wHeight").is_file() {
return Some(preferred);
}
let mjpeg_dir = base.join("streaming/mjpeg/m"); let mjpeg_dir = base.join("streaming/mjpeg/m");
let mut candidates = Vec::new(); let Ok(entries) = fs::read_dir(mjpeg_dir) else {
for entry in fs::read_dir(mjpeg_dir).ok()?.flatten() { return Vec::new();
};
let mut profiles = Vec::new();
for entry in entries.flatten() {
let path = entry.path(); let path = entry.path();
if path.join("wWidth").is_file() && path.join("wHeight").is_file() { let Some(width) = read_u32_file(path.join("wWidth")) else {
candidates.push(path); continue;
};
let Some(height) = read_u32_file(path.join("wHeight")) else {
continue;
};
if width == 0 || height == 0 {
continue;
}
let intervals = read_u32_lines(path.join("dwFrameInterval"))
.or_else(|| read_u32_file(path.join("dwDefaultFrameInterval")).map(|v| vec![v]))
.unwrap_or_default();
for interval_100ns in intervals.into_iter().filter(|interval| *interval > 0) {
profiles.push(LiveUvcProfile {
width,
height,
fps: (10_000_000 / interval_100ns).max(1),
interval_100ns,
});
} }
} }
candidates.sort(); profiles.sort_by_key(|p| {
candidates.into_iter().next() (
std::cmp::Reverse(p.width * p.height),
std::cmp::Reverse(p.fps),
)
});
profiles
} }
#[cfg(not(coverage))] #[cfg(not(coverage))]
@ -305,11 +317,13 @@ fn read_u32_file(path: impl AsRef<Path>) -> Option<u32> {
} }
#[cfg(not(coverage))] #[cfg(not(coverage))]
fn read_first_u32_line(path: impl AsRef<Path>) -> Option<u32> { fn read_u32_lines(path: impl AsRef<Path>) -> Option<Vec<u32>> {
fs::read_to_string(path) let values = fs::read_to_string(path)
.ok()? .ok()?
.lines() .lines()
.find_map(|line| line.trim().parse::<u32>().ok()) .filter_map(|line| line.trim().parse::<u32>().ok())
.collect::<Vec<_>>();
(!values.is_empty()).then_some(values)
} }
#[cfg(coverage)] #[cfg(coverage)]

View File

@ -249,15 +249,14 @@ pub(crate) fn read_u32_first(path: &str) -> Option<u32> {
} }
pub(crate) fn read_configfs_snapshot() -> Option<ConfigfsSnapshot> { pub(crate) fn read_configfs_snapshot() -> Option<ConfigfsSnapshot> {
let width = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/wWidth"))?; let frame = configfs_frame_name_from_env();
let height = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/wHeight"))?; let frame_root = format!("{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/{frame}");
let width = read_u32_file(&format!("{frame_root}/wWidth"))?;
let height = read_u32_file(&format!("{frame_root}/wHeight"))?;
let default_interval = read_u32_file(&format!( let default_interval = read_u32_file(&format!(
"{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/dwDefaultFrameInterval" "{frame_root}/dwDefaultFrameInterval"
))?; ))?;
let frame_interval = read_u32_first(&format!( let frame_interval = read_u32_first(&format!("{frame_root}/dwFrameInterval")).unwrap_or(0);
"{CONFIGFS_UVC_BASE}/streaming/mjpeg/m/720p/dwFrameInterval"
))
.unwrap_or(0);
let maxpacket = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxpacket"))?; let maxpacket = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxpacket"))?;
let maxburst = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxburst")).unwrap_or(0); let maxburst = read_u32_file(&format!("{CONFIGFS_UVC_BASE}/streaming_maxburst")).unwrap_or(0);
Some(ConfigfsSnapshot { Some(ConfigfsSnapshot {
@ -270,6 +269,16 @@ pub(crate) fn read_configfs_snapshot() -> Option<ConfigfsSnapshot> {
}) })
} }
fn configfs_frame_name_from_env() -> &'static str {
match (
env_u32("LESAVKA_UVC_WIDTH", 1280),
env_u32("LESAVKA_UVC_HEIGHT", 720),
) {
(1920, 1080) => "1080p",
_ => "720p",
}
}
pub(crate) fn log_configfs_snapshot(state: &mut UvcState, label: &str) { pub(crate) fn log_configfs_snapshot(state: &mut UvcState, label: &str) {
let Some(current) = read_configfs_snapshot() else { let Some(current) = read_configfs_snapshot() else {
eprintln!("[lesavka-uvc] configfs {label}: unavailable"); eprintln!("[lesavka-uvc] configfs {label}: unavailable");
@ -331,7 +340,7 @@ pub(crate) fn build_streaming_control(
write_le16(&mut buf[0..2], 1); write_le16(&mut buf[0..2], 1);
buf[2] = 1; buf[2] = 1;
buf[3] = 1; buf[3] = uvc_frame_index_for_mode(cfg.width, cfg.height);
write_le32(&mut buf[4..8], cfg.interval); write_le32(&mut buf[4..8], cfg.interval);
write_le16(&mut buf[8..10], 0); write_le16(&mut buf[8..10], 0);
write_le16(&mut buf[10..12], 0); write_le16(&mut buf[10..12], 0);
@ -351,6 +360,29 @@ pub(crate) fn build_streaming_control(
buf buf
} }
pub(crate) fn uvc_frame_index_for_mode(width: u32, height: u32) -> u8 {
match (width, height) {
(1920, 1080) => 1,
(1280, 720) => 2,
_ => 2,
}
}
pub(crate) fn uvc_frame_index_for_request(requested: u8, cfg: &UvcConfig) -> u8 {
match requested {
1 | 2 => requested,
_ => uvc_frame_index_for_mode(cfg.width, cfg.height),
}
}
pub(crate) fn uvc_frame_size_for_index(frame_index: u8, fallback: u32) -> u32 {
match frame_index {
1 => 1920 * 1080 * 2,
2 => 1280 * 720 * 2,
_ => fallback,
}
}
pub(crate) fn compute_payload_cap(bulk: bool) -> Option<PayloadCap> { pub(crate) fn compute_payload_cap(bulk: bool) -> Option<PayloadCap> {
if let Some(limit) = env_u32_opt("LESAVKA_UVC_MAXPAYLOAD_LIMIT") { if let Some(limit) = env_u32_opt("LESAVKA_UVC_MAXPAYLOAD_LIMIT") {
return Some(PayloadCap { return Some(PayloadCap {

View File

@ -6,6 +6,7 @@ use crate::uvc_control::model::{
UVC_GET_RES, UVC_SET_CUR, UVC_VC_REQUEST_ERROR_CODE_CONTROL, UVC_VS_COMMIT_CONTROL, UVC_GET_RES, UVC_SET_CUR, UVC_VC_REQUEST_ERROR_CODE_CONTROL, UVC_VS_COMMIT_CONTROL,
UVC_VS_PROBE_CONTROL, USB_DIR_IN, UvcInterfaces, UvcRequestData, UvcState, UVC_VS_PROBE_CONTROL, USB_DIR_IN, UvcInterfaces, UvcRequestData, UvcState,
adjust_length, build_streaming_control, log_configfs_snapshot, read_le32, write_le32, adjust_length, build_streaming_control, log_configfs_snapshot, read_le32, write_le32,
uvc_frame_index_for_request, uvc_frame_size_for_index,
}; };
pub(crate) fn handle_setup( pub(crate) fn handle_setup(
@ -278,9 +279,12 @@ pub(crate) fn sanitize_streaming_control(
if format_index == 1 { if format_index == 1 {
out[2] = 1; out[2] = 1;
} }
if frame_index == 1 { let frame_index = uvc_frame_index_for_request(frame_index, &state.cfg);
out[3] = 1; out[3] = frame_index;
} write_le32(
&mut out[18..22],
uvc_frame_size_for_index(frame_index, state.cfg.frame_size),
);
if interval != 0 { if interval != 0 {
write_le32(&mut out[4..8], interval); write_le32(&mut out[4..8], interval);
} }

View File

@ -96,7 +96,10 @@ fn core_script_keeps_uvc_output_on_supported_mjpeg_descriptor() {
"UVC_CODEC=${LESAVKA_UVC_CODEC:-mjpeg}", "UVC_CODEC=${LESAVKA_UVC_CODEC:-mjpeg}",
"UVC codec '$UVC_CODEC' is not supported by the MJPEG UVC helper; using mjpeg", "UVC codec '$UVC_CODEC' is not supported by the MJPEG UVC helper; using mjpeg",
"UVC_CODEC=mjpeg", "UVC_CODEC=mjpeg",
"streaming/mjpeg/m/720p", "write_mjpeg_frame_descriptor 1080p 1920 1080",
"write_mjpeg_frame_descriptor 720p 1280 720",
"UVC_INTERVAL_30=${LESAVKA_UVC_INTERVAL_30:-333333}",
"UVC_INTERVAL_20=${LESAVKA_UVC_INTERVAL_20:-500000}",
] { ] {
assert!( assert!(
CORE_SCRIPT.contains(expected), CORE_SCRIPT.contains(expected),

View File

@ -119,7 +119,7 @@ mod uvc_binary {
maybe_update_ctrl_len(&mut state, STREAM_CTRL_SIZE_11 as u16, false); maybe_update_ctrl_len(&mut state, STREAM_CTRL_SIZE_11 as u16, false);
assert_eq!(state.ctrl_len, STREAM_CTRL_SIZE_11); assert_eq!(state.ctrl_len, STREAM_CTRL_SIZE_11);
assert_eq!(state.probe[2], 1); assert_eq!(state.probe[2], 1);
assert_eq!(state.commit[3], 1); assert_eq!(state.commit[3], 2);
} }
#[test] #[test]
@ -134,6 +134,7 @@ mod uvc_binary {
assert_eq!(out[2], 1); assert_eq!(out[2], 1);
assert_eq!(out[3], 1); assert_eq!(out[3], 1);
assert_eq!(read_le32(&out, 4), 333_333); assert_eq!(read_le32(&out, 4), 333_333);
assert_eq!(read_le32(&out, 18), 1920 * 1080 * 2);
assert_eq!(read_le32(&out, 22), state.cfg.max_packet); assert_eq!(read_le32(&out, 22), state.cfg.max_packet);
} }

View File

@ -250,6 +250,7 @@ mod uvc_binary_extra {
assert_eq!(out[2], 1); assert_eq!(out[2], 1);
assert_eq!(out[3], 1); assert_eq!(out[3], 1);
assert_eq!(read_le32(&out, 4), 333_333); assert_eq!(read_le32(&out, 4), 333_333);
assert_eq!(read_le32(&out, 18), 1920 * 1080 * 2);
assert_eq!(read_le32(&out, 22), state.cfg.max_packet); assert_eq!(read_le32(&out, 22), state.cfg.max_packet);
} }

View File

@ -347,6 +347,12 @@ fn server_install_pins_hdmi_camera_and_display_defaults() {
SERVER_INSTALL.contains("dwDefaultFrameInterval"), SERVER_INSTALL.contains("dwDefaultFrameInterval"),
"live descriptor matching should include frame interval, not only codec labels" "live descriptor matching should include frame interval, not only codec labels"
); );
assert!(
SERVER_INSTALL.contains("1920x1080) frame_root=\"$function_root/streaming/mjpeg/m/1080p\"")
&& SERVER_INSTALL.contains("1280x720) frame_root=\"$function_root/streaming/mjpeg/m/720p\"")
&& SERVER_INSTALL.contains("grep -qx \"${LESAVKA_UVC_INTERVAL:-333333}\" \"$frame_root/dwFrameInterval\""),
"live descriptor matching should recognize all supported MJPEG UVC profiles instead of collapsing to one 720p frame"
);
assert!( assert!(
SERVER_INSTALL.contains("streaming/header/h/mjpeg") SERVER_INSTALL.contains("streaming/header/h/mjpeg")
&& SERVER_INSTALL.contains("streaming/header/h/yuyv"), && SERVER_INSTALL.contains("streaming/header/h/yuyv"),

View File

@ -14,6 +14,10 @@ const CLIENT_RCT_SCRIPT: &str = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"), env!("CARGO_MANIFEST_DIR"),
"/scripts/manual/run_client_to_rct_transport_probe.sh" "/scripts/manual/run_client_to_rct_transport_probe.sh"
)); ));
const CLIENT_RCT_REMOTE_CAPTURE: &str = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"),
"/scripts/manual/client_rct_remote_capture.sh"
));
const CLIENT_RCT_SUMMARY: &str = include_str!(concat!( const CLIENT_RCT_SUMMARY: &str = include_str!(concat!(
env!("CARGO_MANIFEST_DIR"), env!("CARGO_MANIFEST_DIR"),
"/scripts/manual/client_rct_transport_summary.py" "/scripts/manual/client_rct_transport_summary.py"
@ -116,7 +120,8 @@ fn client_rct_probe_preserves_black_box_rct_measurement_artifacts() {
|| CLIENT_RCT_SUMMARY.contains(expected) || CLIENT_RCT_SUMMARY.contains(expected)
|| CLIENT_RCT_LAYERS.contains(expected) || CLIENT_RCT_LAYERS.contains(expected)
|| UVC_FRAME_META_FETCH.contains(expected) || UVC_FRAME_META_FETCH.contains(expected)
|| UVC_FRAME_META_SUMMARY.contains(expected), || UVC_FRAME_META_SUMMARY.contains(expected)
|| CLIENT_RCT_REMOTE_CAPTURE.contains(expected),
"client-to-RCT probe should preserve artifact/summary marker {expected}" "client-to-RCT probe should preserve artifact/summary marker {expected}"
); );
} }
@ -126,6 +131,7 @@ fn client_rct_probe_preserves_black_box_rct_measurement_artifacts() {
fn client_rct_probe_keeps_shell_harness_focused_under_loc_limit() { fn client_rct_probe_keeps_shell_harness_focused_under_loc_limit() {
for (name, contents) in [ for (name, contents) in [
("run_client_to_rct_transport_probe.sh", CLIENT_RCT_SCRIPT), ("run_client_to_rct_transport_probe.sh", CLIENT_RCT_SCRIPT),
("client_rct_remote_capture.sh", CLIENT_RCT_REMOTE_CAPTURE),
("client_rct_uvc_frame_meta_fetch.sh", UVC_FRAME_META_FETCH), ("client_rct_uvc_frame_meta_fetch.sh", UVC_FRAME_META_FETCH),
("client_rct_transport_summary.py", CLIENT_RCT_SUMMARY), ("client_rct_transport_summary.py", CLIENT_RCT_SUMMARY),
] { ] {
@ -144,6 +150,11 @@ fn client_rct_probe_is_non_mutating_and_passwordless_by_default() {
"no remote sudo/reconfigure will be attempted by this script", "no remote sudo/reconfigure will be attempted by this script",
"LESAVKA_SERVER_ADDR=${LESAVKA_SERVER_ADDR:-auto}", "LESAVKA_SERVER_ADDR=${LESAVKA_SERVER_ADDR:-auto}",
"LESAVKA_CLIENT_RCT_MODE=${LESAVKA_CLIENT_RCT_MODE:-auto}", "LESAVKA_CLIENT_RCT_MODE=${LESAVKA_CLIENT_RCT_MODE:-auto}",
"LESAVKA_CLIENT_RCT_UPSTREAM_MODE=${LESAVKA_CLIENT_RCT_UPSTREAM_MODE:-${LESAVKA_CLIENT_RCT_MODE}}",
"LESAVKA_CLIENT_RCT_UPSTREAM_CAMERA_CODEC=${LESAVKA_CLIENT_RCT_UPSTREAM_CAMERA_CODEC:-auto}",
"LESAVKA_CLIENT_RCT_UPSTREAM_AUDIO_CODEC=${LESAVKA_CLIENT_RCT_UPSTREAM_AUDIO_CODEC:-${LESAVKA_UPLINK_AUDIO_CODEC:-opus}}",
"--camera-mode",
"--camera-codec",
"LESAVKA_CLIENT_RCT_START_DELAY_SECONDS=${LESAVKA_CLIENT_RCT_START_DELAY_SECONDS:-0}", "LESAVKA_CLIENT_RCT_START_DELAY_SECONDS=${LESAVKA_CLIENT_RCT_START_DELAY_SECONDS:-0}",
"LESAVKA_CLIENT_RCT_START_DELAY_SECONDS must be a non-negative number", "LESAVKA_CLIENT_RCT_START_DELAY_SECONDS must be a non-negative number",
"start_delay=${LESAVKA_CLIENT_RCT_START_DELAY_SECONDS}s", "start_delay=${LESAVKA_CLIENT_RCT_START_DELAY_SECONDS}s",
@ -154,7 +165,7 @@ fn client_rct_probe_is_non_mutating_and_passwordless_by_default() {
"127.0.0.1:${local_port}:127.0.0.1:${SERVER_TUNNEL_REMOTE_PORT}", "127.0.0.1:${local_port}:127.0.0.1:${SERVER_TUNNEL_REMOTE_PORT}",
] { ] {
assert!( assert!(
CLIENT_RCT_SCRIPT.contains(expected), CLIENT_RCT_SCRIPT.contains(expected) || CLIENT_RCT_REMOTE_CAPTURE.contains(expected),
"client-to-RCT probe should keep unattended marker {expected}" "client-to-RCT probe should keep unattended marker {expected}"
); );
} }