video overhaul

This commit is contained in:
Brad Stein 2025-06-27 14:01:29 -05:00
parent 20e33f03d5
commit 0e8a389683
7 changed files with 28 additions and 18 deletions

View File

@ -52,6 +52,8 @@ impl LesavkaClientApp {
.connect_lazy();
let vid_ep = Channel::from_shared(self.server_addr.clone())?
.initial_connection_window_size(2<<20)
.initial_stream_window_size(2<<20)
.tcp_nodelay(true)
.connect_lazy();

View File

@ -7,7 +7,7 @@ use tracing::{debug, error, warn, trace};
use lesavka_common::lesavka::MouseReport;
const SEND_INTERVAL: Duration = Duration::from_micros(50);
const SEND_INTERVAL: Duration = Duration::from_micros(25);
pub struct MouseAggregator {
dev: Device,

View File

@ -37,7 +37,7 @@ impl MonitorWindow {
"capsfilter caps=video/x-h264,stream-format=byte-stream,alignment=au ! ",
"queue max-size-buffers=0 max-size-bytes=0 max-size-time=0 leaky=downstream ! ",
"h264parse ! vaapih264dec low-latency=true ! videoconvert ! ",
"autovideosink sync=false",
"direct-render-synchronised-videosink ! autovideosink sync=false",
)
} else {
concat!(
@ -59,7 +59,7 @@ impl MonitorWindow {
.expect("appsink downcast");
src.set_caps(Some(&caps));
src.set_format(gst::Format::Time); // runningtime PTS
src.set_format(gst::Format::Undefined); // runningtime PTS
src.set_property("blocksize", &0u32); // whole AU per buffer
src.set_latency(gst::ClockTime::NONE, gst::ClockTime::NONE);
@ -70,10 +70,8 @@ impl MonitorWindow {
/// Push one encoded accessunit into the local pipeline.
pub fn push_packet(&self, pkt: VideoPacket) {
let mut buf = gst::Buffer::from_slice(pkt.data);
if let Some(mut b) = buf.get_mut() {
b.set_pts(Some(gst::ClockTime::from_useconds(pkt.pts)));
}
let _ = self.src.push_buffer(buf); // ignore Eos / Flushing
let buf = gst::Buffer::from_slice(pkt.data); // no PTS manipulation
let _ = self.src.push_buffer(buf);
}
}

View File

@ -5,7 +5,7 @@ set -euo pipefail
ORIG_USER=${SUDO_USER:-$(id -un)}
# 1. packages (Arch)
sudo pacman -Syq --needed --noconfirm git rustup protobuf gcc evtest
sudo pacman -Syq --needed --noconfirm git rustup protobuf gcc evtest gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-plugins-ugly gst-libav
# 2. Rust tool-chain for both root & user
sudo rustup default stable

View File

@ -4,7 +4,7 @@ set -euo pipefail
ORIG_USER=${SUDO_USER:-$(id -un)}
echo "==> 1a. Base packages"
sudo pacman -Syq --needed --noconfirm git rustup protobuf gcc pipewire pipewire-pulse tailscale base-devel gst-libav
sudo pacman -Syq --needed --noconfirm git rustup protobuf gcc pipewire pipewire-pulse tailscale base-devel gstreamer gst-plugins-base gst-plugins-good gst-plugins-bad gst-plugins-ugly gst-libav
if ! command -v yay >/dev/null 2>&1; then
echo "==> 1b. installing yay from AUR ..."
sudo -u "$ORIG_USER" bash -c '

View File

@ -98,7 +98,7 @@ impl UsbGadget {
/// Hardreset the gadget → identical to a physical cable replug
pub fn cycle(&self) -> Result<()> {
/* 0ensure we *know* the controller even after a previous crash */
let mut ctrl = Self::find_controller()
let ctrl = Self::find_controller()
.or_else(|_| Self::probe_platform_udc()?
.ok_or_else(|| anyhow::anyhow!("no UDC present")))?;

View File

@ -4,11 +4,14 @@ use anyhow::Context;
use gstreamer as gst;
use gstreamer_app as gst_app;
use gst::prelude::*;
use gst::log;
use lesavka_common::lesavka::VideoPacket;
use tokio_stream::wrappers::ReceiverStream;
use tonic::Status;
use tracing::{debug, enabled, Level};
static START: std::sync::OnceLock<gst::ClockTime> = std::sync::OnceLock::new();
pub async fn spawn_camera(
dev: &str,
id: u32,
@ -18,11 +21,10 @@ pub async fn spawn_camera(
// IMPORTANT: keep one AU per buffer, include regular SPS/PPS
let desc = format!(
"v4l2src device={dev} io-mode=dmabuf ! \
video/x-h264,alignment=au,stream-format=byte-stream ! \
queue max-size-buffers=0 max-size-bytes=0 max-size-time=0 ! \
"v4l2src device={dev} io-mode=mmap ! \
video/x-h264,stream-format=byte-stream,alignment=au ! \
h264parse config-interval=1 ! \
appsink name=sink emit-signals=true sync=false drop=true"
appsink name=sink emit-signals=true drop=true sync=false"
);
let pipeline = gst::parse::launch(&desc)?
@ -57,11 +59,13 @@ pub async fn spawn_camera(
.new_sample(move |sink| {
let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
let origin = *START.get_or_init(|| buffer.pts().unwrap_or(gst::ClockTime::ZERO));
let pts_us = buffer
.pts()
.map(|t| t.nseconds() / 1_000) // ns → µs
.unwrap_or(0);
.unwrap_or(gst::ClockTime::ZERO)
.saturating_sub(origin)
.nseconds() / 1_000;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
@ -92,6 +96,12 @@ pub async fn spawn_camera(
.build(),
);
// gst::debug_remove_default_log_function();
// gst::debug_add_default_log_function(|lvl, cat, msg| {
// println!("[GST] {lvl:?} {cat}: {msg}");
// });
// std::env::set_var("GST_DEBUG", "v4l2src:4,h264parse:3");
pipeline.set_state(gst::State::Playing)?;
Ok(ReceiverStream::new(rx))