diff --git a/client/src/output/audio.rs b/client/src/output/audio.rs index 4ad300b..7c306a4 100644 --- a/client/src/output/audio.rs +++ b/client/src/output/audio.rs @@ -4,6 +4,7 @@ use gstreamer as gst; use gstreamer_app as gst_app; use lesavka_common::lesavka::AudioPacket; use gst::prelude::*; +use tracing::{error, info, warn, debug}; pub struct AudioOut { src: gst_app::AppSrc, @@ -37,6 +38,12 @@ impl AudioOut { } pub fn push(&self, pkt: AudioPacket) { + static CNT : std::sync::atomic::AtomicU64 = + std::sync::atomic::AtomicU64::new(0); + let n = CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + if n % 300 == 0 || n < 10 { + debug!(bytes = pkt.data.len(), pts = pkt.pts, "⬇️ received audio AU"); + } let mut buf = gst::Buffer::from_slice(pkt.data); buf.get_mut() .unwrap() diff --git a/client/src/output/video.rs b/client/src/output/video.rs index e044966..4a0591d 100644 --- a/client/src/output/video.rs +++ b/client/src/output/video.rs @@ -5,6 +5,7 @@ use gstreamer as gst; use gstreamer_app as gst_app; use gst::prelude::*; use lesavka_common::lesavka::VideoPacket; +use tracing::{error, info, warn, debug}; /* ---------- pipeline ---------------------------------------------------- * ┌────────────┐ H.264/AU ┌─────────┐ Decoded ┌─────────────┐ @@ -55,12 +56,23 @@ impl MonitorWindow { .build())); src.set_format(gst::Format::Time); pipeline.set_state(gst::State::Playing)?; + if let Some(sink) = pipeline.by_name("sink") { + let title = format!("Lesavka‑eye‑{_id}"); + sink.set_property_from_str("window-title", &title); + sink.set_property("force-aspect-ratio", &true); + } Ok(Self { _pipeline: pipeline, src }) } /// Feed one access-unit to the decoder. pub fn push_packet(&self, pkt: VideoPacket) { + static CNT : std::sync::atomic::AtomicU64 = + std::sync::atomic::AtomicU64::new(0); + let n = CNT.fetch_add(1, std::sync::atomic::Ordering::Relaxed); + if n % 150 == 0 || n < 10 { + debug!(eye = pkt.id, bytes = pkt.data.len(), pts = pkt.pts, "⬇️ received video AU"); + } let mut buf = gst::Buffer::from_slice(pkt.data); buf.get_mut() .unwrap() diff --git a/server/src/audio.rs b/server/src/audio.rs index bf8b9f7..f707177 100644 --- a/server/src/audio.rs +++ b/server/src/audio.rs @@ -8,6 +8,7 @@ use tonic::Status; use tracing::{debug, trace}; use gst::prelude::*; +const EAR_ID: [&str; 2] = ["l", "r"]; const PIPE: &str = "appsrc name=audsrc is-live=true do-timestamp=true ! aacparse ! queue ! appsink name=asink emit-signals=true"; pub struct AudioStream { @@ -29,12 +30,13 @@ pub async fn eye_ear( dev: &str, id: u32, ) -> anyhow::Result { + let ear = EAR_ID[id as usize]; gst::init().context("gst init")?; let desc = format!( - "v4l2src device=\"{dev}\" io-mode=mmap ! \ - queue ! tsdemux name=demux \ - demux.audio_0 ! queue ! \ - aacparse ! appsink name=asink emit-signals=true" + "v4l2src device=\"{dev}\" io-mode=mmap do-timestamp=true ! queue ! \ + tsdemux name=d d.audio_0 ! queue ! \ + aacparse ! queue ! \ + appsink name=asink emit-signals=true max-buffers=64 drop=true" ); let pipe: gst::Pipeline = gst::parse::launch(&desc)? .downcast() @@ -51,7 +53,9 @@ pub async fn eye_ear( let map = buf.map_readable().map_err(|_| gst::FlowError::Error)?; let pts = buf.pts().unwrap_or(gst::ClockTime::ZERO).nseconds()/1_000; let pkt = AudioPacket { id, pts, data: map.as_slice().to_vec() }; - trace!("srv→grpc audio-{id} {}", pkt.data.len()); + debug!(target:"lesavka_server::audio", + eye = id, bytes = pkt.data.len(), pts = pkt.pts, + "⬆️ pushed audio sample ear-{ear}"); let _ = tx.try_send(Ok(pkt)); Ok(gst::FlowSuccess::Ok) }).build() diff --git a/server/src/video.rs b/server/src/video.rs index 2653b76..5c46558 100644 --- a/server/src/video.rs +++ b/server/src/video.rs @@ -45,9 +45,8 @@ pub async fn eye_ball( gst::init().context("gst init")?; let desc = format!( - "v4l2src device=\"{dev}\" io-mode=mmap ! \ - queue ! tsdemux name=demux \ - demux.video_0 ! queue ! \ + "v4l2src device=\"{dev}\" io-mode=mmap do-timestamp=true ! queue ! \ + tsdemux name=d d.video_0 ! queue ! \ h264parse disable-passthrough=true config-interval=-1 ! \ video/x-h264,stream-format=byte-stream,alignment=au ! \ appsink name=sink emit-signals=true max-buffers=32 drop=true" @@ -97,6 +96,10 @@ pub async fn eye_ball( let path = format!("/tmp/eye-{eye}-srv-{:05}.h264", n); std::fs::write(&path, map.as_slice()).ok(); } + } else if n < 10 { + debug!(target: "lesavka_server::video", + eye = eye, frame = n, bytes = map.len(), + pts = ?buffer.pts(), "⬆️ pushed video sample eye-{eye}"); } /* -------- detect SPS / IDR ---- */ @@ -131,7 +134,7 @@ pub async fn eye_ball( .build(), ); - pipeline.set_state(gst::State::Playing)?; + pipeline.set_state(gst::State::Playing).context("🎥 starting video pipeline eye-{eye}")?; let bus = pipeline.bus().unwrap(); loop { match bus.timed_pop(gst::ClockTime::NONE) {