switch to playbin, improve public api, performance

This commit is contained in:
jazzfool 2024-09-30 19:50:47 +10:00
parent 36d2b90f1d
commit c44141c717
5 changed files with 193 additions and 128 deletions

View file

@ -13,7 +13,7 @@ Features:
- Video buffering when streaming on a network. - Video buffering when streaming on a network.
- Audio support. - Audio support.
- Programmatic control. - Programmatic control.
- Capture thumbnails from a set of timestamps. - Can capture thumbnails from a set of timestamps.
- Good performance (i.e., comparable to other video players). GStreamer (with the right plugins) will perform hardware-accelerated decoding, and the color space (YUV to RGB) is converted on the GPU whilst rendering the frame. - Good performance (i.e., comparable to other video players). GStreamer (with the right plugins) will perform hardware-accelerated decoding, and the color space (YUV to RGB) is converted on the GPU whilst rendering the frame.
Limitations (hopefully to be fixed): Limitations (hopefully to be fixed):

View file

@ -64,7 +64,7 @@ impl App {
Message::SeekRelease => { Message::SeekRelease => {
self.dragging = false; self.dragging = false;
self.video self.video
.seek(Duration::from_secs_f64(self.position)) .seek(Duration::from_secs_f64(self.position), false)
.expect("seek"); .expect("seek");
self.video.set_paused(false); self.video.set_paused(false);
} }

View file

@ -54,5 +54,10 @@ fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
rgb.y = dot(yuv, yuv2g); rgb.y = dot(yuv, yuv2g);
rgb.z = dot(yuv, yuv2b); rgb.z = dot(yuv, yuv2b);
return vec4<f32>(pow(rgb, vec3<f32>(2.2)), 1.0); let threshold = rgb <= vec3<f32>(0.04045);
let hi = pow((rgb + vec3<f32>(0.055)) / vec3<f32>(1.055), vec3<f32>(2.4));
let lo = rgb * vec3<f32>(1.0 / 12.92);
rgb = select(hi, lo, threshold);
return vec4<f32>(rgb, 1.0);
} }

View file

@ -2,12 +2,10 @@ use crate::Error;
use gstreamer as gst; use gstreamer as gst;
use gstreamer_app as gst_app; use gstreamer_app as gst_app;
use gstreamer_app::prelude::*; use gstreamer_app::prelude::*;
use gstreamer_base as gst_base;
use iced::widget::image as img; use iced::widget::image as img;
use std::cell::RefCell; use std::cell::RefCell;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::{mpsc, Arc, Mutex}; use std::sync::{Arc, Mutex};
use std::time::Instant;
/// Position in the media. /// Position in the media.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -46,36 +44,80 @@ pub(crate) struct Internal {
pub(crate) bus: gst::Bus, pub(crate) bus: gst::Bus,
pub(crate) source: gst::Pipeline, pub(crate) source: gst::Pipeline,
pub(crate) app_sink: gst_app::AppSink,
pub(crate) width: i32, pub(crate) width: i32,
pub(crate) height: i32, pub(crate) height: i32,
pub(crate) framerate: f64, pub(crate) framerate: f64,
pub(crate) duration: std::time::Duration, pub(crate) duration: std::time::Duration,
pub(crate) speed: f64,
pub(crate) frame: Arc<Mutex<Vec<u8>>>, // ideally would be Arc<Mutex<[T]>> pub(crate) frame: Arc<Mutex<Vec<u8>>>,
pub(crate) upload_frame: Arc<AtomicBool>, pub(crate) upload_frame: AtomicBool,
pub(crate) wait: mpsc::Receiver<()>,
pub(crate) paused: bool, pub(crate) paused: bool,
pub(crate) muted: bool, pub(crate) muted: bool,
pub(crate) looping: bool, pub(crate) looping: bool,
pub(crate) is_eos: bool, pub(crate) is_eos: bool,
pub(crate) restart_stream: bool, pub(crate) restart_stream: bool,
pub(crate) next_redraw: Instant,
} }
impl Internal { impl Internal {
pub(crate) fn seek(&self, position: impl Into<Position>) -> Result<(), Error> { pub(crate) fn seek(&self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
self.source.seek_simple( let position = position.into();
gst::SeekFlags::FLUSH, // gstreamer complains if the start & end value types aren't the same
gst::GenericFormattedValue::from(position.into()), let end = match &position {
Position::Time(_) => Position::Time(std::time::Duration::ZERO),
Position::Frame(_) => Position::Frame(0),
};
self.source.seek(
self.speed,
gst::SeekFlags::FLUSH
| gst::SeekFlags::KEY_UNIT
| if accurate {
gst::SeekFlags::ACCURATE
} else {
gst::SeekFlags::empty()
},
gst::SeekType::Set,
gst::GenericFormattedValue::from(position),
gst::SeekType::End,
gst::GenericFormattedValue::from(end),
)?; )?;
Ok(()) Ok(())
} }
pub(crate) fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
let Some(position) = self.source.query_position::<gst::ClockTime>() else {
return Err(Error::Caps);
};
if speed > 0.0 {
self.source.seek(
speed,
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
gst::SeekType::Set,
position,
gst::SeekType::End,
gst::ClockTime::from_seconds(0),
)?;
} else {
self.source.seek(
speed,
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
gst::SeekType::Set,
gst::ClockTime::from_seconds(0),
gst::SeekType::Set,
position,
)?;
}
self.speed = speed;
Ok(())
}
pub(crate) fn restart_stream(&mut self) -> Result<(), Error> { pub(crate) fn restart_stream(&mut self) -> Result<(), Error> {
self.is_eos = false; self.is_eos = false;
self.set_paused(false); self.set_paused(false);
self.seek(0)?; self.seek(0, false)?;
Ok(()) Ok(())
} }
@ -94,6 +136,27 @@ impl Internal {
self.restart_stream = true; self.restart_stream = true;
} }
} }
pub(crate) fn read_frame(&self) -> Result<(), gst::FlowError> {
if self.source.state(None).1 != gst::State::Playing {
return Ok(());
}
let sample = self
.app_sink
.pull_sample()
.map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
let mut frame = self.frame.lock().map_err(|_| gst::FlowError::Error)?;
let frame_len = frame.len();
frame.copy_from_slice(&map.as_slice()[..frame_len]);
self.upload_frame.swap(true, Ordering::SeqCst);
Ok(())
}
} }
/// A multimedia video loaded from a URI (e.g., a local file path or HTTP stream). /// A multimedia video loaded from a URI (e.g., a local file path or HTTP stream).
@ -111,59 +174,39 @@ impl Drop for Video {
impl Video { impl Video {
/// Create a new video player from a given video which loads from `uri`. /// Create a new video player from a given video which loads from `uri`.
/// Note that live sourced will report the duration to be zero. /// Note that live sources will report the duration to be zero.
pub fn new(uri: &url::Url) -> Result<Self, Error> { pub fn new(uri: &url::Url) -> Result<Self, Error> {
let pipeline = format!("uridecodebin uri=\"{}\" ! videoconvert ! videoscale ! appsink name=iced_video caps=video/x-raw,format=NV12,pixel-aspect-ratio=1/1", uri.as_str());
Self::from_pipeline(pipeline, None)
}
/// Creates a new video based on GStreamer pipeline in a same format as used in gst-launch-1.0.
/// Expects an appsink plugin to be present with name set to `iced_video` and caps to
/// `video/x-raw,format=NV12,pixel-aspect-ratio=1/1`
pub fn from_pipeline<S: AsRef<str>>(pipeline: S, is_live: Option<bool>) -> Result<Self, Error> {
gst::init()?; gst::init()?;
let pipeline = format!("playbin uri=\"{}\" video-sink=\"videoconvert ! videoscale ! appsink name=iced_video caps=video/x-raw,format=NV12,pixel-aspect-ratio=1/1\"", uri.as_str());
let pipeline = gst::parse::launch(pipeline.as_ref())? let pipeline = gst::parse::launch(pipeline.as_ref())?
.downcast::<gst::Pipeline>() .downcast::<gst::Pipeline>()
.map_err(|_| Error::Cast)?; .map_err(|_| Error::Cast)?;
Self::from_gst_pipeline(pipeline, is_live) let video_sink: gst::Element = pipeline.property("video-sink");
let pad = video_sink.pads().get(0).cloned().unwrap();
let pad = pad.dynamic_cast::<gst::GhostPad>().unwrap();
let bin = pad
.parent_element()
.unwrap()
.downcast::<gst::Bin>()
.unwrap();
let app_sink = bin.by_name("iced_video").unwrap();
let app_sink = app_sink.downcast::<gst_app::AppSink>().unwrap();
Self::from_gst_pipeline(pipeline, app_sink)
} }
/// Creates a new video based on GStreamer pipeline. /// Creates a new video based on an existing GStreamer pipeline and appsink.
/// Expects an appsink plugin to be present with name set to `iced_video` and caps to /// Expects an `appsink` plugin with `caps=video/x-raw,format=NV12,pixel-aspect-ratio=1/1`.
/// `video/x-raw,format=NV12,pixel-aspect-ratio=1/1`
pub fn from_gst_pipeline( pub fn from_gst_pipeline(
pipeline: gst::Pipeline, pipeline: gst::Pipeline,
is_live: Option<bool>, app_sink: gst_app::AppSink,
) -> Result<Self, Error> { ) -> Result<Self, Error> {
gst::init()?; gst::init()?;
static NEXT_ID: AtomicU64 = AtomicU64::new(0); static NEXT_ID: AtomicU64 = AtomicU64::new(0);
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
let mut live = false;
match is_live {
Some(is_live) => live = is_live,
None => {
pipeline
.iterate_sources()
.foreach(|elem| {
if let Ok(src) = elem.downcast::<gst_base::BaseSrc>() {
if src.is_live() {
live = true;
}
}
})
.unwrap();
}
};
let app_sink_name = "iced_video";
let app_sink = pipeline
.by_name(app_sink_name)
.and_then(|elem| elem.downcast::<gst_app::AppSink>().ok())
.ok_or(Error::AppSink(app_sink_name.to_string()))?;
let pad = app_sink.pads().first().cloned().unwrap(); let pad = app_sink.pads().first().cloned().unwrap();
pipeline.set_state(gst::State::Playing)?; pipeline.set_state(gst::State::Playing)?;
@ -181,67 +224,36 @@ impl Video {
.get::<gst::Fraction>("framerate") .get::<gst::Fraction>("framerate")
.map_err(|_| Error::Caps)?; .map_err(|_| Error::Caps)?;
let duration = if !live { let duration = std::time::Duration::from_nanos(
std::time::Duration::from_nanos( pipeline
pipeline .query_duration::<gst::ClockTime>()
.query_duration::<gst::ClockTime>() .map(|duration| duration.nseconds())
.ok_or(Error::Duration)? .unwrap_or(0),
.nseconds(), );
)
} else {
std::time::Duration::from_secs(0)
};
// NV12 = 12bpp // NV12 = 12bpp
let frame_buf = vec![0u8; (width as usize * height as usize * 3).div_ceil(2)]; let frame = vec![0u8; (width as usize * height as usize * 3).div_ceil(2)];
let frame = Arc::new(Mutex::new(frame_buf));
let frame_ref = Arc::clone(&frame);
let upload_frame = Arc::new(AtomicBool::new(true));
let upload_frame_ref = Arc::clone(&upload_frame);
let (notify, wait) = mpsc::channel();
app_sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |sink| {
let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
let mut frame_ref = frame_ref.lock().map_err(|_| gst::FlowError::Error)?;
let frame_len = frame_ref.len();
frame_ref.copy_from_slice(&map.as_slice()[..frame_len]);
upload_frame_ref.store(true, Ordering::SeqCst);
notify.send(()).map_err(|_| gst::FlowError::Error)?;
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
Ok(Video(RefCell::new(Internal { Ok(Video(RefCell::new(Internal {
id, id,
bus: pipeline.bus().unwrap(), bus: pipeline.bus().unwrap(),
source: pipeline, source: pipeline,
app_sink,
width, width,
height, height,
framerate: framerate.numer() as f64 / framerate.denom() as f64, framerate: framerate.numer() as f64 / framerate.denom() as f64,
duration, duration,
speed: 1.0,
frame, frame: Arc::new(Mutex::new(frame)),
upload_frame, upload_frame: AtomicBool::new(false),
wait,
paused: false, paused: false,
muted: false, muted: false,
looping: false, looping: false,
is_eos: false, is_eos: false,
restart_stream: false, restart_stream: false,
next_redraw: Instant::now(),
}))) })))
} }
@ -267,7 +279,7 @@ impl Video {
/// Set if the audio is muted or not, without changing the volume. /// Set if the audio is muted or not, without changing the volume.
pub fn set_muted(&mut self, muted: bool) { pub fn set_muted(&mut self, muted: bool) {
let mut inner = self.0.borrow_mut(); let inner = self.0.get_mut();
inner.muted = muted; inner.muted = muted;
inner.source.set_property("mute", muted); inner.source.set_property("mute", muted);
} }
@ -293,12 +305,12 @@ impl Video {
/// Set if the media will loop or not. /// Set if the media will loop or not.
#[inline(always)] #[inline(always)]
pub fn set_looping(&mut self, looping: bool) { pub fn set_looping(&mut self, looping: bool) {
self.0.borrow_mut().looping = looping; self.0.get_mut().looping = looping;
} }
/// Set if the media is paused or not. /// Set if the media is paused or not.
pub fn set_paused(&mut self, paused: bool) { pub fn set_paused(&mut self, paused: bool) {
let mut inner = self.0.borrow_mut(); let inner = self.0.get_mut();
inner.set_paused(paused); inner.set_paused(paused);
} }
@ -309,9 +321,21 @@ impl Video {
} }
/// Jumps to a specific position in the media. /// Jumps to a specific position in the media.
/// The seeking is not perfectly accurate. /// Passing `true` to the `accurate` parameter will result in more accurate seeking,
pub fn seek(&mut self, position: impl Into<Position>) -> Result<(), Error> { /// however, it is also slower. For most seeks (e.g., scrubbing) this is not needed.
self.0.borrow_mut().seek(position) pub fn seek(&mut self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
self.0.get_mut().seek(position, accurate)
}
/// Set the playback speed of the media.
/// The default speed is `1.0`.
pub fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
self.0.get_mut().set_speed(speed)
}
/// Get the current playback speed.
pub fn speed(&self) -> f64 {
self.0.borrow().speed
} }
/// Get the current playback position in time. /// Get the current playback position in time.
@ -342,31 +366,66 @@ impl Video {
/// It's best to call this at the very start of playback, otherwise the position may shift. /// It's best to call this at the very start of playback, otherwise the position may shift.
pub fn thumbnails(&mut self, positions: &[Position]) -> Result<Vec<img::Handle>, Error> { pub fn thumbnails(&mut self, positions: &[Position]) -> Result<Vec<img::Handle>, Error> {
let paused = self.paused(); let paused = self.paused();
let muted = self.muted();
let pos = self.position(); let pos = self.position();
self.set_paused(false); self.set_paused(false);
let out = positions self.set_muted(true);
.iter()
.map(|&pos| { let out = {
self.seek(pos)?; let inner = self.0.borrow();
let inner = self.0.borrow(); let width = inner.width;
// for some reason waiting for two frames is necessary let height = inner.height;
// maybe in a small window between seek and wait the old frame comes in? positions
inner.wait.recv().map_err(|_| Error::Sync)?; .iter()
inner.wait.recv().map_err(|_| Error::Sync)?; .map(|&pos| {
Ok(img::Handle::from_rgba( inner.seek(pos, true)?;
inner.width as _, inner.read_frame().map_err(|_| Error::Sync)?;
inner.height as _, Ok(img::Handle::from_rgba(
self.0 inner.width as _,
.borrow() inner.height as _,
.frame yuv_to_rgba(
.lock() &inner.frame.lock().map_err(|_| Error::Lock)?,
.map_err(|_| Error::Lock)? width as _,
.clone(), height as _,
)) ),
}) ))
.collect(); })
.collect()
};
self.set_paused(paused); self.set_paused(paused);
self.seek(pos)?; self.set_muted(muted);
self.seek(pos, true)?;
self.0.borrow().read_frame().map_err(|_| Error::Sync)?;
out out
} }
} }
fn yuv_to_rgba(yuv: &[u8], width: u32, height: u32) -> Vec<u8> {
let uv_start = width * height;
let mut rgba = vec![];
for y in 0..height {
for x in 0..width {
let uv_i = uv_start + width * (y / 2) + x / 2 * 2;
let y = yuv[(y * width + x) as usize] as f32;
let u = yuv[uv_i as usize] as f32;
let v = yuv[(uv_i + 1) as usize] as f32;
let r = 1.164 * (y - 16.0) + 1.596 * (v - 128.0);
let g = 1.164 * (y - 16.0) - 0.813 * (v - 128.0) - 0.391 * (u - 128.0);
let b = 1.164 * (y - 16.0) + 2.018 * (u - 128.0);
rgba.push(r as u8);
rgba.push(g as u8);
rgba.push(b as u8);
rgba.push(0xFF);
}
}
return rgba;
}

View file

@ -110,7 +110,9 @@ where
_cursor: advanced::mouse::Cursor, _cursor: advanced::mouse::Cursor,
_viewport: &iced::Rectangle, _viewport: &iced::Rectangle,
) { ) {
let inner = self.video.0.borrow(); let inner = self.video.0.borrow_mut();
let _ = inner.read_frame();
renderer.draw_primitive( renderer.draw_primitive(
layout.bounds(), layout.bounds(),
VideoPrimitive::new( VideoPrimitive::new(
@ -178,10 +180,9 @@ where
} }
let redraw_interval = 1.0 / inner.framerate; let redraw_interval = 1.0 / inner.framerate;
let until_redraw = shell.request_redraw(iced::window::RedrawRequest::At(
redraw_interval - (now - inner.next_redraw).as_secs_f64() % redraw_interval; now + Duration::from_secs_f64(redraw_interval),
inner.next_redraw = now + Duration::from_secs_f64(until_redraw); ));
shell.request_redraw(iced::window::RedrawRequest::At(inner.next_redraw));
if let Some(on_new_frame) = self.on_new_frame.clone() { if let Some(on_new_frame) = self.on_new_frame.clone() {
shell.publish(on_new_frame); shell.publish(on_new_frame);