switch to playbin, improve public api, performance

This commit is contained in:
jazzfool 2024-09-30 19:50:47 +10:00
parent 36d2b90f1d
commit c44141c717
5 changed files with 193 additions and 128 deletions

View file

@ -54,5 +54,10 @@ fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
rgb.y = dot(yuv, yuv2g);
rgb.z = dot(yuv, yuv2b);
return vec4<f32>(pow(rgb, vec3<f32>(2.2)), 1.0);
let threshold = rgb <= vec3<f32>(0.04045);
let hi = pow((rgb + vec3<f32>(0.055)) / vec3<f32>(1.055), vec3<f32>(2.4));
let lo = rgb * vec3<f32>(1.0 / 12.92);
rgb = select(hi, lo, threshold);
return vec4<f32>(rgb, 1.0);
}

View file

@ -2,12 +2,10 @@ use crate::Error;
use gstreamer as gst;
use gstreamer_app as gst_app;
use gstreamer_app::prelude::*;
use gstreamer_base as gst_base;
use iced::widget::image as img;
use std::cell::RefCell;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::{mpsc, Arc, Mutex};
use std::time::Instant;
use std::sync::{Arc, Mutex};
/// Position in the media.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
@ -46,36 +44,80 @@ pub(crate) struct Internal {
pub(crate) bus: gst::Bus,
pub(crate) source: gst::Pipeline,
pub(crate) app_sink: gst_app::AppSink,
pub(crate) width: i32,
pub(crate) height: i32,
pub(crate) framerate: f64,
pub(crate) duration: std::time::Duration,
pub(crate) speed: f64,
pub(crate) frame: Arc<Mutex<Vec<u8>>>, // ideally would be Arc<Mutex<[T]>>
pub(crate) upload_frame: Arc<AtomicBool>,
pub(crate) wait: mpsc::Receiver<()>,
pub(crate) frame: Arc<Mutex<Vec<u8>>>,
pub(crate) upload_frame: AtomicBool,
pub(crate) paused: bool,
pub(crate) muted: bool,
pub(crate) looping: bool,
pub(crate) is_eos: bool,
pub(crate) restart_stream: bool,
pub(crate) next_redraw: Instant,
}
impl Internal {
pub(crate) fn seek(&self, position: impl Into<Position>) -> Result<(), Error> {
self.source.seek_simple(
gst::SeekFlags::FLUSH,
gst::GenericFormattedValue::from(position.into()),
pub(crate) fn seek(&self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
let position = position.into();
// gstreamer complains if the start & end value types aren't the same
let end = match &position {
Position::Time(_) => Position::Time(std::time::Duration::ZERO),
Position::Frame(_) => Position::Frame(0),
};
self.source.seek(
self.speed,
gst::SeekFlags::FLUSH
| gst::SeekFlags::KEY_UNIT
| if accurate {
gst::SeekFlags::ACCURATE
} else {
gst::SeekFlags::empty()
},
gst::SeekType::Set,
gst::GenericFormattedValue::from(position),
gst::SeekType::End,
gst::GenericFormattedValue::from(end),
)?;
Ok(())
}
pub(crate) fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
let Some(position) = self.source.query_position::<gst::ClockTime>() else {
return Err(Error::Caps);
};
if speed > 0.0 {
self.source.seek(
speed,
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
gst::SeekType::Set,
position,
gst::SeekType::End,
gst::ClockTime::from_seconds(0),
)?;
} else {
self.source.seek(
speed,
gst::SeekFlags::FLUSH | gst::SeekFlags::ACCURATE,
gst::SeekType::Set,
gst::ClockTime::from_seconds(0),
gst::SeekType::Set,
position,
)?;
}
self.speed = speed;
Ok(())
}
pub(crate) fn restart_stream(&mut self) -> Result<(), Error> {
self.is_eos = false;
self.set_paused(false);
self.seek(0)?;
self.seek(0, false)?;
Ok(())
}
@ -94,6 +136,27 @@ impl Internal {
self.restart_stream = true;
}
}
pub(crate) fn read_frame(&self) -> Result<(), gst::FlowError> {
if self.source.state(None).1 != gst::State::Playing {
return Ok(());
}
let sample = self
.app_sink
.pull_sample()
.map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
let mut frame = self.frame.lock().map_err(|_| gst::FlowError::Error)?;
let frame_len = frame.len();
frame.copy_from_slice(&map.as_slice()[..frame_len]);
self.upload_frame.swap(true, Ordering::SeqCst);
Ok(())
}
}
/// A multimedia video loaded from a URI (e.g., a local file path or HTTP stream).
@ -111,59 +174,39 @@ impl Drop for Video {
impl Video {
/// Create a new video player from a given video which loads from `uri`.
/// Note that live sourced will report the duration to be zero.
/// Note that live sources will report the duration to be zero.
pub fn new(uri: &url::Url) -> Result<Self, Error> {
let pipeline = format!("uridecodebin uri=\"{}\" ! videoconvert ! videoscale ! appsink name=iced_video caps=video/x-raw,format=NV12,pixel-aspect-ratio=1/1", uri.as_str());
Self::from_pipeline(pipeline, None)
}
/// Creates a new video based on GStreamer pipeline in a same format as used in gst-launch-1.0.
/// Expects an appsink plugin to be present with name set to `iced_video` and caps to
/// `video/x-raw,format=NV12,pixel-aspect-ratio=1/1`
pub fn from_pipeline<S: AsRef<str>>(pipeline: S, is_live: Option<bool>) -> Result<Self, Error> {
gst::init()?;
let pipeline = format!("playbin uri=\"{}\" video-sink=\"videoconvert ! videoscale ! appsink name=iced_video caps=video/x-raw,format=NV12,pixel-aspect-ratio=1/1\"", uri.as_str());
let pipeline = gst::parse::launch(pipeline.as_ref())?
.downcast::<gst::Pipeline>()
.map_err(|_| Error::Cast)?;
Self::from_gst_pipeline(pipeline, is_live)
let video_sink: gst::Element = pipeline.property("video-sink");
let pad = video_sink.pads().get(0).cloned().unwrap();
let pad = pad.dynamic_cast::<gst::GhostPad>().unwrap();
let bin = pad
.parent_element()
.unwrap()
.downcast::<gst::Bin>()
.unwrap();
let app_sink = bin.by_name("iced_video").unwrap();
let app_sink = app_sink.downcast::<gst_app::AppSink>().unwrap();
Self::from_gst_pipeline(pipeline, app_sink)
}
/// Creates a new video based on GStreamer pipeline.
/// Expects an appsink plugin to be present with name set to `iced_video` and caps to
/// `video/x-raw,format=NV12,pixel-aspect-ratio=1/1`
/// Creates a new video based on an existing GStreamer pipeline and appsink.
/// Expects an `appsink` plugin with `caps=video/x-raw,format=NV12,pixel-aspect-ratio=1/1`.
pub fn from_gst_pipeline(
pipeline: gst::Pipeline,
is_live: Option<bool>,
app_sink: gst_app::AppSink,
) -> Result<Self, Error> {
gst::init()?;
static NEXT_ID: AtomicU64 = AtomicU64::new(0);
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
let mut live = false;
match is_live {
Some(is_live) => live = is_live,
None => {
pipeline
.iterate_sources()
.foreach(|elem| {
if let Ok(src) = elem.downcast::<gst_base::BaseSrc>() {
if src.is_live() {
live = true;
}
}
})
.unwrap();
}
};
let app_sink_name = "iced_video";
let app_sink = pipeline
.by_name(app_sink_name)
.and_then(|elem| elem.downcast::<gst_app::AppSink>().ok())
.ok_or(Error::AppSink(app_sink_name.to_string()))?;
let pad = app_sink.pads().first().cloned().unwrap();
pipeline.set_state(gst::State::Playing)?;
@ -181,67 +224,36 @@ impl Video {
.get::<gst::Fraction>("framerate")
.map_err(|_| Error::Caps)?;
let duration = if !live {
std::time::Duration::from_nanos(
pipeline
.query_duration::<gst::ClockTime>()
.ok_or(Error::Duration)?
.nseconds(),
)
} else {
std::time::Duration::from_secs(0)
};
let duration = std::time::Duration::from_nanos(
pipeline
.query_duration::<gst::ClockTime>()
.map(|duration| duration.nseconds())
.unwrap_or(0),
);
// NV12 = 12bpp
let frame_buf = vec![0u8; (width as usize * height as usize * 3).div_ceil(2)];
let frame = Arc::new(Mutex::new(frame_buf));
let frame_ref = Arc::clone(&frame);
let upload_frame = Arc::new(AtomicBool::new(true));
let upload_frame_ref = Arc::clone(&upload_frame);
let (notify, wait) = mpsc::channel();
app_sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |sink| {
let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
let mut frame_ref = frame_ref.lock().map_err(|_| gst::FlowError::Error)?;
let frame_len = frame_ref.len();
frame_ref.copy_from_slice(&map.as_slice()[..frame_len]);
upload_frame_ref.store(true, Ordering::SeqCst);
notify.send(()).map_err(|_| gst::FlowError::Error)?;
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
let frame = vec![0u8; (width as usize * height as usize * 3).div_ceil(2)];
Ok(Video(RefCell::new(Internal {
id,
bus: pipeline.bus().unwrap(),
source: pipeline,
app_sink,
width,
height,
framerate: framerate.numer() as f64 / framerate.denom() as f64,
duration,
speed: 1.0,
frame,
upload_frame,
wait,
frame: Arc::new(Mutex::new(frame)),
upload_frame: AtomicBool::new(false),
paused: false,
muted: false,
looping: false,
is_eos: false,
restart_stream: false,
next_redraw: Instant::now(),
})))
}
@ -267,7 +279,7 @@ impl Video {
/// Set if the audio is muted or not, without changing the volume.
pub fn set_muted(&mut self, muted: bool) {
let mut inner = self.0.borrow_mut();
let inner = self.0.get_mut();
inner.muted = muted;
inner.source.set_property("mute", muted);
}
@ -293,12 +305,12 @@ impl Video {
/// Set if the media will loop or not.
#[inline(always)]
pub fn set_looping(&mut self, looping: bool) {
self.0.borrow_mut().looping = looping;
self.0.get_mut().looping = looping;
}
/// Set if the media is paused or not.
pub fn set_paused(&mut self, paused: bool) {
let mut inner = self.0.borrow_mut();
let inner = self.0.get_mut();
inner.set_paused(paused);
}
@ -309,9 +321,21 @@ impl Video {
}
/// Jumps to a specific position in the media.
/// The seeking is not perfectly accurate.
pub fn seek(&mut self, position: impl Into<Position>) -> Result<(), Error> {
self.0.borrow_mut().seek(position)
/// Passing `true` to the `accurate` parameter will result in more accurate seeking,
/// however, it is also slower. For most seeks (e.g., scrubbing) this is not needed.
pub fn seek(&mut self, position: impl Into<Position>, accurate: bool) -> Result<(), Error> {
self.0.get_mut().seek(position, accurate)
}
/// Set the playback speed of the media.
/// The default speed is `1.0`.
pub fn set_speed(&mut self, speed: f64) -> Result<(), Error> {
self.0.get_mut().set_speed(speed)
}
/// Get the current playback speed.
pub fn speed(&self) -> f64 {
self.0.borrow().speed
}
/// Get the current playback position in time.
@ -342,31 +366,66 @@ impl Video {
/// It's best to call this at the very start of playback, otherwise the position may shift.
pub fn thumbnails(&mut self, positions: &[Position]) -> Result<Vec<img::Handle>, Error> {
let paused = self.paused();
let muted = self.muted();
let pos = self.position();
self.set_paused(false);
let out = positions
.iter()
.map(|&pos| {
self.seek(pos)?;
let inner = self.0.borrow();
// for some reason waiting for two frames is necessary
// maybe in a small window between seek and wait the old frame comes in?
inner.wait.recv().map_err(|_| Error::Sync)?;
inner.wait.recv().map_err(|_| Error::Sync)?;
Ok(img::Handle::from_rgba(
inner.width as _,
inner.height as _,
self.0
.borrow()
.frame
.lock()
.map_err(|_| Error::Lock)?
.clone(),
))
})
.collect();
self.set_muted(true);
let out = {
let inner = self.0.borrow();
let width = inner.width;
let height = inner.height;
positions
.iter()
.map(|&pos| {
inner.seek(pos, true)?;
inner.read_frame().map_err(|_| Error::Sync)?;
Ok(img::Handle::from_rgba(
inner.width as _,
inner.height as _,
yuv_to_rgba(
&inner.frame.lock().map_err(|_| Error::Lock)?,
width as _,
height as _,
),
))
})
.collect()
};
self.set_paused(paused);
self.seek(pos)?;
self.set_muted(muted);
self.seek(pos, true)?;
self.0.borrow().read_frame().map_err(|_| Error::Sync)?;
out
}
}
fn yuv_to_rgba(yuv: &[u8], width: u32, height: u32) -> Vec<u8> {
let uv_start = width * height;
let mut rgba = vec![];
for y in 0..height {
for x in 0..width {
let uv_i = uv_start + width * (y / 2) + x / 2 * 2;
let y = yuv[(y * width + x) as usize] as f32;
let u = yuv[uv_i as usize] as f32;
let v = yuv[(uv_i + 1) as usize] as f32;
let r = 1.164 * (y - 16.0) + 1.596 * (v - 128.0);
let g = 1.164 * (y - 16.0) - 0.813 * (v - 128.0) - 0.391 * (u - 128.0);
let b = 1.164 * (y - 16.0) + 2.018 * (u - 128.0);
rgba.push(r as u8);
rgba.push(g as u8);
rgba.push(b as u8);
rgba.push(0xFF);
}
}
return rgba;
}

View file

@ -110,7 +110,9 @@ where
_cursor: advanced::mouse::Cursor,
_viewport: &iced::Rectangle,
) {
let inner = self.video.0.borrow();
let inner = self.video.0.borrow_mut();
let _ = inner.read_frame();
renderer.draw_primitive(
layout.bounds(),
VideoPrimitive::new(
@ -178,10 +180,9 @@ where
}
let redraw_interval = 1.0 / inner.framerate;
let until_redraw =
redraw_interval - (now - inner.next_redraw).as_secs_f64() % redraw_interval;
inner.next_redraw = now + Duration::from_secs_f64(until_redraw);
shell.request_redraw(iced::window::RedrawRequest::At(inner.next_redraw));
shell.request_redraw(iced::window::RedrawRequest::At(
now + Duration::from_secs_f64(redraw_interval),
));
if let Some(on_new_frame) = self.on_new_frame.clone() {
shell.publish(on_new_frame);