store frames directly from appsink (fixes sync)

This commit is contained in:
jazzfool 2020-08-23 15:55:00 +10:00
parent 153a84d85c
commit 828fde9976
3 changed files with 14 additions and 29 deletions

View file

@ -12,7 +12,6 @@ gstreamer-app = "0.16" # appsink
glib = "0.10" # gobject traits and error type glib = "0.10" # gobject traits and error type
tokio = { version = "0.2", features = ["time", "stream"] }# time subscription (every frame) tokio = { version = "0.2", features = ["time", "stream"] }# time subscription (every frame)
thiserror = "1" thiserror = "1"
crossbeam-channel = "0.4"
url = "2" # media uri url = "2" # media uri
num-rational = "0.3" # framerates come in rationals num-rational = "0.3" # framerates come in rationals
num-traits = "0.2" # convert rationals to floats (ToPrimitive) num-traits = "0.2" # convert rationals to floats (ToPrimitive)

View file

@ -16,7 +16,7 @@ Features:
- Small (around 300 lines). - Small (around 300 lines).
Limitations (hopefully to be fixed): Limitations (hopefully to be fixed):
- Lazy frame syncing. Playback is usually a few frames and the decoder and audio runs ahead. Seeking is also latent. - GStreamer hardware acceleration not working? (leads to choppy playback in some scenarios).
- GStreamer is a bit annoying to set up on Windows. - GStreamer is a bit annoying to set up on Windows.
This is a "composable" instead of a `iced::Widget`. This is because `Widget`s don't support subscriptions (yet?). Once Iced gets animation support (i.e. widgets scheduling a time to update), this can become a widget. This is a "composable" instead of a `iced::Widget`. This is because `Widget`s don't support subscriptions (yet?). Once Iced gets animation support (i.e. widgets scheduling a time to update), this can become a widget.

View file

@ -3,6 +3,7 @@ use gstreamer as gst;
use gstreamer_app as gst_app; use gstreamer_app as gst_app;
use iced::{image as img, Image, Subscription}; use iced::{image as img, Image, Subscription};
use num_traits::ToPrimitive; use num_traits::ToPrimitive;
use std::sync::{Arc, Mutex};
use std::time::Duration; use std::time::Duration;
use thiserror::Error; use thiserror::Error;
@ -43,12 +44,8 @@ pub struct VideoPlayer {
framerate: f64, framerate: f64,
duration: std::time::Duration, duration: std::time::Duration,
frame_rx: crossbeam_channel::Receiver<img::Handle>, frame: Arc<Mutex<Option<img::Handle>>>,
frame: Option<img::Handle>,
pause: bool, pause: bool,
// if true, then the playback resets to the most recent frame.
// set this to true whenever playback is changed (e.g. pause, seek, etc).
reset: bool,
} }
impl Drop for VideoPlayer { impl Drop for VideoPlayer {
@ -64,8 +61,6 @@ impl VideoPlayer {
pub fn new(uri: &url::Url) -> Result<Self, Error> { pub fn new(uri: &url::Url) -> Result<Self, Error> {
gst::init()?; gst::init()?;
let (frame_tx, frame_rx) = crossbeam_channel::unbounded();
let source = gst::parse_launch(&format!("playbin uri=\"{}\" video-sink=\"videoconvert ! videoscale ! appsink name=app_sink caps=video/x-raw,format=BGRA,pixel-aspect-ratio=1/1\"", uri.as_str()))?; let source = gst::parse_launch(&format!("playbin uri=\"{}\" video-sink=\"videoconvert ! videoscale ! appsink name=app_sink caps=video/x-raw,format=BGRA,pixel-aspect-ratio=1/1\"", uri.as_str()))?;
let source = source.downcast::<gst::Bin>().unwrap(); let source = source.downcast::<gst::Bin>().unwrap();
@ -86,6 +81,9 @@ impl VideoPlayer {
let app_sink = bin.get_by_name("app_sink").unwrap(); let app_sink = bin.get_by_name("app_sink").unwrap();
let app_sink = app_sink.downcast::<gst_app::AppSink>().unwrap(); let app_sink = app_sink.downcast::<gst_app::AppSink>().unwrap();
let frame = Arc::new(Mutex::new(None));
let frame_ref = Arc::clone(&frame);
app_sink.set_callbacks( app_sink.set_callbacks(
gst_app::AppSinkCallbacks::builder() gst_app::AppSinkCallbacks::builder()
.new_sample(move |sink| { .new_sample(move |sink| {
@ -106,13 +104,12 @@ impl VideoPlayer {
.map_err(|_| gst::FlowError::Error)? .map_err(|_| gst::FlowError::Error)?
.ok_or(gst::FlowError::Error)?; .ok_or(gst::FlowError::Error)?;
frame_tx *frame_ref.lock().map_err(|_| gst::FlowError::Error)? =
.send(img::Handle::from_pixels( Some(img::Handle::from_pixels(
width as _, width as _,
height as _, height as _,
map.as_slice().to_owned(), map.as_slice().to_owned(),
)) ));
.map_err(|_| gst::FlowError::Error)?;
Ok(gst::FlowSuccess::Ok) Ok(gst::FlowSuccess::Ok)
}) })
@ -162,10 +159,8 @@ impl VideoPlayer {
.to_f64().unwrap(/* if the video framerate is bad then it would've been implicitly caught far earlier */), .to_f64().unwrap(/* if the video framerate is bad then it would've been implicitly caught far earlier */),
duration, duration,
frame_rx, frame,
frame: None,
pause: false, pause: false,
reset: true,
}) })
} }
@ -205,7 +200,6 @@ impl VideoPlayer {
/// Set if the media is paused or not. /// Set if the media is paused or not.
pub fn set_paused(&mut self, pause: bool) { pub fn set_paused(&mut self, pause: bool) {
self.reset = true;
self.pause = pause; self.pause = pause;
self.source self.source
.set_state(if pause { .set_state(if pause {
@ -226,7 +220,6 @@ impl VideoPlayer {
/// ///
/// The position is converted to nanoseconds, so any duration with values more significant that nanoseconds is truncated. /// The position is converted to nanoseconds, so any duration with values more significant that nanoseconds is truncated.
pub fn seek(&mut self, position: std::time::Duration) -> Result<(), Error> { pub fn seek(&mut self, position: std::time::Duration) -> Result<(), Error> {
self.reset = true;
self.source.seek_simple( self.source.seek_simple(
gst::SeekFlags::empty(), gst::SeekFlags::empty(),
gst::GenericFormattedValue::Time(gst::ClockTime::from_nseconds( gst::GenericFormattedValue::Time(gst::ClockTime::from_nseconds(
@ -259,22 +252,13 @@ impl VideoPlayer {
panic!("{:#?}", err); panic!("{:#?}", err);
} }
} }
if self.reset {
self.reset = false;
if let Some(frame) = self.frame_rx.iter().nth(self.frame_rx.len() - 1) {
self.frame = Some(frame);
}
} else if let Ok(frame) = self.frame_rx.try_recv() {
self.frame = Some(frame);
}
} }
} }
} }
pub fn subscription(&self) -> Subscription<VideoPlayerMessage> { pub fn subscription(&self) -> Subscription<VideoPlayerMessage> {
if !self.pause { if !self.pause {
time::every(Duration::from_secs_f64(1.0 / self.framerate)) time::every(Duration::from_secs_f64(0.5 / self.framerate))
.map(|_| VideoPlayerMessage::NextFrame) .map(|_| VideoPlayerMessage::NextFrame)
} else { } else {
Subscription::none() Subscription::none()
@ -284,6 +268,8 @@ impl VideoPlayer {
/// Get the image handle of the current frame. /// Get the image handle of the current frame.
pub fn frame_image(&self) -> img::Handle { pub fn frame_image(&self) -> img::Handle {
self.frame self.frame
.lock()
.expect("failed to lock frame")
.clone() .clone()
.unwrap_or_else(|| img::Handle::from_pixels(0, 0, vec![])) .unwrap_or_else(|| img::Handle::from_pixels(0, 0, vec![]))
} }