diff --git a/.media/screenshot.png b/.media/screenshot.png index 5401bea..185a8fb 100644 Binary files a/.media/screenshot.png and b/.media/screenshot.png differ diff --git a/.media/test.mp4 b/.media/test.mp4 index f5b4b4e..1243d3f 100644 Binary files a/.media/test.mp4 and b/.media/test.mp4 differ diff --git a/Cargo.lock b/Cargo.lock index 792ff19..d42711b 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -18,15 +18,6 @@ version = "0.1.8" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "c71b1793ee61086797f5c80b6efa2b8ffa6d5dd703f118545808a7f2e27f7046" -[[package]] -name = "addr2line" -version = "0.21.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a30b2e23b9e17a9f90641c7ab1549cd9b44f296d3ccbf309d2863cfe398a0cb" -dependencies = [ - "gimli", -] - [[package]] name = "adler" version = "1.0.2" @@ -142,21 +133,6 @@ version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d468802bab17cbc0cc575e9b053f41e72aa36bfa6b7f55e3529ffa43161b97fa" -[[package]] -name = "backtrace" -version = "0.3.69" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2089b7e3f35b9dd2d0ed921ead4f6d318c27680d4a5bd167b3ee120edb105837" -dependencies = [ - "addr2line", - "cc", - "cfg-if", - "libc", - "miniz_oxide", - "object", - "rustc-demangle", -] - [[package]] name = "bit-set" version = "0.5.3" @@ -944,12 +920,6 @@ dependencies = [ "weezl", ] -[[package]] -name = "gimli" -version = "0.28.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4271d37baee1b8c7e4b708028c57d816cf9d2434acb33a549475f78c181f6253" - [[package]] name = "gio-sys" version = "0.19.0" @@ -1341,7 +1311,6 @@ dependencies = [ "futures", "iced_core 0.12.0", "log", - "tokio", "wasm-bindgen-futures", "wasm-timer", ] @@ -1457,8 +1426,8 @@ dependencies = [ "gstreamer-app", "iced", "iced_native", + "iced_wgpu", "thiserror", - "tokio", "url", ] @@ -1991,15 +1960,6 @@ dependencies = [ "objc", ] -[[package]] -name = "object" -version = "0.32.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a6a622008b6e321afc04970976f62ee297fdbaa6f95318ca343e3eebb9648441" -dependencies = [ - "memchr", -] - [[package]] name = "once_cell" version = "1.19.0" @@ -2385,12 +2345,6 @@ version = "0.19.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "3cd14fd5e3b777a7422cca79358c57a8f6e3a703d9ac187448d0daf220c2407f" -[[package]] -name = "rustc-demangle" -version = "0.1.23" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d626bb9dae77e28219937af045c257c28bfd3f69333c512553507f5f9798cb76" - [[package]] name = "rustc-hash" version = "1.1.0" @@ -2798,17 +2752,6 @@ version = "0.1.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20" -[[package]] -name = "tokio" -version = "1.36.0" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "61285f6515fa018fb2d1e46eb21223fff441ee8db5d0f1435e8ab4f5cdb80931" -dependencies = [ - "backtrace", - "num_cpus", - "pin-project-lite", -] - [[package]] name = "toml" version = "0.5.11" diff --git a/Cargo.toml b/Cargo.toml index 51bb310..c335021 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -6,12 +6,12 @@ edition = "2018" resolver = "2" [dependencies] -iced = { version = "0.12", features = ["image", "tokio"] } +iced = { version = "0.12", features = ["image", "advanced", "wgpu"] } iced_native = "0.10" +iced_wgpu = "0.12" gstreamer = "0.22" gstreamer-app = "0.22" # appsink glib = "0.19" # gobject traits and error type -tokio = { version = "1", features = ["time"] } thiserror = "1" url = "2" # media uri diff --git a/README.md b/README.md index 5878e2b..8ab35e6 100644 --- a/README.md +++ b/README.md @@ -13,67 +13,47 @@ Features: - Video buffering when streaming on a network. - Audio support. - Programmatic control. -- Small (around 300 lines). - Capture thumbnails from a set of timestamps. +- Decent performance. Skips a lot of the overhead from Iced `Image` and copies frame data directly to a WGPU texture, and renders using a custom WGPU render pipeline. + For a very subjective reference, I can play back 1080p HEVC video with hardware decoding without hitches, in debug mode. Limitations (hopefully to be fixed): -- GStreamer hardware acceleration not working? (leads to choppy playback in some scenarios). - GStreamer is a bit annoying to set up on Windows. -This is a "composable" instead of a `iced::Widget`. This is because `Widget`s don't support subscriptions (yet?). Once Iced gets animation support (i.e. widgets scheduling a time to update), this can become a widget. - The player **does not** come with any surrounding GUI controls, but they should be quite easy to implement should you need them. +See the "minimal" example for a demonstration on how you could implement pausing, looping, and seeking. ## Example Usage ```rust -use iced_video_player::{VideoPlayerMessage, VideoPlayer}; -use iced::{executor, Application, Command, Element, Subscription}; +use iced_video_player::{Video, VideoPlayer}; +use iced::{Sandbox, Element}; fn main() { App::run(Default::default()); } -#[derive(Debug)] -enum Message { - VideoPlayerMessage(VideoPlayerMessage), -} - struct App { - video: VideoPlayer, + video: Video, } -impl Application for App { - type Executor = executor::Default; - type Message = Message; - type Flags = (); +impl Sandbox for App { + type Message = (); - fn new(_flags: ()) -> (Self, Command) { - ( - App { - video: VideoPlayer::new(&url::Url::parse("file:///C:/my_video.mp4").unwrap()).unwrap(), - }, - Command::none(), - ) + fn new() -> Self { + App { + video: Video::new(&url::Url::parse("file:///C:/my_video.mp4").unwrap()).unwrap(), + } } fn title(&self) -> String { String::from("Video Player") } - fn update(&mut self, message: Message) -> Command { - match message { - Message::VideoPlayerMessage(msg) => self.video.update(msg), - } - Command::none() - } + fn update(&mut self, _message: ()) {} - fn subscription(&self) -> Subscription { - self.video.subscription().map(Message::VideoPlayerMessage) - } - - fn view(&mut self) -> Element { - self.video.frame_view().into() + fn view(&mut self) -> Element<()> { + VideoPlayer::new(&self.video).into() } } ``` diff --git a/examples/minimal.rs b/examples/minimal.rs index dbc3e5f..30a2d1d 100644 --- a/examples/minimal.rs +++ b/examples/minimal.rs @@ -1,9 +1,9 @@ use iced::{ - executor, - widget::{Button, Column, Row, Text}, - Application, Command, Element, Subscription, Theme, + widget::{Button, Column, Row, Slider, Text}, + Element, Sandbox, }; -use iced_video_player::{VideoPlayer, VideoPlayerMessage}; +use iced_video_player::{Video, VideoPlayer}; +use std::time::Duration; fn main() { App::run(Default::default()).unwrap(); @@ -13,21 +13,23 @@ fn main() { enum Message { TogglePause, ToggleLoop, - VideoPlayerMessage(VideoPlayerMessage), + Seek(f64), + SeekRelease, + EndOfStream, + NewFrame, } struct App { - video: VideoPlayer, + video: Video, + position: f64, + dragging: bool, } -impl Application for App { - type Executor = executor::Default; +impl Sandbox for App { type Message = Message; - type Flags = (); - type Theme = Theme; - fn new(_flags: ()) -> (Self, Command) { - let video = VideoPlayer::new( + fn new() -> Self { + let video = Video::new( &url::Url::from_file_path( std::path::PathBuf::from(file!()) .parent() @@ -40,14 +42,18 @@ impl Application for App { false, ) .unwrap(); - (App { video }, Command::none()) + App { + video, + position: 0.0, + dragging: false, + } } fn title(&self) -> String { String::from("Video Player") } - fn update(&mut self, message: Message) -> Command { + fn update(&mut self, message: Message) { match message { Message::TogglePause => { self.video.set_paused(!self.video.paused()); @@ -55,21 +61,36 @@ impl Application for App { Message::ToggleLoop => { self.video.set_looping(!self.video.looping()); } - Message::VideoPlayerMessage(msg) => { - return self.video.update(msg).map(Message::VideoPlayerMessage); + Message::Seek(secs) => { + self.dragging = true; + self.video.set_paused(true); + self.position = secs; + } + Message::SeekRelease => { + self.dragging = false; + self.video + .seek(Duration::from_secs_f64(self.position)) + .expect("seek"); + self.video.set_paused(false); + } + Message::EndOfStream => { + println!("end of stream"); + } + Message::NewFrame => { + if !self.dragging { + self.position = self.video.position().as_secs_f64(); + } } } - - Command::none() - } - - fn subscription(&self) -> Subscription { - self.video.subscription().map(Message::VideoPlayerMessage) } fn view(&self) -> Element { Column::new() - .push(self.video.frame_view()) + .push( + VideoPlayer::new(&self.video) + .on_end_of_stream(Message::EndOfStream) + .on_new_frame(Message::NewFrame), + ) .push( Row::new() .spacing(5) @@ -91,9 +112,18 @@ impl Application for App { ) .push(Text::new(format!( "{:#?}s / {:#?}s", - self.video.position().as_secs(), + self.position as u64, self.video.duration().as_secs() - ))), + ))) + .push( + Slider::new( + 0.0..=self.video.duration().as_secs_f64(), + self.position, + Message::Seek, + ) + .step(0.1) + .on_release(Message::SeekRelease), + ), ) .into() } diff --git a/src/lib.rs b/src/lib.rs index 28cd7a0..709421f 100644 --- a/src/lib.rs +++ b/src/lib.rs @@ -1,47 +1,13 @@ -use gst::prelude::*; +mod pipeline; +mod video; +mod video_player; + use gstreamer as gst; -use gstreamer_app as gst_app; -use iced::{ - widget::{image as img, Image}, - Command, Subscription, -}; -use std::convert::identity; -use std::future; -use std::sync::{mpsc, Arc, Mutex}; -use std::time::Duration; use thiserror::Error; -/// Position in the media. -#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] -pub enum Position { - /// Position based on time. - /// - /// Not the most accurate format for videos. - Time(std::time::Duration), - /// Position based on nth frame. - Frame(u64), -} - -impl From for gst::GenericFormattedValue { - fn from(pos: Position) -> Self { - match pos { - Position::Time(t) => gst::ClockTime::from_nseconds(t.as_nanos() as _).into(), - Position::Frame(f) => gst::format::Default::from_u64(f).into(), - } - } -} - -impl From for Position { - fn from(t: std::time::Duration) -> Self { - Position::Time(t) - } -} - -impl From for Position { - fn from(f: u64) -> Self { - Position::Frame(f) - } -} +pub use video::Position; +pub use video::Video; +pub use video_player::VideoPlayer; #[derive(Debug, Error)] pub enum Error { @@ -65,342 +31,6 @@ pub enum Error { Duration, #[error("failed to sync with playback")] Sync, -} - -#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)] -pub enum VideoPlayerMessage { - NextFrame, - EndOfPlayback, -} - -impl VideoPlayerMessage { - fn into_cmd(self) -> Command { - Command::perform(future::ready(self), identity) - } -} - -/// Video player which handles multimedia playback. -pub struct VideoPlayer { - bus: gst::Bus, - source: gst::Bin, - - width: i32, - height: i32, - framerate: f64, - duration: std::time::Duration, - - frame: Arc>>, - wait: mpsc::Receiver<()>, - paused: bool, - muted: bool, - looping: bool, - is_eos: bool, - restart_stream: bool, -} - -impl Drop for VideoPlayer { - fn drop(&mut self) { - self.source - .set_state(gst::State::Null) - .expect("failed to set state"); - } -} - -impl VideoPlayer { - /// Create a new video player from a given video which loads from `uri`. - /// - /// If `live` is set then no duration is queried (as this will result in an error and is non-sensical for live streams). - /// Set `live` if the streaming source is indefinite (e.g. a live stream). - /// Note that this will cause the duration to be zero. - pub fn new(uri: &url::Url, live: bool) -> Result { - gst::init()?; - - let source = gst::parse::launch(&format!("playbin uri=\"{}\" video-sink=\"videoconvert ! videoscale ! appsink name=app_sink caps=video/x-raw,format=RGBA,pixel-aspect-ratio=1/1\"", uri.as_str()))?; - let source = source.downcast::().unwrap(); - - let video_sink: gst::Element = source.property("video-sink"); - let pad = video_sink.pads().get(0).cloned().unwrap(); - let pad = pad.dynamic_cast::().unwrap(); - let bin = pad - .parent_element() - .unwrap() - .downcast::() - .unwrap(); - - let app_sink = bin.by_name("app_sink").unwrap(); - let app_sink = app_sink.downcast::().unwrap(); - - let frame = Arc::new(Mutex::new(None)); - let frame_ref = Arc::clone(&frame); - - let (notify, wait) = mpsc::channel(); - - app_sink.set_callbacks( - gst_app::AppSinkCallbacks::builder() - .new_sample(move |sink| { - let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?; - let buffer = sample.buffer().ok_or(gst::FlowError::Error)?; - let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?; - - let pad = sink.static_pad("sink").ok_or(gst::FlowError::Error)?; - - let caps = pad.current_caps().ok_or(gst::FlowError::Error)?; - let s = caps.structure(0).ok_or(gst::FlowError::Error)?; - let width = s.get::("width").map_err(|_| gst::FlowError::Error)?; - let height = s.get::("height").map_err(|_| gst::FlowError::Error)?; - - *frame_ref.lock().map_err(|_| gst::FlowError::Error)? = - Some(img::Handle::from_pixels( - width as _, - height as _, - map.as_slice().to_owned(), - )); - - notify.send(()).map_err(|_| gst::FlowError::Error)?; - - Ok(gst::FlowSuccess::Ok) - }) - .build(), - ); - - source.set_state(gst::State::Playing)?; - - // wait for up to 5 seconds until the decoder gets the source capabilities - source.state(gst::ClockTime::from_seconds(5)).0?; - - // extract resolution and framerate - // TODO(jazzfool): maybe we want to extract some other information too? - let caps = pad.current_caps().ok_or(Error::Caps)?; - let s = caps.structure(0).ok_or(Error::Caps)?; - let width = s.get::("width").map_err(|_| Error::Caps)?; - let height = s.get::("height").map_err(|_| Error::Caps)?; - let framerate = s - .get::("framerate") - .map_err(|_| Error::Caps)?; - - let duration = if !live { - std::time::Duration::from_nanos( - source - .query_duration::() - .ok_or(Error::Duration)? - .nseconds(), - ) - } else { - std::time::Duration::from_secs(0) - }; - - Ok(VideoPlayer { - bus: source.bus().unwrap(), - source, - - width, - height, - framerate: framerate.numer() as f64 / framerate.denom() as f64, - duration, - - frame, - wait, - paused: false, - muted: false, - looping: false, - is_eos: false, - restart_stream: false, - }) - } - - /// Get the size/resolution of the video as `(width, height)`. - #[inline(always)] - pub fn size(&self) -> (i32, i32) { - (self.width, self.height) - } - - /// Get the framerate of the video as frames per second. - #[inline(always)] - pub fn framerate(&self) -> f64 { - self.framerate - } - - /// Set the volume multiplier of the audio. - /// `0.0` = 0% volume, `1.0` = 100% volume. - /// - /// This uses a linear scale, for example `0.5` is perceived as half as loud. - pub fn set_volume(&mut self, volume: f64) { - self.source.set_property("volume", &volume); - } - - /// Set if the audio is muted or not, without changing the volume. - pub fn set_muted(&mut self, muted: bool) { - self.muted = muted; - self.source.set_property("mute", &muted); - } - - /// Get if the audio is muted or not. - #[inline(always)] - pub fn muted(&self) -> bool { - self.muted - } - - /// Get if the stream ended or not. - #[inline(always)] - pub fn eos(&self) -> bool { - self.is_eos - } - - /// Get if the media will loop or not. - #[inline(always)] - pub fn looping(&self) -> bool { - self.looping - } - - /// Set if the media will loop or not. - #[inline(always)] - pub fn set_looping(&mut self, looping: bool) { - self.looping = looping; - } - - /// Set if the media is paused or not. - pub fn set_paused(&mut self, paused: bool) { - self.source - .set_state(if paused { - gst::State::Paused - } else { - gst::State::Playing - }) - .unwrap(/* state was changed in ctor; state errors caught there */); - self.paused = paused; - - // Set restart_stream flag to make the stream restart on the next Message::NextFrame - if self.is_eos && !paused { - self.restart_stream = true; - } - } - - /// Get if the media is paused or not. - #[inline(always)] - pub fn paused(&self) -> bool { - self.paused - } - - /// Jumps to a specific position in the media. - /// The seeking is not perfectly accurate. - pub fn seek(&mut self, position: impl Into) -> Result<(), Error> { - self.source.seek_simple( - gst::SeekFlags::FLUSH, - gst::GenericFormattedValue::from(position.into()), - )?; - Ok(()) - } - - /// Get the current playback position in time. - pub fn position(&self) -> std::time::Duration { - std::time::Duration::from_nanos( - self.source - .query_position::() - .map_or(0, |pos| pos.nseconds()), - ) - .into() - } - - /// Get the media duration. - #[inline(always)] - pub fn duration(&self) -> std::time::Duration { - self.duration - } - - /// Generates a list of thumbnails based on a set of positions in the media. - /// - /// Slow; only needs to be called once for each instance. - /// It's best to call this at the very start of playback, otherwise the position may shift. - pub fn thumbnails(&mut self, positions: &[Position]) -> Result, Error> { - let paused = self.paused(); - let pos = self.position(); - self.set_paused(false); - let out = positions - .iter() - .map(|&pos| { - self.seek(pos)?; - self.wait.recv().map_err(|_| Error::Sync)?; - Ok(self.frame_image()) - }) - .collect(); - self.set_paused(paused); - self.seek(pos)?; - out - } - - pub fn update(&mut self, message: VideoPlayerMessage) -> Command { - match message { - VideoPlayerMessage::NextFrame => { - let mut cmds = Vec::new(); - - let mut restart_stream = false; - if self.restart_stream { - restart_stream = true; - // Set flag to false to avoid potentially multiple seeks - self.restart_stream = false; - } - let mut eos_pause = false; - - for msg in self.bus.iter() { - match msg.view() { - gst::MessageView::Error(err) => panic!("{:#?}", err), - gst::MessageView::Eos(_eos) => { - cmds.push(VideoPlayerMessage::EndOfPlayback.into_cmd()); - if self.looping { - restart_stream = true; - } else { - eos_pause = true; - } - } - _ => {} - } - } - - // Don't run eos_pause if restart_stream is true; fixes "pausing" after restarting a stream - if restart_stream { - if let Err(err) = self.restart_stream() { - eprintln!("cannot restart stream (can't seek): {:#?}", err); - } - } else if eos_pause { - self.is_eos = true; - self.set_paused(true); - } - - return Command::batch(cmds); - } - VideoPlayerMessage::EndOfPlayback => {} - } - Command::none() - } - - pub fn subscription(&self) -> Subscription { - if self.restart_stream || (!self.is_eos && !self.paused()) { - iced::time::every(Duration::from_secs_f64(0.5 / self.framerate)) - .map(|_| VideoPlayerMessage::NextFrame) - } else { - Subscription::none() - } - } - - /// Get the image handle of the current frame. - pub fn frame_image(&self) -> img::Handle { - self.frame - .lock() - .expect("failed to lock frame") - .clone() - .unwrap_or_else(|| img::Handle::from_pixels(0, 0, vec![])) - } - - /// Wrap the output of `frame_image` in an `Image` widget. - pub fn frame_view(&self) -> Image { - Image::new(self.frame_image()) - } - - /// Restarts a stream; seeks to the first frame and unpauses, sets the `eos` flag to false. - pub fn restart_stream(&mut self) -> Result<(), Error> { - self.is_eos = false; - self.set_paused(false); - self.seek(0)?; - Ok(()) - } + #[error("failed to lock internal sync primitive")] + Lock, } diff --git a/src/pipeline.rs b/src/pipeline.rs new file mode 100644 index 0000000..57e5fc9 --- /dev/null +++ b/src/pipeline.rs @@ -0,0 +1,329 @@ +use iced_wgpu::primitive::pipeline::Primitive; +use iced_wgpu::wgpu; +use std::{ + collections::BTreeMap, + sync::{Arc, Mutex}, +}; + +#[repr(C)] +struct Uniforms { + rect: [f32; 4], +} + +struct VideoPipeline { + pipeline: wgpu::RenderPipeline, + bg0_layout: wgpu::BindGroupLayout, + sampler: wgpu::Sampler, + textures: BTreeMap, +} + +impl VideoPipeline { + fn new(device: &wgpu::Device, format: wgpu::TextureFormat) -> Self { + let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor { + label: Some("iced_video_player shader"), + source: wgpu::ShaderSource::Wgsl(include_str!("shader.wgsl").into()), + }); + + let bg0_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor { + label: Some("iced_video_player bind group 0 layout"), + entries: &[ + wgpu::BindGroupLayoutEntry { + binding: 0, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Texture { + sample_type: wgpu::TextureSampleType::Float { filterable: true }, + view_dimension: wgpu::TextureViewDimension::D2, + multisampled: false, + }, + count: None, + }, + wgpu::BindGroupLayoutEntry { + binding: 1, + visibility: wgpu::ShaderStages::FRAGMENT, + ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering), + count: None, + }, + wgpu::BindGroupLayoutEntry { + binding: 2, + visibility: wgpu::ShaderStages::VERTEX, + ty: wgpu::BindingType::Buffer { + ty: wgpu::BufferBindingType::Uniform, + has_dynamic_offset: false, + min_binding_size: None, + }, + count: None, + }, + ], + }); + + let layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor { + label: Some("iced_video_player pipeline layout"), + bind_group_layouts: &[&bg0_layout], + push_constant_ranges: &[], + }); + + let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor { + label: Some("iced_video_player pipeline"), + layout: Some(&layout), + vertex: wgpu::VertexState { + module: &shader, + entry_point: "vs_main", + buffers: &[], + }, + primitive: wgpu::PrimitiveState::default(), + depth_stencil: None, + multisample: wgpu::MultisampleState { + count: 1, + mask: !0, + alpha_to_coverage_enabled: false, + }, + fragment: Some(wgpu::FragmentState { + module: &shader, + entry_point: "fs_main", + targets: &[Some(wgpu::ColorTargetState { + format, + blend: None, + write_mask: wgpu::ColorWrites::ALL, + })], + }), + multiview: None, + }); + + let sampler = device.create_sampler(&wgpu::SamplerDescriptor { + label: Some("iced_video_player sampler"), + address_mode_u: wgpu::AddressMode::ClampToEdge, + address_mode_v: wgpu::AddressMode::ClampToEdge, + address_mode_w: wgpu::AddressMode::ClampToEdge, + mag_filter: wgpu::FilterMode::Linear, + min_filter: wgpu::FilterMode::Linear, + mipmap_filter: wgpu::FilterMode::Nearest, + lod_min_clamp: 0.0, + lod_max_clamp: 1.0, + compare: None, + anisotropy_clamp: 1, + border_color: None, + }); + + VideoPipeline { + pipeline, + bg0_layout, + sampler, + textures: BTreeMap::new(), + } + } + + fn upload( + &mut self, + device: &wgpu::Device, + queue: &wgpu::Queue, + video_id: u64, + (width, height): (u32, u32), + frame: &[u8], + ) { + if !self.textures.contains_key(&video_id) { + let texture = device.create_texture(&wgpu::TextureDescriptor { + label: Some("iced_video_player texture"), + size: wgpu::Extent3d { + width, + height, + depth_or_array_layers: 1, + }, + mip_level_count: 1, + sample_count: 1, + dimension: wgpu::TextureDimension::D2, + format: wgpu::TextureFormat::Rgba8UnormSrgb, + usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING, + view_formats: &[], + }); + + let view = texture.create_view(&wgpu::TextureViewDescriptor { + label: Some("iced_video_player texture view"), + format: None, + dimension: None, + aspect: wgpu::TextureAspect::All, + base_mip_level: 0, + mip_level_count: None, + base_array_layer: 0, + array_layer_count: None, + }); + + let buffer = device.create_buffer(&wgpu::BufferDescriptor { + label: Some("iced_video_player uniform buffer"), + size: std::mem::size_of::() as _, + usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::UNIFORM, + mapped_at_creation: false, + }); + + let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor { + label: Some("iced_video_player bind group"), + layout: &self.bg0_layout, + entries: &[ + wgpu::BindGroupEntry { + binding: 0, + resource: wgpu::BindingResource::TextureView(&view), + }, + wgpu::BindGroupEntry { + binding: 1, + resource: wgpu::BindingResource::Sampler(&self.sampler), + }, + wgpu::BindGroupEntry { + binding: 2, + resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding { + buffer: &buffer, + offset: 0, + size: None, + }), + }, + ], + }); + + self.textures + .insert(video_id, (texture, buffer, bind_group)); + } + + let (texture, _, _) = self.textures.get(&video_id).unwrap(); + + queue.write_texture( + wgpu::ImageCopyTexture { + texture, + mip_level: 0, + origin: wgpu::Origin3d::ZERO, + aspect: wgpu::TextureAspect::All, + }, + frame, + wgpu::ImageDataLayout { + offset: 0, + bytes_per_row: Some(width * 4), + rows_per_image: Some(height), + }, + wgpu::Extent3d { + width, + height, + depth_or_array_layers: 1, + }, + ); + } + + fn prepare(&mut self, queue: &wgpu::Queue, video_id: u64, bounds: iced::Rectangle) { + if let Some((_, buffer, _)) = self.textures.get(&video_id) { + let uniforms = Uniforms { + rect: [ + bounds.x, + bounds.y, + bounds.x + bounds.width, + bounds.y + bounds.height, + ], + }; + queue.write_buffer(buffer, 0, unsafe { + std::slice::from_raw_parts( + &uniforms as *const _ as *const u8, + std::mem::size_of::(), + ) + }); + } + } + + fn draw( + &self, + target: &wgpu::TextureView, + encoder: &mut wgpu::CommandEncoder, + viewport: iced::Rectangle, + video_id: u64, + ) { + if let Some((_, _, bind_group)) = self.textures.get(&video_id) { + let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor { + label: Some("iced_video_player render pass"), + color_attachments: &[Some(wgpu::RenderPassColorAttachment { + view: target, + resolve_target: None, + ops: wgpu::Operations { + load: wgpu::LoadOp::Load, + store: wgpu::StoreOp::Store, + }, + })], + depth_stencil_attachment: None, + timestamp_writes: None, + occlusion_query_set: None, + }); + + pass.set_pipeline(&self.pipeline); + pass.set_bind_group(0, bind_group, &[]); + pass.set_viewport( + viewport.x as _, + viewport.y as _, + viewport.width as _, + viewport.height as _, + 0.0, + 1.0, + ); + pass.draw(0..4, 0..1); + } + } +} + +#[derive(Debug, Clone)] +pub(crate) struct VideoPrimitive { + video_id: u64, + frame: Arc>>, + size: (u32, u32), + upload_frame: bool, +} + +impl VideoPrimitive { + pub fn new( + video_id: u64, + frame: Arc>>, + size: (u32, u32), + upload_frame: bool, + ) -> Self { + VideoPrimitive { + video_id, + frame, + size, + upload_frame, + } + } +} + +impl Primitive for VideoPrimitive { + fn prepare( + &self, + format: wgpu::TextureFormat, + device: &wgpu::Device, + queue: &wgpu::Queue, + bounds: iced::Rectangle, + _target_size: iced::Size, + _scale_factor: f32, + storage: &mut iced_wgpu::primitive::pipeline::Storage, + ) { + if !storage.has::() { + storage.store(VideoPipeline::new(device, format)); + } + + let pipeline = storage.get_mut::().unwrap(); + + if self.upload_frame { + pipeline.upload( + device, + queue, + self.video_id, + self.size, + self.frame.lock().expect("lock frame mutex").as_slice(), + ); + } + + pipeline.prepare(queue, self.video_id, bounds); + } + + fn render( + &self, + storage: &iced_wgpu::primitive::pipeline::Storage, + target: &wgpu::TextureView, + _target_size: iced::Size, + viewport: iced::Rectangle, + encoder: &mut wgpu::CommandEncoder, + ) { + let pipeline = storage.get::().unwrap(); + pipeline.draw(target, encoder, viewport, self.video_id); + } +} diff --git a/src/shader.wgsl b/src/shader.wgsl new file mode 100644 index 0000000..30d788b --- /dev/null +++ b/src/shader.wgsl @@ -0,0 +1,41 @@ +struct VertexOutput { + @builtin(position) position: vec4, + @location(0) uv: vec2, +} + +struct Uniforms { + rect: vec4, +} + +@group(0) @binding(0) +var t: texture_2d; + +@group(0) @binding(1) +var s: sampler; + +@group(0) @binding(2) +var uniforms: Uniforms; + +@vertex +fn vs_main(@builtin(vertex_index) in_vertex_index: u32) -> VertexOutput { + let quad = array, 6>( + uniforms.rect.xy, + uniforms.rect.zy, + uniforms.rect.xw, + uniforms.rect.zy, + uniforms.rect.zw, + uniforms.rect.xw, + ); + + var out: VertexOutput; + out.uv = vec2(0.0); + out.uv.x = select(0.0, 2.0, in_vertex_index == 1u); + out.uv.y = select(0.0, 2.0, in_vertex_index == 2u); + out.position = vec4(out.uv * vec2(2.0, -2.0) + vec2(-1.0, 1.0), 1.0, 1.0); + return out; +} + +@fragment +fn fs_main(in: VertexOutput) -> @location(0) vec4 { + return textureSample(t, s, in.uv); +} diff --git a/src/video.rs b/src/video.rs new file mode 100644 index 0000000..f14c971 --- /dev/null +++ b/src/video.rs @@ -0,0 +1,342 @@ +use crate::Error; +use gst::prelude::*; +use gstreamer as gst; +use gstreamer_app as gst_app; +use iced::widget::image as img; +use std::cell::RefCell; +use std::sync::atomic::{AtomicBool, AtomicU64, Ordering}; +use std::sync::{mpsc, Arc, Mutex}; +use std::time::Instant; + +/// Position in the media. +#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)] +pub enum Position { + /// Position based on time. + /// + /// Not the most accurate format for videos. + Time(std::time::Duration), + /// Position based on nth frame. + Frame(u64), +} + +impl From for gst::GenericFormattedValue { + fn from(pos: Position) -> Self { + match pos { + Position::Time(t) => gst::ClockTime::from_nseconds(t.as_nanos() as _).into(), + Position::Frame(f) => gst::format::Default::from_u64(f).into(), + } + } +} + +impl From for Position { + fn from(t: std::time::Duration) -> Self { + Position::Time(t) + } +} + +impl From for Position { + fn from(f: u64) -> Self { + Position::Frame(f) + } +} + +pub(crate) struct Internal { + pub(crate) id: u64, + + pub(crate) bus: gst::Bus, + pub(crate) source: gst::Bin, + + pub(crate) width: i32, + pub(crate) height: i32, + pub(crate) framerate: f64, + pub(crate) duration: std::time::Duration, + + pub(crate) frame: Arc>>, // ideally would be Arc> + pub(crate) upload_frame: Arc, + pub(crate) wait: mpsc::Receiver<()>, + pub(crate) paused: bool, + pub(crate) muted: bool, + pub(crate) looping: bool, + pub(crate) is_eos: bool, + pub(crate) restart_stream: bool, + pub(crate) next_redraw: Instant, +} + +impl Internal { + pub(crate) fn seek(&self, position: impl Into) -> Result<(), Error> { + self.source.seek_simple( + gst::SeekFlags::FLUSH, + gst::GenericFormattedValue::from(position.into()), + )?; + Ok(()) + } + + pub(crate) fn restart_stream(&mut self) -> Result<(), Error> { + self.is_eos = false; + self.set_paused(false); + self.seek(0)?; + Ok(()) + } + + pub(crate) fn set_paused(&mut self, paused: bool) { + self.source + .set_state(if paused { + gst::State::Paused + } else { + gst::State::Playing + }) + .unwrap(/* state was changed in ctor; state errors caught there */); + self.paused = paused; + + // Set restart_stream flag to make the stream restart on the next Message::NextFrame + if self.is_eos && !paused { + self.restart_stream = true; + } + } +} + +/// A multimedia video loaded from a URI (e.g., a local file path or HTTP stream). +pub struct Video(pub(crate) RefCell); + +impl Drop for Video { + fn drop(&mut self) { + self.0 + .borrow() + .source + .set_state(gst::State::Null) + .expect("failed to set state"); + } +} + +impl Video { + /// Create a new video player from a given video which loads from `uri`. + /// + /// If `live` is set then no duration is queried (as this will result in an error and is non-sensical for live streams). + /// Set `live` if the streaming source is indefinite (e.g. a live stream). + /// Note that this will cause the duration to be zero. + pub fn new(uri: &url::Url, live: bool) -> Result { + static NEXT_ID: AtomicU64 = AtomicU64::new(0); + let id = NEXT_ID.fetch_add(1, Ordering::SeqCst); + + gst::init()?; + + let source = gst::parse::launch(&format!("playbin uri=\"{}\" video-sink=\"videoconvert ! videoscale ! appsink name=app_sink caps=video/x-raw,format=RGBA,pixel-aspect-ratio=1/1\"", uri.as_str()))?; + let source = source.downcast::().unwrap(); + + let video_sink: gst::Element = source.property("video-sink"); + let pad = video_sink.pads().get(0).cloned().unwrap(); + let pad = pad.dynamic_cast::().unwrap(); + let bin = pad + .parent_element() + .unwrap() + .downcast::() + .unwrap(); + + let app_sink = bin.by_name("app_sink").unwrap(); + let app_sink = app_sink.downcast::().unwrap(); + + source.set_state(gst::State::Playing)?; + + // wait for up to 5 seconds until the decoder gets the source capabilities + source.state(gst::ClockTime::from_seconds(5)).0?; + + // extract resolution and framerate + // TODO(jazzfool): maybe we want to extract some other information too? + let caps = pad.current_caps().ok_or(Error::Caps)?; + let s = caps.structure(0).ok_or(Error::Caps)?; + let width = s.get::("width").map_err(|_| Error::Caps)?; + let height = s.get::("height").map_err(|_| Error::Caps)?; + let framerate = s + .get::("framerate") + .map_err(|_| Error::Caps)?; + + let duration = if !live { + std::time::Duration::from_nanos( + source + .query_duration::() + .ok_or(Error::Duration)? + .nseconds(), + ) + } else { + std::time::Duration::from_secs(0) + }; + + let frame_buf = vec![0; (width * height * 4) as _]; + let frame = Arc::new(Mutex::new(frame_buf)); + let frame_ref = Arc::clone(&frame); + + let upload_frame = Arc::new(AtomicBool::new(true)); + let upload_frame_ref = Arc::clone(&upload_frame); + + let (notify, wait) = mpsc::channel(); + + app_sink.set_callbacks( + gst_app::AppSinkCallbacks::builder() + .new_sample(move |sink| { + let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?; + let buffer = sample.buffer().ok_or(gst::FlowError::Error)?; + let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?; + + frame_ref + .lock() + .map_err(|_| gst::FlowError::Error)? + .copy_from_slice(map.as_slice()); + + upload_frame_ref.store(true, Ordering::SeqCst); + + notify.send(()).map_err(|_| gst::FlowError::Error)?; + + Ok(gst::FlowSuccess::Ok) + }) + .build(), + ); + + Ok(Video(RefCell::new(Internal { + id, + + bus: source.bus().unwrap(), + source, + + width, + height, + framerate: framerate.numer() as f64 / framerate.denom() as f64, + duration, + + frame, + upload_frame, + wait, + paused: false, + muted: false, + looping: false, + is_eos: false, + restart_stream: false, + next_redraw: Instant::now(), + }))) + } + + /// Get the size/resolution of the video as `(width, height)`. + #[inline(always)] + pub fn size(&self) -> (i32, i32) { + (self.0.borrow().width, self.0.borrow().height) + } + + /// Get the framerate of the video as frames per second. + #[inline(always)] + pub fn framerate(&self) -> f64 { + self.0.borrow().framerate + } + + /// Set the volume multiplier of the audio. + /// `0.0` = 0% volume, `1.0` = 100% volume. + /// + /// This uses a linear scale, for example `0.5` is perceived as half as loud. + pub fn set_volume(&mut self, volume: f64) { + self.0.borrow().source.set_property("volume", &volume); + } + + /// Set if the audio is muted or not, without changing the volume. + pub fn set_muted(&mut self, muted: bool) { + let mut inner = self.0.borrow_mut(); + inner.muted = muted; + inner.source.set_property("mute", &muted); + } + + /// Get if the audio is muted or not. + #[inline(always)] + pub fn muted(&self) -> bool { + self.0.borrow().muted + } + + /// Get if the stream ended or not. + #[inline(always)] + pub fn eos(&self) -> bool { + self.0.borrow().is_eos + } + + /// Get if the media will loop or not. + #[inline(always)] + pub fn looping(&self) -> bool { + self.0.borrow().looping + } + + /// Set if the media will loop or not. + #[inline(always)] + pub fn set_looping(&mut self, looping: bool) { + self.0.borrow_mut().looping = looping; + } + + /// Set if the media is paused or not. + pub fn set_paused(&mut self, paused: bool) { + let mut inner = self.0.borrow_mut(); + inner.set_paused(paused); + } + + /// Get if the media is paused or not. + #[inline(always)] + pub fn paused(&self) -> bool { + self.0.borrow().paused + } + + /// Jumps to a specific position in the media. + /// The seeking is not perfectly accurate. + pub fn seek(&mut self, position: impl Into) -> Result<(), Error> { + self.0.borrow_mut().seek(position) + } + + /// Get the current playback position in time. + pub fn position(&self) -> std::time::Duration { + std::time::Duration::from_nanos( + self.0 + .borrow() + .source + .query_position::() + .map_or(0, |pos| pos.nseconds()), + ) + .into() + } + + /// Get the media duration. + #[inline(always)] + pub fn duration(&self) -> std::time::Duration { + self.0.borrow().duration + } + + /// Restarts a stream; seeks to the first frame and unpauses, sets the `eos` flag to false. + pub fn restart_stream(&mut self) -> Result<(), Error> { + self.0.borrow_mut().restart_stream() + } + + /// Generates a list of thumbnails based on a set of positions in the media. + /// + /// Slow; only needs to be called once for each instance. + /// It's best to call this at the very start of playback, otherwise the position may shift. + pub fn thumbnails(&mut self, positions: &[Position]) -> Result, Error> { + let paused = self.paused(); + let pos = self.position(); + self.set_paused(false); + let out = positions + .iter() + .map(|&pos| { + self.seek(pos)?; + let inner = self.0.borrow(); + // for some reason waiting for two frames is necessary + // maybe in a small window between seek and wait the old frame comes in? + inner.wait.recv().map_err(|_| Error::Sync)?; + inner.wait.recv().map_err(|_| Error::Sync)?; + Ok(img::Handle::from_pixels( + inner.width as _, + inner.height as _, + self.0 + .borrow() + .frame + .lock() + .map_err(|_| Error::Lock)? + .clone(), + )) + }) + .collect(); + self.set_paused(paused); + self.seek(pos)?; + out + } +} diff --git a/src/video_player.rs b/src/video_player.rs new file mode 100644 index 0000000..7f68413 --- /dev/null +++ b/src/video_player.rs @@ -0,0 +1,178 @@ +use crate::{pipeline::VideoPrimitive, video::Video}; +use gstreamer as gst; +use iced::{ + advanced::{self, graphics::core::event::Status, layout, widget, Widget}, + Element, +}; +use iced_wgpu::primitive::pipeline::Renderer as PrimitiveRenderer; +use std::{marker::PhantomData, sync::atomic::Ordering}; +use std::{sync::Arc, time::Duration}; + +/// Video player which displays the current frame of a [`Video`](crate::Video). +pub struct VideoPlayer<'a, Message, Theme = iced::Theme, Renderer = iced::Renderer> +where + Renderer: PrimitiveRenderer, +{ + video: &'a Video, + on_end_of_stream: Option, + on_new_frame: Option, + _phantom: PhantomData<(Theme, Renderer)>, +} + +impl<'a, Message, Theme, Renderer> VideoPlayer<'a, Message, Theme, Renderer> +where + Renderer: PrimitiveRenderer, +{ + pub fn new(video: &'a Video) -> Self { + VideoPlayer { + video, + on_end_of_stream: None, + on_new_frame: None, + _phantom: Default::default(), + } + } + + pub fn on_end_of_stream(self, on_end_of_stream: Message) -> Self { + VideoPlayer { + on_end_of_stream: Some(on_end_of_stream), + ..self + } + } + + pub fn on_new_frame(self, on_new_frame: Message) -> Self { + VideoPlayer { + on_new_frame: Some(on_new_frame), + ..self + } + } +} + +impl<'a, Message, Theme, Renderer> Widget + for VideoPlayer<'a, Message, Theme, Renderer> +where + Message: Clone, + Renderer: PrimitiveRenderer, +{ + fn size(&self) -> iced::Size { + iced::Size { + width: iced::Length::Shrink, + height: iced::Length::Shrink, + } + } + + fn layout( + &self, + _tree: &mut widget::Tree, + _renderer: &Renderer, + limits: &layout::Limits, + ) -> layout::Node { + let (width, height) = self.video.size(); + let size = limits.resolve( + iced::Length::Shrink, + iced::Length::Shrink, + iced::Size::new(width as _, height as _), + ); + + layout::Node::new(size) + } + + fn draw( + &self, + _tree: &widget::Tree, + renderer: &mut Renderer, + _theme: &Theme, + _style: &advanced::renderer::Style, + layout: advanced::Layout<'_>, + _cursor: advanced::mouse::Cursor, + _viewport: &iced::Rectangle, + ) { + let inner = self.video.0.borrow(); + renderer.draw_pipeline_primitive( + layout.bounds(), + VideoPrimitive::new( + inner.id, + Arc::clone(&inner.frame), + (inner.width as _, inner.height as _), + inner.upload_frame.load(Ordering::SeqCst), + ), + ); + } + + fn on_event( + &mut self, + _state: &mut widget::Tree, + event: iced::Event, + _layout: advanced::Layout<'_>, + _cursor: advanced::mouse::Cursor, + _renderer: &Renderer, + _clipboard: &mut dyn advanced::Clipboard, + shell: &mut advanced::Shell<'_, Message>, + _viewport: &iced::Rectangle, + ) -> Status { + let mut inner = self.video.0.borrow_mut(); + + if let iced::Event::Window(_, iced::window::Event::RedrawRequested(now)) = event { + if inner.restart_stream || (!inner.is_eos && !inner.paused) { + let mut restart_stream = false; + if inner.restart_stream { + restart_stream = true; + // Set flag to false to avoid potentially multiple seeks + inner.restart_stream = false; + } + let mut eos_pause = false; + + for msg in inner.bus.iter() { + match msg.view() { + gst::MessageView::Error(err) => panic!("{:#?}", err), + gst::MessageView::Eos(_eos) => { + if let Some(on_end_of_stream) = self.on_end_of_stream.clone() { + shell.publish(on_end_of_stream); + } + if inner.looping { + restart_stream = true; + } else { + eos_pause = true; + } + } + _ => {} + } + } + + // Don't run eos_pause if restart_stream is true; fixes "pausing" after restarting a stream + if restart_stream { + if let Err(err) = inner.restart_stream() { + eprintln!("cannot restart stream (can't seek): {:#?}", err); + } + } else if eos_pause { + inner.is_eos = true; + inner.set_paused(true); + } + + let redraw_interval = 1.0 / inner.framerate; + let until_redraw = + redraw_interval - (now - inner.next_redraw).as_secs_f64() % redraw_interval; + inner.next_redraw = now + Duration::from_secs_f64(until_redraw); + shell.request_redraw(iced::window::RedrawRequest::At(inner.next_redraw)); + + if let Some(on_new_frame) = self.on_new_frame.clone() { + shell.publish(on_new_frame); + } + } + Status::Captured + } else { + Status::Ignored + } + } +} + +impl<'a, Message, Theme, Renderer> From> + for Element<'a, Message, Theme, Renderer> +where + Message: 'a + Clone, + Theme: 'a, + Renderer: 'a + PrimitiveRenderer, +{ + fn from(video_player: VideoPlayer<'a, Message, Theme, Renderer>) -> Self { + Self::new(video_player) + } +}