overhaul to iced widget w/ custom render pipeline

This commit is contained in:
jazzfool 2024-02-19 02:59:54 +11:00
parent 51794fc0b1
commit e347a9b324
11 changed files with 972 additions and 499 deletions

View file

@ -1,47 +1,13 @@
use gst::prelude::*;
mod pipeline;
mod video;
mod video_player;
use gstreamer as gst;
use gstreamer_app as gst_app;
use iced::{
widget::{image as img, Image},
Command, Subscription,
};
use std::convert::identity;
use std::future;
use std::sync::{mpsc, Arc, Mutex};
use std::time::Duration;
use thiserror::Error;
/// Position in the media.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Position {
/// Position based on time.
///
/// Not the most accurate format for videos.
Time(std::time::Duration),
/// Position based on nth frame.
Frame(u64),
}
impl From<Position> for gst::GenericFormattedValue {
fn from(pos: Position) -> Self {
match pos {
Position::Time(t) => gst::ClockTime::from_nseconds(t.as_nanos() as _).into(),
Position::Frame(f) => gst::format::Default::from_u64(f).into(),
}
}
}
impl From<std::time::Duration> for Position {
fn from(t: std::time::Duration) -> Self {
Position::Time(t)
}
}
impl From<u64> for Position {
fn from(f: u64) -> Self {
Position::Frame(f)
}
}
pub use video::Position;
pub use video::Video;
pub use video_player::VideoPlayer;
#[derive(Debug, Error)]
pub enum Error {
@ -65,342 +31,6 @@ pub enum Error {
Duration,
#[error("failed to sync with playback")]
Sync,
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum VideoPlayerMessage {
NextFrame,
EndOfPlayback,
}
impl VideoPlayerMessage {
fn into_cmd(self) -> Command<Self> {
Command::perform(future::ready(self), identity)
}
}
/// Video player which handles multimedia playback.
pub struct VideoPlayer {
bus: gst::Bus,
source: gst::Bin,
width: i32,
height: i32,
framerate: f64,
duration: std::time::Duration,
frame: Arc<Mutex<Option<img::Handle>>>,
wait: mpsc::Receiver<()>,
paused: bool,
muted: bool,
looping: bool,
is_eos: bool,
restart_stream: bool,
}
impl Drop for VideoPlayer {
fn drop(&mut self) {
self.source
.set_state(gst::State::Null)
.expect("failed to set state");
}
}
impl VideoPlayer {
/// Create a new video player from a given video which loads from `uri`.
///
/// If `live` is set then no duration is queried (as this will result in an error and is non-sensical for live streams).
/// Set `live` if the streaming source is indefinite (e.g. a live stream).
/// Note that this will cause the duration to be zero.
pub fn new(uri: &url::Url, live: bool) -> Result<Self, Error> {
gst::init()?;
let source = gst::parse::launch(&format!("playbin uri=\"{}\" video-sink=\"videoconvert ! videoscale ! appsink name=app_sink caps=video/x-raw,format=RGBA,pixel-aspect-ratio=1/1\"", uri.as_str()))?;
let source = source.downcast::<gst::Bin>().unwrap();
let video_sink: gst::Element = source.property("video-sink");
let pad = video_sink.pads().get(0).cloned().unwrap();
let pad = pad.dynamic_cast::<gst::GhostPad>().unwrap();
let bin = pad
.parent_element()
.unwrap()
.downcast::<gst::Bin>()
.unwrap();
let app_sink = bin.by_name("app_sink").unwrap();
let app_sink = app_sink.downcast::<gst_app::AppSink>().unwrap();
let frame = Arc::new(Mutex::new(None));
let frame_ref = Arc::clone(&frame);
let (notify, wait) = mpsc::channel();
app_sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |sink| {
let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
let pad = sink.static_pad("sink").ok_or(gst::FlowError::Error)?;
let caps = pad.current_caps().ok_or(gst::FlowError::Error)?;
let s = caps.structure(0).ok_or(gst::FlowError::Error)?;
let width = s.get::<i32>("width").map_err(|_| gst::FlowError::Error)?;
let height = s.get::<i32>("height").map_err(|_| gst::FlowError::Error)?;
*frame_ref.lock().map_err(|_| gst::FlowError::Error)? =
Some(img::Handle::from_pixels(
width as _,
height as _,
map.as_slice().to_owned(),
));
notify.send(()).map_err(|_| gst::FlowError::Error)?;
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
source.set_state(gst::State::Playing)?;
// wait for up to 5 seconds until the decoder gets the source capabilities
source.state(gst::ClockTime::from_seconds(5)).0?;
// extract resolution and framerate
// TODO(jazzfool): maybe we want to extract some other information too?
let caps = pad.current_caps().ok_or(Error::Caps)?;
let s = caps.structure(0).ok_or(Error::Caps)?;
let width = s.get::<i32>("width").map_err(|_| Error::Caps)?;
let height = s.get::<i32>("height").map_err(|_| Error::Caps)?;
let framerate = s
.get::<gst::Fraction>("framerate")
.map_err(|_| Error::Caps)?;
let duration = if !live {
std::time::Duration::from_nanos(
source
.query_duration::<gst::ClockTime>()
.ok_or(Error::Duration)?
.nseconds(),
)
} else {
std::time::Duration::from_secs(0)
};
Ok(VideoPlayer {
bus: source.bus().unwrap(),
source,
width,
height,
framerate: framerate.numer() as f64 / framerate.denom() as f64,
duration,
frame,
wait,
paused: false,
muted: false,
looping: false,
is_eos: false,
restart_stream: false,
})
}
/// Get the size/resolution of the video as `(width, height)`.
#[inline(always)]
pub fn size(&self) -> (i32, i32) {
(self.width, self.height)
}
/// Get the framerate of the video as frames per second.
#[inline(always)]
pub fn framerate(&self) -> f64 {
self.framerate
}
/// Set the volume multiplier of the audio.
/// `0.0` = 0% volume, `1.0` = 100% volume.
///
/// This uses a linear scale, for example `0.5` is perceived as half as loud.
pub fn set_volume(&mut self, volume: f64) {
self.source.set_property("volume", &volume);
}
/// Set if the audio is muted or not, without changing the volume.
pub fn set_muted(&mut self, muted: bool) {
self.muted = muted;
self.source.set_property("mute", &muted);
}
/// Get if the audio is muted or not.
#[inline(always)]
pub fn muted(&self) -> bool {
self.muted
}
/// Get if the stream ended or not.
#[inline(always)]
pub fn eos(&self) -> bool {
self.is_eos
}
/// Get if the media will loop or not.
#[inline(always)]
pub fn looping(&self) -> bool {
self.looping
}
/// Set if the media will loop or not.
#[inline(always)]
pub fn set_looping(&mut self, looping: bool) {
self.looping = looping;
}
/// Set if the media is paused or not.
pub fn set_paused(&mut self, paused: bool) {
self.source
.set_state(if paused {
gst::State::Paused
} else {
gst::State::Playing
})
.unwrap(/* state was changed in ctor; state errors caught there */);
self.paused = paused;
// Set restart_stream flag to make the stream restart on the next Message::NextFrame
if self.is_eos && !paused {
self.restart_stream = true;
}
}
/// Get if the media is paused or not.
#[inline(always)]
pub fn paused(&self) -> bool {
self.paused
}
/// Jumps to a specific position in the media.
/// The seeking is not perfectly accurate.
pub fn seek(&mut self, position: impl Into<Position>) -> Result<(), Error> {
self.source.seek_simple(
gst::SeekFlags::FLUSH,
gst::GenericFormattedValue::from(position.into()),
)?;
Ok(())
}
/// Get the current playback position in time.
pub fn position(&self) -> std::time::Duration {
std::time::Duration::from_nanos(
self.source
.query_position::<gst::ClockTime>()
.map_or(0, |pos| pos.nseconds()),
)
.into()
}
/// Get the media duration.
#[inline(always)]
pub fn duration(&self) -> std::time::Duration {
self.duration
}
/// Generates a list of thumbnails based on a set of positions in the media.
///
/// Slow; only needs to be called once for each instance.
/// It's best to call this at the very start of playback, otherwise the position may shift.
pub fn thumbnails(&mut self, positions: &[Position]) -> Result<Vec<img::Handle>, Error> {
let paused = self.paused();
let pos = self.position();
self.set_paused(false);
let out = positions
.iter()
.map(|&pos| {
self.seek(pos)?;
self.wait.recv().map_err(|_| Error::Sync)?;
Ok(self.frame_image())
})
.collect();
self.set_paused(paused);
self.seek(pos)?;
out
}
pub fn update(&mut self, message: VideoPlayerMessage) -> Command<VideoPlayerMessage> {
match message {
VideoPlayerMessage::NextFrame => {
let mut cmds = Vec::new();
let mut restart_stream = false;
if self.restart_stream {
restart_stream = true;
// Set flag to false to avoid potentially multiple seeks
self.restart_stream = false;
}
let mut eos_pause = false;
for msg in self.bus.iter() {
match msg.view() {
gst::MessageView::Error(err) => panic!("{:#?}", err),
gst::MessageView::Eos(_eos) => {
cmds.push(VideoPlayerMessage::EndOfPlayback.into_cmd());
if self.looping {
restart_stream = true;
} else {
eos_pause = true;
}
}
_ => {}
}
}
// Don't run eos_pause if restart_stream is true; fixes "pausing" after restarting a stream
if restart_stream {
if let Err(err) = self.restart_stream() {
eprintln!("cannot restart stream (can't seek): {:#?}", err);
}
} else if eos_pause {
self.is_eos = true;
self.set_paused(true);
}
return Command::batch(cmds);
}
VideoPlayerMessage::EndOfPlayback => {}
}
Command::none()
}
pub fn subscription(&self) -> Subscription<VideoPlayerMessage> {
if self.restart_stream || (!self.is_eos && !self.paused()) {
iced::time::every(Duration::from_secs_f64(0.5 / self.framerate))
.map(|_| VideoPlayerMessage::NextFrame)
} else {
Subscription::none()
}
}
/// Get the image handle of the current frame.
pub fn frame_image(&self) -> img::Handle {
self.frame
.lock()
.expect("failed to lock frame")
.clone()
.unwrap_or_else(|| img::Handle::from_pixels(0, 0, vec![]))
}
/// Wrap the output of `frame_image` in an `Image` widget.
pub fn frame_view(&self) -> Image<img::Handle> {
Image::new(self.frame_image())
}
/// Restarts a stream; seeks to the first frame and unpauses, sets the `eos` flag to false.
pub fn restart_stream(&mut self) -> Result<(), Error> {
self.is_eos = false;
self.set_paused(false);
self.seek(0)?;
Ok(())
}
#[error("failed to lock internal sync primitive")]
Lock,
}

329
src/pipeline.rs Normal file
View file

@ -0,0 +1,329 @@
use iced_wgpu::primitive::pipeline::Primitive;
use iced_wgpu::wgpu;
use std::{
collections::BTreeMap,
sync::{Arc, Mutex},
};
#[repr(C)]
struct Uniforms {
rect: [f32; 4],
}
struct VideoPipeline {
pipeline: wgpu::RenderPipeline,
bg0_layout: wgpu::BindGroupLayout,
sampler: wgpu::Sampler,
textures: BTreeMap<u64, (wgpu::Texture, wgpu::Buffer, wgpu::BindGroup)>,
}
impl VideoPipeline {
fn new(device: &wgpu::Device, format: wgpu::TextureFormat) -> Self {
let shader = device.create_shader_module(wgpu::ShaderModuleDescriptor {
label: Some("iced_video_player shader"),
source: wgpu::ShaderSource::Wgsl(include_str!("shader.wgsl").into()),
});
let bg0_layout = device.create_bind_group_layout(&wgpu::BindGroupLayoutDescriptor {
label: Some("iced_video_player bind group 0 layout"),
entries: &[
wgpu::BindGroupLayoutEntry {
binding: 0,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 2,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
has_dynamic_offset: false,
min_binding_size: None,
},
count: None,
},
],
});
let layout = device.create_pipeline_layout(&wgpu::PipelineLayoutDescriptor {
label: Some("iced_video_player pipeline layout"),
bind_group_layouts: &[&bg0_layout],
push_constant_ranges: &[],
});
let pipeline = device.create_render_pipeline(&wgpu::RenderPipelineDescriptor {
label: Some("iced_video_player pipeline"),
layout: Some(&layout),
vertex: wgpu::VertexState {
module: &shader,
entry_point: "vs_main",
buffers: &[],
},
primitive: wgpu::PrimitiveState::default(),
depth_stencil: None,
multisample: wgpu::MultisampleState {
count: 1,
mask: !0,
alpha_to_coverage_enabled: false,
},
fragment: Some(wgpu::FragmentState {
module: &shader,
entry_point: "fs_main",
targets: &[Some(wgpu::ColorTargetState {
format,
blend: None,
write_mask: wgpu::ColorWrites::ALL,
})],
}),
multiview: None,
});
let sampler = device.create_sampler(&wgpu::SamplerDescriptor {
label: Some("iced_video_player sampler"),
address_mode_u: wgpu::AddressMode::ClampToEdge,
address_mode_v: wgpu::AddressMode::ClampToEdge,
address_mode_w: wgpu::AddressMode::ClampToEdge,
mag_filter: wgpu::FilterMode::Linear,
min_filter: wgpu::FilterMode::Linear,
mipmap_filter: wgpu::FilterMode::Nearest,
lod_min_clamp: 0.0,
lod_max_clamp: 1.0,
compare: None,
anisotropy_clamp: 1,
border_color: None,
});
VideoPipeline {
pipeline,
bg0_layout,
sampler,
textures: BTreeMap::new(),
}
}
fn upload(
&mut self,
device: &wgpu::Device,
queue: &wgpu::Queue,
video_id: u64,
(width, height): (u32, u32),
frame: &[u8],
) {
if !self.textures.contains_key(&video_id) {
let texture = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced_video_player texture"),
size: wgpu::Extent3d {
width,
height,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8UnormSrgb,
usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
});
let view = texture.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced_video_player texture view"),
format: None,
dimension: None,
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
});
let buffer = device.create_buffer(&wgpu::BufferDescriptor {
label: Some("iced_video_player uniform buffer"),
size: std::mem::size_of::<Uniforms>() as _,
usage: wgpu::BufferUsages::COPY_DST | wgpu::BufferUsages::UNIFORM,
mapped_at_creation: false,
});
let bind_group = device.create_bind_group(&wgpu::BindGroupDescriptor {
label: Some("iced_video_player bind group"),
layout: &self.bg0_layout,
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(&view),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&self.sampler),
},
wgpu::BindGroupEntry {
binding: 2,
resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding {
buffer: &buffer,
offset: 0,
size: None,
}),
},
],
});
self.textures
.insert(video_id, (texture, buffer, bind_group));
}
let (texture, _, _) = self.textures.get(&video_id).unwrap();
queue.write_texture(
wgpu::ImageCopyTexture {
texture,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
frame,
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(width * 4),
rows_per_image: Some(height),
},
wgpu::Extent3d {
width,
height,
depth_or_array_layers: 1,
},
);
}
fn prepare(&mut self, queue: &wgpu::Queue, video_id: u64, bounds: iced::Rectangle) {
if let Some((_, buffer, _)) = self.textures.get(&video_id) {
let uniforms = Uniforms {
rect: [
bounds.x,
bounds.y,
bounds.x + bounds.width,
bounds.y + bounds.height,
],
};
queue.write_buffer(buffer, 0, unsafe {
std::slice::from_raw_parts(
&uniforms as *const _ as *const u8,
std::mem::size_of::<Uniforms>(),
)
});
}
}
fn draw(
&self,
target: &wgpu::TextureView,
encoder: &mut wgpu::CommandEncoder,
viewport: iced::Rectangle<u32>,
video_id: u64,
) {
if let Some((_, _, bind_group)) = self.textures.get(&video_id) {
let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("iced_video_player render pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {
view: target,
resolve_target: None,
ops: wgpu::Operations {
load: wgpu::LoadOp::Load,
store: wgpu::StoreOp::Store,
},
})],
depth_stencil_attachment: None,
timestamp_writes: None,
occlusion_query_set: None,
});
pass.set_pipeline(&self.pipeline);
pass.set_bind_group(0, bind_group, &[]);
pass.set_viewport(
viewport.x as _,
viewport.y as _,
viewport.width as _,
viewport.height as _,
0.0,
1.0,
);
pass.draw(0..4, 0..1);
}
}
}
#[derive(Debug, Clone)]
pub(crate) struct VideoPrimitive {
video_id: u64,
frame: Arc<Mutex<Vec<u8>>>,
size: (u32, u32),
upload_frame: bool,
}
impl VideoPrimitive {
pub fn new(
video_id: u64,
frame: Arc<Mutex<Vec<u8>>>,
size: (u32, u32),
upload_frame: bool,
) -> Self {
VideoPrimitive {
video_id,
frame,
size,
upload_frame,
}
}
}
impl Primitive for VideoPrimitive {
fn prepare(
&self,
format: wgpu::TextureFormat,
device: &wgpu::Device,
queue: &wgpu::Queue,
bounds: iced::Rectangle,
_target_size: iced::Size<u32>,
_scale_factor: f32,
storage: &mut iced_wgpu::primitive::pipeline::Storage,
) {
if !storage.has::<VideoPipeline>() {
storage.store(VideoPipeline::new(device, format));
}
let pipeline = storage.get_mut::<VideoPipeline>().unwrap();
if self.upload_frame {
pipeline.upload(
device,
queue,
self.video_id,
self.size,
self.frame.lock().expect("lock frame mutex").as_slice(),
);
}
pipeline.prepare(queue, self.video_id, bounds);
}
fn render(
&self,
storage: &iced_wgpu::primitive::pipeline::Storage,
target: &wgpu::TextureView,
_target_size: iced::Size<u32>,
viewport: iced::Rectangle<u32>,
encoder: &mut wgpu::CommandEncoder,
) {
let pipeline = storage.get::<VideoPipeline>().unwrap();
pipeline.draw(target, encoder, viewport, self.video_id);
}
}

41
src/shader.wgsl Normal file
View file

@ -0,0 +1,41 @@
struct VertexOutput {
@builtin(position) position: vec4<f32>,
@location(0) uv: vec2<f32>,
}
struct Uniforms {
rect: vec4<f32>,
}
@group(0) @binding(0)
var t: texture_2d<f32>;
@group(0) @binding(1)
var s: sampler;
@group(0) @binding(2)
var<uniform> uniforms: Uniforms;
@vertex
fn vs_main(@builtin(vertex_index) in_vertex_index: u32) -> VertexOutput {
let quad = array<vec2<f32>, 6>(
uniforms.rect.xy,
uniforms.rect.zy,
uniforms.rect.xw,
uniforms.rect.zy,
uniforms.rect.zw,
uniforms.rect.xw,
);
var out: VertexOutput;
out.uv = vec2<f32>(0.0);
out.uv.x = select(0.0, 2.0, in_vertex_index == 1u);
out.uv.y = select(0.0, 2.0, in_vertex_index == 2u);
out.position = vec4<f32>(out.uv * vec2<f32>(2.0, -2.0) + vec2<f32>(-1.0, 1.0), 1.0, 1.0);
return out;
}
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
return textureSample(t, s, in.uv);
}

342
src/video.rs Normal file
View file

@ -0,0 +1,342 @@
use crate::Error;
use gst::prelude::*;
use gstreamer as gst;
use gstreamer_app as gst_app;
use iced::widget::image as img;
use std::cell::RefCell;
use std::sync::atomic::{AtomicBool, AtomicU64, Ordering};
use std::sync::{mpsc, Arc, Mutex};
use std::time::Instant;
/// Position in the media.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Position {
/// Position based on time.
///
/// Not the most accurate format for videos.
Time(std::time::Duration),
/// Position based on nth frame.
Frame(u64),
}
impl From<Position> for gst::GenericFormattedValue {
fn from(pos: Position) -> Self {
match pos {
Position::Time(t) => gst::ClockTime::from_nseconds(t.as_nanos() as _).into(),
Position::Frame(f) => gst::format::Default::from_u64(f).into(),
}
}
}
impl From<std::time::Duration> for Position {
fn from(t: std::time::Duration) -> Self {
Position::Time(t)
}
}
impl From<u64> for Position {
fn from(f: u64) -> Self {
Position::Frame(f)
}
}
pub(crate) struct Internal {
pub(crate) id: u64,
pub(crate) bus: gst::Bus,
pub(crate) source: gst::Bin,
pub(crate) width: i32,
pub(crate) height: i32,
pub(crate) framerate: f64,
pub(crate) duration: std::time::Duration,
pub(crate) frame: Arc<Mutex<Vec<u8>>>, // ideally would be Arc<Mutex<[T]>>
pub(crate) upload_frame: Arc<AtomicBool>,
pub(crate) wait: mpsc::Receiver<()>,
pub(crate) paused: bool,
pub(crate) muted: bool,
pub(crate) looping: bool,
pub(crate) is_eos: bool,
pub(crate) restart_stream: bool,
pub(crate) next_redraw: Instant,
}
impl Internal {
pub(crate) fn seek(&self, position: impl Into<Position>) -> Result<(), Error> {
self.source.seek_simple(
gst::SeekFlags::FLUSH,
gst::GenericFormattedValue::from(position.into()),
)?;
Ok(())
}
pub(crate) fn restart_stream(&mut self) -> Result<(), Error> {
self.is_eos = false;
self.set_paused(false);
self.seek(0)?;
Ok(())
}
pub(crate) fn set_paused(&mut self, paused: bool) {
self.source
.set_state(if paused {
gst::State::Paused
} else {
gst::State::Playing
})
.unwrap(/* state was changed in ctor; state errors caught there */);
self.paused = paused;
// Set restart_stream flag to make the stream restart on the next Message::NextFrame
if self.is_eos && !paused {
self.restart_stream = true;
}
}
}
/// A multimedia video loaded from a URI (e.g., a local file path or HTTP stream).
pub struct Video(pub(crate) RefCell<Internal>);
impl Drop for Video {
fn drop(&mut self) {
self.0
.borrow()
.source
.set_state(gst::State::Null)
.expect("failed to set state");
}
}
impl Video {
/// Create a new video player from a given video which loads from `uri`.
///
/// If `live` is set then no duration is queried (as this will result in an error and is non-sensical for live streams).
/// Set `live` if the streaming source is indefinite (e.g. a live stream).
/// Note that this will cause the duration to be zero.
pub fn new(uri: &url::Url, live: bool) -> Result<Self, Error> {
static NEXT_ID: AtomicU64 = AtomicU64::new(0);
let id = NEXT_ID.fetch_add(1, Ordering::SeqCst);
gst::init()?;
let source = gst::parse::launch(&format!("playbin uri=\"{}\" video-sink=\"videoconvert ! videoscale ! appsink name=app_sink caps=video/x-raw,format=RGBA,pixel-aspect-ratio=1/1\"", uri.as_str()))?;
let source = source.downcast::<gst::Bin>().unwrap();
let video_sink: gst::Element = source.property("video-sink");
let pad = video_sink.pads().get(0).cloned().unwrap();
let pad = pad.dynamic_cast::<gst::GhostPad>().unwrap();
let bin = pad
.parent_element()
.unwrap()
.downcast::<gst::Bin>()
.unwrap();
let app_sink = bin.by_name("app_sink").unwrap();
let app_sink = app_sink.downcast::<gst_app::AppSink>().unwrap();
source.set_state(gst::State::Playing)?;
// wait for up to 5 seconds until the decoder gets the source capabilities
source.state(gst::ClockTime::from_seconds(5)).0?;
// extract resolution and framerate
// TODO(jazzfool): maybe we want to extract some other information too?
let caps = pad.current_caps().ok_or(Error::Caps)?;
let s = caps.structure(0).ok_or(Error::Caps)?;
let width = s.get::<i32>("width").map_err(|_| Error::Caps)?;
let height = s.get::<i32>("height").map_err(|_| Error::Caps)?;
let framerate = s
.get::<gst::Fraction>("framerate")
.map_err(|_| Error::Caps)?;
let duration = if !live {
std::time::Duration::from_nanos(
source
.query_duration::<gst::ClockTime>()
.ok_or(Error::Duration)?
.nseconds(),
)
} else {
std::time::Duration::from_secs(0)
};
let frame_buf = vec![0; (width * height * 4) as _];
let frame = Arc::new(Mutex::new(frame_buf));
let frame_ref = Arc::clone(&frame);
let upload_frame = Arc::new(AtomicBool::new(true));
let upload_frame_ref = Arc::clone(&upload_frame);
let (notify, wait) = mpsc::channel();
app_sink.set_callbacks(
gst_app::AppSinkCallbacks::builder()
.new_sample(move |sink| {
let sample = sink.pull_sample().map_err(|_| gst::FlowError::Eos)?;
let buffer = sample.buffer().ok_or(gst::FlowError::Error)?;
let map = buffer.map_readable().map_err(|_| gst::FlowError::Error)?;
frame_ref
.lock()
.map_err(|_| gst::FlowError::Error)?
.copy_from_slice(map.as_slice());
upload_frame_ref.store(true, Ordering::SeqCst);
notify.send(()).map_err(|_| gst::FlowError::Error)?;
Ok(gst::FlowSuccess::Ok)
})
.build(),
);
Ok(Video(RefCell::new(Internal {
id,
bus: source.bus().unwrap(),
source,
width,
height,
framerate: framerate.numer() as f64 / framerate.denom() as f64,
duration,
frame,
upload_frame,
wait,
paused: false,
muted: false,
looping: false,
is_eos: false,
restart_stream: false,
next_redraw: Instant::now(),
})))
}
/// Get the size/resolution of the video as `(width, height)`.
#[inline(always)]
pub fn size(&self) -> (i32, i32) {
(self.0.borrow().width, self.0.borrow().height)
}
/// Get the framerate of the video as frames per second.
#[inline(always)]
pub fn framerate(&self) -> f64 {
self.0.borrow().framerate
}
/// Set the volume multiplier of the audio.
/// `0.0` = 0% volume, `1.0` = 100% volume.
///
/// This uses a linear scale, for example `0.5` is perceived as half as loud.
pub fn set_volume(&mut self, volume: f64) {
self.0.borrow().source.set_property("volume", &volume);
}
/// Set if the audio is muted or not, without changing the volume.
pub fn set_muted(&mut self, muted: bool) {
let mut inner = self.0.borrow_mut();
inner.muted = muted;
inner.source.set_property("mute", &muted);
}
/// Get if the audio is muted or not.
#[inline(always)]
pub fn muted(&self) -> bool {
self.0.borrow().muted
}
/// Get if the stream ended or not.
#[inline(always)]
pub fn eos(&self) -> bool {
self.0.borrow().is_eos
}
/// Get if the media will loop or not.
#[inline(always)]
pub fn looping(&self) -> bool {
self.0.borrow().looping
}
/// Set if the media will loop or not.
#[inline(always)]
pub fn set_looping(&mut self, looping: bool) {
self.0.borrow_mut().looping = looping;
}
/// Set if the media is paused or not.
pub fn set_paused(&mut self, paused: bool) {
let mut inner = self.0.borrow_mut();
inner.set_paused(paused);
}
/// Get if the media is paused or not.
#[inline(always)]
pub fn paused(&self) -> bool {
self.0.borrow().paused
}
/// Jumps to a specific position in the media.
/// The seeking is not perfectly accurate.
pub fn seek(&mut self, position: impl Into<Position>) -> Result<(), Error> {
self.0.borrow_mut().seek(position)
}
/// Get the current playback position in time.
pub fn position(&self) -> std::time::Duration {
std::time::Duration::from_nanos(
self.0
.borrow()
.source
.query_position::<gst::ClockTime>()
.map_or(0, |pos| pos.nseconds()),
)
.into()
}
/// Get the media duration.
#[inline(always)]
pub fn duration(&self) -> std::time::Duration {
self.0.borrow().duration
}
/// Restarts a stream; seeks to the first frame and unpauses, sets the `eos` flag to false.
pub fn restart_stream(&mut self) -> Result<(), Error> {
self.0.borrow_mut().restart_stream()
}
/// Generates a list of thumbnails based on a set of positions in the media.
///
/// Slow; only needs to be called once for each instance.
/// It's best to call this at the very start of playback, otherwise the position may shift.
pub fn thumbnails(&mut self, positions: &[Position]) -> Result<Vec<img::Handle>, Error> {
let paused = self.paused();
let pos = self.position();
self.set_paused(false);
let out = positions
.iter()
.map(|&pos| {
self.seek(pos)?;
let inner = self.0.borrow();
// for some reason waiting for two frames is necessary
// maybe in a small window between seek and wait the old frame comes in?
inner.wait.recv().map_err(|_| Error::Sync)?;
inner.wait.recv().map_err(|_| Error::Sync)?;
Ok(img::Handle::from_pixels(
inner.width as _,
inner.height as _,
self.0
.borrow()
.frame
.lock()
.map_err(|_| Error::Lock)?
.clone(),
))
})
.collect();
self.set_paused(paused);
self.seek(pos)?;
out
}
}

178
src/video_player.rs Normal file
View file

@ -0,0 +1,178 @@
use crate::{pipeline::VideoPrimitive, video::Video};
use gstreamer as gst;
use iced::{
advanced::{self, graphics::core::event::Status, layout, widget, Widget},
Element,
};
use iced_wgpu::primitive::pipeline::Renderer as PrimitiveRenderer;
use std::{marker::PhantomData, sync::atomic::Ordering};
use std::{sync::Arc, time::Duration};
/// Video player which displays the current frame of a [`Video`](crate::Video).
pub struct VideoPlayer<'a, Message, Theme = iced::Theme, Renderer = iced::Renderer>
where
Renderer: PrimitiveRenderer,
{
video: &'a Video,
on_end_of_stream: Option<Message>,
on_new_frame: Option<Message>,
_phantom: PhantomData<(Theme, Renderer)>,
}
impl<'a, Message, Theme, Renderer> VideoPlayer<'a, Message, Theme, Renderer>
where
Renderer: PrimitiveRenderer,
{
pub fn new(video: &'a Video) -> Self {
VideoPlayer {
video,
on_end_of_stream: None,
on_new_frame: None,
_phantom: Default::default(),
}
}
pub fn on_end_of_stream(self, on_end_of_stream: Message) -> Self {
VideoPlayer {
on_end_of_stream: Some(on_end_of_stream),
..self
}
}
pub fn on_new_frame(self, on_new_frame: Message) -> Self {
VideoPlayer {
on_new_frame: Some(on_new_frame),
..self
}
}
}
impl<'a, Message, Theme, Renderer> Widget<Message, Theme, Renderer>
for VideoPlayer<'a, Message, Theme, Renderer>
where
Message: Clone,
Renderer: PrimitiveRenderer,
{
fn size(&self) -> iced::Size<iced::Length> {
iced::Size {
width: iced::Length::Shrink,
height: iced::Length::Shrink,
}
}
fn layout(
&self,
_tree: &mut widget::Tree,
_renderer: &Renderer,
limits: &layout::Limits,
) -> layout::Node {
let (width, height) = self.video.size();
let size = limits.resolve(
iced::Length::Shrink,
iced::Length::Shrink,
iced::Size::new(width as _, height as _),
);
layout::Node::new(size)
}
fn draw(
&self,
_tree: &widget::Tree,
renderer: &mut Renderer,
_theme: &Theme,
_style: &advanced::renderer::Style,
layout: advanced::Layout<'_>,
_cursor: advanced::mouse::Cursor,
_viewport: &iced::Rectangle,
) {
let inner = self.video.0.borrow();
renderer.draw_pipeline_primitive(
layout.bounds(),
VideoPrimitive::new(
inner.id,
Arc::clone(&inner.frame),
(inner.width as _, inner.height as _),
inner.upload_frame.load(Ordering::SeqCst),
),
);
}
fn on_event(
&mut self,
_state: &mut widget::Tree,
event: iced::Event,
_layout: advanced::Layout<'_>,
_cursor: advanced::mouse::Cursor,
_renderer: &Renderer,
_clipboard: &mut dyn advanced::Clipboard,
shell: &mut advanced::Shell<'_, Message>,
_viewport: &iced::Rectangle,
) -> Status {
let mut inner = self.video.0.borrow_mut();
if let iced::Event::Window(_, iced::window::Event::RedrawRequested(now)) = event {
if inner.restart_stream || (!inner.is_eos && !inner.paused) {
let mut restart_stream = false;
if inner.restart_stream {
restart_stream = true;
// Set flag to false to avoid potentially multiple seeks
inner.restart_stream = false;
}
let mut eos_pause = false;
for msg in inner.bus.iter() {
match msg.view() {
gst::MessageView::Error(err) => panic!("{:#?}", err),
gst::MessageView::Eos(_eos) => {
if let Some(on_end_of_stream) = self.on_end_of_stream.clone() {
shell.publish(on_end_of_stream);
}
if inner.looping {
restart_stream = true;
} else {
eos_pause = true;
}
}
_ => {}
}
}
// Don't run eos_pause if restart_stream is true; fixes "pausing" after restarting a stream
if restart_stream {
if let Err(err) = inner.restart_stream() {
eprintln!("cannot restart stream (can't seek): {:#?}", err);
}
} else if eos_pause {
inner.is_eos = true;
inner.set_paused(true);
}
let redraw_interval = 1.0 / inner.framerate;
let until_redraw =
redraw_interval - (now - inner.next_redraw).as_secs_f64() % redraw_interval;
inner.next_redraw = now + Duration::from_secs_f64(until_redraw);
shell.request_redraw(iced::window::RedrawRequest::At(inner.next_redraw));
if let Some(on_new_frame) = self.on_new_frame.clone() {
shell.publish(on_new_frame);
}
}
Status::Captured
} else {
Status::Ignored
}
}
}
impl<'a, Message, Theme, Renderer> From<VideoPlayer<'a, Message, Theme, Renderer>>
for Element<'a, Message, Theme, Renderer>
where
Message: 'a + Clone,
Theme: 'a,
Renderer: 'a + PrimitiveRenderer,
{
fn from(video_player: VideoPlayer<'a, Message, Theme, Renderer>) -> Self {
Self::new(video_player)
}
}