convert nv12 to rgb on gpu

This commit is contained in:
jazzfool 2024-09-29 19:44:15 +10:00
parent 5d87dbdf88
commit 9d60f260b0
3 changed files with 99 additions and 21 deletions

View file

@ -14,7 +14,7 @@ struct VideoPipeline {
pipeline: wgpu::RenderPipeline,
bg0_layout: wgpu::BindGroupLayout,
sampler: wgpu::Sampler,
textures: BTreeMap<u64, (wgpu::Texture, wgpu::Buffer, wgpu::BindGroup)>,
textures: BTreeMap<u64, (wgpu::Texture, wgpu::Texture, wgpu::Buffer, wgpu::BindGroup)>,
}
impl VideoPipeline {
@ -40,11 +40,21 @@ impl VideoPipeline {
wgpu::BindGroupLayoutEntry {
binding: 1,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
ty: wgpu::BindingType::Texture {
sample_type: wgpu::TextureSampleType::Float { filterable: true },
view_dimension: wgpu::TextureViewDimension::D2,
multisampled: false,
},
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 2,
visibility: wgpu::ShaderStages::FRAGMENT,
ty: wgpu::BindingType::Sampler(wgpu::SamplerBindingType::Filtering),
count: None,
},
wgpu::BindGroupLayoutEntry {
binding: 3,
visibility: wgpu::ShaderStages::VERTEX,
ty: wgpu::BindingType::Buffer {
ty: wgpu::BufferBindingType::Uniform,
@ -121,7 +131,7 @@ impl VideoPipeline {
frame: &[u8],
) {
if !self.textures.contains_key(&video_id) {
let texture = device.create_texture(&wgpu::TextureDescriptor {
let texture_y = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced_video_player texture"),
size: wgpu::Extent3d {
width,
@ -131,12 +141,38 @@ impl VideoPipeline {
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rgba8UnormSrgb,
format: wgpu::TextureFormat::R8Unorm,
usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
});
let view = texture.create_view(&wgpu::TextureViewDescriptor {
let texture_uv = device.create_texture(&wgpu::TextureDescriptor {
label: Some("iced_video_player texture"),
size: wgpu::Extent3d {
width: width / 2,
height: height / 2,
depth_or_array_layers: 1,
},
mip_level_count: 1,
sample_count: 1,
dimension: wgpu::TextureDimension::D2,
format: wgpu::TextureFormat::Rg8Unorm,
usage: wgpu::TextureUsages::COPY_DST | wgpu::TextureUsages::TEXTURE_BINDING,
view_formats: &[],
});
let view_y = texture_y.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced_video_player texture view"),
format: None,
dimension: None,
aspect: wgpu::TextureAspect::All,
base_mip_level: 0,
mip_level_count: None,
base_array_layer: 0,
array_layer_count: None,
});
let view_uv = texture_uv.create_view(&wgpu::TextureViewDescriptor {
label: Some("iced_video_player texture view"),
format: None,
dimension: None,
@ -160,14 +196,18 @@ impl VideoPipeline {
entries: &[
wgpu::BindGroupEntry {
binding: 0,
resource: wgpu::BindingResource::TextureView(&view),
resource: wgpu::BindingResource::TextureView(&view_y),
},
wgpu::BindGroupEntry {
binding: 1,
resource: wgpu::BindingResource::Sampler(&self.sampler),
resource: wgpu::BindingResource::TextureView(&view_uv),
},
wgpu::BindGroupEntry {
binding: 2,
resource: wgpu::BindingResource::Sampler(&self.sampler),
},
wgpu::BindGroupEntry {
binding: 3,
resource: wgpu::BindingResource::Buffer(wgpu::BufferBinding {
buffer: &buffer,
offset: 0,
@ -178,22 +218,22 @@ impl VideoPipeline {
});
self.textures
.insert(video_id, (texture, buffer, bind_group));
.insert(video_id, (texture_y, texture_uv, buffer, bind_group));
}
let (texture, _, _) = self.textures.get(&video_id).unwrap();
let (texture_y, texture_uv, _, _) = self.textures.get(&video_id).unwrap();
queue.write_texture(
wgpu::ImageCopyTexture {
texture,
texture: texture_y,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
frame,
&frame[..(width * height) as usize],
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(width * 4),
bytes_per_row: Some(width),
rows_per_image: Some(height),
},
wgpu::Extent3d {
@ -202,10 +242,30 @@ impl VideoPipeline {
depth_or_array_layers: 1,
},
);
queue.write_texture(
wgpu::ImageCopyTexture {
texture: texture_uv,
mip_level: 0,
origin: wgpu::Origin3d::ZERO,
aspect: wgpu::TextureAspect::All,
},
&frame[(width * height) as usize..],
wgpu::ImageDataLayout {
offset: 0,
bytes_per_row: Some(width),
rows_per_image: Some(height / 2),
},
wgpu::Extent3d {
width: width / 2,
height: height / 2,
depth_or_array_layers: 1,
},
);
}
fn prepare(&mut self, queue: &wgpu::Queue, video_id: u64, bounds: &iced::Rectangle) {
if let Some((_, buffer, _)) = self.textures.get(&video_id) {
if let Some((_, _, buffer, _)) = self.textures.get(&video_id) {
let uniforms = Uniforms {
rect: [
bounds.x,
@ -230,7 +290,7 @@ impl VideoPipeline {
viewport: &iced::Rectangle<u32>,
video_id: u64,
) {
if let Some((_, _, bind_group)) = self.textures.get(&video_id) {
if let Some((_, _, _, bind_group)) = self.textures.get(&video_id) {
let mut pass = encoder.begin_render_pass(&wgpu::RenderPassDescriptor {
label: Some("iced_video_player render pass"),
color_attachments: &[Some(wgpu::RenderPassColorAttachment {

View file

@ -8,12 +8,15 @@ struct Uniforms {
}
@group(0) @binding(0)
var t: texture_2d<f32>;
var tex_y: texture_2d<f32>;
@group(0) @binding(1)
var s: sampler;
var tex_uv: texture_2d<f32>;
@group(0) @binding(2)
var s: sampler;
@group(0) @binding(3)
var<uniform> uniforms: Uniforms;
@vertex
@ -37,5 +40,19 @@ fn vs_main(@builtin(vertex_index) in_vertex_index: u32) -> VertexOutput {
@fragment
fn fs_main(in: VertexOutput) -> @location(0) vec4<f32> {
return textureSample(t, s, in.uv);
let yuv2r = vec3<f32>(1.164, 0.0, 1.596);
let yuv2g = vec3<f32>(1.164, -0.391, -0.813);
let yuv2b = vec3<f32>(1.164, 2.018, 0.0);
var yuv = vec3<f32>(0.0);
yuv.x = textureSample(tex_y, s, in.uv).r - 0.0625;
yuv.y = textureSample(tex_uv, s, in.uv).r - 0.5;
yuv.z = textureSample(tex_uv, s, in.uv).g - 0.5;
var rgb = vec3<f32>(0.0);
rgb.x = dot(yuv, yuv2r);
rgb.y = dot(yuv, yuv2g);
rgb.z = dot(yuv, yuv2b);
return vec4<f32>(pow(rgb, vec3<f32>(2.2)), 1.0);
}

View file

@ -113,13 +113,13 @@ impl Video {
/// Create a new video player from a given video which loads from `uri`.
/// Note that live sourced will report the duration to be zero.
pub fn new(uri: &url::Url) -> Result<Self, Error> {
let pipeline = format!("uridecodebin uri=\"{}\" ! videoconvert ! videoscale ! appsink name=iced_video caps=video/x-raw,format=RGBA,pixel-aspect-ratio=1/1", uri.as_str());
let pipeline = format!("uridecodebin uri=\"{}\" ! videoconvert ! videoscale ! appsink name=iced_video caps=video/x-raw,pixel-aspect-ratio=1/1", uri.as_str());
Self::from_pipeline(pipeline, None)
}
/// Creates a new video based on GStreamer pipeline in a same format as used in gst-launch-1.0.
/// Expects an appsink plugin to be present with name set to `iced_video` and caps to
/// `video/x-raw,format=RGBA,pixel-aspect-ratio=1/1`
/// `video/x-raw,pixel-aspect-ratio=1/1`
pub fn from_pipeline<S: AsRef<str>>(pipeline: S, is_live: Option<bool>) -> Result<Self, Error> {
gst::init()?;
let pipeline = gst::parse::launch(pipeline.as_ref())?
@ -131,7 +131,7 @@ impl Video {
/// Creates a new video based on GStreamer pipeline.
/// Expects an appsink plugin to be present with name set to `iced_video` and caps to
/// `video/x-raw,format=RGBA,pixel-aspect-ratio=1/1`
/// `video/x-raw,pixel-aspect-ratio=1/1`
pub fn from_gst_pipeline(
pipeline: gst::Pipeline,
is_live: Option<bool>,
@ -192,7 +192,8 @@ impl Video {
std::time::Duration::from_secs(0)
};
let frame_buf = vec![0; (width * height * 4) as _];
// NV12 = 12bpp
let frame_buf = vec![0u8; (width as usize * height as usize * 3).div_ceil(2)];
let frame = Arc::new(Mutex::new(frame_buf));
let frame_ref = Arc::clone(&frame);