#![allow(dead_code, unused_variables, clippy::unnecessary_wraps)]
use std::sync::Arc;
use re_video::{decode::FrameContent, Chunk, Frame, Time};
use parking_lot::Mutex;
use crate::{
resource_managers::SourceImageDataFormat,
video::{
player::{TimedDecodingError, VideoTexture},
VideoPlayerError,
},
wgpu_resources::GpuTexture,
RenderContext,
};
#[derive(Default)]
struct DecoderOutput {
frames: Vec<Frame>,
error: Option<TimedDecodingError>,
}
pub struct VideoChunkDecoder {
decoder: Box<dyn re_video::decode::AsyncDecoder>,
decoder_output: Arc<Mutex<DecoderOutput>>,
}
impl VideoChunkDecoder {
pub fn new(
debug_name: String,
make_decoder: impl FnOnce(
Box<dyn Fn(re_video::decode::Result<Frame>) + Send + Sync>,
)
-> re_video::decode::Result<Box<dyn re_video::decode::AsyncDecoder>>,
) -> Result<Self, VideoPlayerError> {
re_tracing::profile_function!();
let decoder_output = Arc::new(Mutex::new(DecoderOutput::default()));
let on_output = {
let decoder_output = decoder_output.clone();
move |frame: re_video::decode::Result<Frame>| match frame {
Ok(frame) => {
re_log::trace!(
"Decoded frame at PTS {:?}",
frame.info.presentation_timestamp
);
let mut output = decoder_output.lock();
output.frames.push(frame);
output.error = None; }
Err(err) => {
re_log::debug_once!("Error during decoding of {debug_name}: {err}");
let err = VideoPlayerError::Decoding(err);
let mut output = decoder_output.lock();
if let Some(error) = &mut output.error {
error.latest_error = err;
} else {
output.error = Some(TimedDecodingError::new(err));
}
}
}
};
let decoder = make_decoder(Box::new(on_output))?;
Ok(Self {
decoder,
decoder_output,
})
}
pub fn decode(&mut self, chunk: Chunk) -> Result<(), VideoPlayerError> {
self.decoder.submit_chunk(chunk)?;
Ok(())
}
pub fn end_of_video(&mut self) -> Result<(), VideoPlayerError> {
self.decoder.end_of_video()?;
Ok(())
}
pub fn update_video_texture(
&mut self,
render_ctx: &RenderContext,
video_texture: &mut VideoTexture,
presentation_timestamp: Time,
) -> Result<(), VideoPlayerError> {
let mut decoder_output = self.decoder_output.lock();
let frames = &mut decoder_output.frames;
let Some(frame_idx) = re_video::demux::latest_at_idx(
frames,
|frame| frame.info.presentation_timestamp,
&presentation_timestamp,
) else {
return Err(VideoPlayerError::EmptyBuffer);
};
drop(frames.drain(0..frame_idx));
let frame_idx = 0;
let frame = &frames[frame_idx];
let frame_time_range = frame.info.presentation_time_range();
let is_up_to_date = video_texture
.frame_info
.as_ref()
.is_some_and(|info| info.presentation_time_range() == frame_time_range);
if frame_time_range.contains(&presentation_timestamp) && !is_up_to_date {
#[cfg(target_arch = "wasm32")]
{
video_texture.source_pixel_format = copy_web_video_frame_to_texture(
render_ctx,
&frame.content,
&video_texture.texture,
)?;
}
#[cfg(not(target_arch = "wasm32"))]
{
video_texture.source_pixel_format = copy_native_video_frame_to_texture(
render_ctx,
&frame.content,
&video_texture.texture,
)?;
}
video_texture.frame_info = Some(frame.info.clone());
}
Ok(())
}
pub fn reset(&mut self) -> Result<(), VideoPlayerError> {
self.decoder.reset()?;
let mut decoder_output = self.decoder_output.lock();
decoder_output.error = None;
decoder_output.frames.clear();
Ok(())
}
pub fn take_error(&mut self) -> Option<TimedDecodingError> {
self.decoder_output.lock().error.take()
}
}
#[cfg(target_arch = "wasm32")]
fn copy_web_video_frame_to_texture(
ctx: &RenderContext,
frame: &FrameContent,
target_texture: &GpuTexture,
) -> Result<SourceImageDataFormat, VideoPlayerError> {
let size = wgpu::Extent3d {
width: frame.display_width(),
height: frame.display_height(),
depth_or_array_layers: 1,
};
let frame: &web_sys::VideoFrame = frame;
let source = {
#[allow(unsafe_code)]
let frame = unsafe {
std::mem::transmute::<web_sys::VideoFrame, web_sys::HtmlVideoElement>(
frame.clone().expect("Failed to clone the video frame"),
)
};
let display_width = js_sys::Reflect::get(&frame, &"displayWidth".into())
.expect("Failed to get displayWidth property from VideoFrame.");
js_sys::Reflect::set(&frame, &"videoWidth".into(), &display_width)
.expect("Failed to set videoWidth property.");
let display_height = js_sys::Reflect::get(&frame, &"displayHeight".into())
.expect("Failed to get displayHeight property from VideoFrame.");
js_sys::Reflect::set(&frame, &"videoHeight".into(), &display_height)
.expect("Failed to set videoHeight property.");
wgpu_types::ImageCopyExternalImage {
source: wgpu_types::ExternalImageSource::HTMLVideoElement(frame),
origin: wgpu_types::Origin2d { x: 0, y: 0 },
flip_y: false,
}
};
let dest = wgpu::ImageCopyTextureTagged {
texture: &target_texture.texture,
mip_level: 0,
origin: wgpu::Origin3d { x: 0, y: 0, z: 0 },
aspect: wgpu::TextureAspect::All,
color_space: wgpu::PredefinedColorSpace::Srgb,
premultiplied_alpha: false,
};
ctx.queue
.copy_external_image_to_texture(&source, dest, size);
Ok(SourceImageDataFormat::WgpuCompatible(
target_texture.creation_desc.format,
))
}
#[cfg(not(target_arch = "wasm32"))]
fn copy_native_video_frame_to_texture(
ctx: &RenderContext,
frame: &FrameContent,
target_texture: &GpuTexture,
) -> Result<SourceImageDataFormat, VideoPlayerError> {
use crate::resource_managers::{
transfer_image_data_to_texture, ImageDataDesc, SourceImageDataFormat,
YuvMatrixCoefficients, YuvPixelLayout, YuvRange,
};
let format = match frame.format {
re_video::PixelFormat::Rgb8Unorm => {
return copy_native_video_frame_to_texture(
ctx,
&FrameContent {
data: crate::pad_rgb_to_rgba(&frame.data, 255_u8),
format: re_video::PixelFormat::Rgba8Unorm,
..*frame
},
target_texture,
);
}
re_video::PixelFormat::Rgba8Unorm | re_video::PixelFormat::Yuv { .. } => {
wgpu::TextureFormat::Rgba8Unorm
}
};
re_tracing::profile_function!();
let format = match &frame.format {
re_video::PixelFormat::Rgb8Unorm => {
unreachable!("Handled explicitly earlier in this function");
}
re_video::PixelFormat::Rgba8Unorm => {
SourceImageDataFormat::WgpuCompatible(wgpu::TextureFormat::Rgba8Unorm)
}
re_video::PixelFormat::Yuv {
layout,
range,
coefficients,
} => SourceImageDataFormat::Yuv {
layout: match layout {
re_video::decode::YuvPixelLayout::Y_U_V444 => YuvPixelLayout::Y_U_V444,
re_video::decode::YuvPixelLayout::Y_U_V422 => YuvPixelLayout::Y_U_V422,
re_video::decode::YuvPixelLayout::Y_U_V420 => YuvPixelLayout::Y_U_V420,
re_video::decode::YuvPixelLayout::Y400 => YuvPixelLayout::Y400,
},
coefficients: match coefficients {
re_video::decode::YuvMatrixCoefficients::Identity => {
YuvMatrixCoefficients::Identity
}
re_video::decode::YuvMatrixCoefficients::Bt601 => YuvMatrixCoefficients::Bt601,
re_video::decode::YuvMatrixCoefficients::Bt709 => YuvMatrixCoefficients::Bt709,
},
range: match range {
re_video::decode::YuvRange::Limited => YuvRange::Limited,
re_video::decode::YuvRange::Full => YuvRange::Full,
},
},
};
transfer_image_data_to_texture(
ctx,
ImageDataDesc {
label: "video_texture_upload".into(),
data: std::borrow::Cow::Borrowed(frame.data.as_slice()),
format,
width_height: [frame.width, frame.height],
},
target_texture,
)?;
Ok(format)
}