use std::borrow::Cow;
use anyhow::Context as _;
use egui::{util::hash, Rangef};
use wgpu::TextureFormat;
use re_renderer::{
config::DeviceCaps,
pad_rgb_to_rgba,
renderer::{ColorMapper, ColormappedTexture, ShaderDecoding},
resource_managers::{
ImageDataDesc, SourceImageDataFormat, YuvMatrixCoefficients, YuvPixelLayout, YuvRange,
},
RenderContext,
};
use re_types::components::ClassId;
use re_types::datatypes::{ChannelDatatype, ColorModel, ImageFormat, PixelFormat};
use re_types::image::ImageKind;
use crate::{
gpu_bridge::colormap::colormap_to_re_renderer, image_info::ColormapWithRange, Annotations,
ImageInfo, ImageStats,
};
use super::get_or_create_texture;
fn generate_texture_key(image: &ImageInfo) -> u64 {
let ImageInfo {
buffer_row_id: blob_row_id,
buffer: _, format,
kind,
} = image;
hash((blob_row_id, format, kind))
}
pub fn image_to_gpu(
render_ctx: &RenderContext,
debug_name: &str,
image: &ImageInfo,
image_stats: &ImageStats,
annotations: &Annotations,
colormap: Option<&ColormapWithRange>,
) -> anyhow::Result<ColormappedTexture> {
re_tracing::profile_function!();
let texture_key = generate_texture_key(image);
match image.kind {
ImageKind::Color => {
color_image_to_gpu(render_ctx, debug_name, texture_key, image, image_stats)
}
ImageKind::Depth => depth_image_to_gpu(
render_ctx,
debug_name,
texture_key,
image,
image_stats,
colormap,
),
ImageKind::Segmentation => segmentation_image_to_gpu(
render_ctx,
debug_name,
texture_key,
image,
image_stats,
annotations,
),
}
}
fn color_image_to_gpu(
render_ctx: &RenderContext,
debug_name: &str,
texture_key: u64,
image: &ImageInfo,
image_stats: &ImageStats,
) -> anyhow::Result<ColormappedTexture> {
re_tracing::profile_function!();
let image_format = image.format;
let texture_handle = get_or_create_texture(render_ctx, texture_key, || {
texture_creation_desc_from_color_image(render_ctx.device_caps(), image, debug_name)
})
.map_err(|err| anyhow::anyhow!("{err}"))?;
let texture_format = texture_handle.format();
let shader_decoding = required_shader_decode(render_ctx.device_caps(), &image_format);
let decode_srgb = texture_format == TextureFormat::Rgba8Unorm
|| image_decode_srgb_gamma_heuristic(image_stats, image_format);
let range = if matches!(
texture_format,
TextureFormat::R8Unorm | TextureFormat::Rgba8Unorm | TextureFormat::Bgra8Unorm
) {
emath::Rangef::new(0.0, 1.0)
} else if texture_format == TextureFormat::R8Snorm {
emath::Rangef::new(-1.0, 1.0)
} else if let Some(shader_decoding) = shader_decoding {
match shader_decoding {
ShaderDecoding::Bgr => image_data_range_heuristic(image_stats, &image_format),
}
} else {
image_data_range_heuristic(image_stats, &image_format)
};
let color_mapper = if let Some(shader_decoding) = shader_decoding {
match shader_decoding {
ShaderDecoding::Bgr => ColorMapper::OffRGB,
}
} else if texture_format.components() == 1 {
if decode_srgb {
ColorMapper::OffGrayscale
} else {
ColorMapper::Function(re_renderer::Colormap::Grayscale)
}
} else {
ColorMapper::OffRGB
};
let multiply_rgb_with_alpha = image_format.has_alpha();
let gamma = 1.0;
re_log::trace_once!(
"color_tensor_to_gpu {debug_name:?}, range: {range:?}, decode_srgb: {decode_srgb:?}, multiply_rgb_with_alpha: {multiply_rgb_with_alpha:?}, gamma: {gamma:?}, color_mapper: {color_mapper:?}",
);
Ok(ColormappedTexture {
texture: texture_handle,
range: [range.min, range.max],
decode_srgb,
multiply_rgb_with_alpha,
gamma,
color_mapper,
shader_decoding,
})
}
pub fn image_data_range_heuristic(image_stats: &ImageStats, image_format: &ImageFormat) -> Rangef {
let (min, max) = image_stats.finite_range;
let min = min as f32;
let max = max as f32;
if image_format.is_float() && 0.0 <= min && max <= 1.0 {
Rangef::new(0.0, 1.0)
} else if 0.0 <= min && max <= 255.0 {
Rangef::new(0.0, 255.0)
} else if min == max {
Rangef::new(min - 1.0, max + 1.0)
} else {
Rangef::new(min, max)
}
}
fn image_decode_srgb_gamma_heuristic(image_stats: &ImageStats, image_format: ImageFormat) -> bool {
if image_format.pixel_format.is_some() {
true
} else {
let (min, max) = image_stats.finite_range;
#[allow(clippy::if_same_then_else)]
if 0.0 <= min && max <= 255.0 {
true
} else if image_format.datatype().is_float() && 0.0 <= min && max <= 1.0 {
true
} else {
false
}
}
}
pub fn required_shader_decode(
device_caps: &DeviceCaps,
image_format: &ImageFormat,
) -> Option<ShaderDecoding> {
let color_model = image_format.color_model();
if image_format.pixel_format.is_none() && color_model == ColorModel::BGR
|| color_model == ColorModel::BGRA
{
if image_format.datatype() == ChannelDatatype::U8 && device_caps.support_bgra_textures() {
None
} else {
Some(ShaderDecoding::Bgr)
}
} else {
None
}
}
pub fn texture_creation_desc_from_color_image<'a>(
device_caps: &DeviceCaps,
image: &'a ImageInfo,
debug_name: &'a str,
) -> ImageDataDesc<'a> {
re_tracing::profile_function!();
let (data, format) = if let Some(pixel_format) = image.format.pixel_format {
let data = cast_slice_to_cow(image.buffer.as_slice());
let coefficients = match pixel_format.yuv_matrix_coefficients() {
re_types::image::YuvMatrixCoefficients::Bt601 => YuvMatrixCoefficients::Bt601,
re_types::image::YuvMatrixCoefficients::Bt709 => YuvMatrixCoefficients::Bt709,
};
let range = match pixel_format.is_limited_yuv_range() {
true => YuvRange::Limited,
false => YuvRange::Full,
};
let format = match pixel_format {
PixelFormat::Y_U_V24_FullRange | PixelFormat::Y_U_V24_LimitedRange => {
SourceImageDataFormat::Yuv {
layout: YuvPixelLayout::Y_U_V444,
range,
coefficients,
}
}
PixelFormat::Y_U_V16_FullRange | PixelFormat::Y_U_V16_LimitedRange => {
SourceImageDataFormat::Yuv {
layout: YuvPixelLayout::Y_U_V422,
range,
coefficients,
}
}
PixelFormat::Y_U_V12_FullRange | PixelFormat::Y_U_V12_LimitedRange => {
SourceImageDataFormat::Yuv {
layout: YuvPixelLayout::Y_U_V420,
range,
coefficients,
}
}
PixelFormat::Y8_FullRange | PixelFormat::Y8_LimitedRange => {
SourceImageDataFormat::Yuv {
layout: YuvPixelLayout::Y400,
range,
coefficients,
}
}
PixelFormat::NV12 => SourceImageDataFormat::Yuv {
layout: YuvPixelLayout::Y_UV420,
range,
coefficients,
},
PixelFormat::YUY2 => SourceImageDataFormat::Yuv {
layout: YuvPixelLayout::YUYV422,
range,
coefficients,
},
};
(data, format)
} else {
let color_model = image.format.color_model();
let datatype = image.format.datatype();
match (color_model, datatype) {
(ColorModel::RGB, ChannelDatatype::U8) => (
pad_rgb_to_rgba(&image.buffer, u8::MAX).into(),
SourceImageDataFormat::WgpuCompatible(TextureFormat::Rgba8Unorm),
),
(ColorModel::RGBA, ChannelDatatype::U8) => (
cast_slice_to_cow(&image.buffer),
SourceImageDataFormat::WgpuCompatible(TextureFormat::Rgba8Unorm),
),
(ColorModel::BGR, ChannelDatatype::U8) => {
let padded_data = pad_rgb_to_rgba(&image.buffer, u8::MAX).into();
let texture_format = if required_shader_decode(device_caps, &image.format).is_some()
{
TextureFormat::Rgba8Unorm
} else {
TextureFormat::Bgra8Unorm
};
(
padded_data,
SourceImageDataFormat::WgpuCompatible(texture_format),
)
}
(ColorModel::BGRA, ChannelDatatype::U8) => {
let texture_format = if required_shader_decode(device_caps, &image.format).is_some()
{
TextureFormat::Rgba8Unorm
} else {
TextureFormat::Bgra8Unorm
};
(
cast_slice_to_cow(&image.buffer),
SourceImageDataFormat::WgpuCompatible(texture_format),
)
}
_ => {
return general_texture_creation_desc_from_image(
debug_name,
image,
color_model,
datatype,
);
}
}
};
ImageDataDesc {
label: debug_name.into(),
data,
format,
width_height: image.width_height(),
}
}
fn depth_image_to_gpu(
render_ctx: &RenderContext,
debug_name: &str,
texture_key: u64,
image: &ImageInfo,
image_stats: &ImageStats,
colormap_with_range: Option<&ColormapWithRange>,
) -> anyhow::Result<ColormappedTexture> {
re_tracing::profile_function!();
if let Some(pixel_format) = image.format.pixel_format {
anyhow::bail!("Depth image does not support the PixelFormat {pixel_format}");
}
if image.format.color_model() != ColorModel::L {
anyhow::bail!(
"Depth image does not support the ColorModel {}",
image.format.color_model()
);
}
let datatype = image.format.datatype();
let ColormapWithRange {
value_range,
colormap,
} = colormap_with_range
.cloned()
.unwrap_or_else(|| ColormapWithRange::default_for_depth_images(image_stats));
let texture = get_or_create_texture(render_ctx, texture_key, || {
general_texture_creation_desc_from_image(debug_name, image, ColorModel::L, datatype)
})
.map_err(|err| anyhow::anyhow!("Failed to create depth texture: {err}"))?;
Ok(ColormappedTexture {
texture,
range: value_range,
decode_srgb: false,
multiply_rgb_with_alpha: false,
gamma: 1.0,
color_mapper: ColorMapper::Function(colormap_to_re_renderer(colormap)),
shader_decoding: None,
})
}
fn segmentation_image_to_gpu(
render_ctx: &RenderContext,
debug_name: &str,
texture_key: u64,
image: &ImageInfo,
image_stats: &ImageStats,
annotations: &Annotations,
) -> anyhow::Result<ColormappedTexture> {
re_tracing::profile_function!();
if let Some(pixel_format) = image.format.pixel_format {
anyhow::bail!("Segmentation image does not support the PixelFormat {pixel_format}");
}
if image.format.color_model() != ColorModel::L {
anyhow::bail!(
"Segmentation image does not support the ColorModel {}",
image.format.color_model()
);
}
let datatype = image.format.datatype();
let colormap_key = hash(annotations.row_id());
let (_, mut max) = image_stats
.range
.ok_or_else(|| anyhow::anyhow!("compressed_tensor!?"))?;
max = max.min(65535.0);
let num_colors = (max + 1.0) as usize;
let colormap_width = 256;
let colormap_height = (num_colors + colormap_width - 1) / colormap_width;
let colormap_texture_handle = get_or_create_texture(render_ctx, colormap_key, || {
let data: Vec<u8> = (0..(colormap_width * colormap_height))
.flat_map(|id| {
let color = annotations
.resolved_class_description(Some(ClassId::from(id as u16)))
.annotation_info()
.color()
.unwrap_or(re_renderer::Color32::TRANSPARENT);
color.to_array() })
.collect();
ImageDataDesc {
label: "class_id_colormap".into(),
data: data.into(),
format: SourceImageDataFormat::WgpuCompatible(TextureFormat::Rgba8UnormSrgb),
width_height: [colormap_width as u32, colormap_height as u32],
}
})
.context("Failed to create class_id_colormap.")?;
let main_texture_handle = get_or_create_texture(render_ctx, texture_key, || {
general_texture_creation_desc_from_image(debug_name, image, ColorModel::L, datatype)
})
.map_err(|err| anyhow::anyhow!("{err}"))?;
Ok(ColormappedTexture {
texture: main_texture_handle,
range: [0.0, (colormap_width * colormap_height) as f32],
decode_srgb: false, multiply_rgb_with_alpha: false, gamma: 1.0,
color_mapper: ColorMapper::Texture(colormap_texture_handle),
shader_decoding: None,
})
}
fn general_texture_creation_desc_from_image<'a>(
debug_name: &str,
image: &'a ImageInfo,
color_model: ColorModel,
datatype: ChannelDatatype,
) -> ImageDataDesc<'a> {
re_tracing::profile_function!();
let buf: &[u8] = image.buffer.as_ref();
let (data, format) = match color_model {
ColorModel::L => {
match datatype {
ChannelDatatype::U8 => (Cow::Borrowed(buf), TextureFormat::R8Uint),
ChannelDatatype::U16 => (Cow::Borrowed(buf), TextureFormat::R16Uint),
ChannelDatatype::U32 => (Cow::Borrowed(buf), TextureFormat::R32Uint),
ChannelDatatype::U64 => (
narrow_u64_to_f32s(&image.to_slice()),
TextureFormat::R32Float,
),
ChannelDatatype::I8 => (Cow::Borrowed(buf), TextureFormat::R8Sint),
ChannelDatatype::I16 => (Cow::Borrowed(buf), TextureFormat::R16Sint),
ChannelDatatype::I32 => (Cow::Borrowed(buf), TextureFormat::R32Sint),
ChannelDatatype::I64 => (
narrow_i64_to_f32s(&image.to_slice()),
TextureFormat::R32Float,
),
ChannelDatatype::F16 => (Cow::Borrowed(buf), TextureFormat::R16Float),
ChannelDatatype::F32 => (Cow::Borrowed(buf), TextureFormat::R32Float),
ChannelDatatype::F64 => (
narrow_f64_to_f32s(&image.to_slice()),
TextureFormat::R32Float,
),
}
}
ColorModel::RGB | ColorModel::BGR => {
match datatype {
ChannelDatatype::U8 => (
pad_rgb_to_rgba(buf, u8::MAX).into(),
TextureFormat::Rgba8Uint,
),
ChannelDatatype::U16 => (pad_cast_img(image, u16::MAX), TextureFormat::Rgba16Uint),
ChannelDatatype::U32 => (pad_cast_img(image, u32::MAX), TextureFormat::Rgba32Uint),
ChannelDatatype::U64 => (
pad_and_narrow_and_cast(&image.to_slice(), 1.0, |x: u64| x as f32),
TextureFormat::Rgba32Float,
),
ChannelDatatype::I8 => (pad_cast_img(image, i8::MAX), TextureFormat::Rgba8Sint),
ChannelDatatype::I16 => (pad_cast_img(image, i16::MAX), TextureFormat::Rgba16Sint),
ChannelDatatype::I32 => (pad_cast_img(image, i32::MAX), TextureFormat::Rgba32Sint),
ChannelDatatype::I64 => (
pad_and_narrow_and_cast(&image.to_slice(), 1.0, |x: i64| x as f32),
TextureFormat::Rgba32Float,
),
ChannelDatatype::F16 => (
pad_cast_img(image, half::f16::from_f32(1.0)),
TextureFormat::Rgba16Float,
),
ChannelDatatype::F32 => (pad_cast_img(image, 1.0_f32), TextureFormat::Rgba32Float),
ChannelDatatype::F64 => (
pad_and_narrow_and_cast(&image.to_slice(), 1.0, |x: f64| x as f32),
TextureFormat::Rgba32Float,
),
}
}
ColorModel::RGBA | ColorModel::BGRA => {
match datatype {
ChannelDatatype::U8 => (Cow::Borrowed(buf), TextureFormat::Rgba8Uint),
ChannelDatatype::U16 => (Cow::Borrowed(buf), TextureFormat::Rgba16Uint),
ChannelDatatype::U32 => (Cow::Borrowed(buf), TextureFormat::Rgba32Uint),
ChannelDatatype::U64 => (
narrow_u64_to_f32s(&image.to_slice()),
TextureFormat::Rgba32Float,
),
ChannelDatatype::I8 => (Cow::Borrowed(buf), TextureFormat::Rgba8Sint),
ChannelDatatype::I16 => (Cow::Borrowed(buf), TextureFormat::Rgba16Sint),
ChannelDatatype::I32 => (Cow::Borrowed(buf), TextureFormat::Rgba32Sint),
ChannelDatatype::I64 => (
narrow_i64_to_f32s(&image.to_slice()),
TextureFormat::Rgba32Float,
),
ChannelDatatype::F16 => (Cow::Borrowed(buf), TextureFormat::Rgba16Float),
ChannelDatatype::F32 => (Cow::Borrowed(buf), TextureFormat::Rgba32Float),
ChannelDatatype::F64 => (
narrow_f64_to_f32s(&image.to_slice()),
TextureFormat::Rgba32Float,
),
}
}
};
ImageDataDesc {
label: debug_name.into(),
data,
format: SourceImageDataFormat::WgpuCompatible(format),
width_height: image.width_height(),
}
}
fn cast_slice_to_cow<From: bytemuck::Pod>(slice: &[From]) -> Cow<'_, [u8]> {
bytemuck::cast_slice(slice).into()
}
fn narrow_u64_to_f32s(slice: &[u64]) -> Cow<'static, [u8]> {
re_tracing::profile_function!();
let bytes: Vec<u8> = slice
.iter()
.flat_map(|&f| (f as f32).to_le_bytes())
.collect();
bytes.into()
}
fn narrow_i64_to_f32s(slice: &[i64]) -> Cow<'static, [u8]> {
re_tracing::profile_function!();
let bytes: Vec<u8> = slice
.iter()
.flat_map(|&f| (f as f32).to_le_bytes())
.collect();
bytes.into()
}
fn narrow_f64_to_f32s(slice: &[f64]) -> Cow<'static, [u8]> {
re_tracing::profile_function!();
let bytes: Vec<u8> = slice
.iter()
.flat_map(|&f| (f as f32).to_le_bytes())
.collect();
bytes.into()
}
fn pad_and_cast<T: Copy + bytemuck::Pod>(data: &[T], pad: T) -> Cow<'static, [u8]> {
re_tracing::profile_function!();
let padded: Vec<T> = pad_rgb_to_rgba(data, pad);
let bytes: Vec<u8> = bytemuck::pod_collect_to_vec(&padded);
bytes.into()
}
fn pad_cast_img<T: Copy + bytemuck::Pod>(img: &ImageInfo, pad: T) -> Cow<'static, [u8]> {
pad_and_cast(&img.to_slice(), pad)
}
fn pad_and_narrow_and_cast<T: Copy + bytemuck::Pod>(
data: &[T],
pad: f32,
narrow: impl Fn(T) -> f32,
) -> Cow<'static, [u8]> {
re_tracing::profile_function!();
let floats: Vec<f32> = data
.chunks_exact(3)
.flat_map(|chunk| [narrow(chunk[0]), narrow(chunk[1]), narrow(chunk[2]), pad])
.collect();
bytemuck::pod_collect_to_vec(&floats).into()
}