use re_data_ui::item_ui;
use re_renderer::{external::wgpu, renderer::ColormappedTexture, resource_managers::GpuTexture2D};
use re_space_view::AnnotationSceneContext;
use re_types::{datatypes::ColorModel, image::ImageKind, tensor_data::TensorElement};
use re_ui::UiExt as _;
use re_viewer_context::{gpu_bridge, Annotations, ImageInfo, ViewQuery, ViewerContext};
use crate::{view_kind::SpatialSpaceViewKind, PickableRectSourceData};
pub struct PickedPixelInfo {
pub source_data: PickableRectSourceData,
pub texture: ColormappedTexture,
pub pixel_coordinates: [u32; 2],
}
#[allow(clippy::too_many_arguments)]
pub fn textured_rect_hover_ui(
ctx: &ViewerContext<'_>,
ui: &mut egui::Ui,
instance_path: &re_entity_db::InstancePath,
query: &ViewQuery<'_>,
spatial_kind: SpatialSpaceViewKind,
ui_pan_and_zoom_from_ui: egui::emath::RectTransform,
annotations: &AnnotationSceneContext,
picked_pixel_info: PickedPixelInfo,
hover_overlay_index: u32,
) {
let Some(render_ctx) = ctx.render_ctx else {
return;
};
let PickedPixelInfo {
source_data,
texture,
pixel_coordinates,
} = picked_pixel_info;
let depth_meter = match &source_data {
PickableRectSourceData::Image { depth_meter, .. } => *depth_meter,
PickableRectSourceData::Video { .. } => None,
PickableRectSourceData::ErrorPlaceholder => {
return;
}
};
let depth_meter = depth_meter.map(|d| *d.0);
item_ui::instance_path_button(
ctx,
&query.latest_at_query(),
ctx.recording(),
ui,
Some(query.space_view_id),
instance_path,
);
ui.add_space(8.0);
ui.horizontal(|ui| {
let [w, h] = texture.width_height();
let (w, h) = (w as f32, h as f32);
if spatial_kind == SpatialSpaceViewKind::TwoD {
let rect = egui::Rect::from_min_size(egui::Pos2::ZERO, egui::vec2(w, h));
show_zoomed_image_region_area_outline(
ui.ctx(),
*ui_pan_and_zoom_from_ui.from(),
egui::vec2(w, h),
[pixel_coordinates[0] as _, pixel_coordinates[1] as _],
ui_pan_and_zoom_from_ui.inverse().transform_rect(rect),
);
}
let image = if let PickableRectSourceData::Image { image, .. } = &source_data {
Some(image)
} else {
None
};
let annotations = annotations.0.find(&instance_path.entity_path);
show_zoomed_image_region(
render_ctx,
ui,
texture,
image,
&annotations,
depth_meter,
&TextureInteractionId {
entity_path: &instance_path.entity_path,
interaction_idx: hover_overlay_index,
},
[pixel_coordinates[0] as _, pixel_coordinates[1] as _],
);
});
}
const ZOOMED_IMAGE_TEXEL_RADIUS: isize = 10;
fn show_zoomed_image_region_area_outline(
egui_ctx: &egui::Context,
ui_clip_rect: egui::Rect,
image_resolution: egui::Vec2,
[center_x, center_y]: [isize; 2],
image_rect: egui::Rect,
) {
use egui::{pos2, remap, Rect};
let width = image_resolution.x;
let height = image_resolution.y;
let left = (center_x - ZOOMED_IMAGE_TEXEL_RADIUS) as f32;
let right = (center_x + ZOOMED_IMAGE_TEXEL_RADIUS + 1) as f32;
let top = (center_y - ZOOMED_IMAGE_TEXEL_RADIUS) as f32;
let bottom = (center_y + ZOOMED_IMAGE_TEXEL_RADIUS + 1) as f32;
let left = remap(left, 0.0..=width, image_rect.x_range());
let right = remap(right, 0.0..=width, image_rect.x_range());
let top = remap(top, 0.0..=height, image_rect.y_range());
let bottom = remap(bottom, 0.0..=height, image_rect.y_range());
let sample_rect = Rect::from_min_max(pos2(left, top), pos2(right, bottom));
let painter = egui_ctx.debug_painter().with_clip_rect(ui_clip_rect);
painter.rect_stroke(sample_rect, 0.0, (2.0, egui::Color32::BLACK));
painter.rect_stroke(sample_rect, 0.0, (1.0, egui::Color32::WHITE));
}
pub struct TextureInteractionId<'a> {
pub entity_path: &'a re_log_types::EntityPath,
pub interaction_idx: u32,
}
impl<'a> TextureInteractionId<'a> {
pub fn debug_label(&self, topic: &str) -> re_renderer::DebugLabel {
format!("{topic}__{:?}_{}", self.entity_path, self.interaction_idx).into()
}
pub fn gpu_readback_id(&self) -> re_renderer::GpuReadbackIdentifier {
re_log_types::hash::Hash64::hash((self.entity_path, self.interaction_idx)).hash64()
}
}
#[allow(clippy::too_many_arguments)]
pub fn show_zoomed_image_region(
render_ctx: &re_renderer::RenderContext,
ui: &mut egui::Ui,
texture: ColormappedTexture,
image: Option<&ImageInfo>,
annotations: &Annotations,
meter: Option<f32>,
interaction_id: &TextureInteractionId<'_>,
center_texel: [isize; 2],
) {
if let Err(err) = try_show_zoomed_image_region(
render_ctx,
ui,
image,
texture,
annotations,
meter,
interaction_id,
center_texel,
) {
ui.error_with_details_on_hover(&err.to_string());
}
}
#[allow(clippy::too_many_arguments)]
fn try_show_zoomed_image_region(
render_ctx: &re_renderer::RenderContext,
ui: &mut egui::Ui,
image: Option<&ImageInfo>,
colormapped_texture: ColormappedTexture,
annotations: &Annotations,
meter: Option<f32>,
interaction_id: &TextureInteractionId<'_>,
center_texel: [isize; 2],
) -> anyhow::Result<()> {
let [width, height] = colormapped_texture.width_height();
const POINTS_PER_TEXEL: f32 = 5.0;
let size = egui::Vec2::splat(((ZOOMED_IMAGE_TEXEL_RADIUS * 2 + 1) as f32) * POINTS_PER_TEXEL);
let (_id, zoom_rect) = ui.allocate_space(size);
let painter = ui.painter();
painter.rect_filled(zoom_rect, 0.0, ui.visuals().extreme_bg_color);
let center_of_center_texel = egui::vec2(
(center_texel[0] as f32) + 0.5,
(center_texel[1] as f32) + 0.5,
);
{
let image_rect_on_screen = egui::Rect::from_min_size(
zoom_rect.center() - POINTS_PER_TEXEL * center_of_center_texel,
POINTS_PER_TEXEL * egui::vec2(width as f32, height as f32),
);
gpu_bridge::render_image(
render_ctx,
&painter.with_clip_rect(zoom_rect),
image_rect_on_screen,
colormapped_texture.clone(),
egui::TextureOptions::NEAREST,
interaction_id.debug_label("zoomed_region"),
)?;
}
{
let center_texel_rect =
egui::Rect::from_center_size(zoom_rect.center(), egui::Vec2::splat(POINTS_PER_TEXEL));
painter.rect_stroke(
center_texel_rect.expand(1.0),
0.0,
(1.0, egui::Color32::BLACK),
);
painter.rect_stroke(center_texel_rect, 0.0, (1.0, egui::Color32::WHITE));
}
let [x, y] = center_texel;
if 0 <= x && (x as u32) < width && 0 <= y && (y as u32) < height {
ui.separator();
ui.vertical(|ui| {
ui.style_mut().wrap_mode = Some(egui::TextWrapMode::Extend);
pixel_value_ui(
render_ctx,
ui,
interaction_id,
&image.map_or(
PixelValueSource::GpuTexture(&colormapped_texture.texture),
PixelValueSource::Image,
),
annotations,
[x as _, y as _],
meter,
);
let (rect, _) = ui.allocate_exact_size(
egui::Vec2::splat(ui.available_height()),
egui::Sense::hover(),
);
let zoom = rect.width();
let image_rect_on_screen = egui::Rect::from_min_size(
rect.center() - zoom * center_of_center_texel,
zoom * egui::vec2(width as f32, height as f32),
);
gpu_bridge::render_image(
render_ctx,
&ui.painter().with_clip_rect(rect),
image_rect_on_screen,
colormapped_texture,
egui::TextureOptions::NEAREST,
interaction_id.debug_label("single_pixel"),
)
})
.inner?;
}
Ok(())
}
enum PixelValueSource<'a> {
Image(&'a ImageInfo),
GpuTexture(&'a GpuTexture2D),
}
fn pixel_value_ui(
render_ctx: &re_renderer::RenderContext,
ui: &mut egui::Ui,
interaction_id: &TextureInteractionId<'_>,
pixel_value_source: &PixelValueSource<'_>,
annotations: &Annotations,
[x, y]: [u32; 2],
meter: Option<f32>,
) {
egui::Grid::new("hovered pixel properties").show(ui, |ui| {
ui.label("Position:");
ui.label(format!("{x}, {y}"));
ui.end_row();
if let PixelValueSource::Image(image) = &pixel_value_source {
if image.kind == ImageKind::Segmentation {
if let Some(raw_value) = image.get_xyc(x, y, 0) {
if let (ImageKind::Segmentation, Some(u16_val)) =
(image.kind, raw_value.try_as_u16())
{
ui.label("Label:");
ui.label(
annotations
.resolved_class_description(Some(
re_types::components::ClassId::from(u16_val),
))
.annotation_info()
.label(None)
.unwrap_or_else(|| u16_val.to_string()),
);
ui.end_row();
};
}
}
if let Some(meter) = meter {
if let Some(raw_value) = image.get_xyc(x, y, 0) {
let raw_value = raw_value.as_f64();
let meters = raw_value / (meter as f64);
ui.label("Depth:");
if meters < 1.0 {
ui.monospace(format!("{:.1} mm", meters * 1e3));
} else {
ui.monospace(format!("{meters:.3} m"));
}
}
}
}
});
let text = match pixel_value_source {
PixelValueSource::Image(image) => pixel_value_string_from_image(image, x, y),
PixelValueSource::GpuTexture(texture) => {
pixel_value_string_from_gpu_texture(ui.ctx(), render_ctx, texture, interaction_id, x, y)
}
};
if let Some(text) = text {
ui.label(text);
} else {
ui.label("No Value");
}
}
fn format_pixel_value(
image_kind: ImageKind,
color_model: ColorModel,
elements: &[TensorElement],
) -> Option<String> {
match image_kind {
ImageKind::Segmentation | ImageKind::Depth => elements.first().map(|v| format!("Val: {v}")),
ImageKind::Color => match color_model {
ColorModel::L => elements.first().map(|v| format!("L: {v}")),
ColorModel::RGB => {
if let [r, g, b] = elements {
match (r, g, b) {
(TensorElement::U8(r), TensorElement::U8(g), TensorElement::U8(b)) => {
Some(format!("R: {r}, G: {g}, B: {b}, #{r:02X}{g:02X}{b:02X}"))
}
_ => Some(format!("R: {r}, G: {g}, B: {b}")),
}
} else {
None
}
}
ColorModel::RGBA => {
if let [r, g, b, a] = elements {
match (r, g, b, a) {
(
TensorElement::U8(r),
TensorElement::U8(g),
TensorElement::U8(b),
TensorElement::U8(a),
) => Some(format!(
"R: {r}, G: {g}, B: {b}, A: {a}, #{r:02X}{g:02X}{b:02X}{a:02X}"
)),
_ => Some(format!("R: {r}, G: {g}, B: {b}, A: {a}")),
}
} else {
None
}
}
ColorModel::BGR => {
if let [b, g, r] = elements {
match (b, g, r) {
(TensorElement::U8(b), TensorElement::U8(g), TensorElement::U8(r)) => {
Some(format!("B: {b}, G: {g}, R: {r}, #{b:02X}{g:02X}{r:02X}"))
}
_ => Some(format!("B: {b}, G: {g}, R: {r}")),
}
} else {
None
}
}
ColorModel::BGRA => {
if let [b, g, r, a] = elements {
match (b, g, r, a) {
(
TensorElement::U8(b),
TensorElement::U8(g),
TensorElement::U8(r),
TensorElement::U8(a),
) => Some(format!(
"B: {b}, G: {g}, R: {r}, A: {a}, #{r:02X}{g:02X}{b:02X}{a:02X}"
)),
_ => Some(format!("B: {b}, G: {g}, R: {r}, A: {a}")),
}
} else {
None
}
}
},
}
}
fn pixel_value_string_from_image(image: &ImageInfo, x: u32, y: u32) -> Option<String> {
match image.kind {
ImageKind::Segmentation | ImageKind::Depth => format_pixel_value(
image.kind,
image.color_model(),
image.get_xyc(x, y, 0).as_slice(),
),
ImageKind::Color => match image.color_model() {
ColorModel::L => format_pixel_value(
image.kind,
image.color_model(),
image.get_xyc(x, y, 0).as_slice(),
),
ColorModel::BGR | ColorModel::RGB => format_pixel_value(
image.kind,
image.color_model(),
&[
image.get_xyc(x, y, 0)?,
image.get_xyc(x, y, 1)?,
image.get_xyc(x, y, 2)?,
],
),
ColorModel::BGRA | ColorModel::RGBA => format_pixel_value(
image.kind,
image.color_model(),
&[
image.get_xyc(x, y, 0)?,
image.get_xyc(x, y, 1)?,
image.get_xyc(x, y, 2)?,
image.get_xyc(x, y, 3)?,
],
),
},
}
}
struct TextureReadbackUserdata {
readback_rect: re_renderer::RectInt,
buffer_info: re_renderer::Texture2DBufferInfo,
}
fn pixel_value_string_from_gpu_texture(
ui_ctx: &egui::Context,
render_ctx: &re_renderer::RenderContext,
texture: &GpuTexture2D,
interaction_id: &TextureInteractionId<'_>,
x: u32,
y: u32,
) -> Option<String> {
if texture.format() != wgpu::TextureFormat::Rgba8Unorm {
return None;
}
let readback_id = interaction_id.gpu_readback_id();
let pixel_pos = glam::IVec2::new(x as i32, y as i32);
let mut readback_belt = render_ctx.gpu_readback_belt.lock();
let mut readback_result_rgb = None;
readback_belt.readback_data::<TextureReadbackUserdata>(readback_id, |data, userdata| {
debug_assert!(data.len() == userdata.buffer_info.buffer_size_padded as usize);
let data_pos = (pixel_pos - userdata.readback_rect.min())
.clamp(
glam::IVec2::ZERO,
userdata.readback_rect.extent.as_ivec2() - glam::IVec2::ONE,
)
.as_uvec2();
let start_index =
(data_pos.x * 4 + userdata.buffer_info.bytes_per_row_padded * data_pos.y) as usize;
readback_result_rgb = Some([
data[start_index],
data[start_index + 1],
data[start_index + 2],
]);
});
ui_ctx.request_repaint();
const READBACK_RECT_SIZE: i32 = 64;
let resolution = glam::UVec2::from_array(texture.width_height()).as_ivec2();
let readback_rect_min = (pixel_pos - glam::IVec2::splat(READBACK_RECT_SIZE / 2))
.clamp(glam::IVec2::ZERO, resolution);
let readback_rect_max = (pixel_pos + glam::IVec2::splat(READBACK_RECT_SIZE / 2))
.clamp(glam::IVec2::ZERO, resolution);
let readback_rect_size = readback_rect_max - readback_rect_min;
if readback_rect_size.x <= 0 || readback_rect_size.y <= 0 {
return None;
}
let readback_area_size = readback_rect_size.as_uvec2();
let readback_rect = re_renderer::RectInt {
min: readback_rect_min,
extent: readback_area_size,
};
let buffer_info =
re_renderer::Texture2DBufferInfo::new(texture.format(), readback_rect.wgpu_extent());
let mut readback_buffer = readback_belt.allocate(
&render_ctx.device,
&render_ctx.gpu_resources.buffers,
buffer_info.buffer_size_padded,
readback_id,
Box::new(TextureReadbackUserdata {
readback_rect,
buffer_info,
}),
);
drop(readback_belt);
{
let mut encoder = render_ctx.active_frame.before_view_builder_encoder.lock();
if let Err(err) = readback_buffer.read_texture2d(
encoder.get(),
wgpu::ImageCopyTexture {
texture: &texture.texture,
mip_level: 0,
origin: readback_rect.wgpu_origin(),
aspect: wgpu::TextureAspect::All,
},
readback_rect.wgpu_extent(),
) {
re_log::error_once!("Failed to read back texture: {err}");
}
}
readback_result_rgb.and_then(|rgb| {
let rgb = [
TensorElement::U8(rgb[0]),
TensorElement::U8(rgb[1]),
TensorElement::U8(rgb[2]),
];
format_pixel_value(ImageKind::Color, ColorModel::RGB, &rgb)
})
}