1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
380
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
400
401
402
403
404
405
406
407
408
409
410
411
412
413
414
415
416
417
418
419
420
421
422
423
424
425
426
427
428
429
430
431
432
433
434
435
436
437
438
439
440
441
442
443
444
445
446
447
448
449
450
451
452
453
454
455
456
457
458
459
460
461
462
463
464
465
466
467
468
469
470
471
472
473
474
475
476
477
478
479
480
481
482
483
484
485
486
487
488
489
490
491
492
493
494
495
496
497
498
499
500
501
502
503
504
505
506
507
508
509
510
511
512
513
514
515
516
517
518
519
520
521
522
523
524
525
526
527
528
529
530
531
532
533
534
535
536
537
538
539
540
541
542
543
544
545
546
547
548
549
550
551
552
553
554
555
556
557
558
559
560
561
562
563
564
565
566
567
568
569
570
571
572
573
574
575
576
577
578
579
580
581
582
583
584
585
586
587
588
589
590
591
592
593
594
595
596
597
598
599
600
601
602
603
604
605
606
607
608
609
610
use re_data_ui::item_ui;
use re_renderer::{external::wgpu, renderer::ColormappedTexture, resource_managers::GpuTexture2D};
use re_space_view::AnnotationSceneContext;
use re_types::{datatypes::ColorModel, image::ImageKind, tensor_data::TensorElement};
use re_ui::UiExt as _;
use re_viewer_context::{gpu_bridge, Annotations, ImageInfo, ViewQuery, ViewerContext};

use crate::{view_kind::SpatialSpaceViewKind, PickableRectSourceData};

pub struct PickedPixelInfo {
    pub source_data: PickableRectSourceData,
    pub texture: ColormappedTexture,
    pub pixel_coordinates: [u32; 2],
}

#[allow(clippy::too_many_arguments)]
pub fn textured_rect_hover_ui(
    ctx: &ViewerContext<'_>,
    ui: &mut egui::Ui,
    instance_path: &re_entity_db::InstancePath,
    query: &ViewQuery<'_>,
    spatial_kind: SpatialSpaceViewKind,
    ui_pan_and_zoom_from_ui: egui::emath::RectTransform,
    annotations: &AnnotationSceneContext,
    picked_pixel_info: PickedPixelInfo,
    hover_overlay_index: u32,
) {
    let Some(render_ctx) = ctx.render_ctx else {
        return;
    };

    let PickedPixelInfo {
        source_data,
        texture,
        pixel_coordinates,
    } = picked_pixel_info;

    let depth_meter = match &source_data {
        PickableRectSourceData::Image { depth_meter, .. } => *depth_meter,
        PickableRectSourceData::Video { .. } => None,
        PickableRectSourceData::ErrorPlaceholder => {
            // No point in zooming into an error placeholder!
            return;
        }
    };

    let depth_meter = depth_meter.map(|d| *d.0);

    item_ui::instance_path_button(
        ctx,
        &query.latest_at_query(),
        ctx.recording(),
        ui,
        Some(query.space_view_id),
        instance_path,
    );

    ui.add_space(8.0);

    ui.horizontal(|ui| {
        let [w, h] = texture.width_height();
        let (w, h) = (w as f32, h as f32);

        if spatial_kind == SpatialSpaceViewKind::TwoD {
            let rect = egui::Rect::from_min_size(egui::Pos2::ZERO, egui::vec2(w, h));

            show_zoomed_image_region_area_outline(
                ui.ctx(),
                *ui_pan_and_zoom_from_ui.from(),
                egui::vec2(w, h),
                [pixel_coordinates[0] as _, pixel_coordinates[1] as _],
                ui_pan_and_zoom_from_ui.inverse().transform_rect(rect),
            );
        }

        let image = if let PickableRectSourceData::Image { image, .. } = &source_data {
            Some(image)
        } else {
            None
        };

        let annotations = annotations.0.find(&instance_path.entity_path);

        show_zoomed_image_region(
            render_ctx,
            ui,
            texture,
            image,
            &annotations,
            depth_meter,
            &TextureInteractionId {
                entity_path: &instance_path.entity_path,
                interaction_idx: hover_overlay_index,
            },
            [pixel_coordinates[0] as _, pixel_coordinates[1] as _],
        );
    });
}

// Show the surrounding pixels:
const ZOOMED_IMAGE_TEXEL_RADIUS: isize = 10;

/// Draws a border for the area zoomed in by [`show_zoomed_image_region`].
fn show_zoomed_image_region_area_outline(
    egui_ctx: &egui::Context,
    ui_clip_rect: egui::Rect,
    image_resolution: egui::Vec2,
    [center_x, center_y]: [isize; 2],
    image_rect: egui::Rect,
) {
    use egui::{pos2, remap, Rect};

    let width = image_resolution.x;
    let height = image_resolution.y;

    // Show where on the original image the zoomed-in region is at:
    // The area shown is ZOOMED_IMAGE_TEXEL_RADIUS _surrounding_ the center.
    // Since the center is the top-left/rounded-down, coordinate, we need to add 1 to right/bottom.
    let left = (center_x - ZOOMED_IMAGE_TEXEL_RADIUS) as f32;
    let right = (center_x + ZOOMED_IMAGE_TEXEL_RADIUS + 1) as f32;
    let top = (center_y - ZOOMED_IMAGE_TEXEL_RADIUS) as f32;
    let bottom = (center_y + ZOOMED_IMAGE_TEXEL_RADIUS + 1) as f32;

    let left = remap(left, 0.0..=width, image_rect.x_range());
    let right = remap(right, 0.0..=width, image_rect.x_range());
    let top = remap(top, 0.0..=height, image_rect.y_range());
    let bottom = remap(bottom, 0.0..=height, image_rect.y_range());

    let sample_rect = Rect::from_min_max(pos2(left, top), pos2(right, bottom));
    // TODO(emilk): use `parent_ui.painter()` and put it in a high Z layer, when https://github.com/emilk/egui/issues/1516 is done
    let painter = egui_ctx.debug_painter().with_clip_rect(ui_clip_rect);
    painter.rect_stroke(sample_rect, 0.0, (2.0, egui::Color32::BLACK));
    painter.rect_stroke(sample_rect, 0.0, (1.0, egui::Color32::WHITE));
}

/// Identifies an image/texture interaction.
///
/// This is needed primarily to keep track of gpu readbacks and for debugging purposes.
/// Therefore, this should stay roughly stable over several frames.
pub struct TextureInteractionId<'a> {
    pub entity_path: &'a re_log_types::EntityPath,

    /// Index of the interaction. This is important in case there's multiple interactions with the same entity.
    /// This can happen if an entity has several images all of which are inspected at the same time.
    /// Without this, several readbacks may get the same identifier, resulting in the wrong gpu readback values.
    pub interaction_idx: u32,
}

impl<'a> TextureInteractionId<'a> {
    pub fn debug_label(&self, topic: &str) -> re_renderer::DebugLabel {
        format!("{topic}__{:?}_{}", self.entity_path, self.interaction_idx).into()
    }

    pub fn gpu_readback_id(&self) -> re_renderer::GpuReadbackIdentifier {
        re_log_types::hash::Hash64::hash((self.entity_path, self.interaction_idx)).hash64()
    }
}

/// `meter`: iff this is a depth map, how long is one meter?
#[allow(clippy::too_many_arguments)]
pub fn show_zoomed_image_region(
    render_ctx: &re_renderer::RenderContext,
    ui: &mut egui::Ui,
    texture: ColormappedTexture,
    image: Option<&ImageInfo>,
    annotations: &Annotations,
    meter: Option<f32>,
    interaction_id: &TextureInteractionId<'_>,
    center_texel: [isize; 2],
) {
    if let Err(err) = try_show_zoomed_image_region(
        render_ctx,
        ui,
        image,
        texture,
        annotations,
        meter,
        interaction_id,
        center_texel,
    ) {
        ui.error_with_details_on_hover(&err.to_string());
    }
}

/// `meter`: iff this is a depth map, how long is one meter?
#[allow(clippy::too_many_arguments)]
fn try_show_zoomed_image_region(
    render_ctx: &re_renderer::RenderContext,
    ui: &mut egui::Ui,
    image: Option<&ImageInfo>,
    colormapped_texture: ColormappedTexture,
    annotations: &Annotations,
    meter: Option<f32>,
    interaction_id: &TextureInteractionId<'_>,
    center_texel: [isize; 2],
) -> anyhow::Result<()> {
    let [width, height] = colormapped_texture.width_height();

    const POINTS_PER_TEXEL: f32 = 5.0;
    let size = egui::Vec2::splat(((ZOOMED_IMAGE_TEXEL_RADIUS * 2 + 1) as f32) * POINTS_PER_TEXEL);

    let (_id, zoom_rect) = ui.allocate_space(size);
    let painter = ui.painter();

    painter.rect_filled(zoom_rect, 0.0, ui.visuals().extreme_bg_color);

    let center_of_center_texel = egui::vec2(
        (center_texel[0] as f32) + 0.5,
        (center_texel[1] as f32) + 0.5,
    );

    // Paint the zoomed in region:
    {
        let image_rect_on_screen = egui::Rect::from_min_size(
            zoom_rect.center() - POINTS_PER_TEXEL * center_of_center_texel,
            POINTS_PER_TEXEL * egui::vec2(width as f32, height as f32),
        );

        gpu_bridge::render_image(
            render_ctx,
            &painter.with_clip_rect(zoom_rect),
            image_rect_on_screen,
            colormapped_texture.clone(),
            egui::TextureOptions::NEAREST,
            interaction_id.debug_label("zoomed_region"),
        )?;
    }

    // Outline the center texel, to indicate which texel we're printing the values of:
    {
        let center_texel_rect =
            egui::Rect::from_center_size(zoom_rect.center(), egui::Vec2::splat(POINTS_PER_TEXEL));
        painter.rect_stroke(
            center_texel_rect.expand(1.0),
            0.0,
            (1.0, egui::Color32::BLACK),
        );
        painter.rect_stroke(center_texel_rect, 0.0, (1.0, egui::Color32::WHITE));
    }

    let [x, y] = center_texel;
    if 0 <= x && (x as u32) < width && 0 <= y && (y as u32) < height {
        ui.separator();

        ui.vertical(|ui| {
            ui.style_mut().wrap_mode = Some(egui::TextWrapMode::Extend);

            pixel_value_ui(
                render_ctx,
                ui,
                interaction_id,
                &image.map_or(
                    PixelValueSource::GpuTexture(&colormapped_texture.texture),
                    PixelValueSource::Image,
                ),
                annotations,
                [x as _, y as _],
                meter,
            );

            // Show a big sample of the color of the middle texel:
            let (rect, _) = ui.allocate_exact_size(
                egui::Vec2::splat(ui.available_height()),
                egui::Sense::hover(),
            );
            // Position texture so that the center texel is at the center of the rect:
            let zoom = rect.width();
            let image_rect_on_screen = egui::Rect::from_min_size(
                rect.center() - zoom * center_of_center_texel,
                zoom * egui::vec2(width as f32, height as f32),
            );
            gpu_bridge::render_image(
                render_ctx,
                &ui.painter().with_clip_rect(rect),
                image_rect_on_screen,
                colormapped_texture,
                egui::TextureOptions::NEAREST,
                interaction_id.debug_label("single_pixel"),
            )
        })
        .inner?;
    }
    Ok(())
}

/// How we figure out what value to show for a single pixel.
enum PixelValueSource<'a> {
    /// Full image information. Use this whenever reasonably possible.
    Image(&'a ImageInfo),

    /// Via a GPU texture readback.
    ///
    /// As of writing, use this only if…
    /// * the texture is known to be able to read back
    /// * the texture format is `Rgba8UnormSrgb`
    /// * you don't care about alpha (since there's no 24bit textures, we assume we can just ignore it)
    /// Note that these restrictions are not final,
    /// but merely what covers the usecases right now with the least amount of effort.
    GpuTexture(&'a GpuTexture2D),
}

/// Shows the value of a pixel in an image.
/// If no image info is provided, this only shows the position of the pixel.
fn pixel_value_ui(
    render_ctx: &re_renderer::RenderContext,
    ui: &mut egui::Ui,
    interaction_id: &TextureInteractionId<'_>,
    pixel_value_source: &PixelValueSource<'_>,
    annotations: &Annotations,
    [x, y]: [u32; 2],
    meter: Option<f32>,
) {
    egui::Grid::new("hovered pixel properties").show(ui, |ui| {
        ui.label("Position:");
        ui.label(format!("{x}, {y}"));
        ui.end_row();

        if let PixelValueSource::Image(image) = &pixel_value_source {
            // Check for annotations on any single-channel image
            if image.kind == ImageKind::Segmentation {
                if let Some(raw_value) = image.get_xyc(x, y, 0) {
                    if let (ImageKind::Segmentation, Some(u16_val)) =
                        (image.kind, raw_value.try_as_u16())
                    {
                        ui.label("Label:");
                        ui.label(
                            annotations
                                .resolved_class_description(Some(
                                    re_types::components::ClassId::from(u16_val),
                                ))
                                .annotation_info()
                                .label(None)
                                .unwrap_or_else(|| u16_val.to_string()),
                        );
                        ui.end_row();
                    };
                }
            }
            if let Some(meter) = meter {
                // This is a depth map
                if let Some(raw_value) = image.get_xyc(x, y, 0) {
                    let raw_value = raw_value.as_f64();
                    let meters = raw_value / (meter as f64);
                    ui.label("Depth:");
                    if meters < 1.0 {
                        ui.monospace(format!("{:.1} mm", meters * 1e3));
                    } else {
                        ui.monospace(format!("{meters:.3} m"));
                    }
                }
            }
        }
    });

    let text = match pixel_value_source {
        PixelValueSource::Image(image) => pixel_value_string_from_image(image, x, y),
        PixelValueSource::GpuTexture(texture) => {
            pixel_value_string_from_gpu_texture(ui.ctx(), render_ctx, texture, interaction_id, x, y)
        }
    };

    if let Some(text) = text {
        ui.label(text);
    } else {
        ui.label("No Value");
    }
}

fn format_pixel_value(
    image_kind: ImageKind,
    color_model: ColorModel,
    elements: &[TensorElement],
) -> Option<String> {
    match image_kind {
        ImageKind::Segmentation | ImageKind::Depth => elements.first().map(|v| format!("Val: {v}")),

        ImageKind::Color => match color_model {
            ColorModel::L => elements.first().map(|v| format!("L: {v}")),

            ColorModel::RGB => {
                if let [r, g, b] = elements {
                    match (r, g, b) {
                        (TensorElement::U8(r), TensorElement::U8(g), TensorElement::U8(b)) => {
                            Some(format!("R: {r}, G: {g}, B: {b}, #{r:02X}{g:02X}{b:02X}"))
                        }
                        _ => Some(format!("R: {r}, G: {g}, B: {b}")),
                    }
                } else {
                    None
                }
            }

            ColorModel::RGBA => {
                if let [r, g, b, a] = elements {
                    match (r, g, b, a) {
                        (
                            TensorElement::U8(r),
                            TensorElement::U8(g),
                            TensorElement::U8(b),
                            TensorElement::U8(a),
                        ) => Some(format!(
                            "R: {r}, G: {g}, B: {b}, A: {a}, #{r:02X}{g:02X}{b:02X}{a:02X}"
                        )),
                        _ => Some(format!("R: {r}, G: {g}, B: {b}, A: {a}")),
                    }
                } else {
                    None
                }
            }

            ColorModel::BGR => {
                if let [b, g, r] = elements {
                    match (b, g, r) {
                        (TensorElement::U8(b), TensorElement::U8(g), TensorElement::U8(r)) => {
                            Some(format!("B: {b}, G: {g}, R: {r}, #{b:02X}{g:02X}{r:02X}"))
                        }
                        _ => Some(format!("B: {b}, G: {g}, R: {r}")),
                    }
                } else {
                    None
                }
            }

            ColorModel::BGRA => {
                if let [b, g, r, a] = elements {
                    match (b, g, r, a) {
                        (
                            TensorElement::U8(b),
                            TensorElement::U8(g),
                            TensorElement::U8(r),
                            TensorElement::U8(a),
                        ) => Some(format!(
                            "B: {b}, G: {g}, R: {r}, A: {a}, #{r:02X}{g:02X}{b:02X}{a:02X}"
                        )),
                        _ => Some(format!("B: {b}, G: {g}, R: {r}, A: {a}")),
                    }
                } else {
                    None
                }
            }
        },
    }
}

fn pixel_value_string_from_image(image: &ImageInfo, x: u32, y: u32) -> Option<String> {
    match image.kind {
        ImageKind::Segmentation | ImageKind::Depth => format_pixel_value(
            image.kind,
            image.color_model(),
            image.get_xyc(x, y, 0).as_slice(),
        ),

        ImageKind::Color => match image.color_model() {
            ColorModel::L => format_pixel_value(
                image.kind,
                image.color_model(),
                image.get_xyc(x, y, 0).as_slice(),
            ),

            ColorModel::BGR | ColorModel::RGB => format_pixel_value(
                image.kind,
                image.color_model(),
                &[
                    image.get_xyc(x, y, 0)?,
                    image.get_xyc(x, y, 1)?,
                    image.get_xyc(x, y, 2)?,
                ],
            ),

            ColorModel::BGRA | ColorModel::RGBA => format_pixel_value(
                image.kind,
                image.color_model(),
                &[
                    image.get_xyc(x, y, 0)?,
                    image.get_xyc(x, y, 1)?,
                    image.get_xyc(x, y, 2)?,
                    image.get_xyc(x, y, 3)?,
                ],
            ),
        },
    }
}

struct TextureReadbackUserdata {
    /// Rect on the texture that was read back.
    readback_rect: re_renderer::RectInt,

    /// Info about the buffer we're reading back.
    buffer_info: re_renderer::Texture2DBufferInfo,
}

fn pixel_value_string_from_gpu_texture(
    ui_ctx: &egui::Context,
    render_ctx: &re_renderer::RenderContext,
    texture: &GpuTexture2D,
    interaction_id: &TextureInteractionId<'_>,
    x: u32,
    y: u32,
) -> Option<String> {
    // TODO(andreas): Should parts of this be a utility in re_renderer?
    // Note that before this was implemented the readback belt was private to `re_renderer` because it is fairly advanced in its usage.

    // Only support Rgb8Unorm textures for now.
    // We could support more here but that needs more handling code and it doesn't look like we have to right now.
    if texture.format() != wgpu::TextureFormat::Rgba8Unorm {
        return None;
    }

    let readback_id = interaction_id.gpu_readback_id();
    let pixel_pos = glam::IVec2::new(x as i32, y as i32);

    let mut readback_belt = render_ctx.gpu_readback_belt.lock();

    // First check if we have a result ready to read.
    // Keep in mind that copy operation may have required row-padding, use `buffer_info` to get the right values.
    let mut readback_result_rgb = None;
    readback_belt.readback_data::<TextureReadbackUserdata>(readback_id, |data, userdata| {
        debug_assert!(data.len() == userdata.buffer_info.buffer_size_padded as usize);

        // Try to find the pixel at the mouse position.
        // If our position isn't available, just clamp to the edge of the area.
        let data_pos = (pixel_pos - userdata.readback_rect.min())
            .clamp(
                glam::IVec2::ZERO,
                // Exclusive the size of the area we're reading back.
                userdata.readback_rect.extent.as_ivec2() - glam::IVec2::ONE,
            )
            .as_uvec2();
        let start_index =
            (data_pos.x * 4 + userdata.buffer_info.bytes_per_row_padded * data_pos.y) as usize;

        readback_result_rgb = Some([
            data[start_index],
            data[start_index + 1],
            data[start_index + 2],
        ]);
    });

    // Then enqueue a new readback.
    //
    // It's quite hard to figure out when we no longer have to do this. The criteria would be roughly:
    // * mouse has not moved
    // * since the mouse moved last time we received the result
    // * the result we received is still about the exact same texture _content_
    //      * if it is a video the exact same texture may show a different frame by now
    // So instead we err on the safe side and keep requesting readbacks & frames.
    ui_ctx.request_repaint();

    // Read back a region of a few pixels. Criteria:
    // * moving the mouse doesn't typically immediately end up in a different region, important since readback has a delay
    // * we don't go overboard and read back a ton of data
    // * copy operation doesn't induce a lot of padding overhead due to row padding requirements
    const READBACK_RECT_SIZE: i32 = 64;

    let resolution = glam::UVec2::from_array(texture.width_height()).as_ivec2();
    let readback_rect_min = (pixel_pos - glam::IVec2::splat(READBACK_RECT_SIZE / 2))
        .clamp(glam::IVec2::ZERO, resolution);
    let readback_rect_max = (pixel_pos + glam::IVec2::splat(READBACK_RECT_SIZE / 2))
        .clamp(glam::IVec2::ZERO, resolution);
    let readback_rect_size = readback_rect_max - readback_rect_min;

    if readback_rect_size.x <= 0 || readback_rect_size.y <= 0 {
        return None;
    }
    let readback_area_size = readback_rect_size.as_uvec2();
    let readback_rect = re_renderer::RectInt {
        min: readback_rect_min,
        extent: readback_area_size,
    };

    let buffer_info =
        re_renderer::Texture2DBufferInfo::new(texture.format(), readback_rect.wgpu_extent());

    let mut readback_buffer = readback_belt.allocate(
        &render_ctx.device,
        &render_ctx.gpu_resources.buffers,
        buffer_info.buffer_size_padded,
        readback_id,
        Box::new(TextureReadbackUserdata {
            readback_rect,
            buffer_info,
        }),
    );
    drop(readback_belt);

    {
        let mut encoder = render_ctx.active_frame.before_view_builder_encoder.lock();
        if let Err(err) = readback_buffer.read_texture2d(
            encoder.get(),
            wgpu::ImageCopyTexture {
                texture: &texture.texture,
                mip_level: 0,
                origin: readback_rect.wgpu_origin(),
                aspect: wgpu::TextureAspect::All,
            },
            readback_rect.wgpu_extent(),
        ) {
            re_log::error_once!("Failed to read back texture: {err}");
        }
    }

    readback_result_rgb.and_then(|rgb| {
        let rgb = [
            TensorElement::U8(rgb[0]),
            TensorElement::U8(rgb[1]),
            TensorElement::U8(rgb[2]),
        ];
        format_pixel_value(ImageKind::Color, ColorModel::RGB, &rgb)
    })
}