diff --git a/crates/re_renderer/examples/depth_cloud.rs b/crates/re_renderer/examples/depth_cloud.rs index c66c17eb79b3..d1bf120fa325 100644 --- a/crates/re_renderer/examples/depth_cloud.rs +++ b/crates/re_renderer/examples/depth_cloud.rs @@ -20,8 +20,8 @@ use itertools::Itertools; use macaw::IsoTransform; use re_renderer::{ renderer::{ - ColormappedTexture, DepthCloud, DepthCloudDepthData, DepthCloudDrawData, DepthClouds, - DrawData, GenericSkyboxDrawData, RectangleDrawData, RectangleOptions, TexturedRect, + ColormappedTexture, DepthCloud, DepthCloudDrawData, DepthClouds, DrawData, + GenericSkyboxDrawData, RectangleDrawData, RectangleOptions, TexturedRect, }, resource_managers::{GpuTexture2D, Texture2DCreationDesc}, view_builder::{self, Projection, ViewBuilder}, @@ -44,7 +44,6 @@ enum CameraControl { struct RenderDepthClouds { depth: DepthTexture, albedo: AlbedoTexture, - albedo_handle: GpuTexture2D, scale: f32, point_radius_from_world_depth: f32, @@ -175,11 +174,11 @@ impl RenderDepthClouds { clouds: vec![DepthCloud { world_from_obj, depth_camera_intrinsics: *intrinsics, - world_depth_from_data_depth: 1.0, + world_depth_from_texture_depth: 1.0, point_radius_from_world_depth: *point_radius_from_world_depth, max_depth_in_world: 5.0, depth_dimensions: depth.dimensions, - depth_data: depth.data.clone(), + depth_texture: depth.texture.clone(), colormap: re_renderer::Colormap::Turbo, outline_mask_id: Default::default(), picking_object_id: Default::default(), @@ -233,19 +232,8 @@ impl framework::Example for RenderDepthClouds { fn new(re_ctx: &mut re_renderer::RenderContext) -> Self { re_log::info!("Stop camera movement by pressing 'Space'"); - let depth = DepthTexture::spiral((640, 480).into()); - let albedo = AlbedoTexture::spiral(depth.dimensions); - - let albedo_handle = re_ctx.texture_manager_2d.create( - &mut re_ctx.gpu_resources.textures, - &Texture2DCreationDesc { - label: "albedo".into(), - data: bytemuck::cast_slice(&albedo.rgba8).into(), - format: wgpu::TextureFormat::Rgba8UnormSrgb, - width: albedo.dimensions.x, - height: albedo.dimensions.y, - }, - ); + let depth = DepthTexture::spiral(re_ctx, glam::uvec2(640, 480)); + let albedo = AlbedoTexture::spiral(re_ctx, depth.dimensions); let scale = 50.0; let point_radius_from_world_depth = 0.1; @@ -263,7 +251,6 @@ impl framework::Example for RenderDepthClouds { RenderDepthClouds { depth, albedo, - albedo_handle, scale, point_radius_from_world_depth, @@ -283,7 +270,6 @@ impl framework::Example for RenderDepthClouds { ) -> Vec { let Self { albedo, - albedo_handle, camera_control, camera_position, .. @@ -326,7 +312,7 @@ impl framework::Example for RenderDepthClouds { .transform_point3(glam::Vec3::new(1.0, 1.0, 0.0)), extent_u: world_from_model.transform_vector3(-glam::Vec3::X), extent_v: world_from_model.transform_vector3(-glam::Vec3::Y), - colormapped_texture: ColormappedTexture::from_unorm_srgba(albedo_handle.clone()), + colormapped_texture: ColormappedTexture::from_unorm_srgba(albedo.texture.clone()), options: RectangleOptions { texture_filter_magnification: re_renderer::renderer::TextureFilterMag::Nearest, texture_filter_minification: re_renderer::renderer::TextureFilterMin::Linear, @@ -403,40 +389,57 @@ fn spiral(dimensions: glam::UVec2) -> impl Iterator { }) } +pub fn hash(value: &impl std::hash::Hash) -> u64 { + ahash::RandomState::with_seeds(1, 2, 3, 4).hash_one(value) +} + struct DepthTexture { dimensions: glam::UVec2, - data: DepthCloudDepthData, + data: Vec, + texture: GpuTexture2D, } impl DepthTexture { - pub fn spiral(dimensions: glam::UVec2) -> Self { + pub fn spiral(re_ctx: &mut re_renderer::RenderContext, dimensions: glam::UVec2) -> Self { let size = (dimensions.x * dimensions.y) as usize; let mut data = std::iter::repeat(0f32).take(size).collect_vec(); spiral(dimensions).for_each(|(texcoords, d)| { data[(texcoords.x + texcoords.y * dimensions.x) as usize] = d; }); - let data = DepthCloudDepthData::F32(data.into()); - Self { dimensions, data } + let label = format!("depth texture spiral {dimensions}"); + let texture = re_ctx.texture_manager_2d.get_or_create( + hash(&label), + &mut re_ctx.gpu_resources.textures, + Texture2DCreationDesc { + label: label.into(), + data: bytemuck::cast_slice(&data).into(), + format: wgpu::TextureFormat::R32Float, + width: dimensions.x, + height: dimensions.y, + }, + ); + + Self { + dimensions, + data, + texture, + } } pub fn get_linear(&self, x: u32, y: u32) -> f32 { - match &self.data { - DepthCloudDepthData::U16(data) => { - data[(x + y * self.dimensions.x) as usize] as f32 / u16::MAX as f32 - } - DepthCloudDepthData::F32(data) => data[(x + y * self.dimensions.x) as usize], - } + self.data[(x + y * self.dimensions.x) as usize] } } struct AlbedoTexture { dimensions: glam::UVec2, rgba8: Vec, + texture: GpuTexture2D, } impl AlbedoTexture { - pub fn spiral(dimensions: glam::UVec2) -> Self { + pub fn spiral(re_ctx: &mut re_renderer::RenderContext, dimensions: glam::UVec2) -> Self { let size = (dimensions.x * dimensions.y) as usize; let mut rgba8 = std::iter::repeat(0).take(size * 4).collect_vec(); spiral(dimensions).for_each(|(texcoords, d)| { @@ -444,7 +447,24 @@ impl AlbedoTexture { rgba8[idx..idx + 4].copy_from_slice(re_renderer::colormap_turbo_srgb(d).as_slice()); }); - Self { dimensions, rgba8 } + let label = format!("albedo texture spiral {dimensions}"); + let texture = re_ctx.texture_manager_2d.get_or_create( + hash(&label), + &mut re_ctx.gpu_resources.textures, + Texture2DCreationDesc { + label: label.into(), + data: bytemuck::cast_slice(&rgba8).into(), + format: wgpu::TextureFormat::Rgba8UnormSrgb, + width: dimensions.x, + height: dimensions.y, + }, + ); + + Self { + dimensions, + rgba8, + texture, + } } #[allow(dead_code)] diff --git a/crates/re_renderer/src/renderer/depth_cloud.rs b/crates/re_renderer/src/renderer/depth_cloud.rs index 285c0a2f9fd0..e1718591152c 100644 --- a/crates/re_renderer/src/renderer/depth_cloud.rs +++ b/crates/re_renderer/src/renderer/depth_cloud.rs @@ -17,12 +17,11 @@ use crate::{ allocator::create_and_fill_uniform_buffer_batch, draw_phases::{DrawPhase, OutlineMaskProcessor}, include_shader_module, - resource_managers::ResourceManagerError, + resource_managers::{GpuTexture2D, ResourceManagerError}, view_builder::ViewBuilder, wgpu_resources::{ BindGroupDesc, BindGroupEntry, BindGroupLayoutDesc, GpuBindGroup, GpuBindGroupLayoutHandle, - GpuRenderPipelineHandle, GpuTexture, PipelineLayoutDesc, RenderPipelineDesc, - Texture2DBufferInfo, TextureDesc, + GpuRenderPipelineHandle, PipelineLayoutDesc, RenderPipelineDesc, }, Colormap, OutlineMaskPreference, PickingLayerObjectId, PickingLayerProcessor, }; @@ -50,7 +49,7 @@ mod gpu_data { pub picking_layer_object_id: PickingLayerObjectId, /// Multiplier to get world-space depth from whatever is in the texture. - pub world_depth_from_texture_value: f32, + pub world_depth_from_texture_depth: f32, /// Point radius is calculated as world-space depth times this value. pub point_radius_from_world_depth: f32, @@ -75,28 +74,21 @@ mod gpu_data { let super::DepthCloud { world_from_obj, depth_camera_intrinsics, - world_depth_from_data_depth, + world_depth_from_texture_depth, point_radius_from_world_depth, max_depth_in_world, depth_dimensions: _, - depth_data, + depth_texture: _, colormap, outline_mask_id, picking_object_id, } = depth_cloud; - let user_depth_from_texture_value = match depth_data { - super::DepthCloudDepthData::U16(_) => 65535.0, // un-normalize - super::DepthCloudDepthData::F32(_) => 1.0, - }; - let world_depth_from_texture_value = - world_depth_from_data_depth * user_depth_from_texture_value; - Self { world_from_obj: (*world_from_obj).into(), depth_camera_intrinsics: (*depth_camera_intrinsics).into(), outline_mask_id: outline_mask_id.0.unwrap_or_default().into(), - world_depth_from_texture_value, + world_depth_from_texture_depth: *world_depth_from_texture_depth, point_radius_from_world_depth: *point_radius_from_world_depth, max_depth_in_world: *max_depth_in_world, colormap: *colormap as u32, @@ -108,32 +100,6 @@ mod gpu_data { } } -/// The raw data from a depth texture. -/// -/// This is either `u16` or `f32` values; in both cases the data will be uploaded to the shader -/// as-is. -/// For `u16`s, this results in a `Depth16Unorm` texture, otherwise an `R32Float`. -/// The reason we normalize `u16` is so that the shader can use a `float` texture in both cases. -/// However, it means we need to multiply the sampled value by `65535.0` in the shader to get -/// the actual depth. -/// -/// The shader assumes that this is normalized, linear, non-flipped depth using the camera -/// position as reference point (not the camera plane!). -// -// TODO(cmc): support more depth data types. -// TODO(cmc): expose knobs to linearize/normalize/flip/cam-to-plane depth. -#[derive(Debug, Clone)] -pub enum DepthCloudDepthData { - U16(crate::Buffer), - F32(crate::Buffer), -} - -impl Default for DepthCloudDepthData { - fn default() -> Self { - Self::F32(Default::default()) - } -} - pub struct DepthCloud { /// The extrinsics of the camera used for the projection. pub world_from_obj: glam::Mat4, @@ -143,8 +109,8 @@ pub struct DepthCloud { /// Only supports pinhole cameras at the moment. pub depth_camera_intrinsics: glam::Mat3, - /// Multiplier to get world-space depth from whatever is in [`Self::depth_data`]. - pub world_depth_from_data_depth: f32, + /// Multiplier to get world-space depth from whatever is in [`Self::depth_texture`]. + pub world_depth_from_texture_depth: f32, /// Point radius is calculated as world-space depth times this value. pub point_radius_from_world_depth: f32, @@ -157,8 +123,8 @@ pub struct DepthCloud { /// The actual data from the depth texture. /// - /// See [`DepthCloudDepthData`] for more information. - pub depth_data: DepthCloudDepthData, + /// Only textures with sample type `Float` are supported. + pub depth_texture: GpuTexture2D, /// Configures color mapping mode. pub colormap: Colormap, @@ -223,11 +189,20 @@ impl DrawData for DepthCloudDrawData { type Renderer = DepthCloudRenderer; } +#[derive(thiserror::Error, Debug)] +pub enum DepthCloudDrawDataError { + #[error("Depth texture format was {0:?}, only formats with sample type float are supported")] + InvalidDepthTextureFormat(wgpu::TextureFormat), + + #[error(transparent)] + ResourceManagerError(#[from] ResourceManagerError), +} + impl DepthCloudDrawData { pub fn new( ctx: &mut RenderContext, depth_clouds: &DepthClouds, - ) -> Result { + ) -> Result { crate::profile_function!(); let DepthClouds { @@ -276,43 +251,14 @@ impl DepthCloudDrawData { depth_cloud_ubo_binding_outlines, depth_cloud_ubo_binding_opaque ) { - let depth_texture = match &depth_cloud.depth_data { - DepthCloudDepthData::U16(data) => { - if cfg!(target_arch = "wasm32") { - // Web: manual normalization because Depth16Unorm textures aren't supported on - // the web (and won't ever be on the WebGL backend, see - // https://github.com/gfx-rs/wgpu/issues/3537). - // - // TODO(cmc): use an RG8 texture and unpack it manually in the shader instead. - use itertools::Itertools as _; - let dataf32 = data - .as_slice() - .iter() - .map(|d| *d as f32 / u16::MAX as f32) - .collect_vec(); - create_and_upload_texture( - ctx, - depth_cloud, - dataf32.as_slice(), - wgpu::TextureFormat::R32Float, - ) - } else { - // Native: We use Depth16Unorm over R16Unorm because the latter is behind a feature flag and doesn't work on WebGPU. - create_and_upload_texture( - ctx, - depth_cloud, - data.as_slice(), - wgpu::TextureFormat::Depth16Unorm, - ) - } - } - DepthCloudDepthData::F32(data) => create_and_upload_texture( - ctx, - depth_cloud, - data.as_slice(), - wgpu::TextureFormat::R32Float, - ), - }; + if !matches!( + depth_cloud.depth_texture.format().describe().sample_type, + wgpu::TextureSampleType::Float { filterable: _ } + ) { + return Err(DepthCloudDrawDataError::InvalidDepthTextureFormat( + depth_cloud.depth_texture.format(), + )); + } let mk_bind_group = |label, ubo: BindGroupEntry| { ctx.gpu_resources.bind_groups.alloc( @@ -322,7 +268,7 @@ impl DepthCloudDrawData { label, entries: smallvec![ ubo, - BindGroupEntry::DefaultTextureView(depth_texture.handle), + BindGroupEntry::DefaultTextureView(depth_cloud.depth_texture.handle), ], layout: bg_layout, }, @@ -344,81 +290,6 @@ impl DepthCloudDrawData { } } -fn create_and_upload_texture( - ctx: &mut RenderContext, - depth_cloud: &DepthCloud, - data: &[T], - depth_format: wgpu::TextureFormat, -) -> GpuTexture { - crate::profile_function!(); - - let depth_texture_size = wgpu::Extent3d { - width: depth_cloud.depth_dimensions.x, - height: depth_cloud.depth_dimensions.y, - depth_or_array_layers: 1, - }; - let depth_texture_desc = TextureDesc { - label: "depth_texture".into(), - size: depth_texture_size, - mip_level_count: 1, - sample_count: 1, - dimension: wgpu::TextureDimension::D2, - format: depth_format, - usage: wgpu::TextureUsages::TEXTURE_BINDING | wgpu::TextureUsages::COPY_DST, - }; - let depth_texture = ctx - .gpu_resources - .textures - .alloc(&ctx.device, &depth_texture_desc); - - // Not supporting compressed formats here. - debug_assert!(depth_texture_desc.format.describe().block_dimensions == (1, 1)); - - let buffer_info = - Texture2DBufferInfo::new(depth_texture_desc.format, depth_cloud.depth_dimensions); - - // TODO(andreas): CpuGpuWriteBelt should make it easier to do this. - let bytes_padding_per_row = - (buffer_info.bytes_per_row_padded - buffer_info.bytes_per_row_unpadded) as usize; - // Sanity check the padding size. If this happens something is seriously wrong, as it would imply - // that we can't express the required alignment with the block size. - debug_assert!( - bytes_padding_per_row % std::mem::size_of::() == 0, - "Padding is not a multiple of pixel size. Can't correctly pad the texture data" - ); - - let mut depth_texture_staging = ctx.cpu_write_gpu_read_belt.lock().allocate::( - &ctx.device, - &ctx.gpu_resources.buffers, - buffer_info.buffer_size_padded as usize / std::mem::size_of::(), - ); - - // Fill with a single copy if possible, otherwise do multiple, filling in padding. - if bytes_padding_per_row == 0 { - depth_texture_staging.extend_from_slice(data); - } else { - let num_pixel_padding_per_row = bytes_padding_per_row / std::mem::size_of::(); - for row in data.chunks(depth_texture_desc.size.width as usize) { - depth_texture_staging.extend_from_slice(row); - depth_texture_staging - .extend(std::iter::repeat(T::zeroed()).take(num_pixel_padding_per_row)); - } - } - - depth_texture_staging.copy_to_texture2d( - ctx.active_frame.before_view_builder_encoder.lock().get(), - wgpu::ImageCopyTexture { - texture: &depth_texture.inner.texture, - mip_level: 0, - origin: wgpu::Origin3d::ZERO, - aspect: wgpu::TextureAspect::All, - }, - depth_cloud.depth_dimensions, - ); - - depth_texture -} - pub struct DepthCloudRenderer { render_pipeline_color: GpuRenderPipelineHandle, render_pipeline_picking_layer: GpuRenderPipelineHandle, diff --git a/crates/re_renderer/src/renderer/mod.rs b/crates/re_renderer/src/renderer/mod.rs index 3b4284bc2a6d..90caf7f1fda1 100644 --- a/crates/re_renderer/src/renderer/mod.rs +++ b/crates/re_renderer/src/renderer/mod.rs @@ -14,9 +14,7 @@ pub use point_cloud::{ }; mod depth_cloud; -pub use self::depth_cloud::{ - DepthCloud, DepthCloudDepthData, DepthCloudDrawData, DepthCloudRenderer, DepthClouds, -}; +pub use self::depth_cloud::{DepthCloud, DepthCloudDrawData, DepthCloudRenderer, DepthClouds}; mod test_triangle; pub use test_triangle::TestTriangleDrawData; diff --git a/crates/re_renderer/src/resource_managers/texture_manager.rs b/crates/re_renderer/src/resource_managers/texture_manager.rs index 980918cb8a48..04999dc7ffe8 100644 --- a/crates/re_renderer/src/resource_managers/texture_manager.rs +++ b/crates/re_renderer/src/resource_managers/texture_manager.rs @@ -75,7 +75,9 @@ pub struct Texture2DCreationDesc<'a> { pub label: DebugLabel, /// Data for the highest mipmap level. - /// Must be padded according to wgpu rules and ready for upload. + /// + /// Data is expected to be tightly packed. + /// I.e. it is *not* padded according to wgpu buffer->texture transfer rules, padding will happen on the fly if necessary. /// TODO(andreas): This should be a kind of factory function/builder instead which gets target memory passed in. pub data: std::borrow::Cow<'a, [u8]>, pub format: wgpu::TextureFormat, diff --git a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs index 5e1227feb4ad..17821d2f475e 100644 --- a/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs +++ b/crates/re_viewer/src/ui/view_spatial/scene/scene_part/images.rs @@ -9,7 +9,8 @@ use re_log_types::{ }; use re_query::{query_primary_with_history, EntityView, QueryError}; use re_renderer::{ - renderer::{DepthCloud, DepthCloudDepthData, RectangleOptions}, + renderer::{DepthCloud, RectangleOptions}, + resource_managers::Texture2DCreationDesc, Colormap, OutlineMaskPreference, }; @@ -277,24 +278,50 @@ impl ImagesPart { return Err(format!("Couldn't fetch pinhole extrinsics at {pinhole_ent_path:?}")); }; - // TODO(cmc): automagically convert as needed for non-natively supported datatypes? - let data = match &tensor.data { - // NOTE: Shallow clone if feature `arrow` is enabled, full alloc + memcpy otherwise. - TensorData::U16(data) => DepthCloudDepthData::U16(data.clone()), - TensorData::F32(data) => DepthCloudDepthData::F32(data.clone()), - _ => { - return Err(format!( - "Tensor datatype {} is not supported for backprojection", - tensor.dtype() - )); - } + let Some([height, width, _]) = tensor.image_height_width_channels() else { + return Err(format!("Tensor at {ent_path:?} is not an image")); + }; + let dimensions = glam::UVec2::new(width as _, height as _); + + let depth_texture = { + // Ideally, we'd use the same key as for displaying the texture, but we might make other compromises regarding formats etc.! + // So to not couple this, we use a different key here + let texture_key = egui::util::hash((tensor.id(), "depth_cloud")); + let mut data_f32 = Vec::new(); + ctx.render_ctx.texture_manager_2d.get_or_create_with( + texture_key, + &mut ctx.render_ctx.gpu_resources.textures, + || { + // TODO(andreas/cmc): Ideally we'd upload the u16 data as-is. + // However, R16Unorm is behind a feature flag and Depth16Unorm doesn't work on WebGL (and is awkward as this is a depth buffer format!). + let data = match &tensor.data { + TensorData::U16(data) => { + data_f32.extend(data.as_slice().iter().map(|d| *d as f32)); + bytemuck::cast_slice(&data_f32).into() + } + TensorData::F32(data) => bytemuck::cast_slice(data).into(), + _ => { + return Err(format!( + "Tensor datatype {} is not supported for back-projection", + tensor.dtype() + )); + } + }; + + Ok(Texture2DCreationDesc { + label: format!("Depth cloud for {ent_path:?}").into(), + data, + format: wgpu::TextureFormat::R32Float, + width: width as _, + height: height as _, + }) + }, + )? }; let depth_from_world_scale = *properties.depth_from_world_scale.get(); - let world_depth_from_data_depth = 1.0 / depth_from_world_scale; - let (h, w) = (tensor.shape()[0].size, tensor.shape()[1].size); - let dimensions = glam::UVec2::new(w as _, h as _); + let world_depth_from_texture_depth = 1.0 / depth_from_world_scale; let colormap = match *properties.color_mapper.get() { re_data_store::ColorMapper::Colormap(colormap) => match colormap { @@ -311,7 +338,7 @@ impl ImagesPart { // is a factor (`backproject_radius_scale`) of the diameter of a pixel projected // at that distance. let fov_y = intrinsics.fov_y().unwrap_or(1.0); - let pixel_width_from_depth = (0.5 * fov_y).tan() / (0.5 * h as f32); + let pixel_width_from_depth = (0.5 * fov_y).tan() / (0.5 * height as f32); let radius_scale = *properties.backproject_radius_scale.get(); let point_radius_from_world_depth = radius_scale * pixel_width_from_depth; @@ -321,20 +348,20 @@ impl ImagesPart { // This could only happen for Jpegs, and we should never get here. // TODO(emilk): refactor the code so that we can always calculate a range for the tensor re_log::warn_once!("Couldn't calculate range for a depth tensor!?"); - match data { - DepthCloudDepthData::U16(_) => u16::MAX as f32, - DepthCloudDepthData::F32(_) => 10.0, + match tensor.data { + TensorData::U16(_) => u16::MAX as f32, + _ => 10.0, } }; scene.primitives.depth_clouds.clouds.push(DepthCloud { world_from_obj, depth_camera_intrinsics: intrinsics.image_from_cam.into(), - world_depth_from_data_depth, + world_depth_from_texture_depth, point_radius_from_world_depth, - max_depth_in_world: world_depth_from_data_depth * max_data_value, + max_depth_in_world: max_data_value / depth_from_world_scale, depth_dimensions: dimensions, - depth_data: data, + depth_texture, colormap, outline_mask_id: entity_highlight.overall, picking_object_id: re_renderer::PickingLayerObjectId(ent_path.hash64()),