Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion public/assets/img/volume/t1_icbm_normal_1mm_pn0_rf0.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,7 @@ def load_byte_array_from_file(file_path):
with open("t1_icbm_normal_1mm_pn0_rf0_180x216x180_uint8_1x1.bin", "rb") as f:
bytes_data = f.read()

gzip_filename = "t1_icbm_normal_1mm_pn0_rf0_180x216x180_uint8_1x1.bin-gz"
gzip_filename = "t1_icbm_normal_1mm_pn0_rf0_180x216x180_uint8_1x1.bin.gz"

with gzip.open(gzip_filename, "wb", compresslevel=9) as f:
f.write(bytes_data)
Expand Down
Binary file not shown.
Binary file not shown.
10 changes: 10 additions & 0 deletions sample/volumeRenderingTexture3D/index.html
Original file line number Diff line number Diff line change
Expand Up @@ -20,11 +20,21 @@
max-width: 100%;
display: block;
}
#status {
position: fixed;
left: 0;
bottom: 0;
background: darkred;
color: white;
font-family: monospace;
font-size: 200%;
}
</style>
<script defer src="main.js" type="module"></script>
<script defer type="module" src="../../js/iframe-helper.js"></script>
</head>
<body>
<canvas></canvas>
<div id=status></div>
</body>
</html>
121 changes: 86 additions & 35 deletions sample/volumeRenderingTexture3D/main.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,24 +4,76 @@ import volumeWGSL from './volume.wgsl';
import { quitIfWebGPUNotAvailable } from '../util';

const canvas = document.querySelector('canvas') as HTMLCanvasElement;
const status = document.getElementById('status') as HTMLDivElement;

const gui = new GUI();

const brainImages = {
r8unorm: {
bytesPerBlock: 1,
blockLength: 1,
feature: undefined,
dataPath:
'../../assets/img/volume/t1_icbm_normal_1mm_pn0_rf0_180x216x180_uint8_1x1.bin.gz',
},
'bc4-r-unorm': {
bytesPerBlock: 8,
blockLength: 4,
feature: 'texture-compression-bc-sliced-3d',
dataPath:
'../../assets/img/volume/t1_icbm_normal_1mm_pn0_rf0_180x216x180_bc4_4x4.bin.gz',
// Generated with texconv from https://github.com/microsoft/DirectXTex/releases
},
'astc-12x12-unorm': {
bytesPerBlock: 16,
blockLength: 12,
feature: 'texture-compression-astc-sliced-3d',
dataPath:
'../../assets/img/volume/t1_icbm_normal_1mm_pn0_rf0_180x216x180_astc_12x12.bin.gz',
// Generated with astcenc from https://github.com/ARM-software/astc-encoder/releases
},
};

// GUI parameters
const params: { rotateCamera: boolean; near: number; far: number } = {
const params: {
rotateCamera: boolean;
near: number;
far: number;
textureFormat: GPUTextureFormat;
} = {
rotateCamera: true,
near: 2.0,
far: 7.0,
near: 4.3,
far: 4.4,
textureFormat: 'r8unorm',
};

gui.add(params, 'rotateCamera', true);
gui.add(params, 'near', 2.0, 7.0);
gui.add(params, 'far', 2.0, 7.0);
gui
.add(params, 'textureFormat', Object.keys(brainImages))
.onChange(async () => {
await createVolumeTexture(params.textureFormat);
});

const adapter = await navigator.gpu?.requestAdapter({
featureLevel: 'compatibility',
});
const device = await adapter?.requestDevice();
const requiredFeatures = [];
if (adapter?.features.has('texture-compression-bc-sliced-3d')) {
requiredFeatures.push(
'texture-compression-bc',
'texture-compression-bc-sliced-3d'
);
}
if (adapter?.features.has('texture-compression-astc-sliced-3d')) {
requiredFeatures.push(
'texture-compression-astc',
'texture-compression-astc-sliced-3d'
);
}
const device = await adapter?.requestDevice({ requiredFeatures });

quitIfWebGPUNotAvailable(adapter, device);
const context = canvas.getContext('webgpu') as GPUCanvasContext;

Expand Down Expand Up @@ -77,34 +129,26 @@ const uniformBuffer = device.createBuffer({
usage: GPUBufferUsage.UNIFORM | GPUBufferUsage.COPY_DST,
});

let volumeTexture: GPUTexture | null = null;

// Fetch the image and upload it into a GPUTexture.
let volumeTexture: GPUTexture;
{
async function createVolumeTexture(format: GPUTextureFormat) {
volumeTexture = null;

const { blockLength, bytesPerBlock, dataPath, feature } = brainImages[format];
const width = 180;
const height = 216;
const depth = 180;
const format: GPUTextureFormat = 'r8unorm';
const blockLength = 1;
const bytesPerBlock = 1;
const blocksWide = Math.ceil(width / blockLength);
const blocksHigh = Math.ceil(height / blockLength);
const bytesPerRow = blocksWide * bytesPerBlock;
const dataPath =
'../../assets/img/volume/t1_icbm_normal_1mm_pn0_rf0_180x216x180_uint8_1x1.bin-gz';

// Fetch the compressed data
const response = await fetch(dataPath);
const compressedArrayBuffer = await response.arrayBuffer();

// Decompress the data using DecompressionStream for gzip format
const decompressionStream = new DecompressionStream('gzip');
const decompressedStream = new Response(
compressedArrayBuffer
).body.pipeThrough(decompressionStream);
const decompressedArrayBuffer = await new Response(
decompressedStream
).arrayBuffer();
const byteArray = new Uint8Array(decompressedArrayBuffer);
if (feature && !device.features.has(feature)) {
status.textContent = `${feature} not supported`;
return;
} else {
status.textContent = '';
}

volumeTexture = device.createTexture({
dimension: '3d',
Expand All @@ -113,16 +157,19 @@ let volumeTexture: GPUTexture;
usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST,
});

const response = await fetch(dataPath);
const buffer = await response.arrayBuffer();

device.queue.writeTexture(
{
texture: volumeTexture,
},
byteArray,
{ texture: volumeTexture },
buffer,
{ bytesPerRow: bytesPerRow, rowsPerImage: blocksHigh },
[width, height, depth]
);
}

await createVolumeTexture(params.textureFormat);

// Create a sampler with linear filtering for smooth interpolation.
const sampler = device.createSampler({
magFilter: 'linear',
Expand All @@ -131,7 +178,7 @@ const sampler = device.createSampler({
maxAnisotropy: 16,
});

const uniformBindGroup = device.createBindGroup({
const bindGroupDescriptor: GPUBindGroupDescriptor = {
layout: pipeline.getBindGroupLayout(0),
entries: [
{
Expand All @@ -146,17 +193,17 @@ const uniformBindGroup = device.createBindGroup({
},
{
binding: 2,
resource: volumeTexture.createView(),
resource: undefined, // Assigned later
},
],
});
};

const renderPassDescriptor: GPURenderPassDescriptor = {
colorAttachments: [
{
view: undefined, // Assigned later

clearValue: [0.5, 0.5, 0.5, 1.0],
clearValue: [0, 0, 0, 1.0],
loadOp: 'clear',
storeOp: 'discard',
},
Expand Down Expand Up @@ -207,9 +254,13 @@ function frame() {

const commandEncoder = device.createCommandEncoder();
const passEncoder = commandEncoder.beginRenderPass(renderPassDescriptor);
passEncoder.setPipeline(pipeline);
passEncoder.setBindGroup(0, uniformBindGroup);
passEncoder.draw(3);
if (volumeTexture) {
bindGroupDescriptor.entries[2].resource = volumeTexture.createView();
const uniformBindGroup = device.createBindGroup(bindGroupDescriptor);
passEncoder.setPipeline(pipeline);
passEncoder.setBindGroup(0, uniformBindGroup);
passEncoder.draw(3);
}
passEncoder.end();
device.queue.submit([commandEncoder.finish()]);

Expand Down