219 lines
5.1 KiB
HTML
219 lines
5.1 KiB
HTML
<!DOCTYPE html>
|
|
<html lang="en">
|
|
<head>
|
|
<meta charset="UTF-8" />
|
|
<meta name="viewport" content="width=device-width, initial-scale=1" />
|
|
<base href="./">
|
|
<title>Fullscreen WASM Raytracer - WebGPU</title>
|
|
<style>
|
|
html, body {
|
|
margin: 0; padding: 0; overflow: hidden; height: 100%;
|
|
background: black;
|
|
}
|
|
canvas {
|
|
display: block;
|
|
width: 100vw;
|
|
height: 100vh;
|
|
}
|
|
#fps {
|
|
position: fixed;
|
|
top: 5px; left: 5px;
|
|
color: white;
|
|
background: rgba(0,0,0,0.5);
|
|
padding: 4px 8px;
|
|
font-family: monospace;
|
|
font-size: 14px;
|
|
z-index: 100;
|
|
}
|
|
</style>
|
|
</head>
|
|
<body>
|
|
<div id="fps">FPS: 0</div>
|
|
<canvas id="gpu-canvas"></canvas>
|
|
|
|
<script type="module">
|
|
import WasmModule from './wasm_renderer.js';
|
|
|
|
async function main() {
|
|
const canvas = document.getElementById('gpu-canvas');
|
|
const fpsElem = document.getElementById('fps');
|
|
|
|
// Get WebGPU context
|
|
if (!navigator.gpu) {
|
|
alert("WebGPU not supported on this browser.");
|
|
return;
|
|
}
|
|
|
|
const adapter = await navigator.gpu.requestAdapter();
|
|
const device = await adapter.requestDevice();
|
|
const context = canvas.getContext('webgpu');
|
|
|
|
const format = navigator.gpu.getPreferredCanvasFormat();
|
|
context.configure({
|
|
device,
|
|
format,
|
|
alphaMode: "opaque"
|
|
});
|
|
|
|
// Load WASM module
|
|
const wasm = await WasmModule();
|
|
|
|
let width = window.innerWidth;
|
|
let height = window.innerHeight;
|
|
|
|
function resize() {
|
|
width = window.innerWidth;
|
|
height = window.innerHeight;
|
|
canvas.width = width;
|
|
canvas.height = height;
|
|
wasm._set_resolution(width, height);
|
|
}
|
|
|
|
window.addEventListener('resize', resize);
|
|
resize();
|
|
|
|
const framebufferPtr = wasm._get_framebuffer_ptr();
|
|
let framebufferUint8 = new Uint8Array(wasm.HEAPU8.buffer, framebufferPtr, width * height * 4);
|
|
|
|
// Create a GPU texture for the framebuffer
|
|
const texture = device.createTexture({
|
|
size: [width, height],
|
|
format: 'rgba8unorm',
|
|
usage: GPUTextureUsage.COPY_DST | GPUTextureUsage.TEXTURE_BINDING
|
|
});
|
|
|
|
// Create sampler
|
|
const sampler = device.createSampler({
|
|
magFilter: 'nearest',
|
|
minFilter: 'nearest'
|
|
});
|
|
|
|
// Create quad rendering pipeline
|
|
const shaderModule = device.createShaderModule({
|
|
code: `
|
|
@group(0) @binding(0) var mySampler: sampler;
|
|
@group(0) @binding(1) var myTexture: texture_2d<f32>;
|
|
|
|
struct VertexOutput {
|
|
@builtin(position) pos: vec4<f32>,
|
|
@location(0) uv: vec2<f32>,
|
|
};
|
|
|
|
@vertex
|
|
fn vertex_main(@builtin(vertex_index) vertexIndex: u32) -> VertexOutput {
|
|
var pos = array<vec2<f32>, 6>(
|
|
vec2<f32>(-1.0, -1.0),
|
|
vec2<f32>( 1.0, -1.0),
|
|
vec2<f32>(-1.0, 1.0),
|
|
vec2<f32>(-1.0, 1.0),
|
|
vec2<f32>( 1.0, -1.0),
|
|
vec2<f32>( 1.0, 1.0)
|
|
);
|
|
|
|
var uv = (pos[vertexIndex] + vec2<f32>(1.0)) * 0.5;
|
|
|
|
var output: VertexOutput;
|
|
output.pos = vec4<f32>(pos[vertexIndex], 0.0, 1.0);
|
|
output.uv = uv;
|
|
return output;
|
|
}
|
|
|
|
@fragment
|
|
fn fragment_main(@location(0) uv: vec2<f32>) -> @location(0) vec4<f32> {
|
|
return textureSample(myTexture, mySampler, uv);
|
|
}
|
|
`
|
|
});
|
|
|
|
const pipeline = device.createRenderPipeline({
|
|
layout: 'auto',
|
|
vertex: {
|
|
module: shaderModule,
|
|
entryPoint: 'vertex_main',
|
|
},
|
|
fragment: {
|
|
module: shaderModule,
|
|
entryPoint: 'fragment_main',
|
|
targets: [{ format }],
|
|
},
|
|
primitive: {
|
|
topology: 'triangle-list',
|
|
},
|
|
});
|
|
|
|
// Bind group for texture/sampler
|
|
const bindGroup = device.createBindGroup({
|
|
layout: pipeline.getBindGroupLayout(0),
|
|
entries: [
|
|
{ binding: 0, resource: sampler },
|
|
{ binding: 1, resource: texture.createView() },
|
|
],
|
|
});
|
|
|
|
let frameCount = 0;
|
|
let lastTime = performance.now();
|
|
|
|
const encoder = () => device.createCommandEncoder();
|
|
|
|
const uploadToTexture = () => {
|
|
device.queue.writeTexture(
|
|
{ texture: texture },
|
|
framebufferUint8,
|
|
{
|
|
bytesPerRow: width * 4,
|
|
},
|
|
{
|
|
width: width,
|
|
height: height,
|
|
depthOrArrayLayers: 1,
|
|
}
|
|
);
|
|
};
|
|
|
|
function frame(time) {
|
|
wasm._update_framebuffer(time * 0.001);
|
|
|
|
// In case of resize
|
|
framebufferUint8 = new Uint8Array(wasm.HEAPU8.buffer, framebufferPtr, width * height * 4);
|
|
|
|
uploadToTexture();
|
|
|
|
const commandEncoder = encoder();
|
|
const pass = commandEncoder.beginRenderPass({
|
|
colorAttachments: [{
|
|
view: context.getCurrentTexture().createView(),
|
|
loadOp: 'clear',
|
|
storeOp: 'store',
|
|
clearValue: { r: 0, g: 0, b: 0, a: 1 },
|
|
}]
|
|
});
|
|
|
|
pass.setPipeline(pipeline);
|
|
pass.setBindGroup(0, bindGroup);
|
|
pass.draw(6);
|
|
pass.end();
|
|
|
|
device.queue.submit([commandEncoder.finish()]);
|
|
|
|
// FPS
|
|
frameCount++;
|
|
const now = performance.now();
|
|
const delta = now - lastTime;
|
|
if (delta >= 1000) {
|
|
const fps = (frameCount * 1000) / delta;
|
|
fpsElem.textContent = `FPS: ${fps.toFixed(1)}`;
|
|
frameCount = 0;
|
|
lastTime = now;
|
|
}
|
|
|
|
requestAnimationFrame(frame);
|
|
}
|
|
|
|
requestAnimationFrame(frame);
|
|
}
|
|
|
|
main();
|
|
</script>
|
|
</body>
|
|
</html>
|