Initial commit

This commit is contained in:
2025-11-17 15:06:39 +01:00
parent 2015516eca
commit 4d2432a1c2
91 changed files with 5074894 additions and 2 deletions

View File

@@ -0,0 +1,97 @@
// worker.js
import { ParticleSimulation } from "./demo.js";
var particleSimulation = new ParticleSimulation();
export class Controller {
setup( request ) {
var canvas = request.canvas;
console.log( canvas );
particleSimulation.setup( canvas, request.width, request.height );
//self.postMessage({ type: "pong", id: request.id });
}
resetParticlePositions() {
particleSimulation.resetParticlePositions();
}
mousemove( request ) {
particleSimulation.eventManager.mousemove( request );
}
mousedown( request ) {
console.log("onMouseDown");
particleSimulation.eventManager.mousedown( request );
}
mouseup( request ) {
particleSimulation.eventManager.mouseup( request );
}
mouseleave( request ) {
particleSimulation.eventManager.mouseleave( request );
}
wheel( request ) {
particleSimulation.eventManager.wheel( request );
}
resize( request ) {
particleSimulation.eventManager.resize( request );
}
resetParticlePositions() {
particleSimulation.resetParticlePositions( );
}
}
const controller = new Controller();
self.onmessage = function (event) {
const data = event.data;
if (typeof data !== "object" || typeof data.method !== "string") {
console.warn("Invalid request received:", data);
return;
}
// Optional: wrap the data into a Request instance (if you need its methods)
// const request = new Request(data.method, data.payload);
// Or just use plain data object
const request = data;
const methodName = request.method;
if (typeof controller[methodName] !== "function") {
console.warn("No method found for:", request.method);
return;
}
controller[methodName](request);
};

300
Demos/Texture/demo.js Normal file
View File

@@ -0,0 +1,300 @@
import Shader from "../../framework/WebGpu.js"
import Matrix4 from "../../framework/Matrix4.js"
import Vector3 from "../../framework/Vector3.js"
import Camera from "../../framework/Camera.js";
import EventManager from "../../framework/eventManager.js";
import ShaderInpector from "../../framework/ShaderInpector.js";
export class ParticleSimulation {
canvas;
device;
camera;
useLocalSort = true;
eventManager = new EventManager();
frameCount = 0;
setCanvas( canvas ) {
this.canvas = canvas;
this.eventManager.setCanvas( canvas );
}
createTextureFromImageBitmap( device, imageBitmap ) {
const texture = device.createTexture( {
size: [ imageBitmap.width, imageBitmap.height, 1 ],
format: 'rgba8unorm',
usage: GPUTextureUsage.TEXTURE_BINDING | GPUTextureUsage.COPY_DST | GPUTextureUsage.RENDER_ATTACHMENT
} );
device.queue.copyExternalImageToTexture(
{ source: imageBitmap },
{ texture: texture },
[ imageBitmap.width, imageBitmap.height, 1 ]
);
return texture;
}
async loadImageBitmap(url) {
const response = await fetch( url );
const blob = await response.blob();
const imageBitmap = await createImageBitmap( blob );
return imageBitmap;
}
async loadTexture( url ) {
const imageBitmap = await this.loadImageBitmap( url );
const texture = this.createTextureFromImageBitmap( this.device, imageBitmap );
return texture;
}
createPlane(width, height, repeatU, repeatV) {
const vertices = new Float32Array( 18 ); // 6 vertices (2 triangles) * 3 coords
const normals = new Float32Array( 18 ); // same count as vertices
const uvs = new Float32Array( 12 ); // 6 vertices * 2 coords
// Positions (two triangles forming a plane on XY plane at z=0)
// Large plane from (-width/2, -height/2) to (width/2, height/2)
vertices.set([
-width / 2, -height / 2, 0,
width / 2, -height / 2, 0,
-width / 2, height / 2, 0,
-width / 2, height / 2, 0,
width / 2, -height / 2, 0,
width / 2, height / 2, 0
]);
// Normals all pointing +Z
for (let i = 0; i < 6; i++) {
normals[i * 3 + 0] = 0;
normals[i * 3 + 1] = 0;
normals[i * 3 + 2] = 1;
}
// UVs scaled by repeatU, repeatV to repeat texture over the plane
uvs.set([
0, 0,
repeatU, 0,
0, repeatV,
0, repeatV,
repeatU, 0,
repeatU, repeatV
]);
return { vertices, normals, uvs };
}
async setup( offscreenCanvas, width, height ) {
offscreenCanvas.width = width;
offscreenCanvas.height = height;
this.canvas = offscreenCanvas;
const context = offscreenCanvas.getContext("webgpu");
this.camera = new Camera( [0, 0, 1115], [0, -.3, 0], [0, 1, 0] );
this.eventManager.setup( offscreenCanvas, this.camera );
const adapter = await self.navigator.gpu.requestAdapter();
if ( !adapter ) {
throw new Error("Failed to get GPU adapter");
}
const presentationFormat = navigator.gpu.getPreferredCanvasFormat();
this.device = await adapter.requestDevice();
this.renderShader = new Shader( this.device );
context.configure({
device: this.device,
format: presentationFormat,
alphaMode: "opaque"
});
const instanceCount = 100;
const instancePositions = new Float32Array(instanceCount * 4); // vec4 per instance
for (let i = 0; i < instanceCount; i++) {
const x = (i % 10) * 300.0;
const y = Math.floor(i / 10) * 350.0;
instancePositions[i * 4 + 0] = x - 1000;
instancePositions[i * 4 + 1] = 0;
instancePositions[i * 4 + 2] = y - 1000;
instancePositions[i * 4 + 3] = 0;
}
var model = await this.loadJSON("demo.json");
var mesh = model.meshes[0];
this.renderShader.setCanvas( this.canvas );
this.renderShader.topology = "triangle-list";
await this.renderShader.setup( "../../shaders/triangle-list-texture.wgsl");
this.renderShader.setAttribute( "position", mesh.vertices );
this.renderShader.setAttribute( "normal", mesh.normals );
this.renderShader.setAttribute( "uv", mesh.texturecoords[0] );
var faces = mesh.faces;
const indexArray = new Uint32Array(faces.length * 3);
for (let i = 0; i < faces.length; i++) {
indexArray[i * 3 + 0] = faces[i][0];
indexArray[i * 3 + 1] = faces[i][1];
indexArray[i * 3 + 2] = faces[i][2];
}
this.renderShader.setIndices( indexArray );
this.renderShader.setCanvas( this.canvas );
this.renderShader.topology = "triangle-list";
await this.renderShader.setup( "../../shaders/triangle-list-texture.wgsl");
/*
const { vertices, normals, uvs } = this.createPlane( 1000, 1000, 4, 4 );
this.renderShader.setAttribute( "position", vertices );
this.renderShader.setAttribute( "normal", normals );
this.renderShader.setAttribute( "uv", uvs );
this.vertexCount = vertices.length / 3
*/
this.renderShader.setVariable( "instancePositions", instancePositions );
var texture = await this.loadTexture("./textures/defaultnouvs.png");
const sampler = this.device.createSampler({
minFilter: 'linear',
magFilter: 'linear',
mipmapFilter: 'linear',
addressModeU: 'repeat',
addressModeV: 'repeat',
});
this.renderShader.setVariable( "mySampler", sampler );
this.renderShader.setVariable( "myTexture", texture );
this.render();
}
updateTimeDelta() {
const now = performance.now();
this.deltaTimeValue = ( now - this.lastFrameTime ) / 1000;
this.lastFrameTime = now;
}
async render() {
this.updateTimeDelta();
const viewMatrixData = this.camera.getViewMatrix();
const projectionMatrixData = Matrix4.createProjectionMatrix( this.camera, this.canvas )
const viewProjectionMatrix = Matrix4.multiply( projectionMatrixData, viewMatrixData );
const cameraWorldMatrix = Matrix4.invert( viewMatrixData );
const cameraPosition = Matrix4.getColumn( cameraWorldMatrix, 3 );
this.renderShader.setVariable( "viewProjectionMatrix", viewProjectionMatrix );
this.renderShader.setVariable( "cameraPosition", cameraPosition );
this.renderShader.renderToCanvas( this.vertexCount, 74, 0 );
this.frameCount++;
requestAnimationFrame( this.render.bind( this ) );
}
async loadJSON( pathName ) {
const response = await fetch( pathName );
if ( !response.ok ){
throw new Error( `Failed to load shader: ${ pathName }` );
}
return await response.json();
}
}

844040
Demos/Texture/demo.json Executable file
View File

File diff suppressed because it is too large Load Diff

181
Demos/Texture/index.html Normal file
View File

@@ -0,0 +1,181 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>10.000 Gpu Sphere Collision Demo.</title>
</head>
<base href="Texture/" />
<link rel="stylesheet" href="./style/main.css" >
<script type="module">
import { ParticleSimulation } from "./demo.js";
var particleSimulation = new ParticleSimulation();
var useWebWorker = false;
const canvas = document.querySelector(".mainCanvas");
particleSimulation.setCanvas( canvas );
resizeCanvasToDisplaySize( canvas );
var worker;
if ( !useWebWorker ) {
await particleSimulation.setup( canvas, canvas.width, canvas.height, true );
console.log("document.bufferMap", document.bufferMap);
} else if ( useWebWorker ) {
worker = new Worker("../../framework/GpuWorker.js", { type: "module" });
worker.onmessage = function ( event ) {
console.log("From worker:", event.data);
};
const offscreen = canvas.transferControlToOffscreen();
worker.addEventListener("error", function ( event ) {
console.error( "Worker failed:",
event.message, "at",
event.filename + ":" +
event.lineno + ":" +
event.colno );
});
var request = {
method: "setup",
canvas: offscreen,
width: canvas.width,
height: canvas.height
};
worker.postMessage( request, [offscreen] );
}
var events = new Array( "mousemove", "mousedown", "mouseup", "onwheel", "wheel" );
for ( var i = 0; i < events.length; i++ ) {
let eventName = events[i];
console.log("createEvent", eventName);
canvas.addEventListener( eventName, function ( event ) {
event.preventDefault();
const rect = canvas.getBoundingClientRect();
const x = event.clientX - rect.left;
const y = event.clientY - rect.top;
var request = {
method: eventName,
clientX: x,
clientY: y,
deltaY: event.deltaY
};
if ( useWebWorker ) {
worker.postMessage( request );
} else {
particleSimulation.eventManager[ eventName ]( request );
}
});
}
document.querySelector("#resetParticlesButton").addEventListener("click", function () {
var request = {
method: "resetParticlePositions"
};
if ( useWebWorker ) {
worker.postMessage( request );
} else {
particleSimulation.resetParticlePositions();
}
});
function resizeCanvasToDisplaySize( canvas ) {
const width = window.innerWidth;
const height = window.innerHeight;
var request = {
method: "resize",
width: width,
height: height
};
if ( useWebWorker ) {
worker.postMessage( request );
} else {
particleSimulation.eventManager.resize( request );
}
}
window.addEventListener( "resize", function () {
resizeCanvasToDisplaySize( canvas );
});
</script>
<body>
<div id="controlPanel">
<div class="inputRow">
<button id="resetParticlesButton">Reset Spheres</button>
</div>
</div>
<canvas class="mainCanvas" width="1000" height="1000"></canvas>
</body>
</html>

View File

@@ -0,0 +1,114 @@
html, body {
margin: 0;
padding: 0;
height: 100%;
overflow: hidden;
background:#111111;
}
canvas {
width: 100vw;
height: 100vh;
display: block;
padding: 0;
margin: 0;
}
#controlPanel {
position: absolute;
top: 10px;
left: 10px;
display: flex;
flex-direction: column;
gap: 12px;
padding: 20px;
background: rgba(30, 30, 30, 0.6);
backdrop-filter: blur(18px);
-webkit-backdrop-filter: blur(18px);
border-radius: 14px;
box-shadow:
0 4px 12px rgba(0, 0, 0, 0.5),
0 0 0 1px rgba(255, 255, 255, 0.05);
z-index: 1000;
box-sizing: border-box;
}
.inputRow {
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
width: 100%;
}
.inputRow label {
color: #ccc;
font-size: 14px;
white-space: nowrap;
width: 70px;
}
.inputRow button,
.inputRow input {
flex-grow: 1;
font-size: 14px;
font-weight: 600;
color: #f0f0f5;
background: rgba(28, 28, 30, 0.9);
border: none;
border-radius: 8px;
padding: 8px 10px;
cursor: pointer;
box-shadow:
0 1px 3px rgba(0, 0, 0, 0.4),
0 0 6px rgba(28, 28, 30, 0.5);
transition: background-color 0.2s ease, box-shadow 0.2s ease;
}
.inputRow button:hover {
background: rgba(40, 40, 44, 0.95);
}
.inputRow input {
background: rgba(20, 20, 20, 0.8);
border: 1px solid rgba(255, 255, 255, 0.1);
outline: none;
box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.6);
}
.inputRow select {
flex-grow: 1;
font-size: 14px;
font-weight: 600;
color: #f0f0f5;
background: rgba(28, 28, 30, 0.9);
border: none;
border-radius: 8px;
padding: 8px 10px;
cursor: pointer;
box-shadow:
0 1px 3px rgba(0, 0, 0, 0.4),
0 0 6px rgba(28, 28, 30, 0.5);
transition: background-color 0.2s ease, box-shadow 0.2s ease;
appearance: none; /* Remove default arrow */
-webkit-appearance: none;
-moz-appearance: none;
background-image:
linear-gradient(45deg, transparent 50%, #f0f0f5 50%),
linear-gradient(135deg, #f0f0f5 50%, transparent 50%);
background-position:
calc(100% - 20px) calc(50% - 3px),
calc(100% - 15px) calc(50% - 3px);
background-size: 5px 5px;
background-repeat: no-repeat;
}
.inputRow select:hover {
background: rgba(40, 40, 44, 0.95);
}
.inputRow option {
background: rgba(28, 28, 30, 0.95);
color: #f0f0f5;
}

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 447 KiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 1.2 MiB

View File

Binary file not shown.

After

Width:  |  Height:  |  Size: 5.0 KiB

View File

@@ -0,0 +1,114 @@
# Simplifying WebGPU with This Framework
WebGPUs native API is complex and verbose. This framework reduces that complexity by providing simple classes and methods to manage shaders, buffers, and execution. Instead of handling low-level GPU commands, you focus on your logic.
## 1. Loading and Using a Shader
**Without Framework:**
```
Manual setup of device, pipeline, bind groups, etc. Very verbose and error-prone
const shaderModule = device.createShaderModule({ code: wgslSource });
const pipeline = device.createComputePipeline({ ... });
// Setup bind groups and command encoder manually
```
**With Framework:**
```
const shader = new Shader( "myComputeShader.wgsl" );
await shader.compile();
shader.setVariable( "someUniform", 42 );
shader.execute( 64 ); // runs compute with 64 workgroups
```
The framework loads, compiles, sets uniforms, and dispatches the compute shader with simple method calls.
## 2. Setting Buffers
**Without Framework:**
```
Create GPU buffer, write data, create bind group manually
```
**With Framework:**
```
const dataBuffer = gpu.createBuffer( new Float32Array([1, 2, 3]) );
shader.setBuffer( "inputBuffer", dataBuffer );
```
Buffers are bound by name with a simple call.
## 3. Rendering to Canvas
**Without Framework:**
```
Create render pipeline, set vertex buffers, encode commands manually
```
**With Framework:**
```
shader.setCanvas( canvasElement );
shader.setAttributes( vertexData );
shader.execute();
```
The framework handles pipeline creation, drawing commands, and presentation automatically.
## 4. Camera and Matrix Utilities
**Example:**
```
const camera = new Camera();
camera.position.set( 0, 1, 5 );
camera.updateViewMatrix();
shader.setVariable( "viewMatrix", camera.viewMatrix );
```
The framework provides built-in classes for common math tasks.
## 5. Event Handling
**Example:**
```
const events = new EventManager( canvasElement );
events.on( "mouseMove", (event) => {
camera.rotate( event.deltaX, event.deltaY );
shader.setVariable( "viewMatrix", camera.viewMatrix );
shader.execute();
});
```
Separates input handling cleanly from GPU logic.
## Summary Table
| Task | WebGPU Native API | This Framework |
| --- | --- | --- |
| Load and compile shader | Many steps, manual setup | One line with `new Shader()` + `compile()` |
| Set uniforms | Define bind groups and layouts | Simple `setVariable()` calls |
| Bind buffers | Manual buffer creation and binding | `setBuffer()` by name |
| Execute compute | Command encoder and dispatch calls | `execute(workgroupCount)` method |
| Render to canvas | Complex pipeline and draw calls | `setCanvas()`, `setAttributes()`, `execute()` |
| Handle math and camera | External libs or manual math | Built-in Matrix4, Camera classes |
| Input handling | Manual event listeners | `EventManager` handles input cleanly |
---
This framework hides the low-level complexity behind a clean, simple API that accelerates WebGPU development and makes GPU programming accessible and maintainable.