first commit

This commit is contained in:
2025-12-31 14:22:45 +01:00
commit c78a860098
73 changed files with 30137 additions and 0 deletions

387
AGENTS.md Normal file
View File

@@ -0,0 +1,387 @@
Below is a clean, concise **AGENTS.md “WebGPU Patterns” section** designed specifically for *your custom framework*:
* `Engine`
* `RenderPipeline`
* `Block`
* `RenderPass`
* `Shader`
Not the GPT model logic.
Not embeddings or inference.
Only the architectural usage patterns you want Codex to follow.
You can **copy this directly** into your existing AGENTS.md under “Example Patterns”, or use it to replace that section entirely.
It teaches Codex:
* How your framework is structured
* How passes are created
* How pipelines are assembled
* How shader setup works
* How buffer binding works
* How execution is called
This ensures that Codex will generate compatible new passes, pipelines, and engine bootstraps.
---
# **WEBGPU FRAMEWORK PATTERNS (SHORT, STRICT)**
Codex must follow these exact patterns when creating or modifying WebGPU code using the framework located in `/framework`.
These patterns describe:
* how to extend `RenderPass`
* how to assemble a `Block`
* how to create a `RenderPipeline`
* how to use `Engine`
* how to load WGSL shaders
* how to bind buffers and call `.execute()`
No ML logic included.
Only architectural usage patterns.
---
# paths
```
/framework/RenderPass.js
/framework/WebGpu.js
/framework/Block.js
/framework/EventManager.js
/framework/Camera.js
/framework/Matrix4.js
/framework/vector3.js
```
# **1. RenderPass Pattern**
A pass is always:
* its own file under `passes/`
* a class extending `RenderPass`
* has `create()`, `bindBuffers()`, `execute()`
* loads its shader using `new Shader(this.device)`
* sets WGSL variables using `.setVariable()`
```javascript
import { RenderPass } from "framework/RenderPass.js";
import Shader from "framework/WebGpu.js";
export class ExamplePass extends RenderPass {
async create( ) {
this.shader = new Shader( this.device );
await this.shader.setup( "shaders/example.wgsl" );
}
bindBuffers( ) {
var inputData = this.pipeline.memory.inputData;
// set variable can be normal javascript arrays, also typed arrays.
this.shader.setVariable(
"input1",
[1,2,3,4,5]
);
// set variable can be normal javascript arrays, also typed arrays.
this.shader.setVariable(
"input2",
new Float32Array( [ 1,2,3,4 ] )
);
const texture = this.shader.createTextureFromData(
width,
height,
[ 1,2,3... ]
);
this.shader.setVariable( "texture", texture );
// associate buffer from other shader buffer
const prevLayerBlock = this.pipeline.getBlockByName("blockName");
const pass = prevLayerBlock.getPass("passName");
const buffer = pass.shader.getBuffer("outputVector");
this.shader.setBuffer("inputHiddenState", buffer);
}
async execute( ) {
await this.shader.execute( 1 );
}
}
```
Rules Codex must preserve:
* One blank line between imports.
* Tab-aligned imports before each `from`.
* No arrow functions.
* `await this.shader.setup( path )`.
* `this.shader.setVariable( name, data )` only in `bindBuffers()` or incremental calls.
* `execute()` always calls `this.shader.execute( workgroups )`.
---
# **2. Block Pattern**
A block is a logical grouping of passes.
Codex must:
* create blocks using `new Block( "name", pipeline )`
* attach passes with `block.addPass( "Name", instance )`
* add block to pipeline with `pipeline.addBlock( block )`
```
import { Block } from "/framework/Block.js";
import { ExamplePass } from "../passes/ExamplePass.js";
const block = new Block( "example", this );
const pass = new ExamplePass( );
block.addPass( "Example", pass );
this.addBlock( block );
```
---
# **3. Pipeline Pattern**
A pipeline:
* extends `RenderPipeline`
* constructs blocks in `create()`
* allocates memory buffers on `this.memory`
* always calls `await super.create()` last
* may override `execute()` to chain passes logically
```
import { RenderPipeline } from "/framework/RenderPipeline.js";
import { Block } from "/framework/Block.js";
import { ExamplePass } from "../passes/ExamplePass.js";
export class ExamplePipeline extends RenderPipeline {
async create( ) {
/* Allocate global memory */
this.memory.input = new Float32Array( 1024 );
this.memory.output = new Float32Array( 1024 );
/* Pass block */
const block = new Block( "example", this );
const examplePass = new ExamplePass( );
block.addPass( "Example", examplePass );
this.addBlock( block );
/* Build GPU resources (mandatory) */
await super.create();
}
async execute( ) {
const block = this.blocks[0];
const pass = block.getPass( "Example" );
await pass.execute();
}
}
```
# **3. Camera and eventmanager Pattern**
```
import Matrix4 from "framework/Matrix4.js"
import Vector3 from "framework/Vector3.js"
this.camera = new Camera( [0, 0, 1115], [0, -.3, 0], [0, 1, 0] );
this.eventManager.setup( canvas, this.camera );
const viewMatrixData = this.camera.getViewMatrix();
const projectionMatrixData = Matrix4.createProjectionMatrix( this.camera, this.canvas )
const viewProjectionMatrix = Matrix4.multiply( projectionMatrixData, viewMatrixData );
const cameraWorldMatrix = Matrix4.invert( viewMatrixData );
const cameraPosition = Matrix4.getColumn( cameraWorldMatrix, 3 );
this.renderShader.setVariable( "viewProjectionMatrix", viewProjectionMatrix );
this.renderShader.setVariable( "cameraPosition", cameraPosition );
```
Codex must:
* allocate buffers on `this.memory.*`
* assemble blocks before `super.create()`
* not reorder pipeline structure automatically
---
# **4. Engine Usage Pattern**
The entrypoint must follow this shape whenever Codex creates a new WebGPU project:
```
import { Engine } from "/framework/Engine.js";
import { ExamplePipeline } from "./pipelines/ExamplePipeline.js";
async function main( ) {
const adapter = await navigator.gpu.requestAdapter( );
const device = await adapter.requestDevice( );
const engine = new Engine( device );
/* Pipeline */
const pipeline = new ExamplePipeline( engine );
pipeline.memory.set( "inputData", new Float32Array( 1024 ) );
await pipeline.create();
await pipeline.bindBuffers();
await pipeline.execute();
}
main( );
```
Codex must always:
* request adapter → requestDevice
* create `new Engine(device)`
* instantiate pipeline with `(engine)`
* set memory with `pipeline.memory.set( key, value )`
* call `await pipeline.create()`
* call `await pipeline.bindBuffers()`
* call `await pipeline.execute()`
---
# **5. Shader Usage Pattern**
Codex must use the Shader class exactly like this:
```
this.shader = new Shader( this.device );
await this.shader.setup( "shaders/example.wgsl" );
this.shader.setVariable( "bufferA", floatArray );
await this.shader.execute( workgroups );
```
Always:
* One shader instance per pass
* WGSL path passed to `.setup()`
* `.setVariable()` before `.execute()`
* No arrow functions
* Spaces inside parentheses
---
# **6. Memory Allocation Pattern**
All buffers are allocated as plain typed arrays under:
```
this.memory.X = new Float32Array( size );
```
Codex must never use ArrayBuffer directly unless required.
---
# **7. Execution Flow Rule**
Every pipeline execution must follow:
1. `await pipeline.create()`
2. `await pipeline.bindBuffers()`
3. `await pipeline.execute()`
Never run execute before create.
Never omit bindBuffers if pass needs it.
---
# **8. Codex MUST NOT Generate**
* Raw WebGPU API calls (`device.createBuffer`) unless inside Shader class
* Inline WGSL; always stored in `.wgsl` files
* Arrow functions
* Inline callbacks
* Different architecture than this:
* Engine
* Pipeline
* Block
* RenderPass
* Shader

104
README.md Normal file
View File

@@ -0,0 +1,104 @@
# WebGPU FFT Ocean Demo
An interactive WebGPU demo that simulates a tiled ocean surface using fast Fourier transforms (FFT).
The project renders a dynamic wave field in real time and exposes a small framework for building WebGPU pipelines, passes and scenes.
## Features
- Realtime ocean surface simulation using 2D FFT.
- Tiled ocean mesh with adjustable resolution and tiling count.
- Multiple shading modes (realistic, lighting, normals, solid color, height debug).
- Wireframe / solid rendering modes.
- Pause / singlestep controls for the simulation.
- CPU vs GPU validation helpers for the FFT and spectrum (accessible from the browser console).
## Requirements
- Node.js 18+ (for the simple static file server).
- A browser with WebGPU support:
- Recent Chrome / Edge with WebGPU enabled, or
- Chrome Canary / other WebGPUcapable build.
## Getting Started
1. Install dependencies (only used by some tools):
```bash
npm install
```
2. Start the local server:
```bash
node server.js
```
3. Open the demo in your browser:
- Navigate to: `http://localhost:3003/index.html`
If WebGPU is not available, the status label in the bottomleft of the page will tell you that WebGPU is not supported.
## Controls
All controls are in the panel at the topright of the page:
- **Wave height** overall amplitude of the waves.
- **Wave length** scales the wave spectrum (larger values = longer waves).
- **Resolution** underlying mesh resolution (32 → 2048).
- **Wireframe mode** toggle wireframe rendering.
- **Pause waves** pause / resume the simulation.
- **Next frame** advance the simulation by one frame while paused.
- **Shading** choose between realistic, lighting, normals, solid color, and height debug modes.
- **Dump height (debug)** logs statistics about the current height field buffer to the console.
Camera:
- **Drag** on the canvas to orbit the camera.
- **Scroll** to zoom in/out.
On first load the camera autorotates until you interact with the canvas.
## Debug / Test Helpers
The app exposes a couple of helpers on `window` for validating the GPU simulation against CPU reference implementations:
- `window.testFft()`
- Builds a deterministic 2D input field.
- Runs the GPU FFT passes.
- Computes a CPU 2D FFT.
- Logs max and RMS error between GPU and CPU results.
- `window.testSpectrum()`
- Uses the initial spectrum buffers.
- Runs a CPU reference spectrum computation.
- Runs the GPU spectrum pass.
- Logs max and RMS error between GPU and CPU spectra.
You can call these from the browser devtools console while the app is running.
## Project Structure
- `index.html` minimal HTML shell and UI controls.
- `main.js` application entrypoint; wires up the engine, pipelines, scene, and render loop.
- `events.js` encapsulates DOM element lookup and all UI / input event handlers.
- `server.js` simple Node static file server used during development.
WebGPU framework and rendering:
- `framework/` generic engine / scene / math utilities and WebGPU plumbing.
- `pipelines/` highlevel render and compute pipelines (e.g. `OceanPipeline`).
- `passes/` individual compute / render passes used by pipelines.
- `shaders/` WGSL shader programs (FFT, spectrum, rendering, etc.).
Testing / utilities:
- `tests/OceanTests.js` CPU FFT and spectrum reference implementations wrapped in a small test helper class.
- `resources/`, `tools/` additional assets and helper scripts.
## Notes
- The project is written as native ES modules (`type: "module"` in `package.json`).
- All WebGPU usage is routed through the small framework in `framework/` rather than direct `GPUDevice` calls scattered throughout the app.
- If you change ports or server configuration, update how you access the app accordingly (the default is port `3003`).

479
events.js Normal file
View File

@@ -0,0 +1,479 @@
export class events {
getDomElements( ) {
const statusElement = document.getElementById( "status" );
const canvas = document.getElementById( "gfx" );
const waveHeightSlider = document.getElementById( "waveHeight" );
const waveHeightValue = document.getElementById( "waveHeightValue" );
const wavelengthSlider = document.getElementById( "wavelengthSlider" );
const wavelengthValue = document.getElementById( "wavelengthValue" );
const resolutionSlider = document.getElementById( "resolutionSlider" );
const resolutionValue = document.getElementById( "resolutionValue" );
const tilingSlider = document.getElementById( "tilingSlider" );
const tilingValue = document.getElementById( "tilingValue" );
const wireframeToggle = document.getElementById( "wireframeToggle" );
const pauseToggle = document.getElementById( "pauseToggle" );
const shadingModeSelect = document.getElementById( "shadingMode" );
const stepButton = document.getElementById( "stepButton" );
const dumpHeightButton = document.getElementById( "dumpHeightButton" );
return {
statusElement,
canvas,
waveHeightSlider,
waveHeightValue,
wavelengthSlider,
wavelengthValue,
resolutionSlider,
resolutionValue,
tilingSlider,
tilingValue,
wireframeToggle,
pauseToggle,
shadingModeSelect,
stepButton,
dumpHeightButton
};
}
setup( ) {
const domElements = this.getDomElements( );
const waveHeightSlider = domElements.waveHeightSlider;
const waveHeightValue = domElements.waveHeightValue;
const wavelengthSlider = domElements.wavelengthSlider;
const wavelengthValue = domElements.wavelengthValue;
const resolutionSlider = domElements.resolutionSlider;
const resolutionValue = domElements.resolutionValue;
const tilingSlider = domElements.tilingSlider;
const tilingValue = domElements.tilingValue;
const wireframeToggle = domElements.wireframeToggle;
const pauseToggle = domElements.pauseToggle;
const shadingModeSelect = domElements.shadingModeSelect;
const stepButton = domElements.stepButton;
const dumpHeightButton = domElements.dumpHeightButton;
const getPrimaryPipeline = this.getPrimaryPipeline;
const getPipelines = this.getPipelines;
const rebuildScene = this.rebuildScene;
const getScene = this.getScene;
const getRenderSystem = this.getRenderSystem;
if ( this.resizeCanvasToDisplay ) {
window.addEventListener( "resize", function( ) {
this.resizeCanvasToDisplay( );
}.bind( this ) );
}
const self = this;
if ( waveHeightSlider && waveHeightValue ) {
const primary = getPrimaryPipeline( );
waveHeightSlider.value = primary ? String( primary.heightScale ) : "33";
waveHeightValue.textContent = primary ? primary.heightScale.toFixed( 0 ) : "33";
waveHeightSlider.addEventListener( "input", function( event ) {
const target = event.target;
const value = parseFloat( target.value );
if ( isNaN( value ) ) {
return;
}
const pipelines = getPipelines( );
for ( const p of pipelines ) {
p.setHeightScale( value );
}
waveHeightValue.textContent = value.toFixed( 0 );
} );
}
if ( wavelengthSlider && wavelengthValue ) {
const primary = getPrimaryPipeline( );
const initialWavelength = primary && typeof primary.wavelengthScale === "number"
? primary.wavelengthScale
: 1.0;
wavelengthSlider.value = String( initialWavelength );
wavelengthValue.textContent = initialWavelength.toFixed( 2 );
wavelengthSlider.addEventListener( "input", function( event ) {
const target = event.target;
const value = parseFloat( target.value );
if ( isNaN( value ) || value <= 0 ) {
return;
}
if ( self.setCurrentWavelengthScale ) {
self.setCurrentWavelengthScale( value );
}
const pipelines = getPipelines( );
for ( const p of pipelines ) {
if ( typeof p.setWavelengthScale === "function" ) {
p.setWavelengthScale( value );
}
}
wavelengthValue.textContent = value.toFixed( 2 );
} );
}
if ( wireframeToggle ) {
const primary = getPrimaryPipeline( );
wireframeToggle.checked = primary ? primary.renderMode === "wireframe" : false;
wireframeToggle.addEventListener( "change", function( event ) {
const target = event.target;
const pipelines = getPipelines( );
if ( target.checked ) {
for ( const p of pipelines ) {
p.setRenderMode( "wireframe" );
}
} else {
for ( const p of pipelines ) {
p.setRenderMode( "solid" );
}
}
rebuildScene( );
} );
}
if ( pauseToggle ) {
pauseToggle.checked = false;
pauseToggle.addEventListener( "change", function( event ) {
const target = event.target;
const pipelines = getPipelines( );
for ( const p of pipelines ) {
p.setPaused( target.checked );
}
} );
}
if ( shadingModeSelect ) {
shadingModeSelect.value = "lighting";
shadingModeSelect.addEventListener( "change", function( event ) {
const target = event.target;
const value = target.value;
const pipelines = getPipelines( );
for ( const p of pipelines ) {
p.setShadingMode( value );
}
} );
}
if ( stepButton ) {
stepButton.addEventListener( "click", function( ) {
const pipelines = getPipelines( );
for ( const p of pipelines ) {
p.stepOnce( 1 / 60 );
}
const scene = getScene( );
const renderSystem = getRenderSystem( );
if ( scene && renderSystem ) {
renderSystem.render( scene );
}
} );
}
if ( dumpHeightButton ) {
dumpHeightButton.addEventListener( "click", function( ) {
const pipeline = getPrimaryPipeline( );
if ( !pipeline ) {
return;
}
const block = pipeline.getBlockByName( "ocean" );
if ( !block ) {
return;
}
const colPass = block.getPass( "ColFFT" );
if ( !colPass || !colPass.shader ) {
return;
}
colPass.shader.debugBuffer( "heightField" )
.then( function( data ) {
if ( !data || !data.length ) {
console.log( "heightField debug: empty buffer" );
return;
}
let min = data[ 0 ];
let max = data[ 0 ];
let sum = 0;
for ( let i = 0; i < data.length; i++ ) {
const v = data[ i ];
if ( v < min ) min = v;
if ( v > max ) max = v;
sum += v;
}
const avg = sum / data.length;
console.log( "heightField debug:",
"size =", data.length,
"min =", min,
"max =", max,
"avg =", avg
);
} )
.catch( function( error ) {
console.error( "heightField debug failed:", error );
} );
} );
}
if ( resolutionSlider && resolutionValue ) {
const applyResolutionFromSlider = function( ) {
const raw = parseFloat( resolutionSlider.value );
if ( isNaN( raw ) ) {
return;
}
let targetSize = 64;
if ( raw < 48 ) {
targetSize = 32;
} else if ( raw < 96 ) {
targetSize = 64;
} else if ( raw < 192 ) {
targetSize = 128;
} else if ( raw < 384 ) {
targetSize = 256;
} else if ( raw < 768 ) {
targetSize = 512;
} else if ( raw < 1536 ) {
targetSize = 1024;
} else {
targetSize = 2048;
}
resolutionSlider.value = String( targetSize );
resolutionValue.textContent = String( targetSize );
if ( self.setCurrentMeshResolution ) {
self.setCurrentMeshResolution( targetSize );
}
rebuildScene( );
};
applyResolutionFromSlider( );
resolutionSlider.addEventListener( "input", function( ) {
applyResolutionFromSlider( );
} );
}
if ( tilingSlider && tilingValue ) {
const applyTilingFromSlider = function( ) {
const raw = parseInt( tilingSlider.value, 10 );
if ( isNaN( raw ) || raw < 1 ) {
return;
}
const tileRange = raw - 1;
tilingValue.textContent = String( raw );
if ( self.setCurrentTiling ) {
self.setCurrentTiling( raw );
}
rebuildScene( );
};
applyTilingFromSlider( );
tilingSlider.addEventListener( "input", function( ) {
applyTilingFromSlider( );
} );
}
if ( domElements.canvas && this.setAutoRotate ) {
domElements.canvas.addEventListener( "pointerdown", function( ) {
this.setAutoRotate( false );
}.bind( this ) );
}
}
}

76
framework/Block.js Normal file
View File

@@ -0,0 +1,76 @@
import { Memory } from "./Memory.js";
export class Block {
parameters = {};
constructor(name, pipeline) {
this.name = name;
this.pipeline = pipeline;
this.id = -1;
this.passes = [];
this.passMap = new Map();
this.memory = new Memory("block");
}
getPreviousBlock() {
if (this.id === 0) return null;
return this.pipeline.blocks[this.id - 1];
}
addPass( name, passInstance ) {
if (this.passMap.has(name)) {
throw new Error(`Pass '${name}' already exists in block '${this.name}'.`);
}
passInstance.passName = name;
passInstance.indexInBlock = this.passes.length;
passInstance.pipeline = this.pipeline;
passInstance.block = this;
passInstance.device = this.pipeline.device;
if( this.layerIndex !== undefined ) {
passInstance.layerIndex = this.layerIndex;
}
this.passes.push(passInstance);
this.passMap.set(name, passInstance);
return passInstance;
}
getPass(name) {
return this.passMap.get(name);
}
getAllPasses() {
return this.passes;
}
getPreviousPass(passInstance) {
const idx = passInstance.indexInBlock;
if (idx <= 0) return null;
return this.passes[idx - 1];
}
setLayerIndex( layerIndex ) {
this.layerIndex = layerIndex;
for (var i = 0; i < this.passes.length; i++) {
this.passes[i].layerIndex = layerIndex;
}
}
}

83
framework/Camera.js Normal file
View File

@@ -0,0 +1,83 @@
import Vector3 from "./Vector3.js";
import Matrix4 from "./Matrix4.js";
export default class Camera {
eye = new Vector3();
target = new Vector3();
up = new Vector3( 0, 1, 0 );
yaw = 0;
pitch = 0;
fovRadians = Math.PI / 4;
near = 0.1;
far = 3000.0;
distance = 10;
viewMatrix = new Float32Array( 16 );
constructor( eye = [0, 0, 5], target = [0, 0, 0], up = [0, 1, 0] ) {
this.eye = new Vector3( ...eye );
this.target = new Vector3( ...target );
this.up = new Vector3( ...up );
this.distance = Vector3.subtract( this.eye, this.target ).length();
this.viewMatrix = Matrix4.lookAt( this.eye, this.target, this.up );
}
update() {
const x = this.distance * Math.cos( this.pitch ) * Math.sin( this.yaw );
const y = this.distance * Math.sin( this.pitch );
const z = this.distance * Math.cos( this.pitch ) * Math.cos( this.yaw );
this.eye = new Vector3(
x + this.target.x,
y + this.target.y,
z + this.target.z
);
this.viewMatrix = Matrix4.lookAt( this.eye, this.target, this.up );
}
getViewMatrix() {
return this.viewMatrix;
}
rotate( deltaYaw, deltaPitch ) {
this.yaw += deltaYaw;
this.pitch -= deltaPitch;
const maxPitch = Math.PI / 2 - 0.01;
if ( this.pitch > maxPitch ) this.pitch = maxPitch;
if ( this.pitch < -maxPitch ) this.pitch = -maxPitch;
this.update();
}
zoom( delta ) {
this.distance += delta * 1;
if ( this.distance < 0.1 ) this.distance = 0.1;
this.update();
}
setTarget( target ) {
this.target = new Vector3( ...target );
this.update();
}
}

31
framework/Engine.js Normal file
View File

@@ -0,0 +1,31 @@
import { Memory } from "./Memory.js";
import { RenderSystem } from "./RenderSystem.js";
export class Engine {
constructor( device ) {
this.device = device;
this.memory = new Memory( "engine" );
this.pipelines = [];
this.renderSystem = null;
}
addPipeline( p ) {
this.pipelines.push( p );
}
createRenderSystem( canvas ) {
this.renderSystem = new RenderSystem( this.device, canvas );
return this.renderSystem;
}
}

125
framework/Matrix4.js Normal file
View File

@@ -0,0 +1,125 @@
import Vector3 from "./Vector3.js";
export default class Matrix4 {
static lookAt( eye, target, up ) {
const zAxis = Vector3.normalize( Vector3.subtract( eye, target ) );
const xAxis = Vector3.normalize( Vector3.cross( up, zAxis ) );
const yAxis = Vector3.cross( zAxis, xAxis );
return new Float32Array([
xAxis.x, yAxis.x, zAxis.x, 0,
xAxis.y, yAxis.y, zAxis.y, 0,
xAxis.z, yAxis.z, zAxis.z, 0,
-Vector3.dot( xAxis, eye ), -Vector3.dot( yAxis, eye ), -Vector3.dot( zAxis, eye ), 1,
]);
}
static getColumn( matrix, index ) {
const i = index * 4;
return new Vector3(
matrix[ i + 0 ],
matrix[ i + 1 ],
matrix[ i + 2 ]
);
}
static createProjectionMatrix( camera, canvas ) {
return Matrix4.perspective(
camera.fovRadians,
canvas.width / canvas.height,
camera.near,
camera.far
);
}
static invert( m ) {
const out = new Float32Array(16);
const m00 = m[0], m01 = m[1], m02 = m[2], m03 = m[3];
const m10 = m[4], m11 = m[5], m12 = m[6], m13 = m[7];
const m20 = m[8], m21 = m[9], m22 = m[10], m23 = m[11];
const m30 = m[12], m31 = m[13], m32 = m[14], m33 = m[15];
const a0 = m00 * m11 - m01 * m10;
const a1 = m00 * m12 - m02 * m10;
const a2 = m00 * m13 - m03 * m10;
const a3 = m01 * m12 - m02 * m11;
const a4 = m01 * m13 - m03 * m11;
const a5 = m02 * m13 - m03 * m12;
const b0 = m20 * m31 - m21 * m30;
const b1 = m20 * m32 - m22 * m30;
const b2 = m20 * m33 - m23 * m30;
const b3 = m21 * m32 - m22 * m31;
const b4 = m21 * m33 - m23 * m31;
const b5 = m22 * m33 - m23 * m32;
const det = a0 * b5 - a1 * b4 + a2 * b3 + a3 * b2 - a4 * b1 + a5 * b0;
if (det === 0) return null;
const invDet = 1 / det;
out[0] = ( m11 * b5 - m12 * b4 + m13 * b3) * invDet;
out[1] = (-m01 * b5 + m02 * b4 - m03 * b3) * invDet;
out[2] = ( m31 * a5 - m32 * a4 + m33 * a3) * invDet;
out[3] = (-m21 * a5 + m22 * a4 - m23 * a3) * invDet;
out[4] = (-m10 * b5 + m12 * b2 - m13 * b1) * invDet;
out[5] = ( m00 * b5 - m02 * b2 + m03 * b1) * invDet;
out[6] = (-m30 * a5 + m32 * a2 - m33 * a1) * invDet;
out[7] = ( m20 * a5 - m22 * a2 + m23 * a1) * invDet;
out[8] = ( m10 * b4 - m11 * b2 + m13 * b0) * invDet;
out[9] = (-m00 * b4 + m01 * b2 - m03 * b0) * invDet;
out[10] = ( m30 * a4 - m31 * a2 + m33 * a0) * invDet;
out[11] = (-m20 * a4 + m21 * a2 - m23 * a0) * invDet;
out[12] = (-m10 * b3 + m11 * b1 - m12 * b0) * invDet;
out[13] = ( m00 * b3 - m01 * b1 + m02 * b0) * invDet;
out[14] = (-m30 * a3 + m31 * a1 - m32 * a0) * invDet;
out[15] = ( m20 * a3 - m21 * a1 + m22 * a0) * invDet;
return out;
}
static perspective( fovRadians, aspect, near, far ) {
const f = 1.0 / Math.tan( fovRadians / 2 );
const nf = 1 / ( near - far );
return new Float32Array([
f / aspect, 0, 0, 0,
0, f, 0, 0,
0, 0, (far + near) * nf, -1,
0, 0, (2 * far * near) * nf, 0,
]);
}
static multiply( a, b ) {
const out = new Float32Array(16);
for ( let col = 0; col < 4; col++ ) {
for ( let row = 0; row < 4; row++ ) {
let sum = 0;
for ( let k = 0; k < 4; k++ ) {
// a is column-major: element at col k, row row => a[k*4 + row]
// b is column-major: element at col col, row k => b[col*4 + k]
sum += a[k * 4 + row] * b[col * 4 + k];
}
out[col * 4 + row] = sum;
}
}
return out;
}
}

49
framework/Measure.js Normal file
View File

@@ -0,0 +1,49 @@
export default class Measure {
startTimes = {};
endTimes = {};
writeToPage = false;
element = false;
start ( label ) {
this.startTimes[ label ] = performance.now();
}
end ( label ) {
this.endTimes[ label ] = performance.now();
this.log( label );
}
getElapsed ( label ) {
if ( this.startTimes[ label ] === undefined || this.endTimes[ label ] === undefined ) {
throw new Error( "Start or end time missing for label: " + label );
}
return this.endTimes[ label ] - this.startTimes[ label ];
}
log ( label ) {
const elapsed = this.getElapsed( label );
if( this.writeToPage ) {
var p = document.createElement("p")
p.innerText = label + " took " + elapsed.toFixed(3) + " ms";
this.element.appendChild( p );
}
console.log( label + " took " + elapsed.toFixed(3) + " ms" );
}
}

25
framework/Memory.js Normal file
View File

@@ -0,0 +1,25 @@
export class Memory {
constructor(scopeName = "") {
this.scopeName = scopeName;
this._map = new Map();
}
set(name, value) {
this._map.set(name, value);
this[name] = value; // property access
}
get(name) {
return this._map.get(name);
}
has(name) {
return this._map.has(name);
}
delete(name) {
this._map.delete(name);
delete this[name];
}
}

59
framework/Mesh.js Normal file
View File

@@ -0,0 +1,59 @@
export class Mesh {
constructor( ) {
this.shaders = [];
this.instanceCount = 1;
}
addShader( shader, options = { } ) {
this.shaders.push( {
shader,
options
} );
return this;
}
setInstanceCount( count ) {
this.instanceCount = count;
return this;
}
draw( passEncoder ) {
for ( const entry of this.shaders ) {
const shader = entry.shader;
const opts = entry.options || { };
const instances = opts.instances != null ? opts.instances : this.instanceCount;
// When no explicit vertex count is provided, rely on the shader's indexCount
const vertexCount = opts.vertexCount != null ? opts.vertexCount : ( shader.indexCount || 0 );
if ( vertexCount <= 0 ) {
continue;
}
shader.encodeRender( passEncoder, vertexCount, instances );
}
}
}

18
framework/ModelState.js Normal file
View File

@@ -0,0 +1,18 @@
export class ModelState {
constructor() {
this.config = null;
// embeddings
this.tokenEmbedding = null;
this.positionEmbedding = null;
// transformer layers
this.layers = []; // each layer: { Wq,Wk,Wv,bq,bk,bv, Wout,bout, Wfc1,bfc1, Wfc2,bfc2 }
// tied output projection
this.Wlogits = null;
}
}

37
framework/RenderPass.js Normal file
View File

@@ -0,0 +1,37 @@
export class RenderPass {
constructor() {
}
attach(device, block, pipeline) {
this.device = device;
this.block = block;
this.pipeline = pipeline;
}
async create() {
// override — create Shader, load WGSL, create local buffers
}
async bindBuffers() {
// override — wire connections to other passes
}
async execute() {
// override — dispatch shader
}
async test() {
// override — CPU-vs-GPU correctness test
}
setLayerIndex(index) {
this.layerIndex = index;
}
setLayerWeightArray(array) {
this.layerWeightArray = array;
}
}

View File

@@ -0,0 +1,70 @@
import { Memory } from "./Memory.js";
export class RenderPipeline {
constructor(engine) {
this.engine = engine;
this.device = engine.device;
this.blocks = [];
this.memory = new Memory("pipeline");
engine.addPipeline(this);
}
addBlock(block) {
block.id = this.blocks.length;
block.pipeline = this;
this.blocks.push(block);
return block;
}
getBlock(id) {
return this.blocks[id];
}
getBlockByName(name) {
return this.blocks.find(block => block.name === name) || null;
}
getPreviousBlock(block) {
if (block.id === 0) return null;
return this.blocks[block.id - 1];
}
async create() {
for (const block of this.blocks) {
for (const pass of block.getAllPasses()) {
await pass.create();
}
}
}
async bindBuffers() {
for (const block of this.blocks) {
for (const pass of block.getAllPasses()) {
await pass.bindBuffers();
}
}
}
async execute() {
for (const block of this.blocks) {
for (const pass of block.getAllPasses()) {
await pass.execute();
await this.device.queue.onSubmittedWorkDone();
}
}
}
async test() {
for (const block of this.blocks) {
for (const pass of block.getAllPasses()) {
if (typeof pass.test === "function") {
await pass.test();
}
}
}
}
}

83
framework/RenderSystem.js Normal file
View File

@@ -0,0 +1,83 @@
export class RenderSystem {
constructor( device, canvas ) {
this.device = device;
this.canvas = canvas;
// simple per-system depth texture; recreated on resize by user as needed
this.depthTexture = null;
}
_beginFrame( clearColor = { r: 0.3, g: 0.3, b: 0.3, a: 1 } ) {
const canvas = this.canvas;
const context = canvas.getContext( "webgpu" );
const format = navigator.gpu.getPreferredCanvasFormat();
if ( !this.depthTexture ||
this.depthTexture.width !== canvas.width ||
this.depthTexture.height !== canvas.height ) {
this.depthTexture = this.device.createTexture( {
size: [ canvas.width, canvas.height, 1 ],
sampleCount: 1,
format: "depth24plus",
usage: GPUTextureUsage.RENDER_ATTACHMENT
} );
}
const encoder = this.device.createCommandEncoder();
const view = context.getCurrentTexture().createView();
const depthView = this.depthTexture.createView();
const renderPassDescriptor = {
colorAttachments: [ {
view: view,
loadOp: "clear",
storeOp: "store",
clearValue: clearColor
} ],
depthStencilAttachment: {
view: depthView,
depthLoadOp: "clear",
depthStoreOp: "store",
depthClearValue: 1.0
}
};
const passEncoder = encoder.beginRenderPass( renderPassDescriptor );
return { encoder, passEncoder };
}
_endFrame( frame ) {
frame.passEncoder.end();
this.device.queue.submit( [ frame.encoder.finish() ] );
}
render( scene, clearColor ) {
const frame = this._beginFrame( clearColor );
scene.draw( frame.passEncoder );
this._endFrame( frame );
}
}

11
framework/Request.js Normal file
View File

@@ -0,0 +1,11 @@
export class Request {
constructor( method, payload = {} ) {
this.method = method; // method name to call on Controller, e.g. "Ping"
this.payload = payload; // any data for the method
}
}

34
framework/Scene.js Normal file
View File

@@ -0,0 +1,34 @@
export class Scene {
constructor( ) {
this.meshes = [];
}
addMesh( mesh ) {
this.meshes.push( mesh );
return this;
}
draw( passEncoder ) {
for ( const mesh of this.meshes ) {
if ( typeof mesh.draw === "function" ) {
mesh.draw( passEncoder );
}
}
}
}

View File

@@ -0,0 +1,67 @@
class shaderDebugger{
setup() {
var shaders = document.shaders;
var select = document.querySelector(".selectDebugShader");
for (var i = 0; i < shaders.length; i++) {
var currentShader = shaders[i];
var option = document.createElement("option");
option.innerText = currentShader.path;
option.id = i;
select.appendChild( option );
}
document.querySelector( "#showBuffers" ).addEventListener( "click", async function() {
var select = document.querySelector(".selectDebugShader");
var selectedIndex = select.selectedIndex;
var selectedShader = document.shaders[ selectedIndex ]
const keysArray = Array.from( selectedShader.buffers );
console.log("\n\n\n\n -------------------- Debugging Shader --------------- \n\n\n\n");
console.log( "Shader Path: ", selectedShader.path );
console.log( selectedShader );
for (var i = 0; i < keysArray.length; i++) {
const bindingInfo = selectedShader.bindings.find( b => b.varName === keysArray[i][0] );
if( bindingInfo ) {
if( bindingInfo.type == "storage" ) {
await selectedShader.debugBuffer( keysArray[i][0] );
}
} else {
console.log("this is a Uniform", keysArray, selectedShader.bindings);
}
}
});
}
}
export default shaderDebugger;

205
framework/Tools.js Normal file
View File

@@ -0,0 +1,205 @@
// ============================================================================
// Tools.js — now loads real GPT-2 vocab.json + merges.txt into fake model
// ============================================================================
export class Tools {
static async generateFakeModel() {
console.log("[Tools] Loading vocab.json + merges.txt…");
// ------------------------------------------------------------
// Load vocabulary
// ------------------------------------------------------------
const vocabResponse =
await fetch("model/vocab.json"); // <-- adjust path
const vocabularyList =
await vocabResponse.json();
// ------------------------------------------------------------
// Load merges
// ------------------------------------------------------------
const mergesResponse =
await fetch("model/merges.txt"); // <-- adjust path
const mergeRuleText =
await mergesResponse.text();
const mergeRuleList =
mergeRuleText
.split("\n")
.filter(line => line.trim().length > 0 && !line.startsWith("#"));
console.log("[Tools] ✓ Loaded",
Object.keys(vocabularyList).length, "vocab tokens,",
mergeRuleList.length, "merge rules."
);
// ------------------------------------------------------------
// Fake GPT-2 weights (hidden size = 8)
// ------------------------------------------------------------
const fakeHiddenSize = 8;
const fakeIntermediateSize = fakeHiddenSize * 4;
const fakeSequenceLength = 4;
const fakeNumberOfLayers = 2;
function createFakeArray(size) {
let arr = new Float32Array(size);
for (let i = 0; i < size; i++) arr[i] = (i % 7) * 0.1;
return arr;
}
// ------------------------------------------------------------
// Build fake model
// ------------------------------------------------------------
let model = {
configuration : {
hiddenSize: fakeHiddenSize,
numberOfTransformerLayers: fakeNumberOfLayers,
numberOfAttentionHeads: 2,
maximumSequenceLength: fakeSequenceLength,
vocabularySize: Object.keys(vocabularyList).length,
maximumPositionCount: 2048
},
// real tokenizer data:
vocabularyList: vocabularyList,
mergeRuleList: mergeRuleList,
// fake embeddings:
tokenEmbeddingTensor:
createFakeArray(Object.keys(vocabularyList).length * fakeHiddenSize),
positionEmbeddingTensor:
createFakeArray(2048 * fakeHiddenSize),
transformerLayerList: [],
layerWeightFlatList: [],
tokenIndexArray: null
};
// ------------------------------------------------------------
// Create fake transformer layers
// ------------------------------------------------------------
for (let layerIndex = 0; layerIndex < fakeNumberOfLayers; layerIndex++) {
let layer = {
firstNormalizationWeightTensor: createFakeArray(fakeHiddenSize),
firstNormalizationBiasTensor: createFakeArray(fakeHiddenSize),
queryWeightTensor: createFakeArray(fakeHiddenSize * fakeHiddenSize),
keyWeightTensor: createFakeArray(fakeHiddenSize * fakeHiddenSize),
valueWeightTensor: createFakeArray(fakeHiddenSize * fakeHiddenSize),
queryBiasTensor: createFakeArray(fakeHiddenSize),
keyBiasTensor: createFakeArray(fakeHiddenSize),
valueBiasTensor: createFakeArray(fakeHiddenSize),
attentionOutputProjectionWeightTensor:
createFakeArray(fakeHiddenSize * fakeHiddenSize),
attentionOutputProjectionBiasTensor:
createFakeArray(fakeHiddenSize),
secondNormalizationWeightTensor:
createFakeArray(fakeHiddenSize),
secondNormalizationBiasTensor:
createFakeArray(fakeHiddenSize),
feedForwardLayerOneWeightTensor:
createFakeArray(fakeHiddenSize * fakeIntermediateSize),
feedForwardLayerOneBiasTensor:
createFakeArray(fakeIntermediateSize),
feedForwardLayerTwoWeightTensor:
createFakeArray(fakeIntermediateSize * fakeHiddenSize),
feedForwardLayerTwoBiasTensor:
createFakeArray(fakeHiddenSize)
};
model.transformerLayerList.push(layer);
}
console.log("[Tools] ✓ Fake model ready.");
return model;
}
/**
* Packs a 1D typed array into a 2D RGBA Float texture.
*
* @param {TypedArray} data - Input values (Float32Array, Uint32Array, etc.)
* @param {number} texWidth - Maximum texture width (e.g., 8192)
* @returns {{texData: Float32Array, width: number, height: number, totalPixels: number}}
*/
static packIntoTextureRGBA(sourceData, texWidth = 8192) {
const totalValues = sourceData.length;
const totalPixels = Math.ceil(totalValues / 4); // 4 channels/pixel
const width = texWidth;
const height = Math.ceil(totalPixels / width);
const data = new Float32Array(width * height * 4);
for (let i = 0; i < totalValues; i++) {
const pixelIndex = Math.floor(i / 4);
const channel = i % 4;
const x = pixelIndex % width;
const y = Math.floor(pixelIndex / width);
data[(y * width + x) * 4 + channel] = sourceData[i];
}
return { data, width, height, totalPixels };
}
/**
* Packs a 2D tensor (rows × cols) into a 2D RGBA float texture.
*
* @param {TypedArray} tensor - 1D row-major data
* @param {number} rows - number of rows
* @param {number} cols - number of columns
* @param {number} texWidth - max texture width (default 8192)
*/
static pack2DTensorIntoTexture( tensor, rows, cols, texWidth = 8192 ) {
const flatLength = rows * cols;
if (tensor.length !== flatLength)
throw new Error("Tensor length does not match rows*cols");
const totalPixels = Math.ceil(flatLength / 4);
const width = texWidth;
const height = Math.ceil(totalPixels / width);
const data = new Float32Array(width * height * 4);
for (let index = 0; index < flatLength; index++) {
const pixelIndex = index >> 2; // /4
const channel = index & 3; // %4
const x = pixelIndex % width;
const y = (pixelIndex / width) | 0;
data[(y * width + x) * 4 + channel] = tensor[index];
}
return { data, width, height, totalPixels };
}
}

60
framework/Vector3.js Normal file
View File

@@ -0,0 +1,60 @@
export default class Vector3 {
x = 0;
y = 0;
z = 0;
constructor( x = 0, y = 0, z = 0 ) {
this.x = x;
this.y = y;
this.z = z;
}
static subtract( a, b ) {
return new Vector3( a.x - b.x, a.y - b.y, a.z - b.z );
}
length() {
return Math.sqrt( this.x * this.x + this.y * this.y + this.z * this.z );
}
static cross( a, b ) {
return new Vector3(
a.y * b.z - a.z * b.y,
a.z * b.x - a.x * b.z,
a.x * b.y - a.y * b.x
);
}
static dot( a, b ) {
return a.x * b.x + a.y * b.y + a.z * b.z;
}
static normalize( v ) {
const length = Math.sqrt( v.x * v.x + v.y * v.y + v.z * v.z );
if ( length > 0.00001 ) {
return new Vector3( v.x / length, v.y / length, v.z / length );
} else {
return new Vector3( 0, 0, 0 );
}
}
}

143
framework/WGSLReflection.js Normal file
View File

@@ -0,0 +1,143 @@
export class WGSLReflection {
constructor(wgslSource) {
this.src = wgslSource;
this.structs = {};
this.bindings = [];
this.parseStructs();
this.parseBindings();
}
//
// ------------------------------------------------------------
// STRUCT PARSING
// ------------------------------------------------------------
//
parseStructs() {
const structRegex =
/struct\s+(\w+)\s*\{([^}]+)\}/g;
let match;
while ((match = structRegex.exec(this.src))) {
const structName = match[1];
const body = match[2].trim();
const fields = this.parseStructFields(body);
const size = fields.reduce((sum, f) => sum + f.size, 0);
this.structs[structName] = {
name: structName,
fields,
size
};
}
}
parseStructFields(body) {
const lines = body.split("\n");
const fields = [];
for (let line of lines) {
line = line.trim();
if (!line) continue;
const m = line.match(/(\w+)\s*:\s*([^;]+);/);
if (!m) continue;
const fieldName = m[1];
const wgslType = m[2].trim();
const size = this.computeTypeSize(wgslType);
fields.push({ fieldName, wgslType, size });
}
return fields;
}
//
// ------------------------------------------------------------
// BINDING PARSING
// ------------------------------------------------------------
//
parseBindings() {
const varRegex =
/@group\((\d+)\)\s*@binding\((\d+)\)\s*var<([^>]+)>\s+(\w+)\s*:\s*([^;]+);/g;
let match;
while ((match = varRegex.exec(this.src))) {
const group = parseInt(match[1]);
const binding = parseInt(match[2]);
const type = match[3].trim();
const varName = match[4].trim();
const varType = match[5].trim();
const size = this.computeTypeSize(varType);
this.bindings.push({
group,
binding,
type,
varName,
varType,
size
});
}
}
//
// ------------------------------------------------------------
// TYPE SIZE COMPUTATION (Simplified)
// ------------------------------------------------------------
//
computeTypeSize(wgslType) {
// scalar
if (wgslType === "f32" || wgslType === "u32" || wgslType === "i32") {
return 4;
}
// array<f32, N>
const arrMatch = wgslType.match(/array<(\w+),\s*(\d+)>/);
if (arrMatch) {
const elementType = arrMatch[1];
const count = parseInt(arrMatch[2]);
return this.computeTypeSize(elementType) * count;
}
// array<f32> runtime-sized → minimal
const runtimeArr = wgslType.match(/array<(\w+)>/);
if (runtimeArr) {
return 4;
}
// struct
if (this.structs[wgslType]) {
return this.structs[wgslType].size;
}
return 4;
}
//
// ------------------------------------------------------------
// SUMMARY OUTPUT
// ------------------------------------------------------------
//
reflect() {
return {
structs: this.structs,
bindings: this.bindings
};
}
}

2256
framework/WebGpu.js Normal file
View File

File diff suppressed because it is too large Load Diff

143
framework/eventManager.js Normal file
View File

@@ -0,0 +1,143 @@
// eventManager.js
export default class EventManager {
isDragging = false;
lastX = 0;
lastY = 0;
camera;
canvas;
setCanvas( canvas ) {
this.canvas = canvas;
//this.registerEventListeners();
//this.handleResize();
}
setup( canvas, camera ) {
this.canvas = canvas;
this.camera = camera;
//this.registerEventListeners();
//this.handleResize();
}
onMouseDown( event ) {
this.mousedown( event );
}
onMouseUp( event ) {
this.mouseup( event );
}
onMouseLeave( event ) {
this.mouseleave( event );
}
onMouseMove( event ) {
this.mousemove( event );
}
onWheel( event ) {
this.wheel( event );
}
registerEventListeners() {
this.canvas.addEventListener( "mousedown", this.onMouseDown.bind(this) );
this.canvas.addEventListener( "mouseup", this.onMouseUp.bind(this) );
this.canvas.addEventListener( "mouseleave", this.onMouseLeave.bind(this) );
this.canvas.addEventListener( "mousemove", this.onMouseMove.bind(this) );
this.canvas.addEventListener( "wheel", this.onWheel.bind(this), { passive: false } );
}
resize( event ) {
this.canvas.width = event.width;
this.canvas.height = event.height;
//this.canvas.width = window.innerWidth;
//this.canvas.height = window.innerHeight;
}
mousedown( event ) {
console.log("mouseDownHandler");
this.isDragging = true;
this.lastX = event.clientX;
this.lastY = event.clientY;
}
mouseup( event ) {
this.isDragging = false;
}
mouseleave( event ) {
this.isDragging = false;
}
mousemove( event ) {
if ( !this.isDragging ) return;
const deltaX = ( event.clientX - this.lastX ) * 0.005;
const deltaY = ( event.clientY - this.lastY ) * 0.005;
this.camera.rotate( deltaX, -deltaY );
this.lastX = event.clientX;
this.lastY = event.clientY;
}
wheel( event ) {
const delta = event.deltaY * 0.01;
this.camera.zoom( delta );
}
}

View File

@@ -0,0 +1,214 @@
// eventManager.js
import sdl from '@kmamal/sdl'
var isNode = true;
if ( typeof window === 'undefined' ) {
//isNode = false;
}
console.log("isNode", isNode);
export default class EventManager {
isDragging = false;
lastX = 0;
lastY = 0;
camera;
canvas;
setCanvas( canvas ) {
this.canvas = canvas;
//this.registerEventListeners();
//this.handleResize();
}
setup( canvas, camera ) {
this.canvas = canvas;
this.camera = camera;
//this.registerEventListeners();
//this.handleResize();
}
registerEventListeners() {
this.canvas.addEventListener( "mousedown", this.onMouseDown.bind(this) );
this.canvas.addEventListener( "mouseup", this.onMouseUp.bind(this) );
this.canvas.addEventListener( "mouseleave", this.onMouseLeave.bind(this) );
this.canvas.addEventListener( "mousemove", this.onMouseMove.bind(this) );
this.canvas.addEventListener( "wheel", this.onWheel.bind(this), { passive: false } );
}
registerEventListenersNode() {
var that = this;
this.canvas.on('mouseMove', function( event ) {
that.mousemove( event )
});
this.canvas.on('mouseButtonDown', function( event ) {
that.mousedown( event )
});
this.canvas.on('mouseButtonUp', function( event ) {
that.mouseup( event )
});
/*
this.canvas.on( "mouseButtonDown", this.onMouseDown.bind(this) );
this.canvas.on( "mouseButtonUp", this.onMouseUp.bind(this) );
//this.canvas.on( "mouseleave", this.onMouseLeave.bind(this) );
this.canvas.on( "mouseMove", this.onMouseMove.bind(this) );
this.canvas.on( "mouseWheel", this.onWheel.bind(this), { passive: false } );
*/
}
resize( event ) {
this.canvas.width = event.width;
this.canvas.height = event.height;
//this.canvas.width = window.innerWidth;
//this.canvas.height = window.innerHeight;
}
mousedown( event ) {
this.isDragging = true;
if( isNode ) {
var mouseX = event.x;
var mouseY = event.y;
} else {
var mouseX = event.clientX;
var mouseY = event.clientY;
}
//console.log("mouseDownHandler", mouseX, mouseY);
if( isNode ) {
this.lastX = mouseX;
this.lastY = mouseY;
} else {
this.lastX = mouseX;
this.lastY = mouseY;
}
}
mouseup( event ) {
this.isDragging = false;
}
mouseleave( event ) {
this.isDragging = false;
}
mousemove( event ) {
if( isNode ) {
var mouseX = event.x;
var mouseY = event.y;
} else {
var mouseX = event.clientX;
var mouseY = event.clientY;
}
if ( !this.isDragging ) return;
const deltaX = ( mouseX - this.lastX ) * 0.005;
const deltaY = ( mouseY - this.lastY ) * 0.005;
//console.log("mousemove", mouseX, mouseY);
this.camera.rotate( deltaX, -deltaY );
this.lastX = mouseX;
this.lastY = mouseY;
}
wheel( event ) {
const delta = event.deltaY * 0.01;
this.camera.zoom( delta );
}
}

6
framework/package.json Normal file
View File

@@ -0,0 +1,6 @@
{
"type": "module",
"dependencies": {
}
}

134
index.html Normal file
View File

@@ -0,0 +1,134 @@
<!doctype html>
<html lang="en">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>WebGPU FFT Ocean Demo</title>
<style>
body {
margin: 0;
background: radial-gradient(circle at 20% 20%, #0f172a, #020617 60%);
color: #e2e8f0;
font-family: "Segoe UI", system-ui, -apple-system, sans-serif;
height: 100vh;
width: 100vw;
overflow: hidden;
}
#container {
position: fixed;
inset: 0;
border: none;
box-shadow: none;
overflow: hidden;
}
#status {
position: absolute;
left: 12px;
bottom: 12px;
padding: 6px 10px;
background: rgba(15, 23, 42, 0.65);
border: 1px solid #1f2937;
border-radius: 6px;
font-size: 12px;
letter-spacing: 0.4px;
backdrop-filter: blur(4px);
}
canvas {
width: 100vw;
height: 100vh;
display: block;
}
#controls {
position: absolute;
right: 12px;
top: 12px;
padding: 6px 10px;
background: rgba(15, 23, 42, 0.75);
border: 1px solid #1f2937;
border-radius: 6px;
font-size: 12px;
display: flex;
align-items: flex-start;
gap: 10px;
backdrop-filter: blur(4px);
}
#controls label {
display: flex;
align-items: center;
gap: 6px;
white-space: nowrap;
}
#controls input[type="range"] {
width: 140px;
}
#controls .column {
display: flex;
flex-direction: column;
gap: 6px;
}
#controls .row {
display: flex;
align-items: center;
gap: 6px;
}
#controls select {
font-size: 12px;
padding: 2px 4px;
background: #020617;
color: #e2e8f0;
border-radius: 4px;
border: 1px solid #1f2937;
}
</style>
</head>
<body>
<div id="container">
<canvas id="gfx"></canvas>
<div id="controls">
<div class="column">
<label class="row">
Wave height
<input id="waveHeight" type="range" min="0" max="60" step="1" value="33">
<span id="waveHeightValue">33</span>
</label>
<label class="row">
Wave length
<input id="wavelengthSlider" type="range" min="0.25" max="4" step="0.05" value="1">
<span id="wavelengthValue">1.00</span>
</label>
<label class="row">
Resolution
<input id="resolutionSlider" type="range" min="32" max="2048" step="32" value="1024">
<span id="resolutionValue">1024</span>
</label>
<label class="row">
<input id="wireframeToggle" type="checkbox">
Wireframe mode
</label>
<label class="row">
<input id="pauseToggle" type="checkbox">
Pause waves
</label>
<label class="row">
<button id="stepButton" type="button">Next frame</button>
</label>
<label class="row">
Shading
<select id="shadingMode">
<option value="realistic">Realistic</option>
<option value="lighting">Lighting</option>
<option value="normals">Normals</option>
<option value="solid">Solid color</option>
<option value="height">Height (debug)</option>
</select>
</label>
<label class="row">
<button id="dumpHeightButton" type="button">Dump height (debug)</button>
</label>
</div>
</div>
<div id="status">Initializing WebGPU ocean…</div>
</div>
<script type="module" src="./main.js"></script>
</body>
</html>

387
main.js Normal file
View File

@@ -0,0 +1,387 @@
import { Engine } from "/framework/Engine.js";
import { Scene } from "/framework/Scene.js";
import { Mesh } from "/framework/Mesh.js";
import { OceanPipeline } from "./pipelines/OceanPipeline.js";
import { OceanTests } from "./tests/OceanTests.js";
import { events } from "./events.js";
class OceanApp {
static bitReverse( x, bits ) {
let n = x;
let r = 0;
for ( let i = 0; i < bits; i++ ) {
r = ( r << 1 ) | ( n & 1 );
n = n >> 1;
}
return r;
}
async run( ) {
const eventsHandler = new events( );
const domElements = eventsHandler.getDomElements( );
const statusElement = domElements.statusElement;
const canvas = domElements.canvas;
const waveHeightSlider = domElements.waveHeightSlider;
const waveHeightValue = domElements.waveHeightValue;
const wavelengthSlider = domElements.wavelengthSlider;
const wavelengthValue = domElements.wavelengthValue;
const resolutionSlider = domElements.resolutionSlider;
const resolutionValue = domElements.resolutionValue;
const tilingSlider = domElements.tilingSlider;
const tilingValue = domElements.tilingValue;
const wireframeToggle = domElements.wireframeToggle;
const pauseToggle = domElements.pauseToggle;
const shadingModeSelect = domElements.shadingModeSelect;
const stepButton = domElements.stepButton;
const dumpHeightButton = domElements.dumpHeightButton;
function resizeCanvasToDisplay( ) {
const width = window.innerWidth;
const height = window.innerHeight;
if ( canvas.width !== width || canvas.height !== height ) {
canvas.width = width;
canvas.height = height;
}
}
resizeCanvasToDisplay( );
if ( !navigator.gpu ) {
statusElement.textContent = "WebGPU not supported in this browser.";
return;
}
statusElement.textContent = "Requesting WebGPU device…";
const adapter = await navigator.gpu.requestAdapter( );
const device = await adapter.requestDevice( );
const engine = new Engine( device );
const renderSystem = engine.createRenderSystem( canvas );
let scene = new Scene( );
let pipelines = [];
let currentMeshResolution = 1024;
let currentWavelengthScale = 1.0;
let currentTiling = 1;
function getPrimaryPipeline( ) {
return pipelines.length > 0 ? pipelines[ 0 ] : null;
}
async function rebuildScene( ) {
const previousPrimary = getPrimaryPipeline( );
statusElement.textContent = "Rebuilding pipelines…";
const newScene = new Scene( );
const meshRes = currentMeshResolution;
const tiling = currentTiling;
const pipeline = new OceanPipeline( engine, canvas );
pipeline.gridSize = 64;
pipeline.meshResolution = meshRes;
pipeline.memory.set( "canvasRef", canvas );
await pipeline.create( );
// restore camera from previous primary pipeline if available
if ( previousPrimary && previousPrimary.camera && pipeline.camera ) {
const prevCam = previousPrimary.camera;
const newCam = pipeline.camera;
newCam.yaw = prevCam.yaw;
newCam.pitch = prevCam.pitch;
newCam.distance = prevCam.distance;
newCam.fovRadians = prevCam.fovRadians;
newCam.near = prevCam.near;
newCam.far = prevCam.far;
if ( prevCam.target && newCam.target ) {
newCam.target.x = prevCam.target.x;
newCam.target.y = prevCam.target.y;
newCam.target.z = prevCam.target.z;
}
if ( prevCam.up && newCam.up ) {
newCam.up.x = prevCam.up.x;
newCam.up.y = prevCam.up.y;
newCam.up.z = prevCam.up.z;
}
if ( typeof newCam.update === "function" ) {
newCam.update( );
}
}
// global settings
if ( waveHeightSlider && waveHeightValue ) {
const hValue = parseFloat( waveHeightSlider.value );
if ( !isNaN( hValue ) ) {
pipeline.setHeightScale( hValue );
waveHeightValue.textContent = hValue.toFixed( 0 );
}
}
if ( wavelengthSlider && wavelengthValue && typeof pipeline.setWavelengthScale === "function" ) {
const wValue = parseFloat( wavelengthSlider.value );
if ( !isNaN( wValue ) && wValue > 0 ) {
pipeline.setWavelengthScale( wValue );
wavelengthValue.textContent = wValue.toFixed( 2 );
}
}
if ( wireframeToggle ) {
if ( wireframeToggle.checked ) {
pipeline.setRenderMode( "wireframe" );
} else {
pipeline.setRenderMode( "solid" );
}
}
if ( shadingModeSelect ) {
pipeline.setShadingMode( shadingModeSelect.value || "lighting" );
}
if ( pauseToggle ) {
pipeline.setPaused( pauseToggle.checked );
}
// tiling (number of tiles around center)
pipeline.setTiling( tiling );
const tileRange = Math.max( 0, tiling - 1 );
const tilesPerRow = tileRange * 2 + 1;
const tileCount = tilesPerRow * tilesPerRow;
const block = pipeline.getBlockByName( "ocean" );
if ( block ) {
const skyPass = block.getPass( "SkySphere" );
if ( skyPass && skyPass.shader ) {
const skyMesh = new Mesh( );
skyMesh.addShader( skyPass.shader );
newScene.addMesh( skyMesh );
}
const renderPassName = wireframeToggle && wireframeToggle.checked ? "RenderWire" : "RenderSolid";
const renderPass = block.getPass( renderPassName );
if ( renderPass && renderPass.shader ) {
const mesh = new Mesh( );
mesh.addShader( renderPass.shader );
mesh.setInstanceCount( tileCount );
newScene.addMesh( mesh );
}
}
pipelines = [ pipeline ];
scene = newScene;
statusElement.textContent = "Simulating ocean (drag to orbit, scroll to zoom)";
}
await rebuildScene( );
eventsHandler.resizeCanvasToDisplay = resizeCanvasToDisplay;
eventsHandler.getPrimaryPipeline = getPrimaryPipeline;
eventsHandler.getPipelines = function( ) { return pipelines; };
eventsHandler.rebuildScene = rebuildScene;
eventsHandler.getScene = function( ) { return scene; };
eventsHandler.getRenderSystem = function( ) { return renderSystem; };
eventsHandler.setCurrentMeshResolution = function( value ) { currentMeshResolution = value; };
eventsHandler.setCurrentWavelengthScale = function( value ) { currentWavelengthScale = value; };
eventsHandler.setCurrentTiling = function( value ) { currentTiling = value; };
eventsHandler.setAutoRotate = function( value ) { autoRotate = value; };
eventsHandler.setup( );
// expose FFT test helper
const oceanTests = new OceanTests( getPrimaryPipeline );
window.testFft = async function( ) {
return oceanTests.testFft( );
};
window.testSpectrum = async function( ) {
return oceanTests.testSpectrum( );
};
let autoRotate = true;
let lastAutoRotateTime = performance.now();
async function frame( ) {
const now = performance.now();
if ( autoRotate ) {
const dt = ( now - lastAutoRotateTime ) / 1000;
const primary = getPrimaryPipeline();
if ( primary && primary.camera ) {
const rotationSpeed = 0.15;
primary.camera.yaw += rotationSpeed * dt;
primary.camera.update();
}
}
lastAutoRotateTime = now;
const list = pipelines.slice( );
const run = async function( ) {
for ( const p of list ) {
await p.bindBuffers( );
await p.execute( );
}
if ( scene && renderSystem ) {
renderSystem.render( scene );
}
};
try {
await run( );
requestAnimationFrame( frame );
} catch ( error ) {
console.error( error );
statusElement.textContent = "Error while drawing ocean.";
}
}
requestAnimationFrame( frame );
}
}
const app = new OceanApp( );
app.run( );

16
node_modules/.package-lock.json generated vendored Normal file
View File

@@ -0,0 +1,16 @@
{
"name": "codex_test",
"lockfileVersion": 3,
"requires": true,
"packages": {
"node_modules/pngjs": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/pngjs/-/pngjs-7.0.0.tgz",
"integrity": "sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==",
"license": "MIT",
"engines": {
"node": ">=14.19.0"
}
}
}
}

128
node_modules/pngjs/CHANGELOG.md generated vendored Normal file
View File

@@ -0,0 +1,128 @@
# Changelog
### 7.0.0 - 19/02/2023
- BREAKING - Drop support for node 12 (Though nothing incompatible in this release yet)
- Switch to a pngjs organisation
### 6.0.0 - 24/10/2020
- BREAKING - Sync version now throws if there is unexpected content at the end of the stream.
- BREAKING - Drop support for node 10 (Though nothing incompatible in this release yet)
- Reduce the number of files included in the package
### 5.1.0 - 13/09/2020
- Add option to skip rescaling
### 5.0.0 - 15/04/2020
- Drop support for Node 8
- Browserified bundle may now contain ES20(15-20) code if the supported node version supports it. Please run the browserified version through babel if you need to support older browsers.
### 4.0.1 - 15/04/2020
- Fix to possible null reference in nextTick of async method
### 4.0.0 - 09/04/2020
- Fix issue in newer nodes with using Buffer
- Fix async issue with some png files
- Drop support for Node 4 & 6
### 3.4.0 - 09/03/2019
- Include whether the png has alpha in the meta data
- emit an error if the image is truncated instead of hanging
- Add a browserified version
- speed up some mapping functions
### 3.3.3 - 19/04/2018
- Real fix for node 9
### 3.3.2 - 16/02/2018
- Fix for node 9
### 3.3.1 - 15/11/2017
- Bugfixes and removal of es6
### 3.3.0
- Add writing 16 bit channels and support for grayscale input
### 3.2.0 - 30/04/2017
- Support for encoding 8-bit grayscale images
### 3.1.0 - 30/04/2017
- Support for pngs with zlib chunks that are malformed after valid data
### 3.0.1 - 16/02/2017
- Fix single pixel pngs
### 3.0.0 - 03/08/2016
- Drop support for node below v4 and iojs. Pin to 2.3.0 to use with old, unsupported or patched node versions.
### 2.3.0 - 22/04/2016
- Support for sync in node 0.10
### 2.2.0 - 04/12/2015
- Add sync write api
- Fix newfile example
- Correct comparison table
### 2.1.0 - 28/10/2015
- rename package to pngjs
- added 'bgColor' option
### 2.0.0 - 08/10/2015
- fixes to readme
- _breaking change_ - bitblt on the png prototype now doesn't take a unused, unnecessary src first argument
### 1.2.0 - 13/09/2015
- support passing colorType to write PNG's and writing bitmaps without alpha information
### 1.1.0 - 07/09/2015
- support passing a deflate factory for controlled compression
### 1.0.2 - 22/08/2015
- Expose all PNG creation info
### 1.0.1 - 21/08/2015
- Fix non square interlaced files
### 1.0.0 - 08/08/2015
- More tests
- source linted
- maintainability refactorings
- async API - exceptions in reading now emit warnings
- documentation improvement - sync api now documented, adjustGamma documented
- breaking change - gamma chunk is now written. previously a read then write would destroy gamma information, now it is persisted.
### 0.0.3 - 03/08/2015
- Error handling fixes
- ignore files for smaller npm footprint
### 0.0.2 - 02/08/2015
- Bugfixes to interlacing, support for transparent colours
### 0.0.1 - 02/08/2015
- Initial release, see pngjs for older changelog.

20
node_modules/pngjs/LICENSE generated vendored Normal file
View File

@@ -0,0 +1,20 @@
pngjs original work Copyright (c) 2015 Luke Page & Original Contributors
pngjs derived work Copyright (c) 2012 Kuba Niegowski
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.

287
node_modules/pngjs/README.md generated vendored Normal file
View File

@@ -0,0 +1,287 @@
![build](https://github.com/pngjs/pngjs/actions/workflows/ci.yml/badge.svg?branch=main) [![codecov](https://codecov.io/gh/pngjs/pngjs/branch/master/graph/badge.svg)](https://codecov.io/gh/pngjs/pngjs) [![npm version](https://badge.fury.io/js/pngjs.svg)](http://badge.fury.io/js/pngjs)
# pngjs
Simple PNG encoder/decoder for Node.js with no dependencies.
Based on the original [pngjs](https://github.com/niegowski/node-pngjs) with the follow enhancements.
- Support for reading 1,2,4 & 16 bit files
- Support for reading interlace files
- Support for reading `tTRNS` transparent colours
- Support for writing colortype 0 (grayscale), colortype 2 (RGB), colortype 4 (grayscale alpha) and colortype 6 (RGBA)
- Sync interface as well as async
- API compatible with pngjs and node-pngjs
Known lack of support for:
- Extended PNG e.g. Animation
- Writing in colortype 3 (indexed color)
# Table of Contents
- [Requirements](#requirements)
- [Comparison Table](#comparison-table)
- [Tests](#tests)
- [Installation](#installation)
- [Browser](#browser)
- [Example](#example)
- [Async API](#async-api)
- [Sync API](#sync-api)
- [Changelog](#changelog)
# Comparison Table
| Name | Forked From | Sync | Async | 16 Bit | 1/2/4 Bit | Interlace | Gamma | Encodes | Tested |
| ------------- | ----------- | ---- | ----- | ------ | --------- | --------- | ------ | ------- | ------ |
| pngjs | | Yes | Yes | Yes | Yes | Yes | Yes | Yes | Yes |
| node-png | pngjs | No | Yes | No | No | No | Hidden | Yes | Manual |
| png-coder | pngjs | No | Yes | Yes | No | No | Hidden | Yes | Manual |
| pngparse | | No | Yes | No | Yes | No | No | No | Yes |
| pngparse-sync | pngparse | Yes | No | No | Yes | No | No | No | Yes |
| png-async | | No | Yes | No | No | No | No | Yes | Yes |
| png-js | | No | Yes | No | No | No | No | No | No |
Native C++ node decoders:
- png
- png-sync (sync version of above)
- pixel-png
- png-img
# Tests
Tested using [PNG Suite](http://www.schaik.com/pngsuite/). We read every file into pngjs, output it in standard 8bit colour, synchronously and asynchronously, then compare the original with the newly saved images.
To run the tests, fetch the repo (tests are not distributed via npm) and install with `npm i`, run `npm test`.
The only thing not converted is gamma correction - this is because multiple vendors will do gamma correction differently, so the tests will have different results on different browsers.
# Installation
```
$ npm install pngjs --save
```
# Browser
The package has been build with a [Browserify](browserify.org) version (`npm run browserify`) and you can use the browser version by including in your code:
```
import { PNG } from 'pngjs/browser';
```
# Example
```js
var fs = require("fs"),
PNG = require("pngjs").PNG;
fs.createReadStream("in.png")
.pipe(
new PNG({
filterType: 4,
})
)
.on("parsed", function () {
for (var y = 0; y < this.height; y++) {
for (var x = 0; x < this.width; x++) {
var idx = (this.width * y + x) << 2;
// invert color
this.data[idx] = 255 - this.data[idx];
this.data[idx + 1] = 255 - this.data[idx + 1];
this.data[idx + 2] = 255 - this.data[idx + 2];
// and reduce opacity
this.data[idx + 3] = this.data[idx + 3] >> 1;
}
}
this.pack().pipe(fs.createWriteStream("out.png"));
});
```
For more examples see `examples` folder.
# Async API
As input any color type is accepted (grayscale, rgb, palette, grayscale with alpha, rgb with alpha) but 8 bit per sample (channel) is the only supported bit depth. Interlaced mode is not supported.
## Class: PNG
`PNG` is readable and writable `Stream`.
### Options
- `width` - use this with `height` if you want to create png from scratch
- `height` - as above
- `checkCRC` - whether parser should be strict about checksums in source stream (default: `true`)
- `deflateChunkSize` - chunk size used for deflating data chunks, this should be power of 2 and must not be less than 256 and more than 32\*1024 (default: 32 kB)
- `deflateLevel` - compression level for deflate (default: 9)
- `deflateStrategy` - compression strategy for deflate (default: 3)
- `deflateFactory` - deflate stream factory (default: `zlib.createDeflate`)
- `filterType` - png filtering method for scanlines (default: -1 => auto, accepts array of numbers 0-4)
- `colorType` - the output colorType - see constants. 0 = grayscale, no alpha, 2 = color, no alpha, 4 = grayscale & alpha, 6 = color & alpha. Default currently 6, but in the future may calculate best mode.
- `inputColorType` - the input colorType - see constants. Default is 6 (RGBA)
- `bitDepth` - the bitDepth of the output, 8 or 16 bits. Input data is expected to have this bit depth.
16 bit data is expected in the system endianness (Default: 8)
- `inputHasAlpha` - whether the input bitmap has 4 bytes per pixel (rgb and alpha) or 3 (rgb - no alpha).
- `bgColor` - an object containing red, green, and blue values between 0 and 255
that is used when packing a PNG if alpha is not to be included (default: 255,255,255)
### Event "metadata"
`function(metadata) { }`
Image's header has been parsed, metadata contains this information:
- `width` image size in pixels
- `height` image size in pixels
- `palette` image is paletted
- `color` image is not grayscale
- `alpha` image contains alpha channel
- `interlace` image is interlaced
### Event: "parsed"
`function(data) { }`
Input image has been completely parsed, `data` is complete and ready for modification.
### Event: "error"
`function(error) { }`
### png.parse(data, [callback])
Parses PNG file data. Can be `String` or `Buffer`. Alternatively you can stream data to instance of PNG.
Optional `callback` is once called on `error` or `parsed`. The callback gets
two arguments `(err, data)`.
Returns `this` for method chaining.
#### Example
```js
new PNG({ filterType: 4 }).parse(imageData, function (error, data) {
console.log(error, data);
});
```
### png.pack()
Starts converting data to PNG file Stream.
Returns `this` for method chaining.
### png.bitblt(dst, sx, sy, w, h, dx, dy)
Helper for image manipulation, copies a rectangle of pixels from current (i.e. the source) image (`sx`, `sy`, `w`, `h`) to `dst` image (at `dx`, `dy`).
Returns `this` for method chaining.
For example, the following code copies the top-left 100x50 px of `in.png` into dst and writes it to `out.png`:
```js
var dst = new PNG({ width: 100, height: 50 });
fs.createReadStream("in.png")
.pipe(new PNG())
.on("parsed", function () {
this.bitblt(dst, 0, 0, 100, 50, 0, 0);
dst.pack().pipe(fs.createWriteStream("out.png"));
});
```
### Property: adjustGamma()
Helper that takes data and adjusts it to be gamma corrected. Note that it is not 100% reliable with transparent colours because that requires knowing the background colour the bitmap is rendered on to.
In tests against PNG suite it compared 100% with chrome on all 8 bit and below images. On IE there were some differences.
The following example reads a file, adjusts the gamma (which sets the gamma to 0) and writes it out again, effectively removing any gamma correction from the image.
```js
fs.createReadStream("in.png")
.pipe(new PNG())
.on("parsed", function () {
this.adjustGamma();
this.pack().pipe(fs.createWriteStream("out.png"));
});
```
### Property: width
Width of image in pixels
### Property: height
Height of image in pixels
### Property: data
Buffer of image pixel data. Every pixel consists 4 bytes: R, G, B, A (opacity).
### Property: gamma
Gamma of image (0 if not specified)
## Packing a PNG and removing alpha (RGBA to RGB)
When removing the alpha channel from an image, there needs to be a background color to correctly
convert each pixel's transparency to the appropriate RGB value. By default, pngjs will flatten
the image against a white background. You can override this in the options:
```js
var fs = require("fs"),
PNG = require("pngjs").PNG;
fs.createReadStream("in.png")
.pipe(
new PNG({
colorType: 2,
bgColor: {
red: 0,
green: 255,
blue: 0,
},
})
)
.on("parsed", function () {
this.pack().pipe(fs.createWriteStream("out.png"));
});
```
# Sync API
## PNG.sync
### PNG.sync.read(buffer)
Take a buffer and returns a PNG image. The properties on the image include the meta data and `data` as per the async API above.
```
var data = fs.readFileSync('in.png');
var png = PNG.sync.read(data);
```
### PNG.sync.write(png)
Take a PNG image and returns a buffer. The properties on the image include the meta data and `data` as per the async API above.
```
var data = fs.readFileSync('in.png');
var png = PNG.sync.read(data);
var options = { colorType: 6 };
var buffer = PNG.sync.write(png, options);
fs.writeFileSync('out.png', buffer);
```
### PNG.adjustGamma(src)
Adjusts the gamma of a sync image. See the async adjustGamma.
```
var data = fs.readFileSync('in.png');
var png = PNG.sync.read(data);
PNG.adjustGamma(png);
```

18985
node_modules/pngjs/browser.js generated vendored Normal file
View File

File diff suppressed because it is too large Load Diff

267
node_modules/pngjs/lib/bitmapper.js generated vendored Normal file
View File

@@ -0,0 +1,267 @@
"use strict";
let interlaceUtils = require("./interlace");
let pixelBppMapper = [
// 0 - dummy entry
function () {},
// 1 - L
// 0: 0, 1: 0, 2: 0, 3: 0xff
function (pxData, data, pxPos, rawPos) {
if (rawPos === data.length) {
throw new Error("Ran out of data");
}
let pixel = data[rawPos];
pxData[pxPos] = pixel;
pxData[pxPos + 1] = pixel;
pxData[pxPos + 2] = pixel;
pxData[pxPos + 3] = 0xff;
},
// 2 - LA
// 0: 0, 1: 0, 2: 0, 3: 1
function (pxData, data, pxPos, rawPos) {
if (rawPos + 1 >= data.length) {
throw new Error("Ran out of data");
}
let pixel = data[rawPos];
pxData[pxPos] = pixel;
pxData[pxPos + 1] = pixel;
pxData[pxPos + 2] = pixel;
pxData[pxPos + 3] = data[rawPos + 1];
},
// 3 - RGB
// 0: 0, 1: 1, 2: 2, 3: 0xff
function (pxData, data, pxPos, rawPos) {
if (rawPos + 2 >= data.length) {
throw new Error("Ran out of data");
}
pxData[pxPos] = data[rawPos];
pxData[pxPos + 1] = data[rawPos + 1];
pxData[pxPos + 2] = data[rawPos + 2];
pxData[pxPos + 3] = 0xff;
},
// 4 - RGBA
// 0: 0, 1: 1, 2: 2, 3: 3
function (pxData, data, pxPos, rawPos) {
if (rawPos + 3 >= data.length) {
throw new Error("Ran out of data");
}
pxData[pxPos] = data[rawPos];
pxData[pxPos + 1] = data[rawPos + 1];
pxData[pxPos + 2] = data[rawPos + 2];
pxData[pxPos + 3] = data[rawPos + 3];
},
];
let pixelBppCustomMapper = [
// 0 - dummy entry
function () {},
// 1 - L
// 0: 0, 1: 0, 2: 0, 3: 0xff
function (pxData, pixelData, pxPos, maxBit) {
let pixel = pixelData[0];
pxData[pxPos] = pixel;
pxData[pxPos + 1] = pixel;
pxData[pxPos + 2] = pixel;
pxData[pxPos + 3] = maxBit;
},
// 2 - LA
// 0: 0, 1: 0, 2: 0, 3: 1
function (pxData, pixelData, pxPos) {
let pixel = pixelData[0];
pxData[pxPos] = pixel;
pxData[pxPos + 1] = pixel;
pxData[pxPos + 2] = pixel;
pxData[pxPos + 3] = pixelData[1];
},
// 3 - RGB
// 0: 0, 1: 1, 2: 2, 3: 0xff
function (pxData, pixelData, pxPos, maxBit) {
pxData[pxPos] = pixelData[0];
pxData[pxPos + 1] = pixelData[1];
pxData[pxPos + 2] = pixelData[2];
pxData[pxPos + 3] = maxBit;
},
// 4 - RGBA
// 0: 0, 1: 1, 2: 2, 3: 3
function (pxData, pixelData, pxPos) {
pxData[pxPos] = pixelData[0];
pxData[pxPos + 1] = pixelData[1];
pxData[pxPos + 2] = pixelData[2];
pxData[pxPos + 3] = pixelData[3];
},
];
function bitRetriever(data, depth) {
let leftOver = [];
let i = 0;
function split() {
if (i === data.length) {
throw new Error("Ran out of data");
}
let byte = data[i];
i++;
let byte8, byte7, byte6, byte5, byte4, byte3, byte2, byte1;
switch (depth) {
default:
throw new Error("unrecognised depth");
case 16:
byte2 = data[i];
i++;
leftOver.push((byte << 8) + byte2);
break;
case 4:
byte2 = byte & 0x0f;
byte1 = byte >> 4;
leftOver.push(byte1, byte2);
break;
case 2:
byte4 = byte & 3;
byte3 = (byte >> 2) & 3;
byte2 = (byte >> 4) & 3;
byte1 = (byte >> 6) & 3;
leftOver.push(byte1, byte2, byte3, byte4);
break;
case 1:
byte8 = byte & 1;
byte7 = (byte >> 1) & 1;
byte6 = (byte >> 2) & 1;
byte5 = (byte >> 3) & 1;
byte4 = (byte >> 4) & 1;
byte3 = (byte >> 5) & 1;
byte2 = (byte >> 6) & 1;
byte1 = (byte >> 7) & 1;
leftOver.push(byte1, byte2, byte3, byte4, byte5, byte6, byte7, byte8);
break;
}
}
return {
get: function (count) {
while (leftOver.length < count) {
split();
}
let returner = leftOver.slice(0, count);
leftOver = leftOver.slice(count);
return returner;
},
resetAfterLine: function () {
leftOver.length = 0;
},
end: function () {
if (i !== data.length) {
throw new Error("extra data found");
}
},
};
}
function mapImage8Bit(image, pxData, getPxPos, bpp, data, rawPos) {
// eslint-disable-line max-params
let imageWidth = image.width;
let imageHeight = image.height;
let imagePass = image.index;
for (let y = 0; y < imageHeight; y++) {
for (let x = 0; x < imageWidth; x++) {
let pxPos = getPxPos(x, y, imagePass);
pixelBppMapper[bpp](pxData, data, pxPos, rawPos);
rawPos += bpp; //eslint-disable-line no-param-reassign
}
}
return rawPos;
}
function mapImageCustomBit(image, pxData, getPxPos, bpp, bits, maxBit) {
// eslint-disable-line max-params
let imageWidth = image.width;
let imageHeight = image.height;
let imagePass = image.index;
for (let y = 0; y < imageHeight; y++) {
for (let x = 0; x < imageWidth; x++) {
let pixelData = bits.get(bpp);
let pxPos = getPxPos(x, y, imagePass);
pixelBppCustomMapper[bpp](pxData, pixelData, pxPos, maxBit);
}
bits.resetAfterLine();
}
}
exports.dataToBitMap = function (data, bitmapInfo) {
let width = bitmapInfo.width;
let height = bitmapInfo.height;
let depth = bitmapInfo.depth;
let bpp = bitmapInfo.bpp;
let interlace = bitmapInfo.interlace;
let bits;
if (depth !== 8) {
bits = bitRetriever(data, depth);
}
let pxData;
if (depth <= 8) {
pxData = Buffer.alloc(width * height * 4);
} else {
pxData = new Uint16Array(width * height * 4);
}
let maxBit = Math.pow(2, depth) - 1;
let rawPos = 0;
let images;
let getPxPos;
if (interlace) {
images = interlaceUtils.getImagePasses(width, height);
getPxPos = interlaceUtils.getInterlaceIterator(width, height);
} else {
let nonInterlacedPxPos = 0;
getPxPos = function () {
let returner = nonInterlacedPxPos;
nonInterlacedPxPos += 4;
return returner;
};
images = [{ width: width, height: height }];
}
for (let imageIndex = 0; imageIndex < images.length; imageIndex++) {
if (depth === 8) {
rawPos = mapImage8Bit(
images[imageIndex],
pxData,
getPxPos,
bpp,
data,
rawPos
);
} else {
mapImageCustomBit(
images[imageIndex],
pxData,
getPxPos,
bpp,
bits,
maxBit
);
}
}
if (depth === 8) {
if (rawPos !== data.length) {
throw new Error("extra data found");
}
} else {
bits.end();
}
return pxData;
};

158
node_modules/pngjs/lib/bitpacker.js generated vendored Normal file
View File

@@ -0,0 +1,158 @@
"use strict";
let constants = require("./constants");
module.exports = function (dataIn, width, height, options) {
let outHasAlpha =
[constants.COLORTYPE_COLOR_ALPHA, constants.COLORTYPE_ALPHA].indexOf(
options.colorType
) !== -1;
if (options.colorType === options.inputColorType) {
let bigEndian = (function () {
let buffer = new ArrayBuffer(2);
new DataView(buffer).setInt16(0, 256, true /* littleEndian */);
// Int16Array uses the platform's endianness.
return new Int16Array(buffer)[0] !== 256;
})();
// If no need to convert to grayscale and alpha is present/absent in both, take a fast route
if (options.bitDepth === 8 || (options.bitDepth === 16 && bigEndian)) {
return dataIn;
}
}
// map to a UInt16 array if data is 16bit, fix endianness below
let data = options.bitDepth !== 16 ? dataIn : new Uint16Array(dataIn.buffer);
let maxValue = 255;
let inBpp = constants.COLORTYPE_TO_BPP_MAP[options.inputColorType];
if (inBpp === 4 && !options.inputHasAlpha) {
inBpp = 3;
}
let outBpp = constants.COLORTYPE_TO_BPP_MAP[options.colorType];
if (options.bitDepth === 16) {
maxValue = 65535;
outBpp *= 2;
}
let outData = Buffer.alloc(width * height * outBpp);
let inIndex = 0;
let outIndex = 0;
let bgColor = options.bgColor || {};
if (bgColor.red === undefined) {
bgColor.red = maxValue;
}
if (bgColor.green === undefined) {
bgColor.green = maxValue;
}
if (bgColor.blue === undefined) {
bgColor.blue = maxValue;
}
function getRGBA() {
let red;
let green;
let blue;
let alpha = maxValue;
switch (options.inputColorType) {
case constants.COLORTYPE_COLOR_ALPHA:
alpha = data[inIndex + 3];
red = data[inIndex];
green = data[inIndex + 1];
blue = data[inIndex + 2];
break;
case constants.COLORTYPE_COLOR:
red = data[inIndex];
green = data[inIndex + 1];
blue = data[inIndex + 2];
break;
case constants.COLORTYPE_ALPHA:
alpha = data[inIndex + 1];
red = data[inIndex];
green = red;
blue = red;
break;
case constants.COLORTYPE_GRAYSCALE:
red = data[inIndex];
green = red;
blue = red;
break;
default:
throw new Error(
"input color type:" +
options.inputColorType +
" is not supported at present"
);
}
if (options.inputHasAlpha) {
if (!outHasAlpha) {
alpha /= maxValue;
red = Math.min(
Math.max(Math.round((1 - alpha) * bgColor.red + alpha * red), 0),
maxValue
);
green = Math.min(
Math.max(Math.round((1 - alpha) * bgColor.green + alpha * green), 0),
maxValue
);
blue = Math.min(
Math.max(Math.round((1 - alpha) * bgColor.blue + alpha * blue), 0),
maxValue
);
}
}
return { red: red, green: green, blue: blue, alpha: alpha };
}
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let rgba = getRGBA(data, inIndex);
switch (options.colorType) {
case constants.COLORTYPE_COLOR_ALPHA:
case constants.COLORTYPE_COLOR:
if (options.bitDepth === 8) {
outData[outIndex] = rgba.red;
outData[outIndex + 1] = rgba.green;
outData[outIndex + 2] = rgba.blue;
if (outHasAlpha) {
outData[outIndex + 3] = rgba.alpha;
}
} else {
outData.writeUInt16BE(rgba.red, outIndex);
outData.writeUInt16BE(rgba.green, outIndex + 2);
outData.writeUInt16BE(rgba.blue, outIndex + 4);
if (outHasAlpha) {
outData.writeUInt16BE(rgba.alpha, outIndex + 6);
}
}
break;
case constants.COLORTYPE_ALPHA:
case constants.COLORTYPE_GRAYSCALE: {
// Convert to grayscale and alpha
let grayscale = (rgba.red + rgba.green + rgba.blue) / 3;
if (options.bitDepth === 8) {
outData[outIndex] = grayscale;
if (outHasAlpha) {
outData[outIndex + 1] = rgba.alpha;
}
} else {
outData.writeUInt16BE(grayscale, outIndex);
if (outHasAlpha) {
outData.writeUInt16BE(rgba.alpha, outIndex + 2);
}
}
break;
}
default:
throw new Error("unrecognised color Type " + options.colorType);
}
inIndex += inBpp;
outIndex += outBpp;
}
}
return outData;
};

189
node_modules/pngjs/lib/chunkstream.js generated vendored Normal file
View File

@@ -0,0 +1,189 @@
"use strict";
let util = require("util");
let Stream = require("stream");
let ChunkStream = (module.exports = function () {
Stream.call(this);
this._buffers = [];
this._buffered = 0;
this._reads = [];
this._paused = false;
this._encoding = "utf8";
this.writable = true;
});
util.inherits(ChunkStream, Stream);
ChunkStream.prototype.read = function (length, callback) {
this._reads.push({
length: Math.abs(length), // if length < 0 then at most this length
allowLess: length < 0,
func: callback,
});
process.nextTick(
function () {
this._process();
// its paused and there is not enought data then ask for more
if (this._paused && this._reads && this._reads.length > 0) {
this._paused = false;
this.emit("drain");
}
}.bind(this)
);
};
ChunkStream.prototype.write = function (data, encoding) {
if (!this.writable) {
this.emit("error", new Error("Stream not writable"));
return false;
}
let dataBuffer;
if (Buffer.isBuffer(data)) {
dataBuffer = data;
} else {
dataBuffer = Buffer.from(data, encoding || this._encoding);
}
this._buffers.push(dataBuffer);
this._buffered += dataBuffer.length;
this._process();
// ok if there are no more read requests
if (this._reads && this._reads.length === 0) {
this._paused = true;
}
return this.writable && !this._paused;
};
ChunkStream.prototype.end = function (data, encoding) {
if (data) {
this.write(data, encoding);
}
this.writable = false;
// already destroyed
if (!this._buffers) {
return;
}
// enqueue or handle end
if (this._buffers.length === 0) {
this._end();
} else {
this._buffers.push(null);
this._process();
}
};
ChunkStream.prototype.destroySoon = ChunkStream.prototype.end;
ChunkStream.prototype._end = function () {
if (this._reads.length > 0) {
this.emit("error", new Error("Unexpected end of input"));
}
this.destroy();
};
ChunkStream.prototype.destroy = function () {
if (!this._buffers) {
return;
}
this.writable = false;
this._reads = null;
this._buffers = null;
this.emit("close");
};
ChunkStream.prototype._processReadAllowingLess = function (read) {
// ok there is any data so that we can satisfy this request
this._reads.shift(); // == read
// first we need to peek into first buffer
let smallerBuf = this._buffers[0];
// ok there is more data than we need
if (smallerBuf.length > read.length) {
this._buffered -= read.length;
this._buffers[0] = smallerBuf.slice(read.length);
read.func.call(this, smallerBuf.slice(0, read.length));
} else {
// ok this is less than maximum length so use it all
this._buffered -= smallerBuf.length;
this._buffers.shift(); // == smallerBuf
read.func.call(this, smallerBuf);
}
};
ChunkStream.prototype._processRead = function (read) {
this._reads.shift(); // == read
let pos = 0;
let count = 0;
let data = Buffer.alloc(read.length);
// create buffer for all data
while (pos < read.length) {
let buf = this._buffers[count++];
let len = Math.min(buf.length, read.length - pos);
buf.copy(data, pos, 0, len);
pos += len;
// last buffer wasn't used all so just slice it and leave
if (len !== buf.length) {
this._buffers[--count] = buf.slice(len);
}
}
// remove all used buffers
if (count > 0) {
this._buffers.splice(0, count);
}
this._buffered -= read.length;
read.func.call(this, data);
};
ChunkStream.prototype._process = function () {
try {
// as long as there is any data and read requests
while (this._buffered > 0 && this._reads && this._reads.length > 0) {
let read = this._reads[0];
// read any data (but no more than length)
if (read.allowLess) {
this._processReadAllowingLess(read);
} else if (this._buffered >= read.length) {
// ok we can meet some expectations
this._processRead(read);
} else {
// not enought data to satisfy first request in queue
// so we need to wait for more
break;
}
}
if (this._buffers && !this.writable) {
this._end();
}
} catch (ex) {
this.emit("error", ex);
}
};

32
node_modules/pngjs/lib/constants.js generated vendored Normal file
View File

@@ -0,0 +1,32 @@
"use strict";
module.exports = {
PNG_SIGNATURE: [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a],
TYPE_IHDR: 0x49484452,
TYPE_IEND: 0x49454e44,
TYPE_IDAT: 0x49444154,
TYPE_PLTE: 0x504c5445,
TYPE_tRNS: 0x74524e53, // eslint-disable-line camelcase
TYPE_gAMA: 0x67414d41, // eslint-disable-line camelcase
// color-type bits
COLORTYPE_GRAYSCALE: 0,
COLORTYPE_PALETTE: 1,
COLORTYPE_COLOR: 2,
COLORTYPE_ALPHA: 4, // e.g. grayscale and alpha
// color-type combinations
COLORTYPE_PALETTE_COLOR: 3,
COLORTYPE_COLOR_ALPHA: 6,
COLORTYPE_TO_BPP_MAP: {
0: 1,
2: 3,
3: 1,
4: 2,
6: 4,
},
GAMMA_DIVISION: 100000,
};

40
node_modules/pngjs/lib/crc.js generated vendored Normal file
View File

@@ -0,0 +1,40 @@
"use strict";
let crcTable = [];
(function () {
for (let i = 0; i < 256; i++) {
let currentCrc = i;
for (let j = 0; j < 8; j++) {
if (currentCrc & 1) {
currentCrc = 0xedb88320 ^ (currentCrc >>> 1);
} else {
currentCrc = currentCrc >>> 1;
}
}
crcTable[i] = currentCrc;
}
})();
let CrcCalculator = (module.exports = function () {
this._crc = -1;
});
CrcCalculator.prototype.write = function (data) {
for (let i = 0; i < data.length; i++) {
this._crc = crcTable[(this._crc ^ data[i]) & 0xff] ^ (this._crc >>> 8);
}
return true;
};
CrcCalculator.prototype.crc32 = function () {
return this._crc ^ -1;
};
CrcCalculator.crc32 = function (buf) {
let crc = -1;
for (let i = 0; i < buf.length; i++) {
crc = crcTable[(crc ^ buf[i]) & 0xff] ^ (crc >>> 8);
}
return crc ^ -1;
};

171
node_modules/pngjs/lib/filter-pack.js generated vendored Normal file
View File

@@ -0,0 +1,171 @@
"use strict";
let paethPredictor = require("./paeth-predictor");
function filterNone(pxData, pxPos, byteWidth, rawData, rawPos) {
for (let x = 0; x < byteWidth; x++) {
rawData[rawPos + x] = pxData[pxPos + x];
}
}
function filterSumNone(pxData, pxPos, byteWidth) {
let sum = 0;
let length = pxPos + byteWidth;
for (let i = pxPos; i < length; i++) {
sum += Math.abs(pxData[i]);
}
return sum;
}
function filterSub(pxData, pxPos, byteWidth, rawData, rawPos, bpp) {
for (let x = 0; x < byteWidth; x++) {
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
let val = pxData[pxPos + x] - left;
rawData[rawPos + x] = val;
}
}
function filterSumSub(pxData, pxPos, byteWidth, bpp) {
let sum = 0;
for (let x = 0; x < byteWidth; x++) {
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
let val = pxData[pxPos + x] - left;
sum += Math.abs(val);
}
return sum;
}
function filterUp(pxData, pxPos, byteWidth, rawData, rawPos) {
for (let x = 0; x < byteWidth; x++) {
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
let val = pxData[pxPos + x] - up;
rawData[rawPos + x] = val;
}
}
function filterSumUp(pxData, pxPos, byteWidth) {
let sum = 0;
let length = pxPos + byteWidth;
for (let x = pxPos; x < length; x++) {
let up = pxPos > 0 ? pxData[x - byteWidth] : 0;
let val = pxData[x] - up;
sum += Math.abs(val);
}
return sum;
}
function filterAvg(pxData, pxPos, byteWidth, rawData, rawPos, bpp) {
for (let x = 0; x < byteWidth; x++) {
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
let val = pxData[pxPos + x] - ((left + up) >> 1);
rawData[rawPos + x] = val;
}
}
function filterSumAvg(pxData, pxPos, byteWidth, bpp) {
let sum = 0;
for (let x = 0; x < byteWidth; x++) {
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
let val = pxData[pxPos + x] - ((left + up) >> 1);
sum += Math.abs(val);
}
return sum;
}
function filterPaeth(pxData, pxPos, byteWidth, rawData, rawPos, bpp) {
for (let x = 0; x < byteWidth; x++) {
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
let upleft =
pxPos > 0 && x >= bpp ? pxData[pxPos + x - (byteWidth + bpp)] : 0;
let val = pxData[pxPos + x] - paethPredictor(left, up, upleft);
rawData[rawPos + x] = val;
}
}
function filterSumPaeth(pxData, pxPos, byteWidth, bpp) {
let sum = 0;
for (let x = 0; x < byteWidth; x++) {
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
let upleft =
pxPos > 0 && x >= bpp ? pxData[pxPos + x - (byteWidth + bpp)] : 0;
let val = pxData[pxPos + x] - paethPredictor(left, up, upleft);
sum += Math.abs(val);
}
return sum;
}
let filters = {
0: filterNone,
1: filterSub,
2: filterUp,
3: filterAvg,
4: filterPaeth,
};
let filterSums = {
0: filterSumNone,
1: filterSumSub,
2: filterSumUp,
3: filterSumAvg,
4: filterSumPaeth,
};
module.exports = function (pxData, width, height, options, bpp) {
let filterTypes;
if (!("filterType" in options) || options.filterType === -1) {
filterTypes = [0, 1, 2, 3, 4];
} else if (typeof options.filterType === "number") {
filterTypes = [options.filterType];
} else {
throw new Error("unrecognised filter types");
}
if (options.bitDepth === 16) {
bpp *= 2;
}
let byteWidth = width * bpp;
let rawPos = 0;
let pxPos = 0;
let rawData = Buffer.alloc((byteWidth + 1) * height);
let sel = filterTypes[0];
for (let y = 0; y < height; y++) {
if (filterTypes.length > 1) {
// find best filter for this line (with lowest sum of values)
let min = Infinity;
for (let i = 0; i < filterTypes.length; i++) {
let sum = filterSums[filterTypes[i]](pxData, pxPos, byteWidth, bpp);
if (sum < min) {
sel = filterTypes[i];
min = sum;
}
}
}
rawData[rawPos] = sel;
rawPos++;
filters[sel](pxData, pxPos, byteWidth, rawData, rawPos, bpp);
rawPos += byteWidth;
pxPos += byteWidth;
}
return rawData;
};

24
node_modules/pngjs/lib/filter-parse-async.js generated vendored Normal file
View File

@@ -0,0 +1,24 @@
"use strict";
let util = require("util");
let ChunkStream = require("./chunkstream");
let Filter = require("./filter-parse");
let FilterAsync = (module.exports = function (bitmapInfo) {
ChunkStream.call(this);
let buffers = [];
let that = this;
this._filter = new Filter(bitmapInfo, {
read: this.read.bind(this),
write: function (buffer) {
buffers.push(buffer);
},
complete: function () {
that.emit("complete", Buffer.concat(buffers));
},
});
this._filter.start();
});
util.inherits(FilterAsync, ChunkStream);

21
node_modules/pngjs/lib/filter-parse-sync.js generated vendored Normal file
View File

@@ -0,0 +1,21 @@
"use strict";
let SyncReader = require("./sync-reader");
let Filter = require("./filter-parse");
exports.process = function (inBuffer, bitmapInfo) {
let outBuffers = [];
let reader = new SyncReader(inBuffer);
let filter = new Filter(bitmapInfo, {
read: reader.read.bind(reader),
write: function (bufferPart) {
outBuffers.push(bufferPart);
},
complete: function () {},
});
filter.start();
reader.process();
return Buffer.concat(outBuffers);
};

177
node_modules/pngjs/lib/filter-parse.js generated vendored Normal file
View File

@@ -0,0 +1,177 @@
"use strict";
let interlaceUtils = require("./interlace");
let paethPredictor = require("./paeth-predictor");
function getByteWidth(width, bpp, depth) {
let byteWidth = width * bpp;
if (depth !== 8) {
byteWidth = Math.ceil(byteWidth / (8 / depth));
}
return byteWidth;
}
let Filter = (module.exports = function (bitmapInfo, dependencies) {
let width = bitmapInfo.width;
let height = bitmapInfo.height;
let interlace = bitmapInfo.interlace;
let bpp = bitmapInfo.bpp;
let depth = bitmapInfo.depth;
this.read = dependencies.read;
this.write = dependencies.write;
this.complete = dependencies.complete;
this._imageIndex = 0;
this._images = [];
if (interlace) {
let passes = interlaceUtils.getImagePasses(width, height);
for (let i = 0; i < passes.length; i++) {
this._images.push({
byteWidth: getByteWidth(passes[i].width, bpp, depth),
height: passes[i].height,
lineIndex: 0,
});
}
} else {
this._images.push({
byteWidth: getByteWidth(width, bpp, depth),
height: height,
lineIndex: 0,
});
}
// when filtering the line we look at the pixel to the left
// the spec also says it is done on a byte level regardless of the number of pixels
// so if the depth is byte compatible (8 or 16) we subtract the bpp in order to compare back
// a pixel rather than just a different byte part. However if we are sub byte, we ignore.
if (depth === 8) {
this._xComparison = bpp;
} else if (depth === 16) {
this._xComparison = bpp * 2;
} else {
this._xComparison = 1;
}
});
Filter.prototype.start = function () {
this.read(
this._images[this._imageIndex].byteWidth + 1,
this._reverseFilterLine.bind(this)
);
};
Filter.prototype._unFilterType1 = function (
rawData,
unfilteredLine,
byteWidth
) {
let xComparison = this._xComparison;
let xBiggerThan = xComparison - 1;
for (let x = 0; x < byteWidth; x++) {
let rawByte = rawData[1 + x];
let f1Left = x > xBiggerThan ? unfilteredLine[x - xComparison] : 0;
unfilteredLine[x] = rawByte + f1Left;
}
};
Filter.prototype._unFilterType2 = function (
rawData,
unfilteredLine,
byteWidth
) {
let lastLine = this._lastLine;
for (let x = 0; x < byteWidth; x++) {
let rawByte = rawData[1 + x];
let f2Up = lastLine ? lastLine[x] : 0;
unfilteredLine[x] = rawByte + f2Up;
}
};
Filter.prototype._unFilterType3 = function (
rawData,
unfilteredLine,
byteWidth
) {
let xComparison = this._xComparison;
let xBiggerThan = xComparison - 1;
let lastLine = this._lastLine;
for (let x = 0; x < byteWidth; x++) {
let rawByte = rawData[1 + x];
let f3Up = lastLine ? lastLine[x] : 0;
let f3Left = x > xBiggerThan ? unfilteredLine[x - xComparison] : 0;
let f3Add = Math.floor((f3Left + f3Up) / 2);
unfilteredLine[x] = rawByte + f3Add;
}
};
Filter.prototype._unFilterType4 = function (
rawData,
unfilteredLine,
byteWidth
) {
let xComparison = this._xComparison;
let xBiggerThan = xComparison - 1;
let lastLine = this._lastLine;
for (let x = 0; x < byteWidth; x++) {
let rawByte = rawData[1 + x];
let f4Up = lastLine ? lastLine[x] : 0;
let f4Left = x > xBiggerThan ? unfilteredLine[x - xComparison] : 0;
let f4UpLeft = x > xBiggerThan && lastLine ? lastLine[x - xComparison] : 0;
let f4Add = paethPredictor(f4Left, f4Up, f4UpLeft);
unfilteredLine[x] = rawByte + f4Add;
}
};
Filter.prototype._reverseFilterLine = function (rawData) {
let filter = rawData[0];
let unfilteredLine;
let currentImage = this._images[this._imageIndex];
let byteWidth = currentImage.byteWidth;
if (filter === 0) {
unfilteredLine = rawData.slice(1, byteWidth + 1);
} else {
unfilteredLine = Buffer.alloc(byteWidth);
switch (filter) {
case 1:
this._unFilterType1(rawData, unfilteredLine, byteWidth);
break;
case 2:
this._unFilterType2(rawData, unfilteredLine, byteWidth);
break;
case 3:
this._unFilterType3(rawData, unfilteredLine, byteWidth);
break;
case 4:
this._unFilterType4(rawData, unfilteredLine, byteWidth);
break;
default:
throw new Error("Unrecognised filter type - " + filter);
}
}
this.write(unfilteredLine);
currentImage.lineIndex++;
if (currentImage.lineIndex >= currentImage.height) {
this._lastLine = null;
this._imageIndex++;
currentImage = this._images[this._imageIndex];
} else {
this._lastLine = unfilteredLine;
}
if (currentImage) {
// read, using the byte width that may be from the new current image
this.read(currentImage.byteWidth + 1, this._reverseFilterLine.bind(this));
} else {
this._lastLine = null;
this.complete();
}
};

93
node_modules/pngjs/lib/format-normaliser.js generated vendored Normal file
View File

@@ -0,0 +1,93 @@
"use strict";
function dePalette(indata, outdata, width, height, palette) {
let pxPos = 0;
// use values from palette
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let color = palette[indata[pxPos]];
if (!color) {
throw new Error("index " + indata[pxPos] + " not in palette");
}
for (let i = 0; i < 4; i++) {
outdata[pxPos + i] = color[i];
}
pxPos += 4;
}
}
}
function replaceTransparentColor(indata, outdata, width, height, transColor) {
let pxPos = 0;
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
let makeTrans = false;
if (transColor.length === 1) {
if (transColor[0] === indata[pxPos]) {
makeTrans = true;
}
} else if (
transColor[0] === indata[pxPos] &&
transColor[1] === indata[pxPos + 1] &&
transColor[2] === indata[pxPos + 2]
) {
makeTrans = true;
}
if (makeTrans) {
for (let i = 0; i < 4; i++) {
outdata[pxPos + i] = 0;
}
}
pxPos += 4;
}
}
}
function scaleDepth(indata, outdata, width, height, depth) {
let maxOutSample = 255;
let maxInSample = Math.pow(2, depth) - 1;
let pxPos = 0;
for (let y = 0; y < height; y++) {
for (let x = 0; x < width; x++) {
for (let i = 0; i < 4; i++) {
outdata[pxPos + i] = Math.floor(
(indata[pxPos + i] * maxOutSample) / maxInSample + 0.5
);
}
pxPos += 4;
}
}
}
module.exports = function (indata, imageData, skipRescale = false) {
let depth = imageData.depth;
let width = imageData.width;
let height = imageData.height;
let colorType = imageData.colorType;
let transColor = imageData.transColor;
let palette = imageData.palette;
let outdata = indata; // only different for 16 bits
if (colorType === 3) {
// paletted
dePalette(indata, outdata, width, height, palette);
} else {
if (transColor) {
replaceTransparentColor(indata, outdata, width, height, transColor);
}
// if it needs scaling
if (depth !== 8 && !skipRescale) {
// if we need to change the buffer size
if (depth === 16) {
outdata = Buffer.alloc(width * height * 4);
}
scaleDepth(indata, outdata, width, height, depth);
}
}
return outdata;
};

95
node_modules/pngjs/lib/interlace.js generated vendored Normal file
View File

@@ -0,0 +1,95 @@
"use strict";
// Adam 7
// 0 1 2 3 4 5 6 7
// 0 x 6 4 6 x 6 4 6
// 1 7 7 7 7 7 7 7 7
// 2 5 6 5 6 5 6 5 6
// 3 7 7 7 7 7 7 7 7
// 4 3 6 4 6 3 6 4 6
// 5 7 7 7 7 7 7 7 7
// 6 5 6 5 6 5 6 5 6
// 7 7 7 7 7 7 7 7 7
let imagePasses = [
{
// pass 1 - 1px
x: [0],
y: [0],
},
{
// pass 2 - 1px
x: [4],
y: [0],
},
{
// pass 3 - 2px
x: [0, 4],
y: [4],
},
{
// pass 4 - 4px
x: [2, 6],
y: [0, 4],
},
{
// pass 5 - 8px
x: [0, 2, 4, 6],
y: [2, 6],
},
{
// pass 6 - 16px
x: [1, 3, 5, 7],
y: [0, 2, 4, 6],
},
{
// pass 7 - 32px
x: [0, 1, 2, 3, 4, 5, 6, 7],
y: [1, 3, 5, 7],
},
];
exports.getImagePasses = function (width, height) {
let images = [];
let xLeftOver = width % 8;
let yLeftOver = height % 8;
let xRepeats = (width - xLeftOver) / 8;
let yRepeats = (height - yLeftOver) / 8;
for (let i = 0; i < imagePasses.length; i++) {
let pass = imagePasses[i];
let passWidth = xRepeats * pass.x.length;
let passHeight = yRepeats * pass.y.length;
for (let j = 0; j < pass.x.length; j++) {
if (pass.x[j] < xLeftOver) {
passWidth++;
} else {
break;
}
}
for (let j = 0; j < pass.y.length; j++) {
if (pass.y[j] < yLeftOver) {
passHeight++;
} else {
break;
}
}
if (passWidth > 0 && passHeight > 0) {
images.push({ width: passWidth, height: passHeight, index: i });
}
}
return images;
};
exports.getInterlaceIterator = function (width) {
return function (x, y, pass) {
let outerXLeftOver = x % imagePasses[pass].x.length;
let outerX =
((x - outerXLeftOver) / imagePasses[pass].x.length) * 8 +
imagePasses[pass].x[outerXLeftOver];
let outerYLeftOver = y % imagePasses[pass].y.length;
let outerY =
((y - outerYLeftOver) / imagePasses[pass].y.length) * 8 +
imagePasses[pass].y[outerYLeftOver];
return outerX * 4 + outerY * width * 4;
};
};

50
node_modules/pngjs/lib/packer-async.js generated vendored Normal file
View File

@@ -0,0 +1,50 @@
"use strict";
let util = require("util");
let Stream = require("stream");
let constants = require("./constants");
let Packer = require("./packer");
let PackerAsync = (module.exports = function (opt) {
Stream.call(this);
let options = opt || {};
this._packer = new Packer(options);
this._deflate = this._packer.createDeflate();
this.readable = true;
});
util.inherits(PackerAsync, Stream);
PackerAsync.prototype.pack = function (data, width, height, gamma) {
// Signature
this.emit("data", Buffer.from(constants.PNG_SIGNATURE));
this.emit("data", this._packer.packIHDR(width, height));
if (gamma) {
this.emit("data", this._packer.packGAMA(gamma));
}
let filteredData = this._packer.filterData(data, width, height);
// compress it
this._deflate.on("error", this.emit.bind(this, "error"));
this._deflate.on(
"data",
function (compressedData) {
this.emit("data", this._packer.packIDAT(compressedData));
}.bind(this)
);
this._deflate.on(
"end",
function () {
this.emit("data", this._packer.packIEND());
this.emit("end");
}.bind(this)
);
this._deflate.end(filteredData);
};

56
node_modules/pngjs/lib/packer-sync.js generated vendored Normal file
View File

@@ -0,0 +1,56 @@
"use strict";
let hasSyncZlib = true;
let zlib = require("zlib");
if (!zlib.deflateSync) {
hasSyncZlib = false;
}
let constants = require("./constants");
let Packer = require("./packer");
module.exports = function (metaData, opt) {
if (!hasSyncZlib) {
throw new Error(
"To use the sync capability of this library in old node versions, please pin pngjs to v2.3.0"
);
}
let options = opt || {};
let packer = new Packer(options);
let chunks = [];
// Signature
chunks.push(Buffer.from(constants.PNG_SIGNATURE));
// Header
chunks.push(packer.packIHDR(metaData.width, metaData.height));
if (metaData.gamma) {
chunks.push(packer.packGAMA(metaData.gamma));
}
let filteredData = packer.filterData(
metaData.data,
metaData.width,
metaData.height
);
// compress it
let compressedData = zlib.deflateSync(
filteredData,
packer.getDeflateOptions()
);
filteredData = null;
if (!compressedData || !compressedData.length) {
throw new Error("bad png - invalid compressed data response");
}
chunks.push(packer.packIDAT(compressedData));
// End
chunks.push(packer.packIEND());
return Buffer.concat(chunks);
};

129
node_modules/pngjs/lib/packer.js generated vendored Normal file
View File

@@ -0,0 +1,129 @@
"use strict";
let constants = require("./constants");
let CrcStream = require("./crc");
let bitPacker = require("./bitpacker");
let filter = require("./filter-pack");
let zlib = require("zlib");
let Packer = (module.exports = function (options) {
this._options = options;
options.deflateChunkSize = options.deflateChunkSize || 32 * 1024;
options.deflateLevel =
options.deflateLevel != null ? options.deflateLevel : 9;
options.deflateStrategy =
options.deflateStrategy != null ? options.deflateStrategy : 3;
options.inputHasAlpha =
options.inputHasAlpha != null ? options.inputHasAlpha : true;
options.deflateFactory = options.deflateFactory || zlib.createDeflate;
options.bitDepth = options.bitDepth || 8;
// This is outputColorType
options.colorType =
typeof options.colorType === "number"
? options.colorType
: constants.COLORTYPE_COLOR_ALPHA;
options.inputColorType =
typeof options.inputColorType === "number"
? options.inputColorType
: constants.COLORTYPE_COLOR_ALPHA;
if (
[
constants.COLORTYPE_GRAYSCALE,
constants.COLORTYPE_COLOR,
constants.COLORTYPE_COLOR_ALPHA,
constants.COLORTYPE_ALPHA,
].indexOf(options.colorType) === -1
) {
throw new Error(
"option color type:" + options.colorType + " is not supported at present"
);
}
if (
[
constants.COLORTYPE_GRAYSCALE,
constants.COLORTYPE_COLOR,
constants.COLORTYPE_COLOR_ALPHA,
constants.COLORTYPE_ALPHA,
].indexOf(options.inputColorType) === -1
) {
throw new Error(
"option input color type:" +
options.inputColorType +
" is not supported at present"
);
}
if (options.bitDepth !== 8 && options.bitDepth !== 16) {
throw new Error(
"option bit depth:" + options.bitDepth + " is not supported at present"
);
}
});
Packer.prototype.getDeflateOptions = function () {
return {
chunkSize: this._options.deflateChunkSize,
level: this._options.deflateLevel,
strategy: this._options.deflateStrategy,
};
};
Packer.prototype.createDeflate = function () {
return this._options.deflateFactory(this.getDeflateOptions());
};
Packer.prototype.filterData = function (data, width, height) {
// convert to correct format for filtering (e.g. right bpp and bit depth)
let packedData = bitPacker(data, width, height, this._options);
// filter pixel data
let bpp = constants.COLORTYPE_TO_BPP_MAP[this._options.colorType];
let filteredData = filter(packedData, width, height, this._options, bpp);
return filteredData;
};
Packer.prototype._packChunk = function (type, data) {
let len = data ? data.length : 0;
let buf = Buffer.alloc(len + 12);
buf.writeUInt32BE(len, 0);
buf.writeUInt32BE(type, 4);
if (data) {
data.copy(buf, 8);
}
buf.writeInt32BE(
CrcStream.crc32(buf.slice(4, buf.length - 4)),
buf.length - 4
);
return buf;
};
Packer.prototype.packGAMA = function (gamma) {
let buf = Buffer.alloc(4);
buf.writeUInt32BE(Math.floor(gamma * constants.GAMMA_DIVISION), 0);
return this._packChunk(constants.TYPE_gAMA, buf);
};
Packer.prototype.packIHDR = function (width, height) {
let buf = Buffer.alloc(13);
buf.writeUInt32BE(width, 0);
buf.writeUInt32BE(height, 4);
buf[8] = this._options.bitDepth; // Bit depth
buf[9] = this._options.colorType; // colorType
buf[10] = 0; // compression
buf[11] = 0; // filter
buf[12] = 0; // interlace
return this._packChunk(constants.TYPE_IHDR, buf);
};
Packer.prototype.packIDAT = function (data) {
return this._packChunk(constants.TYPE_IDAT, data);
};
Packer.prototype.packIEND = function () {
return this._packChunk(constants.TYPE_IEND, null);
};

16
node_modules/pngjs/lib/paeth-predictor.js generated vendored Normal file
View File

@@ -0,0 +1,16 @@
"use strict";
module.exports = function paethPredictor(left, above, upLeft) {
let paeth = left + above - upLeft;
let pLeft = Math.abs(paeth - left);
let pAbove = Math.abs(paeth - above);
let pUpLeft = Math.abs(paeth - upLeft);
if (pLeft <= pAbove && pLeft <= pUpLeft) {
return left;
}
if (pAbove <= pUpLeft) {
return above;
}
return upLeft;
};

169
node_modules/pngjs/lib/parser-async.js generated vendored Normal file
View File

@@ -0,0 +1,169 @@
"use strict";
let util = require("util");
let zlib = require("zlib");
let ChunkStream = require("./chunkstream");
let FilterAsync = require("./filter-parse-async");
let Parser = require("./parser");
let bitmapper = require("./bitmapper");
let formatNormaliser = require("./format-normaliser");
let ParserAsync = (module.exports = function (options) {
ChunkStream.call(this);
this._parser = new Parser(options, {
read: this.read.bind(this),
error: this._handleError.bind(this),
metadata: this._handleMetaData.bind(this),
gamma: this.emit.bind(this, "gamma"),
palette: this._handlePalette.bind(this),
transColor: this._handleTransColor.bind(this),
finished: this._finished.bind(this),
inflateData: this._inflateData.bind(this),
simpleTransparency: this._simpleTransparency.bind(this),
headersFinished: this._headersFinished.bind(this),
});
this._options = options;
this.writable = true;
this._parser.start();
});
util.inherits(ParserAsync, ChunkStream);
ParserAsync.prototype._handleError = function (err) {
this.emit("error", err);
this.writable = false;
this.destroy();
if (this._inflate && this._inflate.destroy) {
this._inflate.destroy();
}
if (this._filter) {
this._filter.destroy();
// For backward compatibility with Node 7 and below.
// Suppress errors due to _inflate calling write() even after
// it's destroy()'ed.
this._filter.on("error", function () {});
}
this.errord = true;
};
ParserAsync.prototype._inflateData = function (data) {
if (!this._inflate) {
if (this._bitmapInfo.interlace) {
this._inflate = zlib.createInflate();
this._inflate.on("error", this.emit.bind(this, "error"));
this._filter.on("complete", this._complete.bind(this));
this._inflate.pipe(this._filter);
} else {
let rowSize =
((this._bitmapInfo.width *
this._bitmapInfo.bpp *
this._bitmapInfo.depth +
7) >>
3) +
1;
let imageSize = rowSize * this._bitmapInfo.height;
let chunkSize = Math.max(imageSize, zlib.Z_MIN_CHUNK);
this._inflate = zlib.createInflate({ chunkSize: chunkSize });
let leftToInflate = imageSize;
let emitError = this.emit.bind(this, "error");
this._inflate.on("error", function (err) {
if (!leftToInflate) {
return;
}
emitError(err);
});
this._filter.on("complete", this._complete.bind(this));
let filterWrite = this._filter.write.bind(this._filter);
this._inflate.on("data", function (chunk) {
if (!leftToInflate) {
return;
}
if (chunk.length > leftToInflate) {
chunk = chunk.slice(0, leftToInflate);
}
leftToInflate -= chunk.length;
filterWrite(chunk);
});
this._inflate.on("end", this._filter.end.bind(this._filter));
}
}
this._inflate.write(data);
};
ParserAsync.prototype._handleMetaData = function (metaData) {
this._metaData = metaData;
this._bitmapInfo = Object.create(metaData);
this._filter = new FilterAsync(this._bitmapInfo);
};
ParserAsync.prototype._handleTransColor = function (transColor) {
this._bitmapInfo.transColor = transColor;
};
ParserAsync.prototype._handlePalette = function (palette) {
this._bitmapInfo.palette = palette;
};
ParserAsync.prototype._simpleTransparency = function () {
this._metaData.alpha = true;
};
ParserAsync.prototype._headersFinished = function () {
// Up until this point, we don't know if we have a tRNS chunk (alpha)
// so we can't emit metadata any earlier
this.emit("metadata", this._metaData);
};
ParserAsync.prototype._finished = function () {
if (this.errord) {
return;
}
if (!this._inflate) {
this.emit("error", "No Inflate block");
} else {
// no more data to inflate
this._inflate.end();
}
};
ParserAsync.prototype._complete = function (filteredData) {
if (this.errord) {
return;
}
let normalisedBitmapData;
try {
let bitmapData = bitmapper.dataToBitMap(filteredData, this._bitmapInfo);
normalisedBitmapData = formatNormaliser(
bitmapData,
this._bitmapInfo,
this._options.skipRescale
);
bitmapData = null;
} catch (ex) {
this._handleError(ex);
return;
}
this.emit("parsed", normalisedBitmapData);
};

112
node_modules/pngjs/lib/parser-sync.js generated vendored Normal file
View File

@@ -0,0 +1,112 @@
"use strict";
let hasSyncZlib = true;
let zlib = require("zlib");
let inflateSync = require("./sync-inflate");
if (!zlib.deflateSync) {
hasSyncZlib = false;
}
let SyncReader = require("./sync-reader");
let FilterSync = require("./filter-parse-sync");
let Parser = require("./parser");
let bitmapper = require("./bitmapper");
let formatNormaliser = require("./format-normaliser");
module.exports = function (buffer, options) {
if (!hasSyncZlib) {
throw new Error(
"To use the sync capability of this library in old node versions, please pin pngjs to v2.3.0"
);
}
let err;
function handleError(_err_) {
err = _err_;
}
let metaData;
function handleMetaData(_metaData_) {
metaData = _metaData_;
}
function handleTransColor(transColor) {
metaData.transColor = transColor;
}
function handlePalette(palette) {
metaData.palette = palette;
}
function handleSimpleTransparency() {
metaData.alpha = true;
}
let gamma;
function handleGamma(_gamma_) {
gamma = _gamma_;
}
let inflateDataList = [];
function handleInflateData(inflatedData) {
inflateDataList.push(inflatedData);
}
let reader = new SyncReader(buffer);
let parser = new Parser(options, {
read: reader.read.bind(reader),
error: handleError,
metadata: handleMetaData,
gamma: handleGamma,
palette: handlePalette,
transColor: handleTransColor,
inflateData: handleInflateData,
simpleTransparency: handleSimpleTransparency,
});
parser.start();
reader.process();
if (err) {
throw err;
}
//join together the inflate datas
let inflateData = Buffer.concat(inflateDataList);
inflateDataList.length = 0;
let inflatedData;
if (metaData.interlace) {
inflatedData = zlib.inflateSync(inflateData);
} else {
let rowSize =
((metaData.width * metaData.bpp * metaData.depth + 7) >> 3) + 1;
let imageSize = rowSize * metaData.height;
inflatedData = inflateSync(inflateData, {
chunkSize: imageSize,
maxLength: imageSize,
});
}
inflateData = null;
if (!inflatedData || !inflatedData.length) {
throw new Error("bad png - invalid inflate data response");
}
let unfilteredData = FilterSync.process(inflatedData, metaData);
inflateData = null;
let bitmapData = bitmapper.dataToBitMap(unfilteredData, metaData);
unfilteredData = null;
let normalisedBitmapData = formatNormaliser(
bitmapData,
metaData,
options.skipRescale
);
metaData.data = normalisedBitmapData;
metaData.gamma = gamma || 0;
return metaData;
};

290
node_modules/pngjs/lib/parser.js generated vendored Normal file
View File

@@ -0,0 +1,290 @@
"use strict";
let constants = require("./constants");
let CrcCalculator = require("./crc");
let Parser = (module.exports = function (options, dependencies) {
this._options = options;
options.checkCRC = options.checkCRC !== false;
this._hasIHDR = false;
this._hasIEND = false;
this._emittedHeadersFinished = false;
// input flags/metadata
this._palette = [];
this._colorType = 0;
this._chunks = {};
this._chunks[constants.TYPE_IHDR] = this._handleIHDR.bind(this);
this._chunks[constants.TYPE_IEND] = this._handleIEND.bind(this);
this._chunks[constants.TYPE_IDAT] = this._handleIDAT.bind(this);
this._chunks[constants.TYPE_PLTE] = this._handlePLTE.bind(this);
this._chunks[constants.TYPE_tRNS] = this._handleTRNS.bind(this);
this._chunks[constants.TYPE_gAMA] = this._handleGAMA.bind(this);
this.read = dependencies.read;
this.error = dependencies.error;
this.metadata = dependencies.metadata;
this.gamma = dependencies.gamma;
this.transColor = dependencies.transColor;
this.palette = dependencies.palette;
this.parsed = dependencies.parsed;
this.inflateData = dependencies.inflateData;
this.finished = dependencies.finished;
this.simpleTransparency = dependencies.simpleTransparency;
this.headersFinished = dependencies.headersFinished || function () {};
});
Parser.prototype.start = function () {
this.read(constants.PNG_SIGNATURE.length, this._parseSignature.bind(this));
};
Parser.prototype._parseSignature = function (data) {
let signature = constants.PNG_SIGNATURE;
for (let i = 0; i < signature.length; i++) {
if (data[i] !== signature[i]) {
this.error(new Error("Invalid file signature"));
return;
}
}
this.read(8, this._parseChunkBegin.bind(this));
};
Parser.prototype._parseChunkBegin = function (data) {
// chunk content length
let length = data.readUInt32BE(0);
// chunk type
let type = data.readUInt32BE(4);
let name = "";
for (let i = 4; i < 8; i++) {
name += String.fromCharCode(data[i]);
}
//console.log('chunk ', name, length);
// chunk flags
let ancillary = Boolean(data[4] & 0x20); // or critical
// priv = Boolean(data[5] & 0x20), // or public
// safeToCopy = Boolean(data[7] & 0x20); // or unsafe
if (!this._hasIHDR && type !== constants.TYPE_IHDR) {
this.error(new Error("Expected IHDR on beggining"));
return;
}
this._crc = new CrcCalculator();
this._crc.write(Buffer.from(name));
if (this._chunks[type]) {
return this._chunks[type](length);
}
if (!ancillary) {
this.error(new Error("Unsupported critical chunk type " + name));
return;
}
this.read(length + 4, this._skipChunk.bind(this));
};
Parser.prototype._skipChunk = function (/*data*/) {
this.read(8, this._parseChunkBegin.bind(this));
};
Parser.prototype._handleChunkEnd = function () {
this.read(4, this._parseChunkEnd.bind(this));
};
Parser.prototype._parseChunkEnd = function (data) {
let fileCrc = data.readInt32BE(0);
let calcCrc = this._crc.crc32();
// check CRC
if (this._options.checkCRC && calcCrc !== fileCrc) {
this.error(new Error("Crc error - " + fileCrc + " - " + calcCrc));
return;
}
if (!this._hasIEND) {
this.read(8, this._parseChunkBegin.bind(this));
}
};
Parser.prototype._handleIHDR = function (length) {
this.read(length, this._parseIHDR.bind(this));
};
Parser.prototype._parseIHDR = function (data) {
this._crc.write(data);
let width = data.readUInt32BE(0);
let height = data.readUInt32BE(4);
let depth = data[8];
let colorType = data[9]; // bits: 1 palette, 2 color, 4 alpha
let compr = data[10];
let filter = data[11];
let interlace = data[12];
// console.log(' width', width, 'height', height,
// 'depth', depth, 'colorType', colorType,
// 'compr', compr, 'filter', filter, 'interlace', interlace
// );
if (
depth !== 8 &&
depth !== 4 &&
depth !== 2 &&
depth !== 1 &&
depth !== 16
) {
this.error(new Error("Unsupported bit depth " + depth));
return;
}
if (!(colorType in constants.COLORTYPE_TO_BPP_MAP)) {
this.error(new Error("Unsupported color type"));
return;
}
if (compr !== 0) {
this.error(new Error("Unsupported compression method"));
return;
}
if (filter !== 0) {
this.error(new Error("Unsupported filter method"));
return;
}
if (interlace !== 0 && interlace !== 1) {
this.error(new Error("Unsupported interlace method"));
return;
}
this._colorType = colorType;
let bpp = constants.COLORTYPE_TO_BPP_MAP[this._colorType];
this._hasIHDR = true;
this.metadata({
width: width,
height: height,
depth: depth,
interlace: Boolean(interlace),
palette: Boolean(colorType & constants.COLORTYPE_PALETTE),
color: Boolean(colorType & constants.COLORTYPE_COLOR),
alpha: Boolean(colorType & constants.COLORTYPE_ALPHA),
bpp: bpp,
colorType: colorType,
});
this._handleChunkEnd();
};
Parser.prototype._handlePLTE = function (length) {
this.read(length, this._parsePLTE.bind(this));
};
Parser.prototype._parsePLTE = function (data) {
this._crc.write(data);
let entries = Math.floor(data.length / 3);
// console.log('Palette:', entries);
for (let i = 0; i < entries; i++) {
this._palette.push([data[i * 3], data[i * 3 + 1], data[i * 3 + 2], 0xff]);
}
this.palette(this._palette);
this._handleChunkEnd();
};
Parser.prototype._handleTRNS = function (length) {
this.simpleTransparency();
this.read(length, this._parseTRNS.bind(this));
};
Parser.prototype._parseTRNS = function (data) {
this._crc.write(data);
// palette
if (this._colorType === constants.COLORTYPE_PALETTE_COLOR) {
if (this._palette.length === 0) {
this.error(new Error("Transparency chunk must be after palette"));
return;
}
if (data.length > this._palette.length) {
this.error(new Error("More transparent colors than palette size"));
return;
}
for (let i = 0; i < data.length; i++) {
this._palette[i][3] = data[i];
}
this.palette(this._palette);
}
// for colorType 0 (grayscale) and 2 (rgb)
// there might be one gray/color defined as transparent
if (this._colorType === constants.COLORTYPE_GRAYSCALE) {
// grey, 2 bytes
this.transColor([data.readUInt16BE(0)]);
}
if (this._colorType === constants.COLORTYPE_COLOR) {
this.transColor([
data.readUInt16BE(0),
data.readUInt16BE(2),
data.readUInt16BE(4),
]);
}
this._handleChunkEnd();
};
Parser.prototype._handleGAMA = function (length) {
this.read(length, this._parseGAMA.bind(this));
};
Parser.prototype._parseGAMA = function (data) {
this._crc.write(data);
this.gamma(data.readUInt32BE(0) / constants.GAMMA_DIVISION);
this._handleChunkEnd();
};
Parser.prototype._handleIDAT = function (length) {
if (!this._emittedHeadersFinished) {
this._emittedHeadersFinished = true;
this.headersFinished();
}
this.read(-length, this._parseIDAT.bind(this, length));
};
Parser.prototype._parseIDAT = function (length, data) {
this._crc.write(data);
if (
this._colorType === constants.COLORTYPE_PALETTE_COLOR &&
this._palette.length === 0
) {
throw new Error("Expected palette not found");
}
this.inflateData(data);
let leftOverLength = length - data.length;
if (leftOverLength > 0) {
this._handleIDAT(leftOverLength);
} else {
this._handleChunkEnd();
}
};
Parser.prototype._handleIEND = function (length) {
this.read(length, this._parseIEND.bind(this));
};
Parser.prototype._parseIEND = function (data) {
this._crc.write(data);
this._hasIEND = true;
this._handleChunkEnd();
if (this.finished) {
this.finished();
}
};

12
node_modules/pngjs/lib/png-sync.js generated vendored Normal file
View File

@@ -0,0 +1,12 @@
"use strict";
let parse = require("./parser-sync");
let pack = require("./packer-sync");
exports.read = function (buffer, options) {
return parse(buffer, options || {});
};
exports.write = function (png, options) {
return pack(png, options);
};

194
node_modules/pngjs/lib/png.js generated vendored Normal file
View File

@@ -0,0 +1,194 @@
"use strict";
let util = require("util");
let Stream = require("stream");
let Parser = require("./parser-async");
let Packer = require("./packer-async");
let PNGSync = require("./png-sync");
let PNG = (exports.PNG = function (options) {
Stream.call(this);
options = options || {}; // eslint-disable-line no-param-reassign
// coerce pixel dimensions to integers (also coerces undefined -> 0):
this.width = options.width | 0;
this.height = options.height | 0;
this.data =
this.width > 0 && this.height > 0
? Buffer.alloc(4 * this.width * this.height)
: null;
if (options.fill && this.data) {
this.data.fill(0);
}
this.gamma = 0;
this.readable = this.writable = true;
this._parser = new Parser(options);
this._parser.on("error", this.emit.bind(this, "error"));
this._parser.on("close", this._handleClose.bind(this));
this._parser.on("metadata", this._metadata.bind(this));
this._parser.on("gamma", this._gamma.bind(this));
this._parser.on(
"parsed",
function (data) {
this.data = data;
this.emit("parsed", data);
}.bind(this)
);
this._packer = new Packer(options);
this._packer.on("data", this.emit.bind(this, "data"));
this._packer.on("end", this.emit.bind(this, "end"));
this._parser.on("close", this._handleClose.bind(this));
this._packer.on("error", this.emit.bind(this, "error"));
});
util.inherits(PNG, Stream);
PNG.sync = PNGSync;
PNG.prototype.pack = function () {
if (!this.data || !this.data.length) {
this.emit("error", "No data provided");
return this;
}
process.nextTick(
function () {
this._packer.pack(this.data, this.width, this.height, this.gamma);
}.bind(this)
);
return this;
};
PNG.prototype.parse = function (data, callback) {
if (callback) {
let onParsed, onError;
onParsed = function (parsedData) {
this.removeListener("error", onError);
this.data = parsedData;
callback(null, this);
}.bind(this);
onError = function (err) {
this.removeListener("parsed", onParsed);
callback(err, null);
}.bind(this);
this.once("parsed", onParsed);
this.once("error", onError);
}
this.end(data);
return this;
};
PNG.prototype.write = function (data) {
this._parser.write(data);
return true;
};
PNG.prototype.end = function (data) {
this._parser.end(data);
};
PNG.prototype._metadata = function (metadata) {
this.width = metadata.width;
this.height = metadata.height;
this.emit("metadata", metadata);
};
PNG.prototype._gamma = function (gamma) {
this.gamma = gamma;
};
PNG.prototype._handleClose = function () {
if (!this._parser.writable && !this._packer.readable) {
this.emit("close");
}
};
PNG.bitblt = function (src, dst, srcX, srcY, width, height, deltaX, deltaY) {
// eslint-disable-line max-params
// coerce pixel dimensions to integers (also coerces undefined -> 0):
/* eslint-disable no-param-reassign */
srcX |= 0;
srcY |= 0;
width |= 0;
height |= 0;
deltaX |= 0;
deltaY |= 0;
/* eslint-enable no-param-reassign */
if (
srcX > src.width ||
srcY > src.height ||
srcX + width > src.width ||
srcY + height > src.height
) {
throw new Error("bitblt reading outside image");
}
if (
deltaX > dst.width ||
deltaY > dst.height ||
deltaX + width > dst.width ||
deltaY + height > dst.height
) {
throw new Error("bitblt writing outside image");
}
for (let y = 0; y < height; y++) {
src.data.copy(
dst.data,
((deltaY + y) * dst.width + deltaX) << 2,
((srcY + y) * src.width + srcX) << 2,
((srcY + y) * src.width + srcX + width) << 2
);
}
};
PNG.prototype.bitblt = function (
dst,
srcX,
srcY,
width,
height,
deltaX,
deltaY
) {
// eslint-disable-line max-params
PNG.bitblt(this, dst, srcX, srcY, width, height, deltaX, deltaY);
return this;
};
PNG.adjustGamma = function (src) {
if (src.gamma) {
for (let y = 0; y < src.height; y++) {
for (let x = 0; x < src.width; x++) {
let idx = (src.width * y + x) << 2;
for (let i = 0; i < 3; i++) {
let sample = src.data[idx + i] / 255;
sample = Math.pow(sample, 1 / 2.2 / src.gamma);
src.data[idx + i] = Math.round(sample * 255);
}
}
}
src.gamma = 0;
}
};
PNG.prototype.adjustGamma = function () {
PNG.adjustGamma(this);
};

168
node_modules/pngjs/lib/sync-inflate.js generated vendored Normal file
View File

@@ -0,0 +1,168 @@
"use strict";
let assert = require("assert").ok;
let zlib = require("zlib");
let util = require("util");
let kMaxLength = require("buffer").kMaxLength;
function Inflate(opts) {
if (!(this instanceof Inflate)) {
return new Inflate(opts);
}
if (opts && opts.chunkSize < zlib.Z_MIN_CHUNK) {
opts.chunkSize = zlib.Z_MIN_CHUNK;
}
zlib.Inflate.call(this, opts);
// Node 8 --> 9 compatibility check
this._offset = this._offset === undefined ? this._outOffset : this._offset;
this._buffer = this._buffer || this._outBuffer;
if (opts && opts.maxLength != null) {
this._maxLength = opts.maxLength;
}
}
function createInflate(opts) {
return new Inflate(opts);
}
function _close(engine, callback) {
if (callback) {
process.nextTick(callback);
}
// Caller may invoke .close after a zlib error (which will null _handle).
if (!engine._handle) {
return;
}
engine._handle.close();
engine._handle = null;
}
Inflate.prototype._processChunk = function (chunk, flushFlag, asyncCb) {
if (typeof asyncCb === "function") {
return zlib.Inflate._processChunk.call(this, chunk, flushFlag, asyncCb);
}
let self = this;
let availInBefore = chunk && chunk.length;
let availOutBefore = this._chunkSize - this._offset;
let leftToInflate = this._maxLength;
let inOff = 0;
let buffers = [];
let nread = 0;
let error;
this.on("error", function (err) {
error = err;
});
function handleChunk(availInAfter, availOutAfter) {
if (self._hadError) {
return;
}
let have = availOutBefore - availOutAfter;
assert(have >= 0, "have should not go down");
if (have > 0) {
let out = self._buffer.slice(self._offset, self._offset + have);
self._offset += have;
if (out.length > leftToInflate) {
out = out.slice(0, leftToInflate);
}
buffers.push(out);
nread += out.length;
leftToInflate -= out.length;
if (leftToInflate === 0) {
return false;
}
}
if (availOutAfter === 0 || self._offset >= self._chunkSize) {
availOutBefore = self._chunkSize;
self._offset = 0;
self._buffer = Buffer.allocUnsafe(self._chunkSize);
}
if (availOutAfter === 0) {
inOff += availInBefore - availInAfter;
availInBefore = availInAfter;
return true;
}
return false;
}
assert(this._handle, "zlib binding closed");
let res;
do {
res = this._handle.writeSync(
flushFlag,
chunk, // in
inOff, // in_off
availInBefore, // in_len
this._buffer, // out
this._offset, //out_off
availOutBefore
); // out_len
// Node 8 --> 9 compatibility check
res = res || this._writeState;
} while (!this._hadError && handleChunk(res[0], res[1]));
if (this._hadError) {
throw error;
}
if (nread >= kMaxLength) {
_close(this);
throw new RangeError(
"Cannot create final Buffer. It would be larger than 0x" +
kMaxLength.toString(16) +
" bytes"
);
}
let buf = Buffer.concat(buffers, nread);
_close(this);
return buf;
};
util.inherits(Inflate, zlib.Inflate);
function zlibBufferSync(engine, buffer) {
if (typeof buffer === "string") {
buffer = Buffer.from(buffer);
}
if (!(buffer instanceof Buffer)) {
throw new TypeError("Not a string or buffer");
}
let flushFlag = engine._finishFlushFlag;
if (flushFlag == null) {
flushFlag = zlib.Z_FINISH;
}
return engine._processChunk(buffer, flushFlag);
}
function inflateSync(buffer, opts) {
return zlibBufferSync(new Inflate(opts), buffer);
}
module.exports = exports = inflateSync;
exports.Inflate = Inflate;
exports.createInflate = createInflate;
exports.inflateSync = inflateSync;

45
node_modules/pngjs/lib/sync-reader.js generated vendored Normal file
View File

@@ -0,0 +1,45 @@
"use strict";
let SyncReader = (module.exports = function (buffer) {
this._buffer = buffer;
this._reads = [];
});
SyncReader.prototype.read = function (length, callback) {
this._reads.push({
length: Math.abs(length), // if length < 0 then at most this length
allowLess: length < 0,
func: callback,
});
};
SyncReader.prototype.process = function () {
// as long as there is any data and read requests
while (this._reads.length > 0 && this._buffer.length) {
let read = this._reads[0];
if (
this._buffer.length &&
(this._buffer.length >= read.length || read.allowLess)
) {
// ok there is any data so that we can satisfy this request
this._reads.shift(); // == read
let buf = this._buffer;
this._buffer = buf.slice(read.length);
read.func.call(this, buf.slice(0, read.length));
} else {
break;
}
}
if (this._reads.length > 0) {
throw new Error("There are some read requests waitng on finished stream");
}
if (this._buffer.length > 0) {
throw new Error("unrecognised content at end of stream");
}
};

76
node_modules/pngjs/package.json generated vendored Normal file
View File

@@ -0,0 +1,76 @@
{
"name": "pngjs",
"version": "7.0.0",
"description": "PNG encoder/decoder in pure JS, supporting any bit size & interlace, async & sync with full test suite.",
"contributors": [
"Alexandre Paré",
"Gaurav Mali",
"Gusts Kaksis",
"Kuba Niegowski",
"Luke Page",
"Pietajan De Potter",
"Steven Sojka",
"liangzeng",
"Michael Vogt",
"Xin-Xin Wang",
"toriningen",
"Eugene Kulabuhov"
],
"homepage": "https://github.com/lukeapage/pngjs",
"keywords": [
"PNG",
"decoder",
"encoder",
"js-png",
"node-png",
"parser",
"png",
"png-js",
"png-parse",
"pngjs"
],
"engines": {
"node": ">=14.19.0"
},
"main": "./lib/png.js",
"directories": {
"lib": "lib",
"example": "examples",
"test": "test"
},
"files": [
"browser.js",
"lib/"
],
"scripts": {
"build": "yarn prepublish",
"prepublish": "yarn browserify",
"browserify": "browserify lib/png.js --standalone png > browser.js",
"coverage": "nyc --reporter=lcov --reporter=text-summary tape test/*-spec.js",
"test": "yarn lint && yarn prettier:check && tape test/*-spec.js | tap-dot && node test/run-compare",
"lint": "eslint .",
"prettier:write": "prettier --write .",
"prettier:check": "prettier --check ."
},
"repository": {
"type": "git",
"url": "git://github.com/pngjs/pngjs.git"
},
"license": "MIT",
"bugs": {
"url": "https://github.com/pngjs/pngjs/issues"
},
"devDependencies": {
"browserify": "17.0.0",
"buffer-equal": "1.0.1",
"connect": "3.7.0",
"eslint": "8.34.0",
"eslint-config-prettier": "8.6.0",
"nyc": "15.1.0",
"prettier": "2.8.4",
"puppeteer": "19.7.1",
"serve-static": "1.15.0",
"tap-dot": "2.0.0",
"tape": "5.6.3"
}
}

21
package-lock.json generated Normal file
View File

@@ -0,0 +1,21 @@
{
"name": "codex_test",
"lockfileVersion": 3,
"requires": true,
"packages": {
"": {
"dependencies": {
"pngjs": "^7.0.0"
}
},
"node_modules/pngjs": {
"version": "7.0.0",
"resolved": "https://registry.npmjs.org/pngjs/-/pngjs-7.0.0.tgz",
"integrity": "sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==",
"license": "MIT",
"engines": {
"node": ">=14.19.0"
}
}
}
}

1
package.json Normal file
View File

@@ -0,0 +1 @@
{"type":"module","dependencies":{"pngjs":"^7.0.0"}}

52
passes/ColFFTPass.js Normal file
View File

@@ -0,0 +1,52 @@
import { RenderPass } from "/framework/RenderPass.js";
import Shader from "/framework/WebGpu.js";
export class ColFFTPass extends RenderPass {
async create( ) {
this.shader = new Shader( this.device );
await this.shader.setup( "shaders/fft_col.wgsl" );
const oceanBlock = this.pipeline.getBlockByName( "ocean" );
const rowPass = oceanBlock.getPass( "RowFFT" );
const rowReal = rowPass.shader.getBuffer( "outputReal" );
const rowImag = rowPass.shader.getBuffer( "outputImag" );
await this.shader.setBuffer( "inputReal", rowReal );
await this.shader.setBuffer( "inputImag", rowImag );
this.shader.setVariable( "outputReal", this.pipeline.memory.colReal );
this.shader.setVariable( "outputImag", this.pipeline.memory.colImag );
this.shader.setVariable( "heightField", this.pipeline.memory.heightField );
this.shader.setVariable( "params", this.pipeline.memory.computeParams );
}
bindBuffers( ) {
this.shader.setVariable( "params", this.pipeline.memory.computeParams );
}
async execute( ) {
const groups = this.pipeline.gridSize;
await this.shader.execute( groups, 1, 1 );
}
}

91
passes/OceanRenderPass.js Normal file
View File

@@ -0,0 +1,91 @@
import { RenderPass } from "/framework/RenderPass.js";
import Shader from "/framework/WebGpu.js";
import Matrix4 from "/framework/Matrix4.js";
export class OceanRenderPass extends RenderPass {
async create( ) {
this.shader = new Shader( this.device );
this.shader.topology = "line-list";
this.shader.setCanvas( this.pipeline.canvas );
this._configureCanvasContext();
await this.shader.setup( "shaders/ocean_render.wgsl" );
this.shader.setAttribute( "position", this.pipeline.memory.positions );
this.shader.setAttribute( "uv", this.pipeline.memory.uvs );
this.shader.setIndices( this.pipeline.memory.lineIndices );
}
async bindBuffers( ) {
const memory = this.pipeline.memory;
const oceanBlock = this.pipeline.getBlockByName( "ocean" );
const colPass = oceanBlock.getPass( "ColFFT" );
const heightBuffer = colPass.shader.getBuffer( "heightField" );
const viewMatrixData = this.pipeline.camera.getViewMatrix();
const projectionMatrixData = Matrix4.createProjectionMatrix( this.pipeline.camera, this.pipeline.canvas );
const viewProjectionMatrix = Matrix4.multiply( projectionMatrixData, viewMatrixData );
const cameraWorldMatrix = Matrix4.invert( viewMatrixData );
const cameraPosition = Matrix4.getColumn( cameraWorldMatrix, 3 );
await this.shader.setBuffer( "heightField", heightBuffer );
this.shader.setVariable( "renderParams", memory.renderParams );
this.shader.setVariable( "viewProjection", viewProjectionMatrix );
this.shader.setVariable( "cameraPosition", cameraPosition );
this.shader.createBindGroups();
}
async execute( ) {
await this.shader.renderToCanvas( this.shader.indexCount, 1 );
}
_configureCanvasContext( ) {
if ( this.pipeline.canvasConfigured ) {
return;
}
const context = this.pipeline.canvas.getContext( "webgpu" );
const format = navigator.gpu.getPreferredCanvasFormat();
context.configure( {
device: this.device,
format: format,
alphaMode: "opaque"
} );
this.pipeline.canvasConfigured = true;
}
}

View File

@@ -0,0 +1,186 @@
import { RenderPass } from "/framework/RenderPass.js";
import Shader from "/framework/WebGpu.js";
import Matrix4 from "/framework/Matrix4.js";
export class OceanSolidRenderPass extends RenderPass {
async create( ) {
this.shader = new Shader( this.device );
this.shader.topology = "triangle-list";
this.shader.setCanvas( this.pipeline.canvas );
this._configureCanvasContext();
await this.shader.setup( "shaders/ocean_render.wgsl" );
this.shader.setAttribute( "position", this.pipeline.memory.positions );
this.shader.setAttribute( "uv", this.pipeline.memory.uvs );
this.shader.setIndices( this.pipeline.memory.indices );
await this._loadBottomTexture();
}
async bindBuffers( ) {
const memory = this.pipeline.memory;
const oceanBlock = this.pipeline.getBlockByName( "ocean" );
const colPass = oceanBlock.getPass( "ColFFT" );
const heightBuffer = colPass.shader.getBuffer( "heightField" );
const viewMatrixData = this.pipeline.camera.getViewMatrix();
const projectionMatrixData = Matrix4.createProjectionMatrix( this.pipeline.camera, this.pipeline.canvas );
const viewProjectionMatrix = Matrix4.multiply( projectionMatrixData, viewMatrixData );
const cameraWorldMatrix = Matrix4.invert( viewMatrixData );
const cameraPosition = Matrix4.getColumn( cameraWorldMatrix, 3 );
await this.shader.setBuffer( "heightField", heightBuffer );
this.shader.setVariable( "renderParams", memory.renderParams );
this.shader.setVariable( "viewProjection", viewProjectionMatrix );
this.shader.setVariable( "cameraPosition", cameraPosition );
this.shader.createBindGroups();
}
async execute( ) {
await this.shader.renderToCanvas( this.shader.indexCount, 1 );
}
_configureCanvasContext( ) {
if ( this.pipeline.canvasConfigured ) {
return;
}
const context = this.pipeline.canvas.getContext( "webgpu" );
const format = navigator.gpu.getPreferredCanvasFormat();
context.configure( {
device: this.device,
format: format,
alphaMode: "opaque"
} );
this.pipeline.canvasConfigured = true;
}
async _loadBottomTexture( ) {
if ( this._bottomTextureLoaded ) {
return;
}
if ( typeof document === "undefined" ) {
return;
}
const canvas = document.createElement( "canvas" );
const ctx = canvas.getContext( "2d" );
if ( !ctx ) {
return;
}
const colorImage = await this._loadImage( "resources/textures/ground/Ground093C_2K-PNG_Color.png" );
const heightImage = await this._loadImage( "resources/textures/heightmap/Terrain003_1K_Height512.png" );
// COLOR
canvas.width = colorImage.width;
canvas.height = colorImage.height;
ctx.clearRect( 0, 0, canvas.width, canvas.height );
ctx.drawImage( colorImage, 0, 0 );
let imageData = ctx.getImageData( 0, 0, canvas.width, canvas.height );
let pixels = new Uint8Array( imageData.data );
const colorTex = this.shader.createTextureFromData( canvas.width, canvas.height, pixels );
this.shader.setVariable( "bottomColorTex", colorTex );
// HEIGHTMAP
canvas.width = heightImage.width;
canvas.height = heightImage.height;
ctx.clearRect( 0, 0, canvas.width, canvas.height );
ctx.drawImage( heightImage, 0, 0 );
imageData = ctx.getImageData( 0, 0, canvas.width, canvas.height );
pixels = new Uint8Array( imageData.data );
const heightTex = this.shader.createTextureFromData( canvas.width, canvas.height, pixels );
this.shader.setVariable( "bottomHeightTex", heightTex );
const sampler = this.device.createSampler( {
minFilter: "linear",
magFilter: "linear",
mipmapFilter: "linear",
addressModeU: "repeat",
addressModeV: "repeat"
} );
this.shader.setVariable( "bottomSampler", sampler );
this._bottomTextureLoaded = true;
}
async _loadImage( url ) {
return await new Promise( function( resolve, reject ) {
const img = new Image( );
img.onload = function( ) { resolve( img ); };
img.onerror = function( event ) { reject( event ); };
img.src = url;
} );
}
}

50
passes/RowFFTPass.js Normal file
View File

@@ -0,0 +1,50 @@
import { RenderPass } from "/framework/RenderPass.js";
import Shader from "/framework/WebGpu.js";
export class RowFFTPass extends RenderPass {
async create( ) {
this.shader = new Shader( this.device );
await this.shader.setup( "shaders/fft_row.wgsl" );
const oceanBlock = this.pipeline.getBlockByName( "ocean" );
const spectrumPass = oceanBlock.getPass( "Spectrum" );
const spectrumReal = spectrumPass.shader.getBuffer( "spectrumReal" );
const spectrumImag = spectrumPass.shader.getBuffer( "spectrumImag" );
await this.shader.setBuffer( "inputReal", spectrumReal );
await this.shader.setBuffer( "inputImag", spectrumImag );
this.shader.setVariable( "outputReal", this.pipeline.memory.rowReal );
this.shader.setVariable( "outputImag", this.pipeline.memory.rowImag );
this.shader.setVariable( "params", this.pipeline.memory.computeParams );
}
bindBuffers( ) {
this.shader.setVariable( "params", this.pipeline.memory.computeParams );
}
async execute( ) {
const groups = this.pipeline.gridSize;
await this.shader.execute( groups, 1, 1 );
}
}

86
passes/SkySpherePass.js Normal file
View File

@@ -0,0 +1,86 @@
import { RenderPass } from "/framework/RenderPass.js";
import Shader from "/framework/WebGpu.js";
import Matrix4 from "/framework/Matrix4.js";
export class SkySpherePass extends RenderPass {
async create( ) {
this.shader = new Shader( this.device );
this.shader.topology = "triangle-list";
this.shader.setCanvas( this.pipeline.canvas );
this._configureCanvasContext();
await this.shader.setup( "shaders/sky_sphere.wgsl" );
this.shader.setAttribute( "position", this.pipeline.memory.skyPositions );
this.shader.setAttribute( "normal", this.pipeline.memory.skyNormals );
this.shader.setIndices( this.pipeline.memory.skyIndices );
}
async bindBuffers( ) {
const viewMatrixData = this.pipeline.camera.getViewMatrix();
const projectionMatrixData = Matrix4.createProjectionMatrix( this.pipeline.camera, this.pipeline.canvas );
const viewProjectionMatrix = Matrix4.multiply( projectionMatrixData, viewMatrixData );
const cameraWorldMatrix = Matrix4.invert( viewMatrixData );
const cameraPosition = Matrix4.getColumn( cameraWorldMatrix, 3 );
this.shader.setVariable( "viewProjectionMatrix", viewProjectionMatrix );
this.shader.setVariable( "cameraPosition", cameraPosition );
this.shader.createBindGroups();
}
async execute( ) {
// Rendered through the scene / RenderSystem; execute uses the mesh instead.
}
_configureCanvasContext( ) {
if ( this.pipeline.canvasConfigured ) {
return;
}
const context = this.pipeline.canvas.getContext( "webgpu" );
const format = navigator.gpu.getPreferredCanvasFormat();
context.configure( {
device: this.device,
format: format,
alphaMode: "opaque"
} );
this.pipeline.canvasConfigured = true;
}
}

44
passes/SpectrumPass.js Normal file
View File

@@ -0,0 +1,44 @@
import { RenderPass } from "/framework/RenderPass.js";
import Shader from "/framework/WebGpu.js";
export class SpectrumPass extends RenderPass {
async create( ) {
this.shader = new Shader( this.device );
await this.shader.setup( "shaders/ocean_spectrum_new.wgsl" );
this.shader.setVariable( "h0Real", this.pipeline.memory.h0Real );
this.shader.setVariable( "h0Imag", this.pipeline.memory.h0Imag );
this.shader.setVariable( "spectrumReal", this.pipeline.memory.spectrumReal );
this.shader.setVariable( "spectrumImag", this.pipeline.memory.spectrumImag );
this.shader.setVariable( "params", this.pipeline.memory.computeParams );
}
bindBuffers( ) {
const memory = this.pipeline.memory;
this.shader.setVariable( "params", memory.computeParams );
}
async execute( ) {
const groups = Math.ceil( this.pipeline.gridSize / 8 );
await this.shader.execute( groups, groups, 1 );
}
}

770
pipelines/OceanPipeline.js Normal file
View File

@@ -0,0 +1,770 @@
import { RenderPipeline } from "/framework/RenderPipeline.js";
import { Block } from "/framework/Block.js";
import Camera from "/framework/Camera.js";
import EventManager from "/framework/eventManager.js";
import { SpectrumPass } from "../passes/SpectrumPass.js";
import { RowFFTPass } from "../passes/RowFFTPass.js";
import { ColFFTPass } from "../passes/ColFFTPass.js";
import { OceanRenderPass } from "../passes/OceanRenderPass.js";
import { OceanSolidRenderPass } from "../passes/OceanSolidRenderPass.js";
import { SkySpherePass } from "../passes/SkySpherePass.js";
export class OceanPipeline extends RenderPipeline {
constructor( engine, canvas ) {
super( engine );
this.canvas = canvas;
this.canvasConfigured = false;
this.gridSize = 64;
this.meshResolution = 64;
this.offsetX = 0;
this.offsetZ = 0;
this.wavelengthScale = 1.0;
this.tiling = 1;
this.handleInput = true;
this.renderMode = "solid";
this.shadingMode = "lighting";
this.isPaused = false;
this.elapsedTime = 0;
}
async create( ) {
this.patchSize = 80;
this.heightScale = 54.0;
this.timeScale = 0.35;
this.startTime = performance.now();
const simSize = this.gridSize;
this.memory.set( "h0Real", new Float32Array( simSize * simSize ) );
this.memory.set( "h0Imag", new Float32Array( simSize * simSize ) );
this.memory.set( "spectrumReal", new Float32Array( simSize * simSize ) );
this.memory.set( "spectrumImag", new Float32Array( simSize * simSize ) );
this.memory.set( "rowReal", new Float32Array( simSize * simSize ) );
this.memory.set( "rowImag", new Float32Array( simSize * simSize ) );
this.memory.set( "colReal", new Float32Array( simSize * simSize ) );
this.memory.set( "colImag", new Float32Array( simSize * simSize ) );
this.memory.set( "heightField", new Float32Array( simSize * simSize ) );
this.memory.set( "computeParams", new Float32Array( 4 ) );
this.memory.set( "renderParams", new Float32Array( [ simSize, this.patchSize, this.heightScale, 0, this.meshResolution, this.tiling, this.wavelengthScale ] ) );
this._seedSpectrum();
const geometry = this._buildGridGeometry( this.meshResolution, this.patchSize );
this.memory.set( "positions", geometry.positions );
this.memory.set( "uvs", geometry.uvs );
this.memory.set( "indices", geometry.indices );
this.memory.set( "lineIndices", geometry.lineIndices );
const skySphere = this._buildSkySphereGeometry( 220, 32, 64 );
this.memory.set( "skyPositions", skySphere.positions );
this.memory.set( "skyNormals", skySphere.normals );
this.memory.set( "skyIndices", skySphere.indices );
const cubeGeometry = this._buildCubeGeometry();
this.memory.set( "cubePositions", cubeGeometry.positions );
this.memory.set( "cubeColors", cubeGeometry.colors );
this.memory.set( "cubeIndices", cubeGeometry.indices );
const block = new Block( "ocean", this );
const spectrumPass = new SpectrumPass( );
const rowFFTPass = new RowFFTPass( );
const colFFTPass = new ColFFTPass( );
const renderWirePass = new OceanRenderPass( );
const renderSolidPass = new OceanSolidRenderPass( );
const skySpherePass = new SkySpherePass( );
block.addPass( "Spectrum", spectrumPass );
block.addPass( "RowFFT", rowFFTPass );
block.addPass( "ColFFT", colFFTPass );
block.addPass( "RenderWire", renderWirePass );
block.addPass( "RenderSolid", renderSolidPass );
block.addPass( "SkySphere", skySpherePass );
this.addBlock( block );
this.camera = new Camera( [ 0, 90, 120 ], [ 0, 0, 0 ], [ 0, 1, 0 ] );
this.camera.far = 6000.0;
this.camera.pitch = Math.PI / 3;
this.camera.update();
this.eventManager = new EventManager( );
if ( this.canvas && this.handleInput ) {
this.eventManager.setup( this.canvas, this.camera );
this.eventManager.registerEventListeners();
}
await super.create();
}
setHeightScale( value ) {
this.heightScale = value;
this.memory.computeParams[ 2 ] = value;
this.memory.renderParams[ 2 ] = value;
}
setWavelengthScale( value ) {
if ( value <= 0 ) {
value = 0.25;
}
this.wavelengthScale = value;
if ( this.memory && this.memory.renderParams ) {
this.memory.renderParams[ 6 ] = this.wavelengthScale;
}
this._seedSpectrum( );
const block = this.getBlockByName( "ocean" );
if ( block ) {
const spectrumPass = block.getPass( "Spectrum" );
if ( spectrumPass && spectrumPass.shader ) {
spectrumPass.shader.setVariable( "h0Real", this.memory.h0Real );
spectrumPass.shader.setVariable( "h0Imag", this.memory.h0Imag );
}
}
}
setRenderMode( mode ) {
if ( mode !== "wireframe" && mode !== "solid" ) {
return;
}
this.renderMode = mode;
}
setShadingMode( mode ) {
let code = 0;
if ( mode === "normals" ) {
code = 1;
} else if ( mode === "solid" ) {
code = 2;
} else if ( mode === "height" ) {
code = 3;
} else if ( mode === "realistic" ) {
code = 4;
} else {
mode = "lighting";
code = 0;
}
this.shadingMode = mode;
this.memory.renderParams[ 3 ] = code;
}
setTiling( value ) {
if ( value < 1 ) {
value = 1;
}
this.tiling = value;
this.memory.renderParams[ 5 ] = value;
}
setOffset( x, z ) {
this.offsetX = x;
this.offsetZ = z;
// Offsets are now handled in the vertex shader via instancing.
}
setPaused( paused ) {
if ( paused === this.isPaused ) {
return;
}
this.isPaused = paused;
if ( !paused ) {
this.startTime = performance.now() - this.elapsedTime * 1000.0;
}
}
async stepOnce( stepSeconds = 1 / 60 ) {
if ( !this.isPaused ) {
return;
}
this.elapsedTime += stepSeconds;
this.memory.computeParams[ 0 ] = this.elapsedTime * this.timeScale;
this.memory.computeParams[ 1 ] = this.gridSize;
await this.bindBuffers( );
await this.execute( );
}
async bindBuffers( ) {
const nowSeconds = ( performance.now() - this.startTime ) / 1000;
if ( !this.isPaused ) {
this.elapsedTime = nowSeconds;
}
this.memory.computeParams[ 0 ] = this.elapsedTime * this.timeScale;
this.memory.computeParams[ 1 ] = this.gridSize;
const block = this.getBlockByName( "ocean" );
const spectrumPass = block.getPass( "Spectrum" );
const rowFFTPass = block.getPass( "RowFFT" );
const colFFTPass = block.getPass( "ColFFT" );
const renderWirePass = block.getPass( "RenderWire" );
const renderSolidPass = block.getPass( "RenderSolid" );
const skyPass = block.getPass( "SkySphere" );
await spectrumPass.bindBuffers( );
await rowFFTPass.bindBuffers( );
await colFFTPass.bindBuffers( );
if ( this.renderMode === "solid" ) {
await renderSolidPass.bindBuffers( );
} else {
await renderWirePass.bindBuffers( );
}
if ( skyPass ) {
await skyPass.bindBuffers( );
}
}
async execute( ) {
const block = this.getBlockByName( "ocean" );
const spectrumPass = block.getPass( "Spectrum" );
const rowFFTPass = block.getPass( "RowFFT" );
const colFFTPass = block.getPass( "ColFFT" );
await spectrumPass.execute( );
await rowFFTPass.execute( );
await colFFTPass.execute( );
}
_seedSpectrum( ) {
const size = this.gridSize;
const phillipsA = 0.0006;
const windSpeed = 24.0;
const windDir = { x: 0.8, y: 0.6 };
const baseL = windSpeed * windSpeed / 9.81;
let wlScale = this.wavelengthScale;
if ( wlScale <= 0 ) {
wlScale = 0.25;
}
const L = baseL;
for ( let y = 0; y < size; y++ ) {
for ( let x = 0; x < size; x++ ) {
const kx = ( x - size / 2 ) * ( Math.PI * 2 / this.patchSize );
const ky = ( y - size / 2 ) * ( Math.PI * 2 / this.patchSize );
const kLength = Math.sqrt( kx * kx + ky * ky );
if ( kLength === 0 ) {
continue;
}
const kxNorm = kx / kLength;
const kyNorm = ky / kLength;
const windDotK = kxNorm * windDir.x + kyNorm * windDir.y;
const kScaled = kLength / wlScale;
const phillips = phillipsA * Math.exp( -1 / ( kScaled * kScaled * L * L ) ) / Math.pow( kScaled, 4 ) * ( windDotK * windDotK );
const gaussianR = this._gaussianRandom();
const gaussianI = this._gaussianRandom();
const amplitude = Math.sqrt( phillips ) * Math.SQRT1_2;
const idx = y * size + x;
this.memory.h0Real[ idx ] = gaussianR * amplitude;
this.memory.h0Imag[ idx ] = gaussianI * amplitude;
}
}
// enforce conjugate symmetry: h0(-k) = conj( h0(k) ) for a real height field
for ( let y = 0; y < size; y++ ) {
for ( let x = 0; x < size; x++ ) {
const idx = y * size + x;
const mx = ( size - x ) % size;
const my = ( size - y ) % size;
const mirrorIdx = my * size + mx;
this.memory.h0Real[ mirrorIdx ] = this.memory.h0Real[ idx ];
this.memory.h0Imag[ mirrorIdx ] = -this.memory.h0Imag[ idx ];
}
}
this.memory.computeParams[ 0 ] = 0;
this.memory.computeParams[ 1 ] = this.gridSize;
this.memory.computeParams[ 2 ] = this.heightScale;
this.memory.computeParams[ 3 ] = this.patchSize;
this.memory.renderParams[ 2 ] = this.heightScale;
}
_gaussianRandom( ) {
let u = 0, v = 0;
while ( u === 0 ) u = Math.random();
while ( v === 0 ) v = Math.random();
return Math.sqrt( -2.0 * Math.log( u ) ) * Math.cos( 2.0 * Math.PI * v );
}
_buildGridGeometry( size, span ) {
const vertexCount = size * size;
const positions = new Float32Array( vertexCount * 3 );
const uvs = new Float32Array( vertexCount * 2 );
const quadCount = ( size - 1 ) * ( size - 1 );
const indices = new Uint32Array( quadCount * 6 );
const horizontalLines = size * ( size - 1 );
const verticalLines = ( size - 1 ) * size;
const lineIndices = new Uint32Array( ( horizontalLines + verticalLines ) * 2 );
let pi = 0;
let ui = 0;
for ( let y = 0; y < size; y++ ) {
for ( let x = 0; x < size; x++ ) {
const fx = x / ( size - 1 );
const fy = y / ( size - 1 );
const worldX = ( fx - 0.5 ) * span;
const worldZ = ( fy - 0.5 ) * span;
positions[ pi++ ] = worldX;
positions[ pi++ ] = 0;
positions[ pi++ ] = worldZ;
uvs[ ui++ ] = x;
uvs[ ui++ ] = y;
}
}
let ii = 0;
let li = 0;
for ( let y = 0; y < size - 1; y++ ) {
for ( let x = 0; x < size - 1; x++ ) {
const topLeft = y * size + x;
const topRight = topLeft + 1;
const bottomLeft = topLeft + size;
const bottomRight = bottomLeft + 1;
indices[ ii++ ] = topLeft;
indices[ ii++ ] = bottomLeft;
indices[ ii++ ] = topRight;
indices[ ii++ ] = topRight;
indices[ ii++ ] = bottomLeft;
indices[ ii++ ] = bottomRight;
}
// horizontal line for row y at each segment
for ( let x = 0; x < size - 1; x++ ) {
const a = y * size + x;
const b = a + 1;
lineIndices[ li++ ] = a;
lineIndices[ li++ ] = b;
}
}
// vertical lines
for ( let x = 0; x < size; x++ ) {
for ( let y = 0; y < size - 1; y++ ) {
const a = y * size + x;
const b = a + size;
lineIndices[ li++ ] = a;
lineIndices[ li++ ] = b;
}
}
return {
positions,
uvs,
indices,
lineIndices
};
}
_buildSkySphereGeometry( radius, latSegments, lonSegments ) {
const latCount = latSegments;
const lonCount = lonSegments;
const vertexCount = ( latCount + 1 ) * ( lonCount + 1 );
const positions = new Float32Array( vertexCount * 3 );
const normals = new Float32Array( vertexCount * 3 );
const uvs = new Float32Array( vertexCount * 2 );
const indices = new Uint32Array( latCount * lonCount * 6 );
let pi = 0;
let ni = 0;
let ui = 0;
for ( let lat = 0; lat <= latCount; lat++ ) {
const theta = lat * Math.PI / latCount;
const sinTheta = Math.sin( theta );
const cosTheta = Math.cos( theta );
for ( let lon = 0; lon <= lonCount; lon++ ) {
const phi = lon * Math.PI * 2.0 / lonCount;
const sinPhi = Math.sin( phi );
const cosPhi = Math.cos( phi );
const x = sinTheta * cosPhi;
const y = cosTheta;
const z = sinTheta * sinPhi;
positions[ pi++ ] = x * radius;
positions[ pi++ ] = y * radius;
positions[ pi++ ] = z * radius;
const nx = -x;
const ny = -y;
const nz = -z;
normals[ ni++ ] = nx;
normals[ ni++ ] = ny;
normals[ ni++ ] = nz;
uvs[ ui++ ] = lon / lonCount;
uvs[ ui++ ] = lat / latCount;
}
}
let ii = 0;
for ( let y = 0; y < latCount; y++ ) {
for ( let x = 0; x < lonCount; x++ ) {
const i0 = y * ( lonCount + 1 ) + x;
const i1 = i0 + 1;
const i2 = ( y + 1 ) * ( lonCount + 1 ) + x;
const i3 = i2 + 1;
indices[ ii++ ] = i0;
indices[ ii++ ] = i2;
indices[ ii++ ] = i1;
indices[ ii++ ] = i1;
indices[ ii++ ] = i2;
indices[ ii++ ] = i3;
}
}
return {
positions,
normals,
uvs,
indices
};
}
_buildCubeGeometry( ) {
const positions = new Float32Array( [
// Front
-1, -1, 1, 1, -1, 1, 1, 1, 1, -1, 1, 1,
// Back
-1, -1, -1, -1, 1, -1, 1, 1, -1, 1, -1, -1,
// Top
-1, 1, -1, -1, 1, 1, 1, 1, 1, 1, 1, -1,
// Bottom
-1, -1, -1, 1, -1, -1, 1, -1, 1, -1, -1, 1,
// Right
1, -1, -1, 1, 1, -1, 1, 1, 1, 1, -1, 1,
// Left
-1, -1, -1, -1, -1, 1, -1, 1, 1, -1, 1, -1
] );
const colors = new Float32Array( [
// Front
0.1, 0.6, 1.0, 0.1, 0.6, 1.0, 0.1, 0.6, 1.0, 0.1, 0.6, 1.0,
// Back
0.1, 0.2, 0.8, 0.1, 0.2, 0.8, 0.1, 0.2, 0.8, 0.1, 0.2, 0.8,
// Top
0.2, 0.8, 0.5, 0.2, 0.8, 0.5, 0.2, 0.8, 0.5, 0.2, 0.8, 0.5,
// Bottom
0.9, 0.7, 0.2, 0.9, 0.7, 0.2, 0.9, 0.7, 0.2, 0.9, 0.7, 0.2,
// Right
0.8, 0.3, 0.4, 0.8, 0.3, 0.4, 0.8, 0.3, 0.4, 0.8, 0.3, 0.4,
// Left
0.4, 0.9, 0.7, 0.4, 0.9, 0.7, 0.4, 0.9, 0.7, 0.4, 0.9, 0.7
] );
const indices = new Uint32Array( [
0, 1, 2, 0, 2, 3, // Front
4, 5, 6, 4, 6, 7, // Back
8, 9, 10, 8, 10, 11, // Top
12, 13, 14, 12, 14, 15, // Bottom
16, 17, 18, 16, 18, 19, // Right
20, 21, 22, 20, 22, 23 // Left
] );
return {
positions,
colors,
indices
};
}
}

166
server.js Normal file
View File

@@ -0,0 +1,166 @@
import http from "http";
import { readdir } from "fs/promises";
import { stat } from "fs/promises";
import { readFile } from "fs/promises";
import { join } from "path";
import { dirname } from "path";
import { fileURLToPath } from "url";
class App
{
constructor( )
{
const selfPath = fileURLToPath( import.meta.url );
this.rootPath = dirname( selfPath );
this.httpServer = null;
}
async start( )
{
this.httpServer = http.createServer( this.handleRequest.bind( this ) );
this.httpServer.listen( 3003 );
}
async handleRequest( req, res )
{
const requestedPath = decodeURI( req.url );
const fullPath = join( this.rootPath, requestedPath );
const exists = await this.checkFileExists( fullPath );
if ( !exists )
{
res.statusCode = 404;
res.end( "Not Found" );
return;
}
const stats = await stat( fullPath );
if ( stats.isDirectory( ) )
{
const indexPath = join( fullPath, "index.html" );
const indexExists = await this.checkFileExists( indexPath );
if ( indexExists )
{
await this.sendFile( indexPath, res );
return;
}
await this.sendDirectoryListing( fullPath, requestedPath, res );
return;
}
await this.sendFile( fullPath, res );
}
async sendFile( path, res )
{
const contentType = this.getContentType( path );
const fileData = await readFile( path );
res.setHeader( "Content-Type", contentType );
res.statusCode = 200;
res.end( fileData );
}
async sendDirectoryListing( dirPath, urlPath, res )
{
const entries = await readdir( dirPath, { withFileTypes : true } );
let html = "<html><body><h1>Index of " + urlPath + "</h1><ul>";
let i = 0;
while ( i < entries.length )
{
const e = entries[ i ].name;
const link = urlPath.endsWith( "/" )
? urlPath + e
: urlPath + "/" + e;
html = html + "<li><a href=\"" + link + "\">" + e + "</a></li>";
i = i + 1;
}
html = html + "</ul></body></html>";
res.setHeader( "Content-Type", "text/html" );
res.statusCode = 200;
res.end( html );
}
async checkFileExists( path )
{
const exists = await stat( path )
.then( function( ) { return true; } )
.catch( function( ) { return false; } );
return exists;
}
getContentType( path )
{
const lower = path.toLowerCase( );
if ( lower.endsWith( ".html" ) ) return "text/html";
if ( lower.endsWith( ".css" ) ) return "text/css";
if ( lower.endsWith( ".js" ) ) return "text/javascript";
if ( lower.endsWith( ".json" ) ) return "application/json";
if ( lower.endsWith( ".wasm" ) ) return "application/wasm";
if ( lower.endsWith( ".png" ) ) return "image/png";
if ( lower.endsWith( ".jpg" ) ) return "image/jpeg";
if ( lower.endsWith( ".jpeg" ) ) return "image/jpeg";
if ( lower.endsWith( ".gif" ) ) return "image/gif";
if ( lower.endsWith( ".svg" ) ) return "image/svg+xml";
if ( lower.endsWith( ".wgsl" ) ) return "text/plain";
if ( lower.endsWith( ".txt" ) ) return "text/plain";
return "application/octet-stream";
}
}
const app = new App( );
await app.start( );

42
shaders/cube_render.wgsl Normal file
View File

@@ -0,0 +1,42 @@
@group(0) @binding(0) var<storage, read> viewProjection : array<f32>;
struct VertexOutput {
@builtin(position) position : vec4<f32>,
@location(0) color : vec3<f32>,
};
fn loadViewProjection( ) -> mat4x4<f32> {
return mat4x4<f32>(
vec4<f32>( viewProjection[ 0 ], viewProjection[ 1 ], viewProjection[ 2 ], viewProjection[ 3 ] ),
vec4<f32>( viewProjection[ 4 ], viewProjection[ 5 ], viewProjection[ 6 ], viewProjection[ 7 ] ),
vec4<f32>( viewProjection[ 8 ], viewProjection[ 9 ], viewProjection[ 10 ], viewProjection[ 11 ] ),
vec4<f32>( viewProjection[ 12 ], viewProjection[ 13 ], viewProjection[ 14 ], viewProjection[ 15 ] )
);
}
@vertex
fn v_main(
@location(0) position : vec3<f32>,
@location(1) color : vec3<f32>
) -> VertexOutput {
var output : VertexOutput;
let vp : mat4x4<f32> = loadViewProjection();
output.position = vp * vec4<f32>( position, 1.0 );
output.color = color;
return output;
}
@fragment
fn f_main( input : VertexOutput ) -> @location(0) vec4<f32> {
return vec4<f32>( input.color, 1.0 );
}

94
shaders/fft_col.wgsl Normal file
View File

@@ -0,0 +1,94 @@
const PI2 : f32 = 6.28318530718;
@group(0) @binding(0) var<storage, read> inputReal : array<f32>;
@group(0) @binding(1) var<storage, read> inputImag : array<f32>;
@group(0) @binding(2) var<storage, read_write> outputReal : array<f32>;
@group(0) @binding(3) var<storage, read_write> outputImag : array<f32>;
@group(0) @binding(4) var<storage, read_write> heightField : array<f32>;
@group(0) @binding(5) var<storage, read> params : array<f32>;
var<workgroup> wr : array<f32, 64>;
var<workgroup> wi : array<f32, 64>;
fn bitReverse( x : u32, bits : u32 ) -> u32 {
var n : u32 = x;
var r : u32 = 0u;
for ( var i : u32 = 0u; i < bits; i = i + 1u ) {
r = ( r << 1u ) | ( n & 1u );
n = n >> 1u;
}
return r;
}
@compute @workgroup_size( 64 )
fn main( @builtin(local_invocation_id) lid : vec3<u32>,
@builtin(workgroup_id) gid : vec3<u32> ) {
let N : u32 = u32( params[ 1u ] );
if ( N != 64u ) {
return;
}
let col : u32 = gid.x;
let row : u32 = lid.x;
let idx : u32 = row * N + col;
let bits : u32 = 6u;
let rev : u32 = bitReverse( row, bits );
let srcIdx : u32 = rev * N + col;
wr[ row ] = inputReal[ srcIdx ];
wi[ row ] = inputImag[ srcIdx ];
workgroupBarrier();
var step : u32 = 1u;
while ( step < N ) {
let jump : u32 = step << 1u;
let twiddleAngle : f32 = -PI2 / f32( jump );
let pairIndex : u32 = ( row / jump ) * jump + ( row % step );
let matchIndex : u32 = pairIndex + step;
let k : u32 = row % step;
let angle : f32 = twiddleAngle * f32( k );
let c : f32 = cos( angle );
let s : f32 = sin( angle );
let er : f32 = wr[ pairIndex ];
let ei : f32 = wi[ pairIndex ];
let or : f32 = wr[ matchIndex ];
let oi : f32 = wi[ matchIndex ];
let tr : f32 = c * or - s * oi;
let ti : f32 = s * or + c * oi;
if ( row % jump < step ) {
wr[ pairIndex ] = er + tr;
wi[ pairIndex ] = ei + ti;
} else {
wr[ matchIndex ] = er - tr;
wi[ matchIndex ] = ei - ti;
}
workgroupBarrier();
step = jump;
}
let invScale : f32 = 1.0 / f32( N );
let realVal : f32 = wr[ row ] * invScale;
let imagVal : f32 = wi[ row ] * invScale;
outputReal[ idx ] = realVal;
outputImag[ idx ] = imagVal;
// final height is real part
heightField[ idx ] = realVal;
}

86
shaders/fft_row.wgsl Normal file
View File

@@ -0,0 +1,86 @@
const PI2 : f32 = 6.28318530718;
@group(0) @binding(0) var<storage, read> inputReal : array<f32>;
@group(0) @binding(1) var<storage, read> inputImag : array<f32>;
@group(0) @binding(2) var<storage, read_write> outputReal : array<f32>;
@group(0) @binding(3) var<storage, read_write> outputImag : array<f32>;
@group(0) @binding(4) var<storage, read> params : array<f32>;
var<workgroup> wr : array<f32, 64>;
var<workgroup> wi : array<f32, 64>;
fn bitReverse( x : u32, bits : u32 ) -> u32 {
var n : u32 = x;
var r : u32 = 0u;
for ( var i : u32 = 0u; i < bits; i = i + 1u ) {
r = ( r << 1u ) | ( n & 1u );
n = n >> 1u;
}
return r;
}
@compute @workgroup_size( 64 )
fn main( @builtin(local_invocation_id) lid : vec3<u32>,
@builtin(workgroup_id) gid : vec3<u32> ) {
let N : u32 = u32( params[ 1u ] );
if ( N != 64u ) {
return;
}
let row : u32 = gid.x;
let col : u32 = lid.x;
let idx : u32 = row * N + col;
let bits : u32 = 6u;
let rev : u32 = bitReverse( col, bits );
let srcIdx : u32 = row * N + rev;
wr[ col ] = inputReal[ srcIdx ];
wi[ col ] = inputImag[ srcIdx ];
workgroupBarrier();
var step : u32 = 1u;
while ( step < N ) {
let jump : u32 = step << 1u;
let twiddleAngle : f32 = -PI2 / f32( jump );
let pairIndex : u32 = ( col / jump ) * jump + ( col % step );
let matchIndex : u32 = pairIndex + step;
let k : u32 = col % step;
let angle : f32 = twiddleAngle * f32( k );
let c : f32 = cos( angle );
let s : f32 = sin( angle );
let er : f32 = wr[ pairIndex ];
let ei : f32 = wi[ pairIndex ];
let or : f32 = wr[ matchIndex ];
let oi : f32 = wi[ matchIndex ];
let tr : f32 = c * or - s * oi;
let ti : f32 = s * or + c * oi;
if ( col % jump < step ) {
wr[ pairIndex ] = er + tr;
wi[ pairIndex ] = ei + ti;
} else {
wr[ matchIndex ] = er - tr;
wi[ matchIndex ] = ei - ti;
}
workgroupBarrier();
step = jump;
}
let invScale : f32 = 1.0 / f32( N );
outputReal[ idx ] = wr[ col ] * invScale;
outputImag[ idx ] = wi[ col ] * invScale;
}

304
shaders/ocean_render.wgsl Normal file
View File

@@ -0,0 +1,304 @@
@group(0) @binding(0) var<storage, read> heightField : array<f32>;
@group(0) @binding(1) var<storage, read> renderParams : array<f32>;
@group(0) @binding(2) var<storage, read> viewProjection : array<f32>;
@group(0) @binding(3) var<storage, read> cameraPosition : array<f32>;
@group(0) @binding(4) var bottomColorTex : texture_2d<f32>;
@group(0) @binding(5) var bottomHeightTex : texture_2d<f32>;
@group(0) @binding(6) var bottomSampler : sampler;
struct VertexOutput {
@builtin(position) position : vec4<f32>,
@location(0) worldPosition : vec3<f32>,
@location(1) normal : vec3<f32>,
@location(2) heightValue : f32,
};
fn indexFromCoord( x : u32, y : u32, size : u32 ) -> u32 {
return y * size + x;
}
fn loadViewProjection( ) -> mat4x4<f32> {
return mat4x4<f32>(
vec4<f32>( viewProjection[ 0 ], viewProjection[ 1 ], viewProjection[ 2 ], viewProjection[ 3 ] ),
vec4<f32>( viewProjection[ 4 ], viewProjection[ 5 ], viewProjection[ 6 ], viewProjection[ 7 ] ),
vec4<f32>( viewProjection[ 8 ], viewProjection[ 9 ], viewProjection[ 10 ], viewProjection[ 11 ] ),
vec4<f32>( viewProjection[ 12 ], viewProjection[ 13 ], viewProjection[ 14 ], viewProjection[ 15 ] )
);
}
fn sampleHeight( fx : f32, fy : f32, simSize : u32, heightAmp : f32 ) -> f32 {
let sx : f32 = clamp( fx, 0.0, f32( simSize - 1u ) );
let sy : f32 = clamp( fy, 0.0, f32( simSize - 1u ) );
let ix0 : u32 = u32( sx );
let iy0 : u32 = u32( sy );
let ix1 : u32 = min( simSize - 1u, ix0 + 1u );
let iy1 : u32 = min( simSize - 1u, iy0 + 1u );
let tx : f32 = sx - f32( ix0 );
let ty : f32 = sy - f32( iy0 );
let idx00 : u32 = indexFromCoord( ix0, iy0, simSize );
let idx10 : u32 = indexFromCoord( ix1, iy0, simSize );
let idx01 : u32 = indexFromCoord( ix0, iy1, simSize );
let idx11 : u32 = indexFromCoord( ix1, iy1, simSize );
let parity00 : u32 = ( ix0 + iy0 ) & 1u;
let parity10 : u32 = ( ix1 + iy0 ) & 1u;
let parity01 : u32 = ( ix0 + iy1 ) & 1u;
let parity11 : u32 = ( ix1 + iy1 ) & 1u;
var sign00 : f32 = 1.0;
var sign10 : f32 = 1.0;
var sign01 : f32 = 1.0;
var sign11 : f32 = 1.0;
if ( parity00 == 1u ) { sign00 = -1.0; }
if ( parity10 == 1u ) { sign10 = -1.0; }
if ( parity01 == 1u ) { sign01 = -1.0; }
if ( parity11 == 1u ) { sign11 = -1.0; }
let h00 : f32 = heightField[ idx00 ] * heightAmp * sign00;
let h10 : f32 = heightField[ idx10 ] * heightAmp * sign10;
let h01 : f32 = heightField[ idx01 ] * heightAmp * sign01;
let h11 : f32 = heightField[ idx11 ] * heightAmp * sign11;
let hx0 : f32 = mix( h00, h10, tx );
let hx1 : f32 = mix( h01, h11, tx );
return mix( hx0, hx1, ty );
}
@vertex
fn v_main(
@location(0) position : vec3<f32>,
@location(1) uv : vec2<f32>,
@builtin(instance_index) instanceIndex : u32
) -> VertexOutput {
let simSize : u32 = u32( renderParams[ 0 ] );
let meshSize : f32 = renderParams[ 1 ];
let heightAmp : f32 = renderParams[ 2 ];
let meshRes : u32 = u32( renderParams[ 4 ] );
// Tiling amount (slider 1..N). We clamp to at least 1.
let tilingF : f32 = max( 1.0, renderParams[ 5 ] );
let tiling : u32 = max( 1u, u32( tilingF + 0.5 ) );
let tileRange : u32 = tiling - 1u;
let tilesPerRow : u32 = tileRange * 2u + 1u;
let tileIndex : u32 = instanceIndex;
let tileXIndex : u32 = tileIndex % tilesPerRow;
let tileZIndex : u32 = tileIndex / tilesPerRow;
let offsetGridX : i32 = i32( tileXIndex ) - i32( tileRange );
let offsetGridZ : i32 = i32( tileZIndex ) - i32( tileRange );
let offsetX : f32 = f32( offsetGridX ) * meshSize;
let offsetZ : f32 = f32( offsetGridZ ) * meshSize;
let denom : f32 = max( 1.0, f32( meshRes - 1u ) );
let fxNorm : f32 = clamp( uv.x / denom, 0.0, 1.0 );
let fyNorm : f32 = clamp( uv.y / denom, 0.0, 1.0 );
let fx : f32 = fxNorm * f32( simSize - 1u );
let fy : f32 = fyNorm * f32( simSize - 1u );
let hCenter : f32 = sampleHeight( fx, fy, simSize, heightAmp );
let worldX : f32 = ( fxNorm - 0.5 ) * meshSize + offsetX;
let worldZ : f32 = ( fyNorm - 0.5 ) * meshSize + offsetZ;
// Derivatives via central differences on the sampled height field
let fxStep : f32 = 1.0;
let fyStep : f32 = 1.0;
let hXp : f32 = sampleHeight( fx + fxStep, fy, simSize, heightAmp );
let hXm : f32 = sampleHeight( fx - fxStep, fy, simSize, heightAmp );
let hYp : f32 = sampleHeight( fx, fy + fyStep, simSize, heightAmp );
let hYm : f32 = sampleHeight( fx, fy - fyStep, simSize, heightAmp );
let h : f32 = hCenter;
let dx : f32 = hXp - hXm;
let dz : f32 = hYp - hYm;
let normal : vec3<f32> = normalize( vec3<f32>( -dx, 2.0, -dz ) );
let vp : mat4x4<f32> = loadViewProjection();
var output : VertexOutput;
output.position = vp * vec4<f32>( worldX, h, worldZ, 1.0 );
output.worldPosition = vec3<f32>( worldX, h, worldZ );
output.normal = normal;
output.heightValue = h;
return output;
}
@fragment
fn f_main( input : VertexOutput ) -> @location(0) vec4<f32> {
let mode : u32 = u32( renderParams[ 3 ] );
let N : vec3<f32> = normalize( input.normal );
// Debug: visualize normals
if ( mode == 1u ) {
let normalColor : vec3<f32> = 0.5 * ( N + vec3<f32>( 1.0, 1.0, 1.0 ) );
return vec4<f32>( normalColor, 1.0 );
}
// Debug: true solid color (no height variation)
if ( mode == 2u ) {
let flatColor : vec3<f32> = vec3<f32>( 0.05, 0.4, 0.8 );
return vec4<f32>( flatColor, 1.0 );
}
// Debug: visualize height field as grayscale
if ( mode == 3u ) {
// Scale and bias height into a visible range
let h : f32 = input.heightValue;
let hNorm : f32 = clamp( h * 0.05 + 0.5, 0.0, 1.0 );
let c : vec3<f32> = vec3<f32>( hNorm, hNorm, hNorm );
return vec4<f32>( c, 1.0 );
}
// Sun / light setup
let lightDir : vec3<f32> = normalize( vec3<f32>( 0.2, 0.85, 0.35 ) );
let sunColor : vec3<f32> = vec3<f32>( 1.0, 0.97, 0.9 );
// Camera + view
let camPos : vec3<f32> = vec3<f32>( cameraPosition[ 0 ], cameraPosition[ 1 ], cameraPosition[ 2 ] );
let viewDir : vec3<f32> = normalize( camPos - input.worldPosition );
// Normal and basic terms
let NdotL : f32 = max( dot( N, lightDir ), 0.0 );
let NdotV : f32 = max( dot( N, viewDir ), 0.0 );
// Water body base color
let waterBase : vec3<f32> = vec3<f32>( 0.01, 0.18, 0.55 );
// Realistic mode: reuse simple lighting but with sand/depth-based underwater color
if ( mode == 4u ) {
let meshSize : f32 = renderParams[ 1 ];
let uvBase : vec2<f32> = input.worldPosition.xz / meshSize + vec2<f32>( 0.5, 0.5 );
let noise : vec2<f32> = vec2<f32>(
sin( input.worldPosition.x * 0.05 + input.worldPosition.z * 0.12 ),
cos( input.worldPosition.x * 0.04 - input.worldPosition.z * 0.09 )
) * 0.03;
let uvMain : vec2<f32> = uvBase + noise;
let uvAlt : vec2<f32> = uvBase + vec2<f32>( 0.25, 0.43 ) + noise * 0.8;
let colorSample : vec4<f32> = textureSample( bottomColorTex, bottomSampler, uvMain );
let colorAlt : vec4<f32> = textureSample( bottomColorTex, bottomSampler, uvAlt );
let finalColor : vec3<f32> = mix( colorSample.rgb, colorAlt.rgb, 0.38 );
let heightSample: vec4<f32> = textureSample( bottomHeightTex, bottomSampler, uvMain );
let heightAlt : vec4<f32> = textureSample( bottomHeightTex, bottomSampler, uvAlt );
let heightVal : f32 = mix( heightSample.r, heightAlt.r, 0.4 );
let baseColor : vec3<f32> = finalColor;
let poolY : f32 = -6.0;
let depthGeom : f32 = clamp( poolY - input.worldPosition.y, 0.0, 20.0 );
let shallowT : f32 = clamp( ( heightVal - 0.4 ) / 0.6, 0.0, 1.0 );
let depthT : f32 = clamp( depthGeom / 8.0, 0.0, 1.0 );
let sandWeight : f32 = shallowT * ( 1.0 - depthT );
let waterWeight : f32 = 1.0 - sandWeight;
let sandColor : vec3<f32> = baseColor;
let wetColor : vec3<f32> = mix( baseColor, waterBase, 0.6 );
let bottomColor : vec3<f32> = sandColor * sandWeight + wetColor * waterWeight;
let absorbCoeff : vec3<f32> = vec3<f32>( 0.04, 0.09, 0.16 );
let absorb : vec3<f32> = exp( -absorbCoeff * depthGeom );
let bodyColor : vec3<f32> = bottomColor * absorb;
let up : vec3<f32> = vec3<f32>( 0.0, 1.0, 0.0 );
let reflDirEnv : vec3<f32> = normalize( reflect( -viewDir, N ) );
let horizonT : f32 = clamp( pow( 1.0 - max( dot( reflDirEnv, up ), 0.0 ), 1.5 ), 0.0, 1.0 );
let skyZenith : vec3<f32> = vec3<f32>( 0.02, 0.12, 0.28 );
let skyHorizon : vec3<f32> = vec3<f32>( 0.30, 0.45, 0.70 );
let skyAnalytic : vec3<f32> = mix( skyZenith, skyHorizon, horizonT );
let skyMask : vec3<f32> = vec3<f32>( 0.08, 0.18, 0.32 );
let skyColor : vec3<f32> = mix( skyAnalytic, skyMask, 0.4 );
let halfDir : vec3<f32> = normalize( lightDir + viewDir );
let NdotH : f32 = max( dot( N, halfDir ), 0.0 );
let spec : f32 = pow( NdotH, 140.0 ) * NdotL;
let specTerm : vec3<f32> = spec * vec3<f32>( 1.0, 1.0, 1.0 ) * 1.3;
let F0 : vec3<f32> = vec3<f32>( 0.02, 0.025, 0.03 );
let oneMinus : f32 = 1.0 - NdotV;
let fresnel : vec3<f32> = F0 + ( vec3<f32>( 1.0, 1.0, 1.0 ) - F0 ) * pow( oneMinus, 5.0 );
let diffuse : vec3<f32> = bodyColor * ( 0.15 + 0.85 * NdotL ) * sunColor;
let envBlend : vec3<f32> = mix( skyColor, vec3<f32>( 0.08, 0.25, 0.55 ), 0.35 );
let reflection : vec3<f32> = envBlend + specTerm;
let color : vec3<f32> = diffuse * ( vec3<f32>( 1.0, 1.0, 1.0 ) - fresnel ) + reflection * fresnel;
let colorGamma : vec3<f32> = pow( clamp( color, vec3<f32>( 0.0, 0.0, 0.0 ), vec3<f32>( 1.0, 1.0, 1.0 ) ), vec3<f32>( 1.0 / 2.2, 1.0 / 2.2, 1.0 / 2.2 ) );
return vec4<f32>( colorGamma, 1.0 );
}
// Simple sky / environment tint (towards horizon)
let skyZenith : vec3<f32> = vec3<f32>( 0.02, 0.12, 0.28 );
let skyHorizon : vec3<f32> = vec3<f32>( 0.30, 0.45, 0.70 );
// Reflection direction for environment lookup
let reflDirEnv : vec3<f32> = normalize( reflect( -viewDir, N ) );
let up : vec3<f32> = vec3<f32>( 0.0, 1.0, 0.0 );
let horizonT : f32 = clamp( pow( 1.0 - max( dot( reflDirEnv, up ), 0.0 ), 1.5 ), 0.0, 1.0 );
let skyAnalytic : vec3<f32> = mix( skyZenith, skyHorizon, horizonT );
let skyMask : vec3<f32> = vec3<f32>( 0.08, 0.18, 0.32 );
let skyColor : vec3<f32> = mix( skyAnalytic, skyMask, 0.4 );
// Specular highlight using Blinn-Phong
let halfDir : vec3<f32> = normalize( lightDir + viewDir );
let NdotH : f32 = max( dot( N, halfDir ), 0.0 );
let spec : f32 = pow( NdotH, 140.0 ) * NdotL;
let specTerm : vec3<f32> = spec * vec3<f32>( 1.0, 1.0, 1.0 ) * 1.3;
// Fresnel using Schlick's approximation (F0 ~ 0.02 for water)
let F0 : vec3<f32> = vec3<f32>( 0.02, 0.025, 0.03 );
let oneMinus : f32 = 1.0 - NdotV;
let fresnel : vec3<f32> = F0 + ( vec3<f32>( 1.0, 1.0, 1.0 ) - F0 ) * pow( oneMinus, 5.0 );
// Diffuse-ish underwater term (absorbed light)
let diffuse : vec3<f32> = waterBase * ( 0.15 + 0.85 * NdotL ) * sunColor;
// Reflected environment (sky + sun highlight)
let envBlend : vec3<f32> = mix( skyColor, vec3<f32>( 0.08, 0.25, 0.55 ), 0.35 );
let reflection : vec3<f32> = envBlend + specTerm;
// Mix refraction (underwater) and reflection via Fresnel
let color : vec3<f32> = diffuse * ( vec3<f32>( 1.0, 1.0, 1.0 ) - fresnel ) + reflection * fresnel;
// Mild contrast to keep it punchy
let colorGamma : vec3<f32> = pow( clamp( color, vec3<f32>( 0.0, 0.0, 0.0 ), vec3<f32>( 1.0, 1.0, 1.0 ) ), vec3<f32>( 1.0 / 2.2, 1.0 / 2.2, 1.0 / 2.2 ) );
return vec4<f32>( colorGamma, 1.0 );
}

View File

@@ -0,0 +1,93 @@
const TWO_PI : f32 = 6.28318530718;
const GRAVITY : f32 = 9.81;
@group(0) @binding(0) var<storage, read> h0Real : array<f32>;
@group(0) @binding(1) var<storage, read> h0Imag : array<f32>;
@group(0) @binding(2) var<storage, read_write> spectrumReal : array<f32>;
@group(0) @binding(3) var<storage, read_write> spectrumImag : array<f32>;
@group(0) @binding(4) var<storage, read> params : array<f32>;
fn indexFromCoord( x : u32, y : u32, size : u32 ) -> u32 {
return y * size + x;
}
fn mirrorIndex( x : u32, y : u32, size : u32 ) -> u32 {
let mx : u32 = ( size - x ) % size;
let my : u32 = ( size - y ) % size;
return indexFromCoord( mx, my, size );
}
@compute @workgroup_size( 8, 8 )
fn main( @builtin(global_invocation_id) gid : vec3<u32> ) {
let size : u32 = u32( params[ 1u ] );
if ( gid.x >= size || gid.y >= size ) {
return;
}
let nFloat : f32 = f32( size );
let xPos : f32 = f32( gid.x );
let yPos : f32 = f32( gid.y );
var realValue : f32 = 0.0;
var imagValue : f32 = 0.0;
for ( var ky : u32 = 0u; ky < size; ky = ky + 1u ) {
let kyShift : f32 = f32( i32( ky ) - i32( size ) / 2 );
for ( var kx : u32 = 0u; kx < size; kx = kx + 1u ) {
let kxShift : f32 = f32( i32( kx ) - i32( size ) / 2 );
let idx : u32 = indexFromCoord( kx, ky, size );
let mirrorIdx : u32 = mirrorIndex( kx, ky, size );
let kLength : f32 = sqrt( kxShift * kxShift + kyShift * kyShift );
if ( kLength == 0.0 ) {
continue;
}
let h0R : f32 = h0Real[ idx ];
let h0I : f32 = h0Imag[ idx ];
let h0mR : f32 = h0Real[ mirrorIdx ];
let h0mI : f32 = -h0Imag[ mirrorIdx ]; // conjugate
let omega : f32 = sqrt( GRAVITY * kLength );
let phase : f32 =
omega * params[ 0u ] +
( kxShift * TWO_PI * xPos / nFloat ) +
( kyShift * TWO_PI * yPos / nFloat );
let sinP : f32 = sin( phase );
let cosP : f32 = cos( phase );
let hPos : f32 = h0R * cosP - h0I * sinP;
let hNeg : f32 = h0mR * cosP + h0mI * sinP;
let hPosI : f32 = h0R * sinP + h0I * cosP;
let hNegI : f32 = h0mR * sinP - h0mI * cosP;
realValue = realValue + hPos + hNeg;
imagValue = imagValue + hPosI + hNegI;
}
}
let idxOut : u32 = indexFromCoord( gid.x, gid.y, size );
spectrumReal[ idxOut ] = realValue * params[ 2u ];
spectrumImag[ idxOut ] = imagValue * params[ 2u ];
}

View File

@@ -0,0 +1,80 @@
const GRAVITY : f32 = 9.81;
@group(0) @binding(0) var<storage, read> h0Real : array<f32>;
@group(0) @binding(1) var<storage, read> h0Imag : array<f32>;
@group(0) @binding(2) var<storage, read_write> spectrumReal : array<f32>;
@group(0) @binding(3) var<storage, read_write> spectrumImag : array<f32>;
@group(0) @binding(4) var<storage, read> params : array<f32>;
fn indexFromCoord( x : u32, y : u32, size : u32 ) -> u32 {
return y * size + x;
}
fn mirrorIndex( x : u32, y : u32, size : u32 ) -> u32 {
let mx : u32 = ( size - x ) % size;
let my : u32 = ( size - y ) % size;
return indexFromCoord( mx, my, size );
}
@compute @workgroup_size( 8, 8 )
fn main( @builtin(global_invocation_id) gid : vec3<u32> ) {
let size : u32 = u32( params[ 1u ] );
if ( gid.x >= size || gid.y >= size ) {
return;
}
let kx : u32 = gid.x;
let ky : u32 = gid.y;
let kxShift : f32 = f32( i32( kx ) - i32( size ) / 2 );
let kyShift : f32 = f32( i32( ky ) - i32( size ) / 2 );
let kLength : f32 = sqrt( kxShift * kxShift + kyShift * kyShift );
let idx : u32 = indexFromCoord( kx, ky, size );
if ( kLength == 0.0 ) {
spectrumReal[ idx ] = 0.0;
spectrumImag[ idx ] = 0.0;
return;
}
let mirrorIdx : u32 = mirrorIndex( kx, ky, size );
let h0R : f32 = h0Real[ idx ];
let h0I : f32 = h0Imag[ idx ];
let h0mR : f32 = h0Real[ mirrorIdx ];
let h0mI : f32 = h0Imag[ mirrorIdx ];
let omega : f32 = sqrt( GRAVITY * kLength );
let time : f32 = params[ 0u ];
let heightA : f32 = params[ 2u ];
let theta : f32 = omega * time;
let cosT : f32 = cos( theta );
let sinT : f32 = sin( theta );
// h0(k) * e^{ i * omega * t }
let hPosR : f32 = h0R * cosT - h0I * sinT;
let hPosI : f32 = h0R * sinT + h0I * cosT;
// h0(-k) * e^{ -i * omega * t }
let hNegR : f32 = h0mR * cosT + h0mI * sinT;
let hNegI : f32 = h0mI * cosT - h0mR * sinT;
let outR : f32 = ( hPosR + hNegR ) * heightA;
let outI : f32 = ( hPosI + hNegI ) * heightA;
spectrumReal[ idx ] = outR;
spectrumImag[ idx ] = outI;
}

74
shaders/sky_sphere.wgsl Normal file
View File

@@ -0,0 +1,74 @@
@group(0) @binding(0) var<uniform> viewProjectionMatrix : mat4x4<f32>;
@group(0) @binding(1) var<uniform> cameraPosition : vec3<f32>;
struct VertexOutput {
@builtin(position) position : vec4<f32>,
@location(0) worldPosition : vec3<f32>,
@location(1) worldNormal : vec3<f32>,
};
@vertex
fn v_main(
@location(0) position : vec3<f32>,
@location(1) normal : vec3<f32>
) -> VertexOutput {
var output : VertexOutput;
output.position = viewProjectionMatrix * vec4<f32>( position, 1.0 );
output.worldPosition = position;
output.worldNormal = normalize( normal );
return output;
}
struct FragmentOutput {
@location(0) color : vec4<f32>,
@builtin(frag_depth) depth : f32,
};
@fragment
fn f_main( input : VertexOutput ) -> FragmentOutput {
let surfaceDir : vec3<f32> = normalize( input.worldPosition );
let up : vec3<f32> = vec3<f32>( 0.0, 1.0, 0.0 );
let cosAngle : f32 = clamp( dot( surfaceDir, up ), 0.0, 1.0 );
let skyZenith : vec3<f32> = vec3<f32>( 0.45, 0.78, 0.98 );
let skyHorizon : vec3<f32> = vec3<f32>( 0.12, 0.28, 0.52 );
let sunDir : vec3<f32> = normalize( vec3<f32>( -0.1, 0.97, 0.2 ) );
let sunAngle : f32 = clamp( dot( surfaceDir, sunDir ), 0.0, 1.0 );
let sunExposure : f32 = smoothstep( 0.94, 0.999, sunAngle );
let skyGradient : vec3<f32> = mix( skyHorizon, skyZenith, pow( cosAngle, 1.1 ) );
let airglow : vec3<f32> = vec3<f32>( 0.09, 0.16, 0.28 ) * pow( 1.0 - cosAngle, 3.0 );
let scattering : vec3<f32> = skyGradient + airglow;
let sunDisk : vec3<f32> = vec3<f32>( 1.0, 0.94, 0.76 ) * sunExposure * sunExposure * 1.6;
let skyColor : vec3<f32> = mix( scattering, scattering + sunDisk, sunExposure );
let horizonBlend : f32 = smoothstep( 0.0, 0.6, 1.0 - cosAngle );
let baseColor : vec3<f32> = mix( skyHorizon, skyZenith, max( cosAngle, 0.1 ) );
let color : vec3<f32> = mix( baseColor * 1.1 + vec3<f32>( 0.02, 0.03, 0.04 ), skyColor * 0.9, 1.0 - horizonBlend );
return FragmentOutput(
vec4<f32>( color, 1.0 ),
0.999
);
}

311
tests/OceanTests.js Normal file
View File

@@ -0,0 +1,311 @@
export class OceanTests {
constructor( getPrimaryPipeline ) {
this.getPrimaryPipeline = getPrimaryPipeline;
}
async testFft( ) {
try {
const pipeline = this.getPrimaryPipeline( );
if ( !pipeline ) {
console.warn( "No primary pipeline for testFft()" );
return;
}
const N = pipeline.gridSize;
const block = pipeline.getBlockByName( "ocean" );
const rowPass = block.getPass( "RowFFT" );
const colPass = block.getPass( "ColFFT" );
const inputReal = new Float32Array( N * N );
const inputImag = new Float32Array( N * N );
for ( let y = 0; y < N; y++ ) {
for ( let x = 0; x < N; x++ ) {
const idx = y * N + x;
inputReal[ idx ] = Math.sin( 2 * Math.PI * x / N ) + 0.5 * Math.cos( 2 * Math.PI * y / N );
inputImag[ idx ] = 0;
}
}
pipeline.memory.spectrumReal.set( inputReal );
pipeline.memory.spectrumImag.set( inputImag );
const spectrumPass = block.getPass( "Spectrum" );
spectrumPass.shader.setVariable( "spectrumReal", pipeline.memory.spectrumReal );
spectrumPass.shader.setVariable( "spectrumImag", pipeline.memory.spectrumImag );
await rowPass.execute( );
await colPass.execute( );
const gpuHeight = await colPass.shader.debugBuffer( "heightField" );
const cpuResult = this.cpuFft2D( inputReal, inputImag, N );
let maxDiff = 0;
let rms = 0;
for ( let i = 0; i < gpuHeight.length; i++ ) {
const diff = gpuHeight[ i ] - cpuResult.real[ i ];
maxDiff = Math.max( maxDiff, Math.abs( diff ) );
rms += diff * diff;
}
rms = Math.sqrt( rms / gpuHeight.length );
console.log( "FFT test completed. maxDiff:", maxDiff, "rms:", rms );
} catch ( e ) {
console.error( "FFT test failed:", e );
}
}
async testSpectrum( ) {
try {
const pipeline = this.getPrimaryPipeline( );
if ( !pipeline ) {
console.warn( "No primary pipeline for testSpectrum()" );
return;
}
const N = pipeline.gridSize;
const block = pipeline.getBlockByName( "ocean" );
const spectrumPass = block.getPass( "Spectrum" );
const h0Real = pipeline.memory.h0Real;
const h0Imag = pipeline.memory.h0Imag;
const time = 0;
const heightScale = pipeline.memory.computeParams[ 2 ];
const cpuSpec = this.cpuSpectrum2D( h0Real, h0Imag, N, time, heightScale );
pipeline.memory.computeParams[ 0 ] = time;
pipeline.memory.computeParams[ 1 ] = N;
pipeline.memory.computeParams[ 2 ] = heightScale;
spectrumPass.shader.setVariable( "params", pipeline.memory.computeParams );
await spectrumPass.execute( );
const gpuReal = await spectrumPass.shader.debugBuffer( "spectrumReal" );
const gpuImag = await spectrumPass.shader.debugBuffer( "spectrumImag" );
let maxDiff = 0;
let rms = 0;
for ( let i = 0; i < gpuReal.length; i++ ) {
const dr = gpuReal[ i ] - cpuSpec.real[ i ];
const di = gpuImag[ i ] - cpuSpec.imag[ i ];
const magDiff = Math.sqrt( dr * dr + di * di );
if ( magDiff > maxDiff ) {
maxDiff = magDiff;
}
rms += dr * dr + di * di;
}
rms = Math.sqrt( rms / gpuReal.length );
console.log( "Spectrum test completed. maxDiff:", maxDiff, "rms:", rms );
} catch ( e ) {
console.error( "Spectrum test failed:", e );
}
}
cpuFft1D( realIn, imagIn, N ) {
const outR = new Float32Array( N );
const outI = new Float32Array( N );
for ( let k = 0; k < N; k++ ) {
let sumR = 0;
let sumI = 0;
for ( let n = 0; n < N; n++ ) {
const angle = -2 * Math.PI * k * n / N;
const c = Math.cos( angle );
const s = Math.sin( angle );
const xr = realIn[ n ];
const xi = imagIn[ n ];
sumR += xr * c - xi * s;
sumI += xr * s + xi * c;
}
const invScale = 1 / N;
outR[ k ] = sumR * invScale;
outI[ k ] = sumI * invScale;
}
return { real: outR, imag: outI };
}
cpuFft2D( realIn, imagIn, N ) {
const realTmp = new Float32Array( realIn );
const imagTmp = new Float32Array( imagIn );
for ( let row = 0; row < N; row++ ) {
const rowOffset = row * N;
const rSlice = realTmp.subarray( rowOffset, rowOffset + N );
const iSlice = imagTmp.subarray( rowOffset, rowOffset + N );
const res = this.cpuFft1D( rSlice, iSlice, N );
for ( let x = 0; x < N; x++ ) {
realTmp[ rowOffset + x ] = res.real[ x ];
imagTmp[ rowOffset + x ] = res.imag[ x ];
}
}
const realOut = new Float32Array( N * N );
const imagOut = new Float32Array( N * N );
for ( let col = 0; col < N; col++ ) {
const rCol = new Float32Array( N );
const iCol = new Float32Array( N );
for ( let row = 0; row < N; row++ ) {
const idx = row * N + col;
rCol[ row ] = realTmp[ idx ];
iCol[ row ] = imagTmp[ idx ];
}
const res = this.cpuFft1D( rCol, iCol, N );
for ( let row = 0; row < N; row++ ) {
const idx = row * N + col;
realOut[ idx ] = res.real[ row ];
imagOut[ idx ] = res.imag[ row ];
}
}
return { real: realOut, imag: imagOut };
}
cpuSpectrum2D( h0Real, h0Imag, size, time, heightScale ) {
const GRAVITY = 9.81;
const realOut = new Float32Array( size * size );
const imagOut = new Float32Array( size * size );
for ( let ky = 0; ky < size; ky++ ) {
for ( let kx = 0; kx < size; kx++ ) {
const kxShift = kx - size / 2;
const kyShift = ky - size / 2;
const kLength = Math.sqrt( kxShift * kxShift + kyShift * kyShift );
const idx = ky * size + kx;
if ( kLength === 0 ) {
continue;
}
const mx = ( size - kx ) % size;
const my = ( size - ky ) % size;
const mirrorIdx = my * size + mx;
const h0R = h0Real[ idx ];
const h0I = h0Imag[ idx ];
const h0mR = h0Real[ mirrorIdx ];
const h0mI = h0Imag[ mirrorIdx ];
const omega = Math.sqrt( GRAVITY * kLength );
const theta = omega * time;
const cosT = Math.cos( theta );
const sinT = Math.sin( theta );
const hPosR = h0R * cosT - h0I * sinT;
const hPosI = h0R * sinT + h0I * cosT;
const hNegR = h0mR * cosT + h0mI * sinT;
const hNegI = h0mI * cosT - h0mR * sinT;
realOut[ idx ] = ( hPosR + hNegR ) * heightScale;
imagOut[ idx ] = ( hPosI + hNegI ) * heightScale;
}
}
return { real: realOut, imag: imagOut };
}
}

View File

@@ -0,0 +1,280 @@
// Simple Node.js (ESM) script to bake a grayscale heightmap PNG
// from the OBJ terrain model: resources/models/Terrain003_1K.obj
//
// Usage (from repo root, after installing pngjs):
// npm install pngjs
// node tools/generate_terrain_heightmap.js
//
// It will write a PNG into: resources/textures/heightmap/Terrain003_1K_Height512.png
import fs from "node:fs";
import path from "node:path";
import { PNG } from "pngjs";
const ROOT_DIR = path.dirname( path.dirname( new URL( import.meta.url ).pathname ) );
const OBJ_PATH = path.join( ROOT_DIR, "resources", "models", "Terrain003_1K.obj" );
const OUT_PATH = path.join( ROOT_DIR, "resources", "textures", "heightmap", "Terrain003_1K_Height512.png" );
const RESOLUTION = 512;
function parseObjVertices( text ) {
const lines = text.split( /\r?\n/ );
const vertices = [];
for ( let i = 0; i < lines.length; i++ ) {
const line = lines[ i ].trim();
if ( line.length === 0 || line.charAt( 0 ) === "#" ) {
continue;
}
const parts = line.split( /\s+/ );
if ( parts[ 0 ] === "v" && parts.length >= 4 ) {
const x = parseFloat( parts[ 1 ] );
const y = parseFloat( parts[ 2 ] );
const z = parseFloat( parts[ 3 ] );
if ( !isNaN( x ) && !isNaN( y ) && !isNaN( z ) ) {
vertices.push( { x, y, z } );
}
}
}
return vertices;
}
function buildHeightmapFromVertices( vertices, resolution ) {
if ( vertices.length === 0 ) {
throw new Error( "No vertices parsed from OBJ." );
}
let minX = Infinity, maxX = -Infinity;
let minY = Infinity, maxY = -Infinity;
let minZ = Infinity, maxZ = -Infinity;
for ( const v of vertices ) {
if ( v.x < minX ) minX = v.x;
if ( v.x > maxX ) maxX = v.x;
if ( v.y < minY ) minY = v.y;
if ( v.y > maxY ) maxY = v.y;
if ( v.z < minZ ) minZ = v.z;
if ( v.z > maxZ ) maxZ = v.z;
}
const spanX = maxX - minX || 1;
const spanZ = maxZ - minZ || 1;
const width = resolution;
const height= resolution;
const heights = new Float32Array( width * height );
const counts = new Uint32Array( width * height );
for ( const v of vertices ) {
const uNorm = ( v.x - minX ) / spanX;
const vNorm = ( v.z - minZ ) / spanZ;
let ix = Math.round( uNorm * ( width - 1 ) );
let iy = Math.round( vNorm * ( height - 1 ) );
if ( ix < 0 ) ix = 0;
if ( ix >= width ) ix = width - 1;
if ( iy < 0 ) iy = 0;
if ( iy >= height ) iy = height - 1;
const idx = iy * width + ix;
heights[ idx ] += v.y;
counts[ idx ]++;
}
for ( let i = 0; i < heights.length; i++ ) {
if ( counts[ i ] > 0 ) {
heights[ i ] /= counts[ i ];
} else {
heights[ i ] = minY;
}
}
const tmp = new Float32Array( width * height );
for ( let pass = 0; pass < 2; pass++ ) {
for ( let y = 0; y < height; y++ ) {
for ( let x = 0; x < width; x++ ) {
let sum = 0;
let weight = 0;
for ( let dy = -1; dy <= 1; dy++ ) {
for ( let dx = -1; dx <= 1; dx++ ) {
const nx = x + dx;
const ny = y + dy;
if ( nx < 0 || ny < 0 || nx >= width || ny >= height ) {
continue;
}
const nIdx = ny * width + nx;
sum += heights[ nIdx ];
weight += 1;
}
}
const idx = y * width + x;
tmp[ idx ] = weight > 0 ? sum / weight : heights[ idx ];
}
}
for ( let i = 0; i < heights.length; i++ ) {
heights[ i ] = tmp[ i ];
}
}
let outMin = Infinity;
let outMax = -Infinity;
for ( let i = 0; i < heights.length; i++ ) {
const h = heights[ i ];
if ( h < outMin ) outMin = h;
if ( h > outMax ) outMax = h;
}
const range = outMax - outMin || 1;
const normalized = new Float32Array( width * height );
for ( let i = 0; i < heights.length; i++ ) {
normalized[ i ] = ( heights[ i ] - outMin ) / range;
}
return { width, height, data: normalized };
}
function writeHeightmapPng( heightmap, outPath ) {
const { width, height, data } = heightmap;
const png = new PNG( { width, height } );
for ( let y = 0; y < height; y++ ) {
for ( let x = 0; x < width; x++ ) {
const idx = y * width + x;
const v = Math.max( 0, Math.min( 1, data[ idx ] ) );
const c = Math.round( v * 255 );
const o = idx * 4;
png.data[ o + 0 ] = c;
png.data[ o + 1 ] = c;
png.data[ o + 2 ] = c;
png.data[ o + 3 ] = 255;
}
}
return new Promise( function( resolve, reject ) {
const stream = fs.createWriteStream( outPath );
stream.on( "finish", resolve );
stream.on( "error", reject );
png.pack().pipe( stream );
} );
}
async function main( ) {
try {
console.log( "Reading OBJ:", OBJ_PATH );
const text = fs.readFileSync( OBJ_PATH, "utf8" );
const vertices = parseObjVertices( text );
console.log( "Parsed vertices:", vertices.length );
const heightmap = buildHeightmapFromVertices( vertices, RESOLUTION );
console.log( "Heightmap built:", heightmap.width, "x", heightmap.height );
await writeHeightmapPng( heightmap, OUT_PATH );
console.log( "Heightmap written to:", OUT_PATH );
} catch ( err ) {
console.error( "Failed to generate heightmap:", err );
process.exitCode = 1;
}
}
if ( import.meta.url === `file://${ process.argv[ 1 ] }` ) {
main( );
}