From 6f586ae91b8c947fcac27bd3adddff11e0c7a1f8 Mon Sep 17 00:00:00 2001 From: kaj dijkstra Date: Tue, 18 Nov 2025 13:16:41 +0100 Subject: [PATCH] First Commit --- Matrix.js | 49 +++++ README.md | 123 +++++++++++- Vector.js | 47 +++++ index.html | 261 ++++++++++++++++++++++++++ real.js | 535 +++++++++++++++++++++++++++++++++++++++++++++++++++++ server.mjs | 171 +++++++++++++++++ 6 files changed, 1184 insertions(+), 2 deletions(-) create mode 100644 Matrix.js create mode 100644 Vector.js create mode 100644 index.html create mode 100644 real.js create mode 100644 server.mjs diff --git a/Matrix.js b/Matrix.js new file mode 100644 index 0000000..5176221 --- /dev/null +++ b/Matrix.js @@ -0,0 +1,49 @@ + +import Vector from "./Vector.js" + +function random() { + let seed = 12345; // fixed seed + + return function() { + seed = ( seed * 16807 ) % 2147483647; + return ( seed - 1 ) / 2147483646; + }; +} + +const randomFix = random(); + +export default class Matrix { + constructor( rows, cols, fillValue = 0 ) { + this.rows = rows; + this.cols = cols; + this.data = new Array( rows ); + for ( let i = 0 ; i < rows ; i++ ) { + this.data[ i ] = new Array( cols ); + for ( let j = 0 ; j < cols ; j++ ) { + this.data[ i ][ j ] = fillValue; + } + } + } + + static random( rows, cols, scale = 0.1 ) { + const mat = new Matrix( rows, cols ); + for ( let i = 0 ; i < rows ; i++ ) { + for ( let j = 0 ; j < cols ; j++ ) { + mat.data[ i ][ j ] = ( randomFix() - 0.5 ) * scale; + } + } + return mat; + } + + static matVecMul( mat, vec ) { + const result = new Vector( mat.rows ); + for ( let i = 0 ; i < mat.rows ; i++ ) { + let sum = 0; + for ( let j = 0 ; j < mat.cols ; j++ ) { + sum += mat.data[ i ][ j ] * vec.data[ j ]; + } + result.data[ i ] = sum; + } + return result; + } +} \ No newline at end of file diff --git a/README.md b/README.md index e35aa96..88a3bff 100644 --- a/README.md +++ b/README.md @@ -1,3 +1,122 @@ -# Simple-Transformer-JS +# Minimal Attention Model Demo (Browser-Only) + +This project is a small in-browser demonstration of key components of a transformer-style attention mechanism. It runs entirely in JavaScript using ES modules. + +It includes: + +• Word embeddings +• Positional encoding +• Scaled dot-product attention +• Softmax scoring +• Simple training loop (cross-entropy loss) +• Prediction of next token based on input context + +No third-party machine learning libraries are used. + +--- + +## Files + +| File | Purpose | +| ------------ | ------------------------------------ | +| `index.html` | Basic UI output + script inclusion | +| `real.js` | Full attention model implementation | +| `Vector.js` | Basic vector operations | +| `Matrix.js` | Basic dense matrix operations | +| `server.js` | Minimal static HTTP server (Node.js) | + +--- + +## Vocabulary + +The demo uses a tiny fixed vocabulary: + +``` +The, Cat, Sat, On, Mat, Bench, Book, Great, Is +``` + +Tokens are mapped to integer indices. + +--- + +## Training + +Training data sequences: + +``` +["The Book Is Great"] +["The Cat Sat On The Mat"] +["The Cat Sat On The Bench"] +… +``` + +Each epoch loops over all sequences and performs: + +1. Embedding lookup +2. Positional encoding added to embeddings +3. Query / Key / Value projections +4. Scaled dot-product attention +5. Weighted sum → logits → softmax probabilities +6. Cross-entropy loss + weight updates on: + • Output projection matrix + • Token embeddings + +The system prints intermediate progress into DOM elements. + +--- + +## Output + +Once trained, the model prints predictions: + +``` +Next word after 'The Book Is': ... +Next word after 'The Cat Sat': ... +Next word after 'The Cat': ... +... +``` + +Predictions are appended to `.prediction` container in the page. + +--- + +## How to Run + +### 1 — Start the server + +From the folder containing `server.js` and the HTML/JS files: + +```bash +node server.js +``` + +Server will listen on: + +``` +http://localhost:1234 +``` + +### 2 — Open the demo in a browser + +Navigate to: + +``` +http://localhost:1234 +``` + +The demo will: + +• Load embeddings +• Run training loop +• Display loss progression +• Show final predictions + +--- + +## Notes + +• This is a simplified demonstration intended for clarity, not accuracy +• No batching, dropout, layer-norm, or multi-head attention +• Update rules only modify embeddings + output projection (queries/keys/values not updated) + -This project is a small in-browser demonstration of key components of a transformer-style attention mechanism. It runs entirely in JavaScript using ES modules. \ No newline at end of file diff --git a/Vector.js b/Vector.js new file mode 100644 index 0000000..7466001 --- /dev/null +++ b/Vector.js @@ -0,0 +1,47 @@ +function random() { + let seed = 12345; // fixed seed + + return function() { + seed = ( seed * 16807 ) % 2147483647; + return ( seed - 1 ) / 2147483646; + }; +} + +const randomFix = random(); + +export default class Vector { + + constructor( length, fillValue = 0 ) { + this.data = new Array( length ); + for ( let i = 0 ; i < length ; i++ ) { + this.data[ i ] = fillValue; + } + } + + get length() { + return this.data.length; + } + + static dot( a, b ) { + let sum = 0; + for ( let i = 0 ; i < a.length ; i++ ) { + sum += a.data[ i ] * b.data[ i ]; + } + return sum; + } + + static softmax( vec ) { + const max = Math.max( ...vec.data ); + const exps = new Array( vec.length ); + let sum = 0; + for ( let i = 0 ; i < vec.length ; i++ ) { + exps[ i ] = Math.exp( vec.data[ i ] - max ); + sum += exps[ i ]; + } + const result = new Vector( vec.length ); + for ( let i = 0 ; i < vec.length ; i++ ) { + result.data[ i ] = exps[ i ] / sum; + } + return result; + } +} diff --git a/index.html b/index.html new file mode 100644 index 0000000..ab439d9 --- /dev/null +++ b/index.html @@ -0,0 +1,261 @@ + + + + + + +WebGPU Attention Example + + + + +
+ +
+

Simplified transformer-style attention mechanism in Javascript

+

This JavaScript implementation demonstrates a simplified transformer-style attention mechanism using basic linear algebra and embedding concepts. The code was developed with the assistance of AI, leveraging natural language instructions and automated code generation to build and organize the components efficiently.

+ +
+ +
+

Training

+
+ +
+
+ + + +
+ +
+ + + +
+ +
+ + + +
+
+ + + + \ No newline at end of file diff --git a/real.js b/real.js new file mode 100644 index 0000000..7fbde90 --- /dev/null +++ b/real.js @@ -0,0 +1,535 @@ +import Vector from "./Vector.js" + +import Matrix from "./Matrix.js" + + +function createRandomNumberGenerator() { + + let seed = 12345; + + function generateNextRandomNumber() { + + seed = ( seed * 16807 ) % 2147483647; + + return ( seed - 1 ) / 2147483646; + + } + + return generateNextRandomNumber; + +} + +const randomNumberGenerator = createRandomNumberGenerator(); + + +class SimpleEmbedding { + + constructor( vocabularySize, embeddingDimension ) { + + this.vocabularySize = vocabularySize; + + this.embeddingDimension = embeddingDimension; + + this.embeddingVectors = new Array( vocabularySize ); + + this.initializeEmbeddings(); + + } + + + initializeEmbeddings() { + + for ( let index = 0 ; index < this.vocabularySize ; index++ ) { + + const vectorInstance = new Vector( this.embeddingDimension ); + + this.initializeVectorRandomly( vectorInstance ); + + this.embeddingVectors[ index ] = vectorInstance; + + } + + } + + + initializeVectorRandomly( vectorInstance ) { + + for ( let elementIndex = 0 ; elementIndex < vectorInstance.data.length ; elementIndex++ ) { + + vectorInstance.data[ elementIndex ] = ( randomNumberGenerator() - 0.5 ) * 0.1; + + } + + } + + + lookupEmbedding( tokenIdentifier ) { + + return this.embeddingVectors[ tokenIdentifier ]; + + } + + + updateEmbedding( tokenIdentifier, gradientVector, learningRate ) { + + const vectorInstance = this.embeddingVectors[ tokenIdentifier ]; + + for ( let elementIndex = 0 ; elementIndex < this.embeddingDimension ; elementIndex++ ) { + + vectorInstance.data[ elementIndex ] -= learningRate * gradientVector.data[ elementIndex ]; + + } + + } + +} + + +class PositionalEncoding { + + static calculateValue( position, index ) { + + const modelDimension = 8; + + const angleRate = 1 / Math.pow( 10000, 2 * Math.floor( index / 2 ) / modelDimension ); + + if ( index % 2 === 0 ) { + + return Math.sin( position * angleRate ); + + } + + return Math.cos( position * angleRate ); + + } + +} + + +function calculateCrossEntropyLoss( probabilities, targetIndex ) { + + return -Math.log( probabilities.data[ targetIndex ] + 1e-9 ); + +} + + +function calculateCrossEntropyGradient( probabilities, targetIndex ) { + + const gradientVector = new Vector( probabilities.length ); + + for ( let index = 0 ; index < probabilities.length ; index++ ) { + + gradientVector.data[ index ] = probabilities.data[ index ]; + + } + + gradientVector.data[ targetIndex ] -= 1; + + return gradientVector; + +} + + +const vocabulary = { + The: 0, + Cat: 1, + Sat: 2, + On: 3, + Mat: 4, + Bench: 5, + Book: 6, + Great: 7, + Is: 8 +}; + + +const vocabularySize = Object.keys( vocabulary ).length; + +const embeddingDimension = 8; + +const embeddingsInstance = new SimpleEmbedding( vocabularySize, embeddingDimension ); + +const matrixQuery = Matrix.random( embeddingDimension, embeddingDimension ); + +const matrixKey = Matrix.random( embeddingDimension, embeddingDimension ); + +const matrixValue = Matrix.random( embeddingDimension, embeddingDimension ); + +const matrixOutput = Matrix.random( vocabularySize, embeddingDimension ); + + +function applyPositionalEncodingToInputEmbeddings( inputEmbeddingVectors ) { + + for ( let positionIndex = 0 ; positionIndex < inputEmbeddingVectors.length ; positionIndex++ ) { + + for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) { + + inputEmbeddingVectors[ positionIndex ][ dimensionIndex ] += PositionalEncoding.calculateValue( positionIndex, dimensionIndex ); + + } + + } + +} + + +function computeAttentionScoresVector( queryVector, keyVectors ) { + + const scoresVector = new Vector( keyVectors.length ); + + for ( let index = 0 ; index < keyVectors.length ; index++ ) { + + scoresVector.data[ index ] = Vector.dot( queryVector, keyVectors[ index ] ) / Math.sqrt( embeddingDimension ); + + } + + return scoresVector; + +} + + +function computeWeightedSumVector( attentionWeightsVector, valueVectors ) { + + const weightedSumVector = new Vector( embeddingDimension ); + + for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) { + + let sum = 0; + + for ( let index = 0 ; index < valueVectors.length ; index++ ) { + + sum += attentionWeightsVector.data[ index ] * valueVectors[ index ].data[ dimensionIndex ]; + + } + + weightedSumVector.data[ dimensionIndex ] = sum; + + } + + return weightedSumVector; + +} + + +function computeLogitsVector( weightedSumVector ) { + + const logitsVector = new Vector( vocabularySize ); + + for ( let vocabIndex = 0 ; vocabIndex < vocabularySize ; vocabIndex++ ) { + + let sum = 0; + + for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) { + + sum += matrixOutput.data[ vocabIndex ][ dimensionIndex ] * weightedSumVector.data[ dimensionIndex ]; + + } + + logitsVector.data[ vocabIndex ] = sum; + + } + + return logitsVector; + +} + + +function forwardPass( inputTokenIdentifiers ) { + + const inputEmbeddingVectors = new Array( inputTokenIdentifiers.length ); + + for ( let index = 0 ; index < inputTokenIdentifiers.length ; index++ ) { + + inputEmbeddingVectors[ index ] = embeddingsInstance.lookupEmbedding( inputTokenIdentifiers[ index ] ); + + } + + applyPositionalEncodingToInputEmbeddings( inputEmbeddingVectors ); + + const queryVectors = new Array( inputEmbeddingVectors.length ); + + const keyVectors = new Array( inputEmbeddingVectors.length ); + + const valueVectors = new Array( inputEmbeddingVectors.length ); + + for ( let index = 0 ; index < inputEmbeddingVectors.length ; index++ ) { + + queryVectors[ index ] = Matrix.matVecMul( matrixQuery, inputEmbeddingVectors[ index ] ); + + keyVectors[ index ] = Matrix.matVecMul( matrixKey, inputEmbeddingVectors[ index ] ); + + valueVectors[ index ] = Matrix.matVecMul( matrixValue, inputEmbeddingVectors[ index ] ); + + } + + const lastQueryVector = queryVectors[ queryVectors.length - 1 ]; + + const attentionScores = computeAttentionScoresVector( lastQueryVector, keyVectors ); + + const attentionWeights = Vector.softmax( attentionScores ); + + const weightedSumVector = computeWeightedSumVector( attentionWeights, valueVectors ); + + const logitsVector = computeLogitsVector( weightedSumVector ); + + const probabilities = Vector.softmax( logitsVector ); + + const resultObject = { + probabilities: probabilities, + attentionWeights: attentionWeights, + weightedSumVector: weightedSumVector, + queryVectors: queryVectors, + keyVectors: keyVectors, + valueVectors: valueVectors, + inputEmbeddingVectors: inputEmbeddingVectors, + lastQueryVector: lastQueryVector + }; + + return resultObject; + +} + + +function updateOutputLayerWeights( probabilities, targetIndex, weightedSumVector, learningRate ) { + + const gradientLossVector = calculateCrossEntropyGradient( probabilities, targetIndex ); + + for ( let vocabIndex = 0 ; vocabIndex < vocabularySize ; vocabIndex++ ) { + + for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) { + + matrixOutput.data[ vocabIndex ][ dimensionIndex ] -= learningRate * gradientLossVector.data[ vocabIndex ] * weightedSumVector.data[ dimensionIndex ]; + + } + + } + +} + + +function trainModel( inputTokenIdentifiers, targetIndex, learningRate ) { + + const { + probabilities, + weightedSumVector, + queryVectors, + keyVectors, + valueVectors, + inputEmbeddingVectors, + lastQueryVector + } = forwardPass( inputTokenIdentifiers ); + + const lossValue = calculateCrossEntropyLoss( probabilities, targetIndex ); + + updateOutputLayerWeights( probabilities, targetIndex, weightedSumVector, learningRate ); + + // Calculate gradient of loss with respect to weightedSumVector: dLoss/dWeightedSum = W_out^T * gradLoss + const gradientLossVector = calculateCrossEntropyGradient( probabilities, targetIndex ); + + const gradientWeightedSumVector = new Vector( embeddingDimension ); + + for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) { + + let sum = 0; + + for ( let vocabIndex = 0 ; vocabIndex < vocabularySize ; vocabIndex++ ) { + + sum += matrixOutput.data[ vocabIndex ][ dimensionIndex ] * gradientLossVector.data[ vocabIndex ]; + + } + + gradientWeightedSumVector.data[ dimensionIndex ] = sum; + + } + + // Backpropagate to valueVectors weighted by attention weights + + for ( let index = 0 ; index < valueVectors.length ; index++ ) { + + const gradientVector = new Vector( embeddingDimension ); + + for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) { + + // gradVec = gradWeightedSum * attentionWeights[index] + + gradientVector.data[ dimensionIndex ] = gradientWeightedSumVector.data[ dimensionIndex ] * Vector.softmax( computeAttentionScoresVector( lastQueryVector, keyVectors ) ).data[ index ]; + + } + + // Update embedding corresponding to inputTokenIdentifiers[index] + + embeddingsInstance.updateEmbedding( inputTokenIdentifiers[ index ], gradientVector, learningRate ); + + } + + // TODO: implement updates for matrixQuery, matrixKey, matrixValue similarly + + return lossValue; + +} + +function trainOnMultipleSequences( sequenceArray, numberOfEpochs, learningRateValue ) { + + for ( let currentEpoch = 0 ; currentEpoch < numberOfEpochs ; currentEpoch++ ) { + + let totalLossValue = 0; + + let totalStepsCount = 0; + + for ( let sequenceIndex = 0 ; sequenceIndex < sequenceArray.length ; sequenceIndex++ ) { + + const currentSequence = sequenceArray[ sequenceIndex ]; + + // For each token in sequence except the first (start from index 1) + + for ( let tokenIndex = 1 ; tokenIndex < currentSequence.length ; tokenIndex++ ) { + + const inputTokens = currentSequence.slice( 0, tokenIndex ); + + const targetTokenIndex = currentSequence[ tokenIndex ]; + + const lossValue = trainModel( inputTokens, targetTokenIndex, learningRateValue ); + + totalLossValue += lossValue; + + totalStepsCount++; + + } + + } + + if ( currentEpoch % 100 === 0 ) { + + writeToPage(".training", "p", "Epoch " + currentEpoch + " Average Loss: " + ( totalLossValue / totalStepsCount ).toFixed( 4 ) ); + //console.log( "Epoch " + currentEpoch + " Average Loss: " + ( totalLossValue / totalStepsCount ).toFixed( 4 ) ); + scrollToBottom(".training"); + } + + } + +} + +function getTokenIdentifier( word ) { + + const capitalizedWord = word.charAt( 0 ).toUpperCase() + word.slice( 1 ); + + if ( vocabulary[ capitalizedWord ] !== undefined ) { + + return vocabulary[ capitalizedWord ]; + + } + + throw new Error( "Word not in vocabulary: " + word ); + +} + +function predictNextWordGivenInput( inputWordArray ) { + + const tokenIdentifierArray = inputWordArray.map( getTokenIdentifier ); + + const { + probabilities + } = forwardPass( tokenIdentifierArray ); + + let maximumProbability = -Infinity; + + let maximumProbabilityIndex = -1; + + for ( let index = 0 ; index < probabilities.length ; index++ ) { + + if ( probabilities.data[ index ] > maximumProbability ) { + + maximumProbability = probabilities.data[ index ]; + + maximumProbabilityIndex = index; + + } + + } + + return Object.keys( vocabulary ).find( function( key ) { return vocabulary[ key ] === maximumProbabilityIndex; } ); + +} + +const trainingSequences = [ + + new Array( vocabulary.The, vocabulary.Book, vocabulary.Is, vocabulary.Great ), + new Array( vocabulary.The, vocabulary.Book, vocabulary.Is, vocabulary.Great ), + new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Mat ), + new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Bench ), + new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Bench ), + new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Bench ), + new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Mat ), + +]; + +// Create reverse mapping from index to word +const indexToWord = {}; + +for (const word in vocabulary) { + if (vocabulary.hasOwnProperty(word)) { + const index = vocabulary[word]; + indexToWord[index] = word; + } +} +console.log(document.querySelector(".trainingsData")); +writeToPage(".trainingsData", "h2", "Trainings data"); + + +// Convert and write each sequence +for (const sequence of trainingSequences) { + const sequenceWords = sequence.map( index => indexToWord[index] ); + writeToPage(".trainingsData", "p",sequenceWords); +} + +const totalEpochsCount = 6000; + +const learningRateValue = 0.01; + +//writeToPage(".training", "h2", "Training " ); +trainOnMultipleSequences( trainingSequences, totalEpochsCount, learningRateValue ); + + + +function scrollToBottom( selector ) { + + var element = document.querySelector( selector ); + + element.scrollTop = element.scrollHeight; + +} + +function writeToPage( selector, elementType, ...args) { + + var p = document.createElement( elementType ); + + for(var i = 0; i < args.length; i++) { + + var argument = args[i] + + p.innerText += argument + " "; + + } + + document.querySelector( selector ).appendChild( p ); + +} + + + +writeToPage(".prediction", "h2", "Prediction"); + +writeToPage( ".prediction", "p", "Next word after 'The Book is':", predictNextWordGivenInput( [ "The", "Book", "Is" ] ) ); + +writeToPage( ".prediction", "p", "Next word after 'The Cat Sat':", predictNextWordGivenInput( [ "The", "Cat", "Sat" ] ) ); + +writeToPage( ".prediction", "p", "Next word after 'The Cat':", predictNextWordGivenInput( [ "The", "Cat" ] ) ); + +writeToPage( ".prediction", "p", "Next word after 'The':", predictNextWordGivenInput( [ "The" ] ) ); + +writeToPage( ".prediction", "p", "Next word after 'The Cat Sat On':", predictNextWordGivenInput( [ "The", "Cat", "Sat", "On" ] ) ); + +writeToPage( ".prediction", "p", "Next word after 'The Cat Sat On the':", predictNextWordGivenInput( [ "The", "Cat", "Sat", "On", "The" ] ) ); + +writeToPage( ".prediction", "p", "Next word after 'The Book':", predictNextWordGivenInput( [ "The", "Book" ] ) ); + diff --git a/server.mjs b/server.mjs new file mode 100644 index 0000000..ddfe511 --- /dev/null +++ b/server.mjs @@ -0,0 +1,171 @@ +import http from "http"; + +import { readdir } from "fs/promises"; +import { stat } from "fs/promises"; +import { readFile } from "fs/promises"; +import { join } from "path"; +import { dirname } from "path"; +import { fileURLToPath } from "url"; + + +class App +{ + + + constructor( ) + { + + const selfPath = fileURLToPath( import.meta.url ); + + this.rootPath = dirname( selfPath ); + this.httpServer = null; + + } + + + async start( ) + { + + this.httpServer = http.createServer( this.handleRequest.bind( this ) ); + + this.httpServer.listen( 1234 ); + + console.log("server started on port 1234"); + + } + + + async handleRequest( req, res ) + { + + res.setHeader("Cross-Origin-Opener-Policy", "same-origin") +res.setHeader("Cross-Origin-Embedder-Policy", "require-corp") + + const requestedPath = decodeURI( req.url ); + const fullPath = join( this.rootPath, requestedPath ); + + const exists = await this.checkFileExists( fullPath ); + + if ( !exists ) + { + + res.statusCode = 404; + res.end( "Not Found" ); + return; + + } + + const stats = await stat( fullPath ); + + if ( stats.isDirectory( ) ) + { + + const indexPath = join( fullPath, "index.html" ); + const indexExists = await this.checkFileExists( indexPath ); + + if ( indexExists ) + { + + await this.sendFile( indexPath, res ); + return; + + } + + await this.sendDirectoryListing( fullPath, requestedPath, res ); + return; + + } + + await this.sendFile( fullPath, res ); + + } + + + async sendFile( path, res ) + { + + const contentType = this.getContentType( path ); + const fileData = await readFile( path ); + + res.setHeader( "Content-Type", contentType ); + res.statusCode = 200; + + res.end( fileData ); + + } + + + async sendDirectoryListing( dirPath, urlPath, res ) + { + + const entries = await readdir( dirPath, { withFileTypes : true } ); + + let html = "

Index of " + urlPath + "

"; + + res.setHeader( "Content-Type", "text/html" ); + res.statusCode = 200; + + res.end( html ); + + } + + + async checkFileExists( path ) + { + + const exists = await stat( path ) + .then( function( ) { return true; } ) + .catch( function( ) { return false; } ); + + return exists; + + } + + + getContentType( path ) + { + + const lower = path.toLowerCase( ); + + if ( lower.endsWith( ".html" ) ) return "text/html"; + if ( lower.endsWith( ".css" ) ) return "text/css"; + if ( lower.endsWith( ".js" ) ) return "text/javascript"; + if ( lower.endsWith( ".json" ) ) return "application/json"; + if ( lower.endsWith( ".wasm" ) ) return "application/wasm"; + if ( lower.endsWith( ".png" ) ) return "image/png"; + if ( lower.endsWith( ".jpg" ) ) return "image/jpeg"; + if ( lower.endsWith( ".jpeg" ) ) return "image/jpeg"; + if ( lower.endsWith( ".gif" ) ) return "image/gif"; + if ( lower.endsWith( ".svg" ) ) return "image/svg+xml"; + if ( lower.endsWith( ".wgsl" ) ) return "text/plain"; + if ( lower.endsWith( ".txt" ) ) return "text/plain"; + + return "application/octet-stream"; + + } + + +} + + +const app = new App( ); + +await app.start( );