First Commit

This commit is contained in:
2025-11-18 13:16:41 +01:00
parent d3331c63b7
commit 6f586ae91b
6 changed files with 1184 additions and 2 deletions

49
Matrix.js Normal file
View File

@@ -0,0 +1,49 @@
import Vector from "./Vector.js"
function random() {
let seed = 12345; // fixed seed
return function() {
seed = ( seed * 16807 ) % 2147483647;
return ( seed - 1 ) / 2147483646;
};
}
const randomFix = random();
export default class Matrix {
constructor( rows, cols, fillValue = 0 ) {
this.rows = rows;
this.cols = cols;
this.data = new Array( rows );
for ( let i = 0 ; i < rows ; i++ ) {
this.data[ i ] = new Array( cols );
for ( let j = 0 ; j < cols ; j++ ) {
this.data[ i ][ j ] = fillValue;
}
}
}
static random( rows, cols, scale = 0.1 ) {
const mat = new Matrix( rows, cols );
for ( let i = 0 ; i < rows ; i++ ) {
for ( let j = 0 ; j < cols ; j++ ) {
mat.data[ i ][ j ] = ( randomFix() - 0.5 ) * scale;
}
}
return mat;
}
static matVecMul( mat, vec ) {
const result = new Vector( mat.rows );
for ( let i = 0 ; i < mat.rows ; i++ ) {
let sum = 0;
for ( let j = 0 ; j < mat.cols ; j++ ) {
sum += mat.data[ i ][ j ] * vec.data[ j ];
}
result.data[ i ] = sum;
}
return result;
}
}

123
README.md
View File

@@ -1,3 +1,122 @@
# Simple-Transformer-JS # Minimal Attention Model Demo (Browser-Only)
This project is a small in-browser demonstration of key components of a transformer-style attention mechanism. It runs entirely in JavaScript using ES modules.
It includes:
• Word embeddings
• Positional encoding
• Scaled dot-product attention
• Softmax scoring
• Simple training loop (cross-entropy loss)
• Prediction of next token based on input context
No third-party machine learning libraries are used.
---
## Files
| File | Purpose |
| ------------ | ------------------------------------ |
| `index.html` | Basic UI output + script inclusion |
| `real.js` | Full attention model implementation |
| `Vector.js` | Basic vector operations |
| `Matrix.js` | Basic dense matrix operations |
| `server.js` | Minimal static HTTP server (Node.js) |
---
## Vocabulary
The demo uses a tiny fixed vocabulary:
```
The, Cat, Sat, On, Mat, Bench, Book, Great, Is
```
Tokens are mapped to integer indices.
---
## Training
Training data sequences:
```
["The Book Is Great"]
["The Cat Sat On The Mat"]
["The Cat Sat On The Bench"]
```
Each epoch loops over all sequences and performs:
1. Embedding lookup
2. Positional encoding added to embeddings
3. Query / Key / Value projections
4. Scaled dot-product attention
5. Weighted sum → logits → softmax probabilities
6. Cross-entropy loss + weight updates on:
• Output projection matrix
• Token embeddings
The system prints intermediate progress into DOM elements.
---
## Output
Once trained, the model prints predictions:
```
Next word after 'The Book Is': ...
Next word after 'The Cat Sat': ...
Next word after 'The Cat': ...
...
```
Predictions are appended to `.prediction` container in the page.
---
## How to Run
### 1 — Start the server
From the folder containing `server.js` and the HTML/JS files:
```bash
node server.js
```
Server will listen on:
```
http://localhost:1234
```
### 2 — Open the demo in a browser
Navigate to:
```
http://localhost:1234
```
The demo will:
• Load embeddings
• Run training loop
• Display loss progression
• Show final predictions
---
## Notes
• This is a simplified demonstration intended for clarity, not accuracy
• No batching, dropout, layer-norm, or multi-head attention
• Update rules only modify embeddings + output projection (queries/keys/values not updated)
This project is a small in-browser demonstration of key components of a transformer-style attention mechanism. It runs entirely in JavaScript using ES modules.

47
Vector.js Normal file
View File

@@ -0,0 +1,47 @@
function random() {
let seed = 12345; // fixed seed
return function() {
seed = ( seed * 16807 ) % 2147483647;
return ( seed - 1 ) / 2147483646;
};
}
const randomFix = random();
export default class Vector {
constructor( length, fillValue = 0 ) {
this.data = new Array( length );
for ( let i = 0 ; i < length ; i++ ) {
this.data[ i ] = fillValue;
}
}
get length() {
return this.data.length;
}
static dot( a, b ) {
let sum = 0;
for ( let i = 0 ; i < a.length ; i++ ) {
sum += a.data[ i ] * b.data[ i ];
}
return sum;
}
static softmax( vec ) {
const max = Math.max( ...vec.data );
const exps = new Array( vec.length );
let sum = 0;
for ( let i = 0 ; i < vec.length ; i++ ) {
exps[ i ] = Math.exp( vec.data[ i ] - max );
sum += exps[ i ];
}
const result = new Vector( vec.length );
for ( let i = 0 ; i < vec.length ; i++ ) {
result.data[ i ] = exps[ i ] / sum;
}
return result;
}
}

261
index.html Normal file
View File

@@ -0,0 +1,261 @@
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="UTF-8" />
<link rel="icon" href="data:;base64,iVBORw0KGgo=" />
<meta name="viewport" content="width=device-width, initial-scale=1" />
<title>WebGPU Attention Example</title>
<base href="./">
<style>
html, body {
margin: 0;
padding: 0;
height: 100%;
}
.panel{
color: #d9d9d9;
margin:0 auto;
width: 400px;
padding: 10px;
text-align: center;
}
button {
top: 10px;
left: 10px;
z-index: 10;
padding: 10px 20px;
font-size: 16px;
font-weight: 600;
color: #f0f0f5;
background: rgba(28, 28, 30, 0.8);
border: none;
border-radius: 12px;
box-shadow:
0 1px 3px rgba(0, 0, 0, 0.4),
0 0 8px rgba(28, 28, 30, 0.7);
backdrop-filter: blur(12px);
-webkit-backdrop-filter: blur(12px);
cursor: pointer;
transition: background-color 0.3s ease, box-shadow 0.3s ease;
}
button:hover {
background: rgba(28, 28, 30, 0.95);
box-shadow:
0 4px 12px rgba(0, 0, 0, 0.6),
0 0 15px rgba(28, 28, 30, 0.9);
}
button:active {
background: rgba(28, 28, 30, 1);
box-shadow:
0 2px 6px rgba(0, 0, 0, 0.8),
0 0 10px rgba(28, 28, 30, 1);
}
#controlPanel {
position: absolute;
top: 10px;
left: 10px;
display: flex;
flex-direction: column;
gap: 12px;
padding: 20px;
background: rgba(30, 30, 30, 0.6);
backdrop-filter: blur(18px);
-webkit-backdrop-filter: blur(18px);
border-radius: 14px;
box-shadow:
0 4px 12px rgba(0, 0, 0, 0.5),
0 0 0 1px rgba(255, 255, 255, 0.05);
z-index: 1000;
box-sizing: border-box;
}
.inputRow {
display: flex;
align-items: center;
justify-content: space-between;
gap: 12px;
width: 100%;
}
.inputRow label {
color: #ccc;
font-size: 14px;
white-space: nowrap;
width: 70px;
}
.inputRow button,
.inputRow input {
flex-grow: 1;
font-size: 14px;
font-weight: 600;
color: #f0f0f5;
background: rgba(28, 28, 30, 0.9);
border: none;
border-radius: 8px;
padding: 8px 10px;
cursor: pointer;
box-shadow:
0 1px 3px rgba(0, 0, 0, 0.4),
0 0 6px rgba(28, 28, 30, 0.5);
transition: background-color 0.2s ease, box-shadow 0.2s ease;
}
.inputRow button:hover {
background: rgba(40, 40, 44, 0.95);
}
.inputRow input {
background: rgba(20, 20, 20, 0.8);
border: 1px solid rgba(255, 255, 255, 0.1);
outline: none;
box-shadow: inset 0 1px 2px rgba(0, 0, 0, 0.6);
}
.inputRow select {
flex-grow: 1;
font-size: 14px;
font-weight: 600;
color: #f0f0f5;
background: rgba(28, 28, 30, 0.9);
border: none;
border-radius: 8px;
padding: 8px 10px;
cursor: pointer;
box-shadow:
0 1px 3px rgba(0, 0, 0, 0.4),
0 0 6px rgba(28, 28, 30, 0.5);
transition: background-color 0.2s ease, box-shadow 0.2s ease;
appearance: none; /* Remove default arrow */
-webkit-appearance: none;
-moz-appearance: none;
background-image:
linear-gradient(45deg, transparent 50%, #f0f0f5 50%),
linear-gradient(135deg, #f0f0f5 50%, transparent 50%);
background-position:
calc(100% - 20px) calc(50% - 3px),
calc(100% - 15px) calc(50% - 3px);
background-size: 5px 5px;
background-repeat: no-repeat;
}
.inputRow select:hover {
background: rgba(40, 40, 44, 0.95);
}
.inputRow option {
background: rgba(28, 28, 30, 0.95);
color: #f0f0f5;
}
.top-panel{
justify-content: flex-start;
border-bottom: 1px solid #292929;
display: flex;
gap: 2.5rem;
padding: 1.75rem 2rem;
background: rgb(20 20 20 / 95%);
backdrop-filter: blur(35px);
-webkit-backdrop-filter: blur(35px);
color: white;
box-shadow: 0 4px 30px rgba(0, 0, 0, 0.7);
font-weight: 600;
font-size: 0.95rem;
user-select: none;
width: calc(100vw);
z-index: 10;
}
.top-panel button{
cursor:pointer;
}
body {
background: #111111;
font-family: "Inter", sans-serif;
}
.vertical{
display: flex;
}
.training{
overflow: auto;
height: 50vh;
}
.scrollable {
scrollbar-width: thin;
scrollbar-color: #444444 #111111; /* thumb and track colors */
overflow: auto;
}
/* WebKit-based browsers */
.scrollable::-webkit-scrollbar {
width: 6px;
height: 6px;
}
.scrollable::-webkit-scrollbar-track {
background: #111111;
}
.scrollable::-webkit-scrollbar-thumb {
background-color: #444444;
border-radius: 3px;
}
.scrollable::-webkit-scrollbar-thumb:hover {
background-color: #777777;
}
</style>
</head>
<body>
<div class="top-panel"></div>
<div class="description panel">
<h2>Simplified transformer-style attention mechanism in Javascript</h2>
<p>This JavaScript implementation demonstrates a simplified transformer-style attention mechanism using basic linear algebra and embedding concepts. The code was developed with the assistance of AI, leveraging natural language instructions and automated code generation to build and organize the components efficiently.</p>
</div>
<div class="panel">
<h2>Training</h2>
</div>
<div class="vertical">
<div class="trainingsData panel">
</div>
<div class="training panel scrollable">
</div>
<div class="prediction panel">
</div>
</div>
<canvas width="1000" height="1000"></canvas>
<script type="module" src="./real.js"></script>
</body>
</html>

535
real.js Normal file
View File

@@ -0,0 +1,535 @@
import Vector from "./Vector.js"
import Matrix from "./Matrix.js"
function createRandomNumberGenerator() {
let seed = 12345;
function generateNextRandomNumber() {
seed = ( seed * 16807 ) % 2147483647;
return ( seed - 1 ) / 2147483646;
}
return generateNextRandomNumber;
}
const randomNumberGenerator = createRandomNumberGenerator();
class SimpleEmbedding {
constructor( vocabularySize, embeddingDimension ) {
this.vocabularySize = vocabularySize;
this.embeddingDimension = embeddingDimension;
this.embeddingVectors = new Array( vocabularySize );
this.initializeEmbeddings();
}
initializeEmbeddings() {
for ( let index = 0 ; index < this.vocabularySize ; index++ ) {
const vectorInstance = new Vector( this.embeddingDimension );
this.initializeVectorRandomly( vectorInstance );
this.embeddingVectors[ index ] = vectorInstance;
}
}
initializeVectorRandomly( vectorInstance ) {
for ( let elementIndex = 0 ; elementIndex < vectorInstance.data.length ; elementIndex++ ) {
vectorInstance.data[ elementIndex ] = ( randomNumberGenerator() - 0.5 ) * 0.1;
}
}
lookupEmbedding( tokenIdentifier ) {
return this.embeddingVectors[ tokenIdentifier ];
}
updateEmbedding( tokenIdentifier, gradientVector, learningRate ) {
const vectorInstance = this.embeddingVectors[ tokenIdentifier ];
for ( let elementIndex = 0 ; elementIndex < this.embeddingDimension ; elementIndex++ ) {
vectorInstance.data[ elementIndex ] -= learningRate * gradientVector.data[ elementIndex ];
}
}
}
class PositionalEncoding {
static calculateValue( position, index ) {
const modelDimension = 8;
const angleRate = 1 / Math.pow( 10000, 2 * Math.floor( index / 2 ) / modelDimension );
if ( index % 2 === 0 ) {
return Math.sin( position * angleRate );
}
return Math.cos( position * angleRate );
}
}
function calculateCrossEntropyLoss( probabilities, targetIndex ) {
return -Math.log( probabilities.data[ targetIndex ] + 1e-9 );
}
function calculateCrossEntropyGradient( probabilities, targetIndex ) {
const gradientVector = new Vector( probabilities.length );
for ( let index = 0 ; index < probabilities.length ; index++ ) {
gradientVector.data[ index ] = probabilities.data[ index ];
}
gradientVector.data[ targetIndex ] -= 1;
return gradientVector;
}
const vocabulary = {
The: 0,
Cat: 1,
Sat: 2,
On: 3,
Mat: 4,
Bench: 5,
Book: 6,
Great: 7,
Is: 8
};
const vocabularySize = Object.keys( vocabulary ).length;
const embeddingDimension = 8;
const embeddingsInstance = new SimpleEmbedding( vocabularySize, embeddingDimension );
const matrixQuery = Matrix.random( embeddingDimension, embeddingDimension );
const matrixKey = Matrix.random( embeddingDimension, embeddingDimension );
const matrixValue = Matrix.random( embeddingDimension, embeddingDimension );
const matrixOutput = Matrix.random( vocabularySize, embeddingDimension );
function applyPositionalEncodingToInputEmbeddings( inputEmbeddingVectors ) {
for ( let positionIndex = 0 ; positionIndex < inputEmbeddingVectors.length ; positionIndex++ ) {
for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) {
inputEmbeddingVectors[ positionIndex ][ dimensionIndex ] += PositionalEncoding.calculateValue( positionIndex, dimensionIndex );
}
}
}
function computeAttentionScoresVector( queryVector, keyVectors ) {
const scoresVector = new Vector( keyVectors.length );
for ( let index = 0 ; index < keyVectors.length ; index++ ) {
scoresVector.data[ index ] = Vector.dot( queryVector, keyVectors[ index ] ) / Math.sqrt( embeddingDimension );
}
return scoresVector;
}
function computeWeightedSumVector( attentionWeightsVector, valueVectors ) {
const weightedSumVector = new Vector( embeddingDimension );
for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) {
let sum = 0;
for ( let index = 0 ; index < valueVectors.length ; index++ ) {
sum += attentionWeightsVector.data[ index ] * valueVectors[ index ].data[ dimensionIndex ];
}
weightedSumVector.data[ dimensionIndex ] = sum;
}
return weightedSumVector;
}
function computeLogitsVector( weightedSumVector ) {
const logitsVector = new Vector( vocabularySize );
for ( let vocabIndex = 0 ; vocabIndex < vocabularySize ; vocabIndex++ ) {
let sum = 0;
for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) {
sum += matrixOutput.data[ vocabIndex ][ dimensionIndex ] * weightedSumVector.data[ dimensionIndex ];
}
logitsVector.data[ vocabIndex ] = sum;
}
return logitsVector;
}
function forwardPass( inputTokenIdentifiers ) {
const inputEmbeddingVectors = new Array( inputTokenIdentifiers.length );
for ( let index = 0 ; index < inputTokenIdentifiers.length ; index++ ) {
inputEmbeddingVectors[ index ] = embeddingsInstance.lookupEmbedding( inputTokenIdentifiers[ index ] );
}
applyPositionalEncodingToInputEmbeddings( inputEmbeddingVectors );
const queryVectors = new Array( inputEmbeddingVectors.length );
const keyVectors = new Array( inputEmbeddingVectors.length );
const valueVectors = new Array( inputEmbeddingVectors.length );
for ( let index = 0 ; index < inputEmbeddingVectors.length ; index++ ) {
queryVectors[ index ] = Matrix.matVecMul( matrixQuery, inputEmbeddingVectors[ index ] );
keyVectors[ index ] = Matrix.matVecMul( matrixKey, inputEmbeddingVectors[ index ] );
valueVectors[ index ] = Matrix.matVecMul( matrixValue, inputEmbeddingVectors[ index ] );
}
const lastQueryVector = queryVectors[ queryVectors.length - 1 ];
const attentionScores = computeAttentionScoresVector( lastQueryVector, keyVectors );
const attentionWeights = Vector.softmax( attentionScores );
const weightedSumVector = computeWeightedSumVector( attentionWeights, valueVectors );
const logitsVector = computeLogitsVector( weightedSumVector );
const probabilities = Vector.softmax( logitsVector );
const resultObject = {
probabilities: probabilities,
attentionWeights: attentionWeights,
weightedSumVector: weightedSumVector,
queryVectors: queryVectors,
keyVectors: keyVectors,
valueVectors: valueVectors,
inputEmbeddingVectors: inputEmbeddingVectors,
lastQueryVector: lastQueryVector
};
return resultObject;
}
function updateOutputLayerWeights( probabilities, targetIndex, weightedSumVector, learningRate ) {
const gradientLossVector = calculateCrossEntropyGradient( probabilities, targetIndex );
for ( let vocabIndex = 0 ; vocabIndex < vocabularySize ; vocabIndex++ ) {
for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) {
matrixOutput.data[ vocabIndex ][ dimensionIndex ] -= learningRate * gradientLossVector.data[ vocabIndex ] * weightedSumVector.data[ dimensionIndex ];
}
}
}
function trainModel( inputTokenIdentifiers, targetIndex, learningRate ) {
const {
probabilities,
weightedSumVector,
queryVectors,
keyVectors,
valueVectors,
inputEmbeddingVectors,
lastQueryVector
} = forwardPass( inputTokenIdentifiers );
const lossValue = calculateCrossEntropyLoss( probabilities, targetIndex );
updateOutputLayerWeights( probabilities, targetIndex, weightedSumVector, learningRate );
// Calculate gradient of loss with respect to weightedSumVector: dLoss/dWeightedSum = W_out^T * gradLoss
const gradientLossVector = calculateCrossEntropyGradient( probabilities, targetIndex );
const gradientWeightedSumVector = new Vector( embeddingDimension );
for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) {
let sum = 0;
for ( let vocabIndex = 0 ; vocabIndex < vocabularySize ; vocabIndex++ ) {
sum += matrixOutput.data[ vocabIndex ][ dimensionIndex ] * gradientLossVector.data[ vocabIndex ];
}
gradientWeightedSumVector.data[ dimensionIndex ] = sum;
}
// Backpropagate to valueVectors weighted by attention weights
for ( let index = 0 ; index < valueVectors.length ; index++ ) {
const gradientVector = new Vector( embeddingDimension );
for ( let dimensionIndex = 0 ; dimensionIndex < embeddingDimension ; dimensionIndex++ ) {
// gradVec = gradWeightedSum * attentionWeights[index]
gradientVector.data[ dimensionIndex ] = gradientWeightedSumVector.data[ dimensionIndex ] * Vector.softmax( computeAttentionScoresVector( lastQueryVector, keyVectors ) ).data[ index ];
}
// Update embedding corresponding to inputTokenIdentifiers[index]
embeddingsInstance.updateEmbedding( inputTokenIdentifiers[ index ], gradientVector, learningRate );
}
// TODO: implement updates for matrixQuery, matrixKey, matrixValue similarly
return lossValue;
}
function trainOnMultipleSequences( sequenceArray, numberOfEpochs, learningRateValue ) {
for ( let currentEpoch = 0 ; currentEpoch < numberOfEpochs ; currentEpoch++ ) {
let totalLossValue = 0;
let totalStepsCount = 0;
for ( let sequenceIndex = 0 ; sequenceIndex < sequenceArray.length ; sequenceIndex++ ) {
const currentSequence = sequenceArray[ sequenceIndex ];
// For each token in sequence except the first (start from index 1)
for ( let tokenIndex = 1 ; tokenIndex < currentSequence.length ; tokenIndex++ ) {
const inputTokens = currentSequence.slice( 0, tokenIndex );
const targetTokenIndex = currentSequence[ tokenIndex ];
const lossValue = trainModel( inputTokens, targetTokenIndex, learningRateValue );
totalLossValue += lossValue;
totalStepsCount++;
}
}
if ( currentEpoch % 100 === 0 ) {
writeToPage(".training", "p", "Epoch " + currentEpoch + " Average Loss: " + ( totalLossValue / totalStepsCount ).toFixed( 4 ) );
//console.log( "Epoch " + currentEpoch + " Average Loss: " + ( totalLossValue / totalStepsCount ).toFixed( 4 ) );
scrollToBottom(".training");
}
}
}
function getTokenIdentifier( word ) {
const capitalizedWord = word.charAt( 0 ).toUpperCase() + word.slice( 1 );
if ( vocabulary[ capitalizedWord ] !== undefined ) {
return vocabulary[ capitalizedWord ];
}
throw new Error( "Word not in vocabulary: " + word );
}
function predictNextWordGivenInput( inputWordArray ) {
const tokenIdentifierArray = inputWordArray.map( getTokenIdentifier );
const {
probabilities
} = forwardPass( tokenIdentifierArray );
let maximumProbability = -Infinity;
let maximumProbabilityIndex = -1;
for ( let index = 0 ; index < probabilities.length ; index++ ) {
if ( probabilities.data[ index ] > maximumProbability ) {
maximumProbability = probabilities.data[ index ];
maximumProbabilityIndex = index;
}
}
return Object.keys( vocabulary ).find( function( key ) { return vocabulary[ key ] === maximumProbabilityIndex; } );
}
const trainingSequences = [
new Array( vocabulary.The, vocabulary.Book, vocabulary.Is, vocabulary.Great ),
new Array( vocabulary.The, vocabulary.Book, vocabulary.Is, vocabulary.Great ),
new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Mat ),
new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Bench ),
new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Bench ),
new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Bench ),
new Array( vocabulary.The, vocabulary.Cat, vocabulary.Sat, vocabulary.On, vocabulary.The, vocabulary.Mat ),
];
// Create reverse mapping from index to word
const indexToWord = {};
for (const word in vocabulary) {
if (vocabulary.hasOwnProperty(word)) {
const index = vocabulary[word];
indexToWord[index] = word;
}
}
console.log(document.querySelector(".trainingsData"));
writeToPage(".trainingsData", "h2", "Trainings data");
// Convert and write each sequence
for (const sequence of trainingSequences) {
const sequenceWords = sequence.map( index => indexToWord[index] );
writeToPage(".trainingsData", "p",sequenceWords);
}
const totalEpochsCount = 6000;
const learningRateValue = 0.01;
//writeToPage(".training", "h2", "Training " );
trainOnMultipleSequences( trainingSequences, totalEpochsCount, learningRateValue );
function scrollToBottom( selector ) {
var element = document.querySelector( selector );
element.scrollTop = element.scrollHeight;
}
function writeToPage( selector, elementType, ...args) {
var p = document.createElement( elementType );
for(var i = 0; i < args.length; i++) {
var argument = args[i]
p.innerText += argument + " ";
}
document.querySelector( selector ).appendChild( p );
}
writeToPage(".prediction", "h2", "Prediction");
writeToPage( ".prediction", "p", "Next word after 'The Book is':", predictNextWordGivenInput( [ "The", "Book", "Is" ] ) );
writeToPage( ".prediction", "p", "Next word after 'The Cat Sat':", predictNextWordGivenInput( [ "The", "Cat", "Sat" ] ) );
writeToPage( ".prediction", "p", "Next word after 'The Cat':", predictNextWordGivenInput( [ "The", "Cat" ] ) );
writeToPage( ".prediction", "p", "Next word after 'The':", predictNextWordGivenInput( [ "The" ] ) );
writeToPage( ".prediction", "p", "Next word after 'The Cat Sat On':", predictNextWordGivenInput( [ "The", "Cat", "Sat", "On" ] ) );
writeToPage( ".prediction", "p", "Next word after 'The Cat Sat On the':", predictNextWordGivenInput( [ "The", "Cat", "Sat", "On", "The" ] ) );
writeToPage( ".prediction", "p", "Next word after 'The Book':", predictNextWordGivenInput( [ "The", "Book" ] ) );

171
server.mjs Normal file
View File

@@ -0,0 +1,171 @@
import http from "http";
import { readdir } from "fs/promises";
import { stat } from "fs/promises";
import { readFile } from "fs/promises";
import { join } from "path";
import { dirname } from "path";
import { fileURLToPath } from "url";
class App
{
constructor( )
{
const selfPath = fileURLToPath( import.meta.url );
this.rootPath = dirname( selfPath );
this.httpServer = null;
}
async start( )
{
this.httpServer = http.createServer( this.handleRequest.bind( this ) );
this.httpServer.listen( 1234 );
console.log("server started on port 1234");
}
async handleRequest( req, res )
{
res.setHeader("Cross-Origin-Opener-Policy", "same-origin")
res.setHeader("Cross-Origin-Embedder-Policy", "require-corp")
const requestedPath = decodeURI( req.url );
const fullPath = join( this.rootPath, requestedPath );
const exists = await this.checkFileExists( fullPath );
if ( !exists )
{
res.statusCode = 404;
res.end( "Not Found" );
return;
}
const stats = await stat( fullPath );
if ( stats.isDirectory( ) )
{
const indexPath = join( fullPath, "index.html" );
const indexExists = await this.checkFileExists( indexPath );
if ( indexExists )
{
await this.sendFile( indexPath, res );
return;
}
await this.sendDirectoryListing( fullPath, requestedPath, res );
return;
}
await this.sendFile( fullPath, res );
}
async sendFile( path, res )
{
const contentType = this.getContentType( path );
const fileData = await readFile( path );
res.setHeader( "Content-Type", contentType );
res.statusCode = 200;
res.end( fileData );
}
async sendDirectoryListing( dirPath, urlPath, res )
{
const entries = await readdir( dirPath, { withFileTypes : true } );
let html = "<html><body><h1>Index of " + urlPath + "</h1><ul>";
let i = 0;
while ( i < entries.length )
{
const e = entries[ i ].name;
const link = urlPath.endsWith( "/" )
? urlPath + e
: urlPath + "/" + e;
html = html + "<li><a href=\"" + link + "\">" + e + "</a></li>";
i = i + 1;
}
html = html + "</ul></body></html>";
res.setHeader( "Content-Type", "text/html" );
res.statusCode = 200;
res.end( html );
}
async checkFileExists( path )
{
const exists = await stat( path )
.then( function( ) { return true; } )
.catch( function( ) { return false; } );
return exists;
}
getContentType( path )
{
const lower = path.toLowerCase( );
if ( lower.endsWith( ".html" ) ) return "text/html";
if ( lower.endsWith( ".css" ) ) return "text/css";
if ( lower.endsWith( ".js" ) ) return "text/javascript";
if ( lower.endsWith( ".json" ) ) return "application/json";
if ( lower.endsWith( ".wasm" ) ) return "application/wasm";
if ( lower.endsWith( ".png" ) ) return "image/png";
if ( lower.endsWith( ".jpg" ) ) return "image/jpeg";
if ( lower.endsWith( ".jpeg" ) ) return "image/jpeg";
if ( lower.endsWith( ".gif" ) ) return "image/gif";
if ( lower.endsWith( ".svg" ) ) return "image/svg+xml";
if ( lower.endsWith( ".wgsl" ) ) return "text/plain";
if ( lower.endsWith( ".txt" ) ) return "text/plain";
return "application/octet-stream";
}
}
const app = new App( );
await app.start( );