first commit
This commit is contained in:
16
node_modules/.package-lock.json
generated
vendored
Normal file
16
node_modules/.package-lock.json
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
{
|
||||
"name": "codex_test",
|
||||
"lockfileVersion": 3,
|
||||
"requires": true,
|
||||
"packages": {
|
||||
"node_modules/pngjs": {
|
||||
"version": "7.0.0",
|
||||
"resolved": "https://registry.npmjs.org/pngjs/-/pngjs-7.0.0.tgz",
|
||||
"integrity": "sha512-LKWqWJRhstyYo9pGvgor/ivk2w94eSjE3RGVuzLGlr3NmD8bf7RcYGze1mNdEHRP6TRP6rMuDHk5t44hnTRyow==",
|
||||
"license": "MIT",
|
||||
"engines": {
|
||||
"node": ">=14.19.0"
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
128
node_modules/pngjs/CHANGELOG.md
generated
vendored
Normal file
128
node_modules/pngjs/CHANGELOG.md
generated
vendored
Normal file
@@ -0,0 +1,128 @@
|
||||
# Changelog
|
||||
|
||||
### 7.0.0 - 19/02/2023
|
||||
|
||||
- BREAKING - Drop support for node 12 (Though nothing incompatible in this release yet)
|
||||
- Switch to a pngjs organisation
|
||||
|
||||
### 6.0.0 - 24/10/2020
|
||||
|
||||
- BREAKING - Sync version now throws if there is unexpected content at the end of the stream.
|
||||
- BREAKING - Drop support for node 10 (Though nothing incompatible in this release yet)
|
||||
- Reduce the number of files included in the package
|
||||
|
||||
### 5.1.0 - 13/09/2020
|
||||
|
||||
- Add option to skip rescaling
|
||||
|
||||
### 5.0.0 - 15/04/2020
|
||||
|
||||
- Drop support for Node 8
|
||||
- Browserified bundle may now contain ES20(15-20) code if the supported node version supports it. Please run the browserified version through babel if you need to support older browsers.
|
||||
|
||||
### 4.0.1 - 15/04/2020
|
||||
|
||||
- Fix to possible null reference in nextTick of async method
|
||||
|
||||
### 4.0.0 - 09/04/2020
|
||||
|
||||
- Fix issue in newer nodes with using Buffer
|
||||
- Fix async issue with some png files
|
||||
- Drop support for Node 4 & 6
|
||||
|
||||
### 3.4.0 - 09/03/2019
|
||||
|
||||
- Include whether the png has alpha in the meta data
|
||||
- emit an error if the image is truncated instead of hanging
|
||||
- Add a browserified version
|
||||
- speed up some mapping functions
|
||||
|
||||
### 3.3.3 - 19/04/2018
|
||||
|
||||
- Real fix for node 9
|
||||
|
||||
### 3.3.2 - 16/02/2018
|
||||
|
||||
- Fix for node 9
|
||||
|
||||
### 3.3.1 - 15/11/2017
|
||||
|
||||
- Bugfixes and removal of es6
|
||||
|
||||
### 3.3.0
|
||||
|
||||
- Add writing 16 bit channels and support for grayscale input
|
||||
|
||||
### 3.2.0 - 30/04/2017
|
||||
|
||||
- Support for encoding 8-bit grayscale images
|
||||
|
||||
### 3.1.0 - 30/04/2017
|
||||
|
||||
- Support for pngs with zlib chunks that are malformed after valid data
|
||||
|
||||
### 3.0.1 - 16/02/2017
|
||||
|
||||
- Fix single pixel pngs
|
||||
|
||||
### 3.0.0 - 03/08/2016
|
||||
|
||||
- Drop support for node below v4 and iojs. Pin to 2.3.0 to use with old, unsupported or patched node versions.
|
||||
|
||||
### 2.3.0 - 22/04/2016
|
||||
|
||||
- Support for sync in node 0.10
|
||||
|
||||
### 2.2.0 - 04/12/2015
|
||||
|
||||
- Add sync write api
|
||||
- Fix newfile example
|
||||
- Correct comparison table
|
||||
|
||||
### 2.1.0 - 28/10/2015
|
||||
|
||||
- rename package to pngjs
|
||||
- added 'bgColor' option
|
||||
|
||||
### 2.0.0 - 08/10/2015
|
||||
|
||||
- fixes to readme
|
||||
- _breaking change_ - bitblt on the png prototype now doesn't take a unused, unnecessary src first argument
|
||||
|
||||
### 1.2.0 - 13/09/2015
|
||||
|
||||
- support passing colorType to write PNG's and writing bitmaps without alpha information
|
||||
|
||||
### 1.1.0 - 07/09/2015
|
||||
|
||||
- support passing a deflate factory for controlled compression
|
||||
|
||||
### 1.0.2 - 22/08/2015
|
||||
|
||||
- Expose all PNG creation info
|
||||
|
||||
### 1.0.1 - 21/08/2015
|
||||
|
||||
- Fix non square interlaced files
|
||||
|
||||
### 1.0.0 - 08/08/2015
|
||||
|
||||
- More tests
|
||||
- source linted
|
||||
- maintainability refactorings
|
||||
- async API - exceptions in reading now emit warnings
|
||||
- documentation improvement - sync api now documented, adjustGamma documented
|
||||
- breaking change - gamma chunk is now written. previously a read then write would destroy gamma information, now it is persisted.
|
||||
|
||||
### 0.0.3 - 03/08/2015
|
||||
|
||||
- Error handling fixes
|
||||
- ignore files for smaller npm footprint
|
||||
|
||||
### 0.0.2 - 02/08/2015
|
||||
|
||||
- Bugfixes to interlacing, support for transparent colours
|
||||
|
||||
### 0.0.1 - 02/08/2015
|
||||
|
||||
- Initial release, see pngjs for older changelog.
|
||||
20
node_modules/pngjs/LICENSE
generated
vendored
Normal file
20
node_modules/pngjs/LICENSE
generated
vendored
Normal file
@@ -0,0 +1,20 @@
|
||||
pngjs original work Copyright (c) 2015 Luke Page & Original Contributors
|
||||
pngjs derived work Copyright (c) 2012 Kuba Niegowski
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||
of this software and associated documentation files (the "Software"), to deal
|
||||
in the Software without restriction, including without limitation the rights
|
||||
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||
copies of the Software, and to permit persons to whom the Software is
|
||||
furnished to do so, subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in
|
||||
all copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
|
||||
THE SOFTWARE.
|
||||
287
node_modules/pngjs/README.md
generated
vendored
Normal file
287
node_modules/pngjs/README.md
generated
vendored
Normal file
@@ -0,0 +1,287 @@
|
||||
 [](https://codecov.io/gh/pngjs/pngjs) [](http://badge.fury.io/js/pngjs)
|
||||
|
||||
# pngjs
|
||||
|
||||
Simple PNG encoder/decoder for Node.js with no dependencies.
|
||||
|
||||
Based on the original [pngjs](https://github.com/niegowski/node-pngjs) with the follow enhancements.
|
||||
|
||||
- Support for reading 1,2,4 & 16 bit files
|
||||
- Support for reading interlace files
|
||||
- Support for reading `tTRNS` transparent colours
|
||||
- Support for writing colortype 0 (grayscale), colortype 2 (RGB), colortype 4 (grayscale alpha) and colortype 6 (RGBA)
|
||||
- Sync interface as well as async
|
||||
- API compatible with pngjs and node-pngjs
|
||||
|
||||
Known lack of support for:
|
||||
|
||||
- Extended PNG e.g. Animation
|
||||
- Writing in colortype 3 (indexed color)
|
||||
|
||||
# Table of Contents
|
||||
|
||||
- [Requirements](#requirements)
|
||||
- [Comparison Table](#comparison-table)
|
||||
- [Tests](#tests)
|
||||
- [Installation](#installation)
|
||||
- [Browser](#browser)
|
||||
- [Example](#example)
|
||||
- [Async API](#async-api)
|
||||
- [Sync API](#sync-api)
|
||||
- [Changelog](#changelog)
|
||||
|
||||
# Comparison Table
|
||||
|
||||
| Name | Forked From | Sync | Async | 16 Bit | 1/2/4 Bit | Interlace | Gamma | Encodes | Tested |
|
||||
| ------------- | ----------- | ---- | ----- | ------ | --------- | --------- | ------ | ------- | ------ |
|
||||
| pngjs | | Yes | Yes | Yes | Yes | Yes | Yes | Yes | Yes |
|
||||
| node-png | pngjs | No | Yes | No | No | No | Hidden | Yes | Manual |
|
||||
| png-coder | pngjs | No | Yes | Yes | No | No | Hidden | Yes | Manual |
|
||||
| pngparse | | No | Yes | No | Yes | No | No | No | Yes |
|
||||
| pngparse-sync | pngparse | Yes | No | No | Yes | No | No | No | Yes |
|
||||
| png-async | | No | Yes | No | No | No | No | Yes | Yes |
|
||||
| png-js | | No | Yes | No | No | No | No | No | No |
|
||||
|
||||
Native C++ node decoders:
|
||||
|
||||
- png
|
||||
- png-sync (sync version of above)
|
||||
- pixel-png
|
||||
- png-img
|
||||
|
||||
# Tests
|
||||
|
||||
Tested using [PNG Suite](http://www.schaik.com/pngsuite/). We read every file into pngjs, output it in standard 8bit colour, synchronously and asynchronously, then compare the original with the newly saved images.
|
||||
|
||||
To run the tests, fetch the repo (tests are not distributed via npm) and install with `npm i`, run `npm test`.
|
||||
|
||||
The only thing not converted is gamma correction - this is because multiple vendors will do gamma correction differently, so the tests will have different results on different browsers.
|
||||
|
||||
# Installation
|
||||
|
||||
```
|
||||
$ npm install pngjs --save
|
||||
```
|
||||
|
||||
# Browser
|
||||
|
||||
The package has been build with a [Browserify](browserify.org) version (`npm run browserify`) and you can use the browser version by including in your code:
|
||||
|
||||
```
|
||||
import { PNG } from 'pngjs/browser';
|
||||
```
|
||||
|
||||
# Example
|
||||
|
||||
```js
|
||||
var fs = require("fs"),
|
||||
PNG = require("pngjs").PNG;
|
||||
|
||||
fs.createReadStream("in.png")
|
||||
.pipe(
|
||||
new PNG({
|
||||
filterType: 4,
|
||||
})
|
||||
)
|
||||
.on("parsed", function () {
|
||||
for (var y = 0; y < this.height; y++) {
|
||||
for (var x = 0; x < this.width; x++) {
|
||||
var idx = (this.width * y + x) << 2;
|
||||
|
||||
// invert color
|
||||
this.data[idx] = 255 - this.data[idx];
|
||||
this.data[idx + 1] = 255 - this.data[idx + 1];
|
||||
this.data[idx + 2] = 255 - this.data[idx + 2];
|
||||
|
||||
// and reduce opacity
|
||||
this.data[idx + 3] = this.data[idx + 3] >> 1;
|
||||
}
|
||||
}
|
||||
|
||||
this.pack().pipe(fs.createWriteStream("out.png"));
|
||||
});
|
||||
```
|
||||
|
||||
For more examples see `examples` folder.
|
||||
|
||||
# Async API
|
||||
|
||||
As input any color type is accepted (grayscale, rgb, palette, grayscale with alpha, rgb with alpha) but 8 bit per sample (channel) is the only supported bit depth. Interlaced mode is not supported.
|
||||
|
||||
## Class: PNG
|
||||
|
||||
`PNG` is readable and writable `Stream`.
|
||||
|
||||
### Options
|
||||
|
||||
- `width` - use this with `height` if you want to create png from scratch
|
||||
- `height` - as above
|
||||
- `checkCRC` - whether parser should be strict about checksums in source stream (default: `true`)
|
||||
- `deflateChunkSize` - chunk size used for deflating data chunks, this should be power of 2 and must not be less than 256 and more than 32\*1024 (default: 32 kB)
|
||||
- `deflateLevel` - compression level for deflate (default: 9)
|
||||
- `deflateStrategy` - compression strategy for deflate (default: 3)
|
||||
- `deflateFactory` - deflate stream factory (default: `zlib.createDeflate`)
|
||||
- `filterType` - png filtering method for scanlines (default: -1 => auto, accepts array of numbers 0-4)
|
||||
- `colorType` - the output colorType - see constants. 0 = grayscale, no alpha, 2 = color, no alpha, 4 = grayscale & alpha, 6 = color & alpha. Default currently 6, but in the future may calculate best mode.
|
||||
- `inputColorType` - the input colorType - see constants. Default is 6 (RGBA)
|
||||
- `bitDepth` - the bitDepth of the output, 8 or 16 bits. Input data is expected to have this bit depth.
|
||||
16 bit data is expected in the system endianness (Default: 8)
|
||||
- `inputHasAlpha` - whether the input bitmap has 4 bytes per pixel (rgb and alpha) or 3 (rgb - no alpha).
|
||||
- `bgColor` - an object containing red, green, and blue values between 0 and 255
|
||||
that is used when packing a PNG if alpha is not to be included (default: 255,255,255)
|
||||
|
||||
### Event "metadata"
|
||||
|
||||
`function(metadata) { }`
|
||||
Image's header has been parsed, metadata contains this information:
|
||||
|
||||
- `width` image size in pixels
|
||||
- `height` image size in pixels
|
||||
- `palette` image is paletted
|
||||
- `color` image is not grayscale
|
||||
- `alpha` image contains alpha channel
|
||||
- `interlace` image is interlaced
|
||||
|
||||
### Event: "parsed"
|
||||
|
||||
`function(data) { }`
|
||||
Input image has been completely parsed, `data` is complete and ready for modification.
|
||||
|
||||
### Event: "error"
|
||||
|
||||
`function(error) { }`
|
||||
|
||||
### png.parse(data, [callback])
|
||||
|
||||
Parses PNG file data. Can be `String` or `Buffer`. Alternatively you can stream data to instance of PNG.
|
||||
|
||||
Optional `callback` is once called on `error` or `parsed`. The callback gets
|
||||
two arguments `(err, data)`.
|
||||
|
||||
Returns `this` for method chaining.
|
||||
|
||||
#### Example
|
||||
|
||||
```js
|
||||
new PNG({ filterType: 4 }).parse(imageData, function (error, data) {
|
||||
console.log(error, data);
|
||||
});
|
||||
```
|
||||
|
||||
### png.pack()
|
||||
|
||||
Starts converting data to PNG file Stream.
|
||||
|
||||
Returns `this` for method chaining.
|
||||
|
||||
### png.bitblt(dst, sx, sy, w, h, dx, dy)
|
||||
|
||||
Helper for image manipulation, copies a rectangle of pixels from current (i.e. the source) image (`sx`, `sy`, `w`, `h`) to `dst` image (at `dx`, `dy`).
|
||||
|
||||
Returns `this` for method chaining.
|
||||
|
||||
For example, the following code copies the top-left 100x50 px of `in.png` into dst and writes it to `out.png`:
|
||||
|
||||
```js
|
||||
var dst = new PNG({ width: 100, height: 50 });
|
||||
fs.createReadStream("in.png")
|
||||
.pipe(new PNG())
|
||||
.on("parsed", function () {
|
||||
this.bitblt(dst, 0, 0, 100, 50, 0, 0);
|
||||
dst.pack().pipe(fs.createWriteStream("out.png"));
|
||||
});
|
||||
```
|
||||
|
||||
### Property: adjustGamma()
|
||||
|
||||
Helper that takes data and adjusts it to be gamma corrected. Note that it is not 100% reliable with transparent colours because that requires knowing the background colour the bitmap is rendered on to.
|
||||
|
||||
In tests against PNG suite it compared 100% with chrome on all 8 bit and below images. On IE there were some differences.
|
||||
|
||||
The following example reads a file, adjusts the gamma (which sets the gamma to 0) and writes it out again, effectively removing any gamma correction from the image.
|
||||
|
||||
```js
|
||||
fs.createReadStream("in.png")
|
||||
.pipe(new PNG())
|
||||
.on("parsed", function () {
|
||||
this.adjustGamma();
|
||||
this.pack().pipe(fs.createWriteStream("out.png"));
|
||||
});
|
||||
```
|
||||
|
||||
### Property: width
|
||||
|
||||
Width of image in pixels
|
||||
|
||||
### Property: height
|
||||
|
||||
Height of image in pixels
|
||||
|
||||
### Property: data
|
||||
|
||||
Buffer of image pixel data. Every pixel consists 4 bytes: R, G, B, A (opacity).
|
||||
|
||||
### Property: gamma
|
||||
|
||||
Gamma of image (0 if not specified)
|
||||
|
||||
## Packing a PNG and removing alpha (RGBA to RGB)
|
||||
|
||||
When removing the alpha channel from an image, there needs to be a background color to correctly
|
||||
convert each pixel's transparency to the appropriate RGB value. By default, pngjs will flatten
|
||||
the image against a white background. You can override this in the options:
|
||||
|
||||
```js
|
||||
var fs = require("fs"),
|
||||
PNG = require("pngjs").PNG;
|
||||
|
||||
fs.createReadStream("in.png")
|
||||
.pipe(
|
||||
new PNG({
|
||||
colorType: 2,
|
||||
bgColor: {
|
||||
red: 0,
|
||||
green: 255,
|
||||
blue: 0,
|
||||
},
|
||||
})
|
||||
)
|
||||
.on("parsed", function () {
|
||||
this.pack().pipe(fs.createWriteStream("out.png"));
|
||||
});
|
||||
```
|
||||
|
||||
# Sync API
|
||||
|
||||
## PNG.sync
|
||||
|
||||
### PNG.sync.read(buffer)
|
||||
|
||||
Take a buffer and returns a PNG image. The properties on the image include the meta data and `data` as per the async API above.
|
||||
|
||||
```
|
||||
var data = fs.readFileSync('in.png');
|
||||
var png = PNG.sync.read(data);
|
||||
```
|
||||
|
||||
### PNG.sync.write(png)
|
||||
|
||||
Take a PNG image and returns a buffer. The properties on the image include the meta data and `data` as per the async API above.
|
||||
|
||||
```
|
||||
var data = fs.readFileSync('in.png');
|
||||
var png = PNG.sync.read(data);
|
||||
var options = { colorType: 6 };
|
||||
var buffer = PNG.sync.write(png, options);
|
||||
fs.writeFileSync('out.png', buffer);
|
||||
```
|
||||
|
||||
### PNG.adjustGamma(src)
|
||||
|
||||
Adjusts the gamma of a sync image. See the async adjustGamma.
|
||||
|
||||
```
|
||||
var data = fs.readFileSync('in.png');
|
||||
var png = PNG.sync.read(data);
|
||||
PNG.adjustGamma(png);
|
||||
```
|
||||
18985
node_modules/pngjs/browser.js
generated
vendored
Normal file
18985
node_modules/pngjs/browser.js
generated
vendored
Normal file
File diff suppressed because it is too large
Load Diff
267
node_modules/pngjs/lib/bitmapper.js
generated
vendored
Normal file
267
node_modules/pngjs/lib/bitmapper.js
generated
vendored
Normal file
@@ -0,0 +1,267 @@
|
||||
"use strict";
|
||||
|
||||
let interlaceUtils = require("./interlace");
|
||||
|
||||
let pixelBppMapper = [
|
||||
// 0 - dummy entry
|
||||
function () {},
|
||||
|
||||
// 1 - L
|
||||
// 0: 0, 1: 0, 2: 0, 3: 0xff
|
||||
function (pxData, data, pxPos, rawPos) {
|
||||
if (rawPos === data.length) {
|
||||
throw new Error("Ran out of data");
|
||||
}
|
||||
|
||||
let pixel = data[rawPos];
|
||||
pxData[pxPos] = pixel;
|
||||
pxData[pxPos + 1] = pixel;
|
||||
pxData[pxPos + 2] = pixel;
|
||||
pxData[pxPos + 3] = 0xff;
|
||||
},
|
||||
|
||||
// 2 - LA
|
||||
// 0: 0, 1: 0, 2: 0, 3: 1
|
||||
function (pxData, data, pxPos, rawPos) {
|
||||
if (rawPos + 1 >= data.length) {
|
||||
throw new Error("Ran out of data");
|
||||
}
|
||||
|
||||
let pixel = data[rawPos];
|
||||
pxData[pxPos] = pixel;
|
||||
pxData[pxPos + 1] = pixel;
|
||||
pxData[pxPos + 2] = pixel;
|
||||
pxData[pxPos + 3] = data[rawPos + 1];
|
||||
},
|
||||
|
||||
// 3 - RGB
|
||||
// 0: 0, 1: 1, 2: 2, 3: 0xff
|
||||
function (pxData, data, pxPos, rawPos) {
|
||||
if (rawPos + 2 >= data.length) {
|
||||
throw new Error("Ran out of data");
|
||||
}
|
||||
|
||||
pxData[pxPos] = data[rawPos];
|
||||
pxData[pxPos + 1] = data[rawPos + 1];
|
||||
pxData[pxPos + 2] = data[rawPos + 2];
|
||||
pxData[pxPos + 3] = 0xff;
|
||||
},
|
||||
|
||||
// 4 - RGBA
|
||||
// 0: 0, 1: 1, 2: 2, 3: 3
|
||||
function (pxData, data, pxPos, rawPos) {
|
||||
if (rawPos + 3 >= data.length) {
|
||||
throw new Error("Ran out of data");
|
||||
}
|
||||
|
||||
pxData[pxPos] = data[rawPos];
|
||||
pxData[pxPos + 1] = data[rawPos + 1];
|
||||
pxData[pxPos + 2] = data[rawPos + 2];
|
||||
pxData[pxPos + 3] = data[rawPos + 3];
|
||||
},
|
||||
];
|
||||
|
||||
let pixelBppCustomMapper = [
|
||||
// 0 - dummy entry
|
||||
function () {},
|
||||
|
||||
// 1 - L
|
||||
// 0: 0, 1: 0, 2: 0, 3: 0xff
|
||||
function (pxData, pixelData, pxPos, maxBit) {
|
||||
let pixel = pixelData[0];
|
||||
pxData[pxPos] = pixel;
|
||||
pxData[pxPos + 1] = pixel;
|
||||
pxData[pxPos + 2] = pixel;
|
||||
pxData[pxPos + 3] = maxBit;
|
||||
},
|
||||
|
||||
// 2 - LA
|
||||
// 0: 0, 1: 0, 2: 0, 3: 1
|
||||
function (pxData, pixelData, pxPos) {
|
||||
let pixel = pixelData[0];
|
||||
pxData[pxPos] = pixel;
|
||||
pxData[pxPos + 1] = pixel;
|
||||
pxData[pxPos + 2] = pixel;
|
||||
pxData[pxPos + 3] = pixelData[1];
|
||||
},
|
||||
|
||||
// 3 - RGB
|
||||
// 0: 0, 1: 1, 2: 2, 3: 0xff
|
||||
function (pxData, pixelData, pxPos, maxBit) {
|
||||
pxData[pxPos] = pixelData[0];
|
||||
pxData[pxPos + 1] = pixelData[1];
|
||||
pxData[pxPos + 2] = pixelData[2];
|
||||
pxData[pxPos + 3] = maxBit;
|
||||
},
|
||||
|
||||
// 4 - RGBA
|
||||
// 0: 0, 1: 1, 2: 2, 3: 3
|
||||
function (pxData, pixelData, pxPos) {
|
||||
pxData[pxPos] = pixelData[0];
|
||||
pxData[pxPos + 1] = pixelData[1];
|
||||
pxData[pxPos + 2] = pixelData[2];
|
||||
pxData[pxPos + 3] = pixelData[3];
|
||||
},
|
||||
];
|
||||
|
||||
function bitRetriever(data, depth) {
|
||||
let leftOver = [];
|
||||
let i = 0;
|
||||
|
||||
function split() {
|
||||
if (i === data.length) {
|
||||
throw new Error("Ran out of data");
|
||||
}
|
||||
let byte = data[i];
|
||||
i++;
|
||||
let byte8, byte7, byte6, byte5, byte4, byte3, byte2, byte1;
|
||||
switch (depth) {
|
||||
default:
|
||||
throw new Error("unrecognised depth");
|
||||
case 16:
|
||||
byte2 = data[i];
|
||||
i++;
|
||||
leftOver.push((byte << 8) + byte2);
|
||||
break;
|
||||
case 4:
|
||||
byte2 = byte & 0x0f;
|
||||
byte1 = byte >> 4;
|
||||
leftOver.push(byte1, byte2);
|
||||
break;
|
||||
case 2:
|
||||
byte4 = byte & 3;
|
||||
byte3 = (byte >> 2) & 3;
|
||||
byte2 = (byte >> 4) & 3;
|
||||
byte1 = (byte >> 6) & 3;
|
||||
leftOver.push(byte1, byte2, byte3, byte4);
|
||||
break;
|
||||
case 1:
|
||||
byte8 = byte & 1;
|
||||
byte7 = (byte >> 1) & 1;
|
||||
byte6 = (byte >> 2) & 1;
|
||||
byte5 = (byte >> 3) & 1;
|
||||
byte4 = (byte >> 4) & 1;
|
||||
byte3 = (byte >> 5) & 1;
|
||||
byte2 = (byte >> 6) & 1;
|
||||
byte1 = (byte >> 7) & 1;
|
||||
leftOver.push(byte1, byte2, byte3, byte4, byte5, byte6, byte7, byte8);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return {
|
||||
get: function (count) {
|
||||
while (leftOver.length < count) {
|
||||
split();
|
||||
}
|
||||
let returner = leftOver.slice(0, count);
|
||||
leftOver = leftOver.slice(count);
|
||||
return returner;
|
||||
},
|
||||
resetAfterLine: function () {
|
||||
leftOver.length = 0;
|
||||
},
|
||||
end: function () {
|
||||
if (i !== data.length) {
|
||||
throw new Error("extra data found");
|
||||
}
|
||||
},
|
||||
};
|
||||
}
|
||||
|
||||
function mapImage8Bit(image, pxData, getPxPos, bpp, data, rawPos) {
|
||||
// eslint-disable-line max-params
|
||||
let imageWidth = image.width;
|
||||
let imageHeight = image.height;
|
||||
let imagePass = image.index;
|
||||
for (let y = 0; y < imageHeight; y++) {
|
||||
for (let x = 0; x < imageWidth; x++) {
|
||||
let pxPos = getPxPos(x, y, imagePass);
|
||||
pixelBppMapper[bpp](pxData, data, pxPos, rawPos);
|
||||
rawPos += bpp; //eslint-disable-line no-param-reassign
|
||||
}
|
||||
}
|
||||
return rawPos;
|
||||
}
|
||||
|
||||
function mapImageCustomBit(image, pxData, getPxPos, bpp, bits, maxBit) {
|
||||
// eslint-disable-line max-params
|
||||
let imageWidth = image.width;
|
||||
let imageHeight = image.height;
|
||||
let imagePass = image.index;
|
||||
for (let y = 0; y < imageHeight; y++) {
|
||||
for (let x = 0; x < imageWidth; x++) {
|
||||
let pixelData = bits.get(bpp);
|
||||
let pxPos = getPxPos(x, y, imagePass);
|
||||
pixelBppCustomMapper[bpp](pxData, pixelData, pxPos, maxBit);
|
||||
}
|
||||
bits.resetAfterLine();
|
||||
}
|
||||
}
|
||||
|
||||
exports.dataToBitMap = function (data, bitmapInfo) {
|
||||
let width = bitmapInfo.width;
|
||||
let height = bitmapInfo.height;
|
||||
let depth = bitmapInfo.depth;
|
||||
let bpp = bitmapInfo.bpp;
|
||||
let interlace = bitmapInfo.interlace;
|
||||
let bits;
|
||||
|
||||
if (depth !== 8) {
|
||||
bits = bitRetriever(data, depth);
|
||||
}
|
||||
let pxData;
|
||||
if (depth <= 8) {
|
||||
pxData = Buffer.alloc(width * height * 4);
|
||||
} else {
|
||||
pxData = new Uint16Array(width * height * 4);
|
||||
}
|
||||
let maxBit = Math.pow(2, depth) - 1;
|
||||
let rawPos = 0;
|
||||
let images;
|
||||
let getPxPos;
|
||||
|
||||
if (interlace) {
|
||||
images = interlaceUtils.getImagePasses(width, height);
|
||||
getPxPos = interlaceUtils.getInterlaceIterator(width, height);
|
||||
} else {
|
||||
let nonInterlacedPxPos = 0;
|
||||
getPxPos = function () {
|
||||
let returner = nonInterlacedPxPos;
|
||||
nonInterlacedPxPos += 4;
|
||||
return returner;
|
||||
};
|
||||
images = [{ width: width, height: height }];
|
||||
}
|
||||
|
||||
for (let imageIndex = 0; imageIndex < images.length; imageIndex++) {
|
||||
if (depth === 8) {
|
||||
rawPos = mapImage8Bit(
|
||||
images[imageIndex],
|
||||
pxData,
|
||||
getPxPos,
|
||||
bpp,
|
||||
data,
|
||||
rawPos
|
||||
);
|
||||
} else {
|
||||
mapImageCustomBit(
|
||||
images[imageIndex],
|
||||
pxData,
|
||||
getPxPos,
|
||||
bpp,
|
||||
bits,
|
||||
maxBit
|
||||
);
|
||||
}
|
||||
}
|
||||
if (depth === 8) {
|
||||
if (rawPos !== data.length) {
|
||||
throw new Error("extra data found");
|
||||
}
|
||||
} else {
|
||||
bits.end();
|
||||
}
|
||||
|
||||
return pxData;
|
||||
};
|
||||
158
node_modules/pngjs/lib/bitpacker.js
generated
vendored
Normal file
158
node_modules/pngjs/lib/bitpacker.js
generated
vendored
Normal file
@@ -0,0 +1,158 @@
|
||||
"use strict";
|
||||
|
||||
let constants = require("./constants");
|
||||
|
||||
module.exports = function (dataIn, width, height, options) {
|
||||
let outHasAlpha =
|
||||
[constants.COLORTYPE_COLOR_ALPHA, constants.COLORTYPE_ALPHA].indexOf(
|
||||
options.colorType
|
||||
) !== -1;
|
||||
if (options.colorType === options.inputColorType) {
|
||||
let bigEndian = (function () {
|
||||
let buffer = new ArrayBuffer(2);
|
||||
new DataView(buffer).setInt16(0, 256, true /* littleEndian */);
|
||||
// Int16Array uses the platform's endianness.
|
||||
return new Int16Array(buffer)[0] !== 256;
|
||||
})();
|
||||
// If no need to convert to grayscale and alpha is present/absent in both, take a fast route
|
||||
if (options.bitDepth === 8 || (options.bitDepth === 16 && bigEndian)) {
|
||||
return dataIn;
|
||||
}
|
||||
}
|
||||
|
||||
// map to a UInt16 array if data is 16bit, fix endianness below
|
||||
let data = options.bitDepth !== 16 ? dataIn : new Uint16Array(dataIn.buffer);
|
||||
|
||||
let maxValue = 255;
|
||||
let inBpp = constants.COLORTYPE_TO_BPP_MAP[options.inputColorType];
|
||||
if (inBpp === 4 && !options.inputHasAlpha) {
|
||||
inBpp = 3;
|
||||
}
|
||||
let outBpp = constants.COLORTYPE_TO_BPP_MAP[options.colorType];
|
||||
if (options.bitDepth === 16) {
|
||||
maxValue = 65535;
|
||||
outBpp *= 2;
|
||||
}
|
||||
let outData = Buffer.alloc(width * height * outBpp);
|
||||
|
||||
let inIndex = 0;
|
||||
let outIndex = 0;
|
||||
|
||||
let bgColor = options.bgColor || {};
|
||||
if (bgColor.red === undefined) {
|
||||
bgColor.red = maxValue;
|
||||
}
|
||||
if (bgColor.green === undefined) {
|
||||
bgColor.green = maxValue;
|
||||
}
|
||||
if (bgColor.blue === undefined) {
|
||||
bgColor.blue = maxValue;
|
||||
}
|
||||
|
||||
function getRGBA() {
|
||||
let red;
|
||||
let green;
|
||||
let blue;
|
||||
let alpha = maxValue;
|
||||
switch (options.inputColorType) {
|
||||
case constants.COLORTYPE_COLOR_ALPHA:
|
||||
alpha = data[inIndex + 3];
|
||||
red = data[inIndex];
|
||||
green = data[inIndex + 1];
|
||||
blue = data[inIndex + 2];
|
||||
break;
|
||||
case constants.COLORTYPE_COLOR:
|
||||
red = data[inIndex];
|
||||
green = data[inIndex + 1];
|
||||
blue = data[inIndex + 2];
|
||||
break;
|
||||
case constants.COLORTYPE_ALPHA:
|
||||
alpha = data[inIndex + 1];
|
||||
red = data[inIndex];
|
||||
green = red;
|
||||
blue = red;
|
||||
break;
|
||||
case constants.COLORTYPE_GRAYSCALE:
|
||||
red = data[inIndex];
|
||||
green = red;
|
||||
blue = red;
|
||||
break;
|
||||
default:
|
||||
throw new Error(
|
||||
"input color type:" +
|
||||
options.inputColorType +
|
||||
" is not supported at present"
|
||||
);
|
||||
}
|
||||
|
||||
if (options.inputHasAlpha) {
|
||||
if (!outHasAlpha) {
|
||||
alpha /= maxValue;
|
||||
red = Math.min(
|
||||
Math.max(Math.round((1 - alpha) * bgColor.red + alpha * red), 0),
|
||||
maxValue
|
||||
);
|
||||
green = Math.min(
|
||||
Math.max(Math.round((1 - alpha) * bgColor.green + alpha * green), 0),
|
||||
maxValue
|
||||
);
|
||||
blue = Math.min(
|
||||
Math.max(Math.round((1 - alpha) * bgColor.blue + alpha * blue), 0),
|
||||
maxValue
|
||||
);
|
||||
}
|
||||
}
|
||||
return { red: red, green: green, blue: blue, alpha: alpha };
|
||||
}
|
||||
|
||||
for (let y = 0; y < height; y++) {
|
||||
for (let x = 0; x < width; x++) {
|
||||
let rgba = getRGBA(data, inIndex);
|
||||
|
||||
switch (options.colorType) {
|
||||
case constants.COLORTYPE_COLOR_ALPHA:
|
||||
case constants.COLORTYPE_COLOR:
|
||||
if (options.bitDepth === 8) {
|
||||
outData[outIndex] = rgba.red;
|
||||
outData[outIndex + 1] = rgba.green;
|
||||
outData[outIndex + 2] = rgba.blue;
|
||||
if (outHasAlpha) {
|
||||
outData[outIndex + 3] = rgba.alpha;
|
||||
}
|
||||
} else {
|
||||
outData.writeUInt16BE(rgba.red, outIndex);
|
||||
outData.writeUInt16BE(rgba.green, outIndex + 2);
|
||||
outData.writeUInt16BE(rgba.blue, outIndex + 4);
|
||||
if (outHasAlpha) {
|
||||
outData.writeUInt16BE(rgba.alpha, outIndex + 6);
|
||||
}
|
||||
}
|
||||
break;
|
||||
case constants.COLORTYPE_ALPHA:
|
||||
case constants.COLORTYPE_GRAYSCALE: {
|
||||
// Convert to grayscale and alpha
|
||||
let grayscale = (rgba.red + rgba.green + rgba.blue) / 3;
|
||||
if (options.bitDepth === 8) {
|
||||
outData[outIndex] = grayscale;
|
||||
if (outHasAlpha) {
|
||||
outData[outIndex + 1] = rgba.alpha;
|
||||
}
|
||||
} else {
|
||||
outData.writeUInt16BE(grayscale, outIndex);
|
||||
if (outHasAlpha) {
|
||||
outData.writeUInt16BE(rgba.alpha, outIndex + 2);
|
||||
}
|
||||
}
|
||||
break;
|
||||
}
|
||||
default:
|
||||
throw new Error("unrecognised color Type " + options.colorType);
|
||||
}
|
||||
|
||||
inIndex += inBpp;
|
||||
outIndex += outBpp;
|
||||
}
|
||||
}
|
||||
|
||||
return outData;
|
||||
};
|
||||
189
node_modules/pngjs/lib/chunkstream.js
generated
vendored
Normal file
189
node_modules/pngjs/lib/chunkstream.js
generated
vendored
Normal file
@@ -0,0 +1,189 @@
|
||||
"use strict";
|
||||
|
||||
let util = require("util");
|
||||
let Stream = require("stream");
|
||||
|
||||
let ChunkStream = (module.exports = function () {
|
||||
Stream.call(this);
|
||||
|
||||
this._buffers = [];
|
||||
this._buffered = 0;
|
||||
|
||||
this._reads = [];
|
||||
this._paused = false;
|
||||
|
||||
this._encoding = "utf8";
|
||||
this.writable = true;
|
||||
});
|
||||
util.inherits(ChunkStream, Stream);
|
||||
|
||||
ChunkStream.prototype.read = function (length, callback) {
|
||||
this._reads.push({
|
||||
length: Math.abs(length), // if length < 0 then at most this length
|
||||
allowLess: length < 0,
|
||||
func: callback,
|
||||
});
|
||||
|
||||
process.nextTick(
|
||||
function () {
|
||||
this._process();
|
||||
|
||||
// its paused and there is not enought data then ask for more
|
||||
if (this._paused && this._reads && this._reads.length > 0) {
|
||||
this._paused = false;
|
||||
|
||||
this.emit("drain");
|
||||
}
|
||||
}.bind(this)
|
||||
);
|
||||
};
|
||||
|
||||
ChunkStream.prototype.write = function (data, encoding) {
|
||||
if (!this.writable) {
|
||||
this.emit("error", new Error("Stream not writable"));
|
||||
return false;
|
||||
}
|
||||
|
||||
let dataBuffer;
|
||||
if (Buffer.isBuffer(data)) {
|
||||
dataBuffer = data;
|
||||
} else {
|
||||
dataBuffer = Buffer.from(data, encoding || this._encoding);
|
||||
}
|
||||
|
||||
this._buffers.push(dataBuffer);
|
||||
this._buffered += dataBuffer.length;
|
||||
|
||||
this._process();
|
||||
|
||||
// ok if there are no more read requests
|
||||
if (this._reads && this._reads.length === 0) {
|
||||
this._paused = true;
|
||||
}
|
||||
|
||||
return this.writable && !this._paused;
|
||||
};
|
||||
|
||||
ChunkStream.prototype.end = function (data, encoding) {
|
||||
if (data) {
|
||||
this.write(data, encoding);
|
||||
}
|
||||
|
||||
this.writable = false;
|
||||
|
||||
// already destroyed
|
||||
if (!this._buffers) {
|
||||
return;
|
||||
}
|
||||
|
||||
// enqueue or handle end
|
||||
if (this._buffers.length === 0) {
|
||||
this._end();
|
||||
} else {
|
||||
this._buffers.push(null);
|
||||
this._process();
|
||||
}
|
||||
};
|
||||
|
||||
ChunkStream.prototype.destroySoon = ChunkStream.prototype.end;
|
||||
|
||||
ChunkStream.prototype._end = function () {
|
||||
if (this._reads.length > 0) {
|
||||
this.emit("error", new Error("Unexpected end of input"));
|
||||
}
|
||||
|
||||
this.destroy();
|
||||
};
|
||||
|
||||
ChunkStream.prototype.destroy = function () {
|
||||
if (!this._buffers) {
|
||||
return;
|
||||
}
|
||||
|
||||
this.writable = false;
|
||||
this._reads = null;
|
||||
this._buffers = null;
|
||||
|
||||
this.emit("close");
|
||||
};
|
||||
|
||||
ChunkStream.prototype._processReadAllowingLess = function (read) {
|
||||
// ok there is any data so that we can satisfy this request
|
||||
this._reads.shift(); // == read
|
||||
|
||||
// first we need to peek into first buffer
|
||||
let smallerBuf = this._buffers[0];
|
||||
|
||||
// ok there is more data than we need
|
||||
if (smallerBuf.length > read.length) {
|
||||
this._buffered -= read.length;
|
||||
this._buffers[0] = smallerBuf.slice(read.length);
|
||||
|
||||
read.func.call(this, smallerBuf.slice(0, read.length));
|
||||
} else {
|
||||
// ok this is less than maximum length so use it all
|
||||
this._buffered -= smallerBuf.length;
|
||||
this._buffers.shift(); // == smallerBuf
|
||||
|
||||
read.func.call(this, smallerBuf);
|
||||
}
|
||||
};
|
||||
|
||||
ChunkStream.prototype._processRead = function (read) {
|
||||
this._reads.shift(); // == read
|
||||
|
||||
let pos = 0;
|
||||
let count = 0;
|
||||
let data = Buffer.alloc(read.length);
|
||||
|
||||
// create buffer for all data
|
||||
while (pos < read.length) {
|
||||
let buf = this._buffers[count++];
|
||||
let len = Math.min(buf.length, read.length - pos);
|
||||
|
||||
buf.copy(data, pos, 0, len);
|
||||
pos += len;
|
||||
|
||||
// last buffer wasn't used all so just slice it and leave
|
||||
if (len !== buf.length) {
|
||||
this._buffers[--count] = buf.slice(len);
|
||||
}
|
||||
}
|
||||
|
||||
// remove all used buffers
|
||||
if (count > 0) {
|
||||
this._buffers.splice(0, count);
|
||||
}
|
||||
|
||||
this._buffered -= read.length;
|
||||
|
||||
read.func.call(this, data);
|
||||
};
|
||||
|
||||
ChunkStream.prototype._process = function () {
|
||||
try {
|
||||
// as long as there is any data and read requests
|
||||
while (this._buffered > 0 && this._reads && this._reads.length > 0) {
|
||||
let read = this._reads[0];
|
||||
|
||||
// read any data (but no more than length)
|
||||
if (read.allowLess) {
|
||||
this._processReadAllowingLess(read);
|
||||
} else if (this._buffered >= read.length) {
|
||||
// ok we can meet some expectations
|
||||
|
||||
this._processRead(read);
|
||||
} else {
|
||||
// not enought data to satisfy first request in queue
|
||||
// so we need to wait for more
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (this._buffers && !this.writable) {
|
||||
this._end();
|
||||
}
|
||||
} catch (ex) {
|
||||
this.emit("error", ex);
|
||||
}
|
||||
};
|
||||
32
node_modules/pngjs/lib/constants.js
generated
vendored
Normal file
32
node_modules/pngjs/lib/constants.js
generated
vendored
Normal file
@@ -0,0 +1,32 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = {
|
||||
PNG_SIGNATURE: [0x89, 0x50, 0x4e, 0x47, 0x0d, 0x0a, 0x1a, 0x0a],
|
||||
|
||||
TYPE_IHDR: 0x49484452,
|
||||
TYPE_IEND: 0x49454e44,
|
||||
TYPE_IDAT: 0x49444154,
|
||||
TYPE_PLTE: 0x504c5445,
|
||||
TYPE_tRNS: 0x74524e53, // eslint-disable-line camelcase
|
||||
TYPE_gAMA: 0x67414d41, // eslint-disable-line camelcase
|
||||
|
||||
// color-type bits
|
||||
COLORTYPE_GRAYSCALE: 0,
|
||||
COLORTYPE_PALETTE: 1,
|
||||
COLORTYPE_COLOR: 2,
|
||||
COLORTYPE_ALPHA: 4, // e.g. grayscale and alpha
|
||||
|
||||
// color-type combinations
|
||||
COLORTYPE_PALETTE_COLOR: 3,
|
||||
COLORTYPE_COLOR_ALPHA: 6,
|
||||
|
||||
COLORTYPE_TO_BPP_MAP: {
|
||||
0: 1,
|
||||
2: 3,
|
||||
3: 1,
|
||||
4: 2,
|
||||
6: 4,
|
||||
},
|
||||
|
||||
GAMMA_DIVISION: 100000,
|
||||
};
|
||||
40
node_modules/pngjs/lib/crc.js
generated
vendored
Normal file
40
node_modules/pngjs/lib/crc.js
generated
vendored
Normal file
@@ -0,0 +1,40 @@
|
||||
"use strict";
|
||||
|
||||
let crcTable = [];
|
||||
|
||||
(function () {
|
||||
for (let i = 0; i < 256; i++) {
|
||||
let currentCrc = i;
|
||||
for (let j = 0; j < 8; j++) {
|
||||
if (currentCrc & 1) {
|
||||
currentCrc = 0xedb88320 ^ (currentCrc >>> 1);
|
||||
} else {
|
||||
currentCrc = currentCrc >>> 1;
|
||||
}
|
||||
}
|
||||
crcTable[i] = currentCrc;
|
||||
}
|
||||
})();
|
||||
|
||||
let CrcCalculator = (module.exports = function () {
|
||||
this._crc = -1;
|
||||
});
|
||||
|
||||
CrcCalculator.prototype.write = function (data) {
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
this._crc = crcTable[(this._crc ^ data[i]) & 0xff] ^ (this._crc >>> 8);
|
||||
}
|
||||
return true;
|
||||
};
|
||||
|
||||
CrcCalculator.prototype.crc32 = function () {
|
||||
return this._crc ^ -1;
|
||||
};
|
||||
|
||||
CrcCalculator.crc32 = function (buf) {
|
||||
let crc = -1;
|
||||
for (let i = 0; i < buf.length; i++) {
|
||||
crc = crcTable[(crc ^ buf[i]) & 0xff] ^ (crc >>> 8);
|
||||
}
|
||||
return crc ^ -1;
|
||||
};
|
||||
171
node_modules/pngjs/lib/filter-pack.js
generated
vendored
Normal file
171
node_modules/pngjs/lib/filter-pack.js
generated
vendored
Normal file
@@ -0,0 +1,171 @@
|
||||
"use strict";
|
||||
|
||||
let paethPredictor = require("./paeth-predictor");
|
||||
|
||||
function filterNone(pxData, pxPos, byteWidth, rawData, rawPos) {
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
rawData[rawPos + x] = pxData[pxPos + x];
|
||||
}
|
||||
}
|
||||
|
||||
function filterSumNone(pxData, pxPos, byteWidth) {
|
||||
let sum = 0;
|
||||
let length = pxPos + byteWidth;
|
||||
|
||||
for (let i = pxPos; i < length; i++) {
|
||||
sum += Math.abs(pxData[i]);
|
||||
}
|
||||
return sum;
|
||||
}
|
||||
|
||||
function filterSub(pxData, pxPos, byteWidth, rawData, rawPos, bpp) {
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
|
||||
let val = pxData[pxPos + x] - left;
|
||||
|
||||
rawData[rawPos + x] = val;
|
||||
}
|
||||
}
|
||||
|
||||
function filterSumSub(pxData, pxPos, byteWidth, bpp) {
|
||||
let sum = 0;
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
|
||||
let val = pxData[pxPos + x] - left;
|
||||
|
||||
sum += Math.abs(val);
|
||||
}
|
||||
|
||||
return sum;
|
||||
}
|
||||
|
||||
function filterUp(pxData, pxPos, byteWidth, rawData, rawPos) {
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
|
||||
let val = pxData[pxPos + x] - up;
|
||||
|
||||
rawData[rawPos + x] = val;
|
||||
}
|
||||
}
|
||||
|
||||
function filterSumUp(pxData, pxPos, byteWidth) {
|
||||
let sum = 0;
|
||||
let length = pxPos + byteWidth;
|
||||
for (let x = pxPos; x < length; x++) {
|
||||
let up = pxPos > 0 ? pxData[x - byteWidth] : 0;
|
||||
let val = pxData[x] - up;
|
||||
|
||||
sum += Math.abs(val);
|
||||
}
|
||||
|
||||
return sum;
|
||||
}
|
||||
|
||||
function filterAvg(pxData, pxPos, byteWidth, rawData, rawPos, bpp) {
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
|
||||
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
|
||||
let val = pxData[pxPos + x] - ((left + up) >> 1);
|
||||
|
||||
rawData[rawPos + x] = val;
|
||||
}
|
||||
}
|
||||
|
||||
function filterSumAvg(pxData, pxPos, byteWidth, bpp) {
|
||||
let sum = 0;
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
|
||||
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
|
||||
let val = pxData[pxPos + x] - ((left + up) >> 1);
|
||||
|
||||
sum += Math.abs(val);
|
||||
}
|
||||
|
||||
return sum;
|
||||
}
|
||||
|
||||
function filterPaeth(pxData, pxPos, byteWidth, rawData, rawPos, bpp) {
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
|
||||
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
|
||||
let upleft =
|
||||
pxPos > 0 && x >= bpp ? pxData[pxPos + x - (byteWidth + bpp)] : 0;
|
||||
let val = pxData[pxPos + x] - paethPredictor(left, up, upleft);
|
||||
|
||||
rawData[rawPos + x] = val;
|
||||
}
|
||||
}
|
||||
|
||||
function filterSumPaeth(pxData, pxPos, byteWidth, bpp) {
|
||||
let sum = 0;
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let left = x >= bpp ? pxData[pxPos + x - bpp] : 0;
|
||||
let up = pxPos > 0 ? pxData[pxPos + x - byteWidth] : 0;
|
||||
let upleft =
|
||||
pxPos > 0 && x >= bpp ? pxData[pxPos + x - (byteWidth + bpp)] : 0;
|
||||
let val = pxData[pxPos + x] - paethPredictor(left, up, upleft);
|
||||
|
||||
sum += Math.abs(val);
|
||||
}
|
||||
|
||||
return sum;
|
||||
}
|
||||
|
||||
let filters = {
|
||||
0: filterNone,
|
||||
1: filterSub,
|
||||
2: filterUp,
|
||||
3: filterAvg,
|
||||
4: filterPaeth,
|
||||
};
|
||||
|
||||
let filterSums = {
|
||||
0: filterSumNone,
|
||||
1: filterSumSub,
|
||||
2: filterSumUp,
|
||||
3: filterSumAvg,
|
||||
4: filterSumPaeth,
|
||||
};
|
||||
|
||||
module.exports = function (pxData, width, height, options, bpp) {
|
||||
let filterTypes;
|
||||
if (!("filterType" in options) || options.filterType === -1) {
|
||||
filterTypes = [0, 1, 2, 3, 4];
|
||||
} else if (typeof options.filterType === "number") {
|
||||
filterTypes = [options.filterType];
|
||||
} else {
|
||||
throw new Error("unrecognised filter types");
|
||||
}
|
||||
|
||||
if (options.bitDepth === 16) {
|
||||
bpp *= 2;
|
||||
}
|
||||
let byteWidth = width * bpp;
|
||||
let rawPos = 0;
|
||||
let pxPos = 0;
|
||||
let rawData = Buffer.alloc((byteWidth + 1) * height);
|
||||
|
||||
let sel = filterTypes[0];
|
||||
|
||||
for (let y = 0; y < height; y++) {
|
||||
if (filterTypes.length > 1) {
|
||||
// find best filter for this line (with lowest sum of values)
|
||||
let min = Infinity;
|
||||
|
||||
for (let i = 0; i < filterTypes.length; i++) {
|
||||
let sum = filterSums[filterTypes[i]](pxData, pxPos, byteWidth, bpp);
|
||||
if (sum < min) {
|
||||
sel = filterTypes[i];
|
||||
min = sum;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
rawData[rawPos] = sel;
|
||||
rawPos++;
|
||||
filters[sel](pxData, pxPos, byteWidth, rawData, rawPos, bpp);
|
||||
rawPos += byteWidth;
|
||||
pxPos += byteWidth;
|
||||
}
|
||||
return rawData;
|
||||
};
|
||||
24
node_modules/pngjs/lib/filter-parse-async.js
generated
vendored
Normal file
24
node_modules/pngjs/lib/filter-parse-async.js
generated
vendored
Normal file
@@ -0,0 +1,24 @@
|
||||
"use strict";
|
||||
|
||||
let util = require("util");
|
||||
let ChunkStream = require("./chunkstream");
|
||||
let Filter = require("./filter-parse");
|
||||
|
||||
let FilterAsync = (module.exports = function (bitmapInfo) {
|
||||
ChunkStream.call(this);
|
||||
|
||||
let buffers = [];
|
||||
let that = this;
|
||||
this._filter = new Filter(bitmapInfo, {
|
||||
read: this.read.bind(this),
|
||||
write: function (buffer) {
|
||||
buffers.push(buffer);
|
||||
},
|
||||
complete: function () {
|
||||
that.emit("complete", Buffer.concat(buffers));
|
||||
},
|
||||
});
|
||||
|
||||
this._filter.start();
|
||||
});
|
||||
util.inherits(FilterAsync, ChunkStream);
|
||||
21
node_modules/pngjs/lib/filter-parse-sync.js
generated
vendored
Normal file
21
node_modules/pngjs/lib/filter-parse-sync.js
generated
vendored
Normal file
@@ -0,0 +1,21 @@
|
||||
"use strict";
|
||||
|
||||
let SyncReader = require("./sync-reader");
|
||||
let Filter = require("./filter-parse");
|
||||
|
||||
exports.process = function (inBuffer, bitmapInfo) {
|
||||
let outBuffers = [];
|
||||
let reader = new SyncReader(inBuffer);
|
||||
let filter = new Filter(bitmapInfo, {
|
||||
read: reader.read.bind(reader),
|
||||
write: function (bufferPart) {
|
||||
outBuffers.push(bufferPart);
|
||||
},
|
||||
complete: function () {},
|
||||
});
|
||||
|
||||
filter.start();
|
||||
reader.process();
|
||||
|
||||
return Buffer.concat(outBuffers);
|
||||
};
|
||||
177
node_modules/pngjs/lib/filter-parse.js
generated
vendored
Normal file
177
node_modules/pngjs/lib/filter-parse.js
generated
vendored
Normal file
@@ -0,0 +1,177 @@
|
||||
"use strict";
|
||||
|
||||
let interlaceUtils = require("./interlace");
|
||||
let paethPredictor = require("./paeth-predictor");
|
||||
|
||||
function getByteWidth(width, bpp, depth) {
|
||||
let byteWidth = width * bpp;
|
||||
if (depth !== 8) {
|
||||
byteWidth = Math.ceil(byteWidth / (8 / depth));
|
||||
}
|
||||
return byteWidth;
|
||||
}
|
||||
|
||||
let Filter = (module.exports = function (bitmapInfo, dependencies) {
|
||||
let width = bitmapInfo.width;
|
||||
let height = bitmapInfo.height;
|
||||
let interlace = bitmapInfo.interlace;
|
||||
let bpp = bitmapInfo.bpp;
|
||||
let depth = bitmapInfo.depth;
|
||||
|
||||
this.read = dependencies.read;
|
||||
this.write = dependencies.write;
|
||||
this.complete = dependencies.complete;
|
||||
|
||||
this._imageIndex = 0;
|
||||
this._images = [];
|
||||
if (interlace) {
|
||||
let passes = interlaceUtils.getImagePasses(width, height);
|
||||
for (let i = 0; i < passes.length; i++) {
|
||||
this._images.push({
|
||||
byteWidth: getByteWidth(passes[i].width, bpp, depth),
|
||||
height: passes[i].height,
|
||||
lineIndex: 0,
|
||||
});
|
||||
}
|
||||
} else {
|
||||
this._images.push({
|
||||
byteWidth: getByteWidth(width, bpp, depth),
|
||||
height: height,
|
||||
lineIndex: 0,
|
||||
});
|
||||
}
|
||||
|
||||
// when filtering the line we look at the pixel to the left
|
||||
// the spec also says it is done on a byte level regardless of the number of pixels
|
||||
// so if the depth is byte compatible (8 or 16) we subtract the bpp in order to compare back
|
||||
// a pixel rather than just a different byte part. However if we are sub byte, we ignore.
|
||||
if (depth === 8) {
|
||||
this._xComparison = bpp;
|
||||
} else if (depth === 16) {
|
||||
this._xComparison = bpp * 2;
|
||||
} else {
|
||||
this._xComparison = 1;
|
||||
}
|
||||
});
|
||||
|
||||
Filter.prototype.start = function () {
|
||||
this.read(
|
||||
this._images[this._imageIndex].byteWidth + 1,
|
||||
this._reverseFilterLine.bind(this)
|
||||
);
|
||||
};
|
||||
|
||||
Filter.prototype._unFilterType1 = function (
|
||||
rawData,
|
||||
unfilteredLine,
|
||||
byteWidth
|
||||
) {
|
||||
let xComparison = this._xComparison;
|
||||
let xBiggerThan = xComparison - 1;
|
||||
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let rawByte = rawData[1 + x];
|
||||
let f1Left = x > xBiggerThan ? unfilteredLine[x - xComparison] : 0;
|
||||
unfilteredLine[x] = rawByte + f1Left;
|
||||
}
|
||||
};
|
||||
|
||||
Filter.prototype._unFilterType2 = function (
|
||||
rawData,
|
||||
unfilteredLine,
|
||||
byteWidth
|
||||
) {
|
||||
let lastLine = this._lastLine;
|
||||
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let rawByte = rawData[1 + x];
|
||||
let f2Up = lastLine ? lastLine[x] : 0;
|
||||
unfilteredLine[x] = rawByte + f2Up;
|
||||
}
|
||||
};
|
||||
|
||||
Filter.prototype._unFilterType3 = function (
|
||||
rawData,
|
||||
unfilteredLine,
|
||||
byteWidth
|
||||
) {
|
||||
let xComparison = this._xComparison;
|
||||
let xBiggerThan = xComparison - 1;
|
||||
let lastLine = this._lastLine;
|
||||
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let rawByte = rawData[1 + x];
|
||||
let f3Up = lastLine ? lastLine[x] : 0;
|
||||
let f3Left = x > xBiggerThan ? unfilteredLine[x - xComparison] : 0;
|
||||
let f3Add = Math.floor((f3Left + f3Up) / 2);
|
||||
unfilteredLine[x] = rawByte + f3Add;
|
||||
}
|
||||
};
|
||||
|
||||
Filter.prototype._unFilterType4 = function (
|
||||
rawData,
|
||||
unfilteredLine,
|
||||
byteWidth
|
||||
) {
|
||||
let xComparison = this._xComparison;
|
||||
let xBiggerThan = xComparison - 1;
|
||||
let lastLine = this._lastLine;
|
||||
|
||||
for (let x = 0; x < byteWidth; x++) {
|
||||
let rawByte = rawData[1 + x];
|
||||
let f4Up = lastLine ? lastLine[x] : 0;
|
||||
let f4Left = x > xBiggerThan ? unfilteredLine[x - xComparison] : 0;
|
||||
let f4UpLeft = x > xBiggerThan && lastLine ? lastLine[x - xComparison] : 0;
|
||||
let f4Add = paethPredictor(f4Left, f4Up, f4UpLeft);
|
||||
unfilteredLine[x] = rawByte + f4Add;
|
||||
}
|
||||
};
|
||||
|
||||
Filter.prototype._reverseFilterLine = function (rawData) {
|
||||
let filter = rawData[0];
|
||||
let unfilteredLine;
|
||||
let currentImage = this._images[this._imageIndex];
|
||||
let byteWidth = currentImage.byteWidth;
|
||||
|
||||
if (filter === 0) {
|
||||
unfilteredLine = rawData.slice(1, byteWidth + 1);
|
||||
} else {
|
||||
unfilteredLine = Buffer.alloc(byteWidth);
|
||||
|
||||
switch (filter) {
|
||||
case 1:
|
||||
this._unFilterType1(rawData, unfilteredLine, byteWidth);
|
||||
break;
|
||||
case 2:
|
||||
this._unFilterType2(rawData, unfilteredLine, byteWidth);
|
||||
break;
|
||||
case 3:
|
||||
this._unFilterType3(rawData, unfilteredLine, byteWidth);
|
||||
break;
|
||||
case 4:
|
||||
this._unFilterType4(rawData, unfilteredLine, byteWidth);
|
||||
break;
|
||||
default:
|
||||
throw new Error("Unrecognised filter type - " + filter);
|
||||
}
|
||||
}
|
||||
|
||||
this.write(unfilteredLine);
|
||||
|
||||
currentImage.lineIndex++;
|
||||
if (currentImage.lineIndex >= currentImage.height) {
|
||||
this._lastLine = null;
|
||||
this._imageIndex++;
|
||||
currentImage = this._images[this._imageIndex];
|
||||
} else {
|
||||
this._lastLine = unfilteredLine;
|
||||
}
|
||||
|
||||
if (currentImage) {
|
||||
// read, using the byte width that may be from the new current image
|
||||
this.read(currentImage.byteWidth + 1, this._reverseFilterLine.bind(this));
|
||||
} else {
|
||||
this._lastLine = null;
|
||||
this.complete();
|
||||
}
|
||||
};
|
||||
93
node_modules/pngjs/lib/format-normaliser.js
generated
vendored
Normal file
93
node_modules/pngjs/lib/format-normaliser.js
generated
vendored
Normal file
@@ -0,0 +1,93 @@
|
||||
"use strict";
|
||||
|
||||
function dePalette(indata, outdata, width, height, palette) {
|
||||
let pxPos = 0;
|
||||
// use values from palette
|
||||
for (let y = 0; y < height; y++) {
|
||||
for (let x = 0; x < width; x++) {
|
||||
let color = palette[indata[pxPos]];
|
||||
|
||||
if (!color) {
|
||||
throw new Error("index " + indata[pxPos] + " not in palette");
|
||||
}
|
||||
|
||||
for (let i = 0; i < 4; i++) {
|
||||
outdata[pxPos + i] = color[i];
|
||||
}
|
||||
pxPos += 4;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function replaceTransparentColor(indata, outdata, width, height, transColor) {
|
||||
let pxPos = 0;
|
||||
for (let y = 0; y < height; y++) {
|
||||
for (let x = 0; x < width; x++) {
|
||||
let makeTrans = false;
|
||||
|
||||
if (transColor.length === 1) {
|
||||
if (transColor[0] === indata[pxPos]) {
|
||||
makeTrans = true;
|
||||
}
|
||||
} else if (
|
||||
transColor[0] === indata[pxPos] &&
|
||||
transColor[1] === indata[pxPos + 1] &&
|
||||
transColor[2] === indata[pxPos + 2]
|
||||
) {
|
||||
makeTrans = true;
|
||||
}
|
||||
if (makeTrans) {
|
||||
for (let i = 0; i < 4; i++) {
|
||||
outdata[pxPos + i] = 0;
|
||||
}
|
||||
}
|
||||
pxPos += 4;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
function scaleDepth(indata, outdata, width, height, depth) {
|
||||
let maxOutSample = 255;
|
||||
let maxInSample = Math.pow(2, depth) - 1;
|
||||
let pxPos = 0;
|
||||
|
||||
for (let y = 0; y < height; y++) {
|
||||
for (let x = 0; x < width; x++) {
|
||||
for (let i = 0; i < 4; i++) {
|
||||
outdata[pxPos + i] = Math.floor(
|
||||
(indata[pxPos + i] * maxOutSample) / maxInSample + 0.5
|
||||
);
|
||||
}
|
||||
pxPos += 4;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
module.exports = function (indata, imageData, skipRescale = false) {
|
||||
let depth = imageData.depth;
|
||||
let width = imageData.width;
|
||||
let height = imageData.height;
|
||||
let colorType = imageData.colorType;
|
||||
let transColor = imageData.transColor;
|
||||
let palette = imageData.palette;
|
||||
|
||||
let outdata = indata; // only different for 16 bits
|
||||
|
||||
if (colorType === 3) {
|
||||
// paletted
|
||||
dePalette(indata, outdata, width, height, palette);
|
||||
} else {
|
||||
if (transColor) {
|
||||
replaceTransparentColor(indata, outdata, width, height, transColor);
|
||||
}
|
||||
// if it needs scaling
|
||||
if (depth !== 8 && !skipRescale) {
|
||||
// if we need to change the buffer size
|
||||
if (depth === 16) {
|
||||
outdata = Buffer.alloc(width * height * 4);
|
||||
}
|
||||
scaleDepth(indata, outdata, width, height, depth);
|
||||
}
|
||||
}
|
||||
return outdata;
|
||||
};
|
||||
95
node_modules/pngjs/lib/interlace.js
generated
vendored
Normal file
95
node_modules/pngjs/lib/interlace.js
generated
vendored
Normal file
@@ -0,0 +1,95 @@
|
||||
"use strict";
|
||||
|
||||
// Adam 7
|
||||
// 0 1 2 3 4 5 6 7
|
||||
// 0 x 6 4 6 x 6 4 6
|
||||
// 1 7 7 7 7 7 7 7 7
|
||||
// 2 5 6 5 6 5 6 5 6
|
||||
// 3 7 7 7 7 7 7 7 7
|
||||
// 4 3 6 4 6 3 6 4 6
|
||||
// 5 7 7 7 7 7 7 7 7
|
||||
// 6 5 6 5 6 5 6 5 6
|
||||
// 7 7 7 7 7 7 7 7 7
|
||||
|
||||
let imagePasses = [
|
||||
{
|
||||
// pass 1 - 1px
|
||||
x: [0],
|
||||
y: [0],
|
||||
},
|
||||
{
|
||||
// pass 2 - 1px
|
||||
x: [4],
|
||||
y: [0],
|
||||
},
|
||||
{
|
||||
// pass 3 - 2px
|
||||
x: [0, 4],
|
||||
y: [4],
|
||||
},
|
||||
{
|
||||
// pass 4 - 4px
|
||||
x: [2, 6],
|
||||
y: [0, 4],
|
||||
},
|
||||
{
|
||||
// pass 5 - 8px
|
||||
x: [0, 2, 4, 6],
|
||||
y: [2, 6],
|
||||
},
|
||||
{
|
||||
// pass 6 - 16px
|
||||
x: [1, 3, 5, 7],
|
||||
y: [0, 2, 4, 6],
|
||||
},
|
||||
{
|
||||
// pass 7 - 32px
|
||||
x: [0, 1, 2, 3, 4, 5, 6, 7],
|
||||
y: [1, 3, 5, 7],
|
||||
},
|
||||
];
|
||||
|
||||
exports.getImagePasses = function (width, height) {
|
||||
let images = [];
|
||||
let xLeftOver = width % 8;
|
||||
let yLeftOver = height % 8;
|
||||
let xRepeats = (width - xLeftOver) / 8;
|
||||
let yRepeats = (height - yLeftOver) / 8;
|
||||
for (let i = 0; i < imagePasses.length; i++) {
|
||||
let pass = imagePasses[i];
|
||||
let passWidth = xRepeats * pass.x.length;
|
||||
let passHeight = yRepeats * pass.y.length;
|
||||
for (let j = 0; j < pass.x.length; j++) {
|
||||
if (pass.x[j] < xLeftOver) {
|
||||
passWidth++;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
for (let j = 0; j < pass.y.length; j++) {
|
||||
if (pass.y[j] < yLeftOver) {
|
||||
passHeight++;
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
if (passWidth > 0 && passHeight > 0) {
|
||||
images.push({ width: passWidth, height: passHeight, index: i });
|
||||
}
|
||||
}
|
||||
return images;
|
||||
};
|
||||
|
||||
exports.getInterlaceIterator = function (width) {
|
||||
return function (x, y, pass) {
|
||||
let outerXLeftOver = x % imagePasses[pass].x.length;
|
||||
let outerX =
|
||||
((x - outerXLeftOver) / imagePasses[pass].x.length) * 8 +
|
||||
imagePasses[pass].x[outerXLeftOver];
|
||||
let outerYLeftOver = y % imagePasses[pass].y.length;
|
||||
let outerY =
|
||||
((y - outerYLeftOver) / imagePasses[pass].y.length) * 8 +
|
||||
imagePasses[pass].y[outerYLeftOver];
|
||||
return outerX * 4 + outerY * width * 4;
|
||||
};
|
||||
};
|
||||
50
node_modules/pngjs/lib/packer-async.js
generated
vendored
Normal file
50
node_modules/pngjs/lib/packer-async.js
generated
vendored
Normal file
@@ -0,0 +1,50 @@
|
||||
"use strict";
|
||||
|
||||
let util = require("util");
|
||||
let Stream = require("stream");
|
||||
let constants = require("./constants");
|
||||
let Packer = require("./packer");
|
||||
|
||||
let PackerAsync = (module.exports = function (opt) {
|
||||
Stream.call(this);
|
||||
|
||||
let options = opt || {};
|
||||
|
||||
this._packer = new Packer(options);
|
||||
this._deflate = this._packer.createDeflate();
|
||||
|
||||
this.readable = true;
|
||||
});
|
||||
util.inherits(PackerAsync, Stream);
|
||||
|
||||
PackerAsync.prototype.pack = function (data, width, height, gamma) {
|
||||
// Signature
|
||||
this.emit("data", Buffer.from(constants.PNG_SIGNATURE));
|
||||
this.emit("data", this._packer.packIHDR(width, height));
|
||||
|
||||
if (gamma) {
|
||||
this.emit("data", this._packer.packGAMA(gamma));
|
||||
}
|
||||
|
||||
let filteredData = this._packer.filterData(data, width, height);
|
||||
|
||||
// compress it
|
||||
this._deflate.on("error", this.emit.bind(this, "error"));
|
||||
|
||||
this._deflate.on(
|
||||
"data",
|
||||
function (compressedData) {
|
||||
this.emit("data", this._packer.packIDAT(compressedData));
|
||||
}.bind(this)
|
||||
);
|
||||
|
||||
this._deflate.on(
|
||||
"end",
|
||||
function () {
|
||||
this.emit("data", this._packer.packIEND());
|
||||
this.emit("end");
|
||||
}.bind(this)
|
||||
);
|
||||
|
||||
this._deflate.end(filteredData);
|
||||
};
|
||||
56
node_modules/pngjs/lib/packer-sync.js
generated
vendored
Normal file
56
node_modules/pngjs/lib/packer-sync.js
generated
vendored
Normal file
@@ -0,0 +1,56 @@
|
||||
"use strict";
|
||||
|
||||
let hasSyncZlib = true;
|
||||
let zlib = require("zlib");
|
||||
if (!zlib.deflateSync) {
|
||||
hasSyncZlib = false;
|
||||
}
|
||||
let constants = require("./constants");
|
||||
let Packer = require("./packer");
|
||||
|
||||
module.exports = function (metaData, opt) {
|
||||
if (!hasSyncZlib) {
|
||||
throw new Error(
|
||||
"To use the sync capability of this library in old node versions, please pin pngjs to v2.3.0"
|
||||
);
|
||||
}
|
||||
|
||||
let options = opt || {};
|
||||
|
||||
let packer = new Packer(options);
|
||||
|
||||
let chunks = [];
|
||||
|
||||
// Signature
|
||||
chunks.push(Buffer.from(constants.PNG_SIGNATURE));
|
||||
|
||||
// Header
|
||||
chunks.push(packer.packIHDR(metaData.width, metaData.height));
|
||||
|
||||
if (metaData.gamma) {
|
||||
chunks.push(packer.packGAMA(metaData.gamma));
|
||||
}
|
||||
|
||||
let filteredData = packer.filterData(
|
||||
metaData.data,
|
||||
metaData.width,
|
||||
metaData.height
|
||||
);
|
||||
|
||||
// compress it
|
||||
let compressedData = zlib.deflateSync(
|
||||
filteredData,
|
||||
packer.getDeflateOptions()
|
||||
);
|
||||
filteredData = null;
|
||||
|
||||
if (!compressedData || !compressedData.length) {
|
||||
throw new Error("bad png - invalid compressed data response");
|
||||
}
|
||||
chunks.push(packer.packIDAT(compressedData));
|
||||
|
||||
// End
|
||||
chunks.push(packer.packIEND());
|
||||
|
||||
return Buffer.concat(chunks);
|
||||
};
|
||||
129
node_modules/pngjs/lib/packer.js
generated
vendored
Normal file
129
node_modules/pngjs/lib/packer.js
generated
vendored
Normal file
@@ -0,0 +1,129 @@
|
||||
"use strict";
|
||||
|
||||
let constants = require("./constants");
|
||||
let CrcStream = require("./crc");
|
||||
let bitPacker = require("./bitpacker");
|
||||
let filter = require("./filter-pack");
|
||||
let zlib = require("zlib");
|
||||
|
||||
let Packer = (module.exports = function (options) {
|
||||
this._options = options;
|
||||
|
||||
options.deflateChunkSize = options.deflateChunkSize || 32 * 1024;
|
||||
options.deflateLevel =
|
||||
options.deflateLevel != null ? options.deflateLevel : 9;
|
||||
options.deflateStrategy =
|
||||
options.deflateStrategy != null ? options.deflateStrategy : 3;
|
||||
options.inputHasAlpha =
|
||||
options.inputHasAlpha != null ? options.inputHasAlpha : true;
|
||||
options.deflateFactory = options.deflateFactory || zlib.createDeflate;
|
||||
options.bitDepth = options.bitDepth || 8;
|
||||
// This is outputColorType
|
||||
options.colorType =
|
||||
typeof options.colorType === "number"
|
||||
? options.colorType
|
||||
: constants.COLORTYPE_COLOR_ALPHA;
|
||||
options.inputColorType =
|
||||
typeof options.inputColorType === "number"
|
||||
? options.inputColorType
|
||||
: constants.COLORTYPE_COLOR_ALPHA;
|
||||
|
||||
if (
|
||||
[
|
||||
constants.COLORTYPE_GRAYSCALE,
|
||||
constants.COLORTYPE_COLOR,
|
||||
constants.COLORTYPE_COLOR_ALPHA,
|
||||
constants.COLORTYPE_ALPHA,
|
||||
].indexOf(options.colorType) === -1
|
||||
) {
|
||||
throw new Error(
|
||||
"option color type:" + options.colorType + " is not supported at present"
|
||||
);
|
||||
}
|
||||
if (
|
||||
[
|
||||
constants.COLORTYPE_GRAYSCALE,
|
||||
constants.COLORTYPE_COLOR,
|
||||
constants.COLORTYPE_COLOR_ALPHA,
|
||||
constants.COLORTYPE_ALPHA,
|
||||
].indexOf(options.inputColorType) === -1
|
||||
) {
|
||||
throw new Error(
|
||||
"option input color type:" +
|
||||
options.inputColorType +
|
||||
" is not supported at present"
|
||||
);
|
||||
}
|
||||
if (options.bitDepth !== 8 && options.bitDepth !== 16) {
|
||||
throw new Error(
|
||||
"option bit depth:" + options.bitDepth + " is not supported at present"
|
||||
);
|
||||
}
|
||||
});
|
||||
|
||||
Packer.prototype.getDeflateOptions = function () {
|
||||
return {
|
||||
chunkSize: this._options.deflateChunkSize,
|
||||
level: this._options.deflateLevel,
|
||||
strategy: this._options.deflateStrategy,
|
||||
};
|
||||
};
|
||||
|
||||
Packer.prototype.createDeflate = function () {
|
||||
return this._options.deflateFactory(this.getDeflateOptions());
|
||||
};
|
||||
|
||||
Packer.prototype.filterData = function (data, width, height) {
|
||||
// convert to correct format for filtering (e.g. right bpp and bit depth)
|
||||
let packedData = bitPacker(data, width, height, this._options);
|
||||
|
||||
// filter pixel data
|
||||
let bpp = constants.COLORTYPE_TO_BPP_MAP[this._options.colorType];
|
||||
let filteredData = filter(packedData, width, height, this._options, bpp);
|
||||
return filteredData;
|
||||
};
|
||||
|
||||
Packer.prototype._packChunk = function (type, data) {
|
||||
let len = data ? data.length : 0;
|
||||
let buf = Buffer.alloc(len + 12);
|
||||
|
||||
buf.writeUInt32BE(len, 0);
|
||||
buf.writeUInt32BE(type, 4);
|
||||
|
||||
if (data) {
|
||||
data.copy(buf, 8);
|
||||
}
|
||||
|
||||
buf.writeInt32BE(
|
||||
CrcStream.crc32(buf.slice(4, buf.length - 4)),
|
||||
buf.length - 4
|
||||
);
|
||||
return buf;
|
||||
};
|
||||
|
||||
Packer.prototype.packGAMA = function (gamma) {
|
||||
let buf = Buffer.alloc(4);
|
||||
buf.writeUInt32BE(Math.floor(gamma * constants.GAMMA_DIVISION), 0);
|
||||
return this._packChunk(constants.TYPE_gAMA, buf);
|
||||
};
|
||||
|
||||
Packer.prototype.packIHDR = function (width, height) {
|
||||
let buf = Buffer.alloc(13);
|
||||
buf.writeUInt32BE(width, 0);
|
||||
buf.writeUInt32BE(height, 4);
|
||||
buf[8] = this._options.bitDepth; // Bit depth
|
||||
buf[9] = this._options.colorType; // colorType
|
||||
buf[10] = 0; // compression
|
||||
buf[11] = 0; // filter
|
||||
buf[12] = 0; // interlace
|
||||
|
||||
return this._packChunk(constants.TYPE_IHDR, buf);
|
||||
};
|
||||
|
||||
Packer.prototype.packIDAT = function (data) {
|
||||
return this._packChunk(constants.TYPE_IDAT, data);
|
||||
};
|
||||
|
||||
Packer.prototype.packIEND = function () {
|
||||
return this._packChunk(constants.TYPE_IEND, null);
|
||||
};
|
||||
16
node_modules/pngjs/lib/paeth-predictor.js
generated
vendored
Normal file
16
node_modules/pngjs/lib/paeth-predictor.js
generated
vendored
Normal file
@@ -0,0 +1,16 @@
|
||||
"use strict";
|
||||
|
||||
module.exports = function paethPredictor(left, above, upLeft) {
|
||||
let paeth = left + above - upLeft;
|
||||
let pLeft = Math.abs(paeth - left);
|
||||
let pAbove = Math.abs(paeth - above);
|
||||
let pUpLeft = Math.abs(paeth - upLeft);
|
||||
|
||||
if (pLeft <= pAbove && pLeft <= pUpLeft) {
|
||||
return left;
|
||||
}
|
||||
if (pAbove <= pUpLeft) {
|
||||
return above;
|
||||
}
|
||||
return upLeft;
|
||||
};
|
||||
169
node_modules/pngjs/lib/parser-async.js
generated
vendored
Normal file
169
node_modules/pngjs/lib/parser-async.js
generated
vendored
Normal file
@@ -0,0 +1,169 @@
|
||||
"use strict";
|
||||
|
||||
let util = require("util");
|
||||
let zlib = require("zlib");
|
||||
let ChunkStream = require("./chunkstream");
|
||||
let FilterAsync = require("./filter-parse-async");
|
||||
let Parser = require("./parser");
|
||||
let bitmapper = require("./bitmapper");
|
||||
let formatNormaliser = require("./format-normaliser");
|
||||
|
||||
let ParserAsync = (module.exports = function (options) {
|
||||
ChunkStream.call(this);
|
||||
|
||||
this._parser = new Parser(options, {
|
||||
read: this.read.bind(this),
|
||||
error: this._handleError.bind(this),
|
||||
metadata: this._handleMetaData.bind(this),
|
||||
gamma: this.emit.bind(this, "gamma"),
|
||||
palette: this._handlePalette.bind(this),
|
||||
transColor: this._handleTransColor.bind(this),
|
||||
finished: this._finished.bind(this),
|
||||
inflateData: this._inflateData.bind(this),
|
||||
simpleTransparency: this._simpleTransparency.bind(this),
|
||||
headersFinished: this._headersFinished.bind(this),
|
||||
});
|
||||
this._options = options;
|
||||
this.writable = true;
|
||||
|
||||
this._parser.start();
|
||||
});
|
||||
util.inherits(ParserAsync, ChunkStream);
|
||||
|
||||
ParserAsync.prototype._handleError = function (err) {
|
||||
this.emit("error", err);
|
||||
|
||||
this.writable = false;
|
||||
|
||||
this.destroy();
|
||||
|
||||
if (this._inflate && this._inflate.destroy) {
|
||||
this._inflate.destroy();
|
||||
}
|
||||
|
||||
if (this._filter) {
|
||||
this._filter.destroy();
|
||||
// For backward compatibility with Node 7 and below.
|
||||
// Suppress errors due to _inflate calling write() even after
|
||||
// it's destroy()'ed.
|
||||
this._filter.on("error", function () {});
|
||||
}
|
||||
|
||||
this.errord = true;
|
||||
};
|
||||
|
||||
ParserAsync.prototype._inflateData = function (data) {
|
||||
if (!this._inflate) {
|
||||
if (this._bitmapInfo.interlace) {
|
||||
this._inflate = zlib.createInflate();
|
||||
|
||||
this._inflate.on("error", this.emit.bind(this, "error"));
|
||||
this._filter.on("complete", this._complete.bind(this));
|
||||
|
||||
this._inflate.pipe(this._filter);
|
||||
} else {
|
||||
let rowSize =
|
||||
((this._bitmapInfo.width *
|
||||
this._bitmapInfo.bpp *
|
||||
this._bitmapInfo.depth +
|
||||
7) >>
|
||||
3) +
|
||||
1;
|
||||
let imageSize = rowSize * this._bitmapInfo.height;
|
||||
let chunkSize = Math.max(imageSize, zlib.Z_MIN_CHUNK);
|
||||
|
||||
this._inflate = zlib.createInflate({ chunkSize: chunkSize });
|
||||
let leftToInflate = imageSize;
|
||||
|
||||
let emitError = this.emit.bind(this, "error");
|
||||
this._inflate.on("error", function (err) {
|
||||
if (!leftToInflate) {
|
||||
return;
|
||||
}
|
||||
|
||||
emitError(err);
|
||||
});
|
||||
this._filter.on("complete", this._complete.bind(this));
|
||||
|
||||
let filterWrite = this._filter.write.bind(this._filter);
|
||||
this._inflate.on("data", function (chunk) {
|
||||
if (!leftToInflate) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (chunk.length > leftToInflate) {
|
||||
chunk = chunk.slice(0, leftToInflate);
|
||||
}
|
||||
|
||||
leftToInflate -= chunk.length;
|
||||
|
||||
filterWrite(chunk);
|
||||
});
|
||||
|
||||
this._inflate.on("end", this._filter.end.bind(this._filter));
|
||||
}
|
||||
}
|
||||
this._inflate.write(data);
|
||||
};
|
||||
|
||||
ParserAsync.prototype._handleMetaData = function (metaData) {
|
||||
this._metaData = metaData;
|
||||
this._bitmapInfo = Object.create(metaData);
|
||||
|
||||
this._filter = new FilterAsync(this._bitmapInfo);
|
||||
};
|
||||
|
||||
ParserAsync.prototype._handleTransColor = function (transColor) {
|
||||
this._bitmapInfo.transColor = transColor;
|
||||
};
|
||||
|
||||
ParserAsync.prototype._handlePalette = function (palette) {
|
||||
this._bitmapInfo.palette = palette;
|
||||
};
|
||||
|
||||
ParserAsync.prototype._simpleTransparency = function () {
|
||||
this._metaData.alpha = true;
|
||||
};
|
||||
|
||||
ParserAsync.prototype._headersFinished = function () {
|
||||
// Up until this point, we don't know if we have a tRNS chunk (alpha)
|
||||
// so we can't emit metadata any earlier
|
||||
this.emit("metadata", this._metaData);
|
||||
};
|
||||
|
||||
ParserAsync.prototype._finished = function () {
|
||||
if (this.errord) {
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this._inflate) {
|
||||
this.emit("error", "No Inflate block");
|
||||
} else {
|
||||
// no more data to inflate
|
||||
this._inflate.end();
|
||||
}
|
||||
};
|
||||
|
||||
ParserAsync.prototype._complete = function (filteredData) {
|
||||
if (this.errord) {
|
||||
return;
|
||||
}
|
||||
|
||||
let normalisedBitmapData;
|
||||
|
||||
try {
|
||||
let bitmapData = bitmapper.dataToBitMap(filteredData, this._bitmapInfo);
|
||||
|
||||
normalisedBitmapData = formatNormaliser(
|
||||
bitmapData,
|
||||
this._bitmapInfo,
|
||||
this._options.skipRescale
|
||||
);
|
||||
bitmapData = null;
|
||||
} catch (ex) {
|
||||
this._handleError(ex);
|
||||
return;
|
||||
}
|
||||
|
||||
this.emit("parsed", normalisedBitmapData);
|
||||
};
|
||||
112
node_modules/pngjs/lib/parser-sync.js
generated
vendored
Normal file
112
node_modules/pngjs/lib/parser-sync.js
generated
vendored
Normal file
@@ -0,0 +1,112 @@
|
||||
"use strict";
|
||||
|
||||
let hasSyncZlib = true;
|
||||
let zlib = require("zlib");
|
||||
let inflateSync = require("./sync-inflate");
|
||||
if (!zlib.deflateSync) {
|
||||
hasSyncZlib = false;
|
||||
}
|
||||
let SyncReader = require("./sync-reader");
|
||||
let FilterSync = require("./filter-parse-sync");
|
||||
let Parser = require("./parser");
|
||||
let bitmapper = require("./bitmapper");
|
||||
let formatNormaliser = require("./format-normaliser");
|
||||
|
||||
module.exports = function (buffer, options) {
|
||||
if (!hasSyncZlib) {
|
||||
throw new Error(
|
||||
"To use the sync capability of this library in old node versions, please pin pngjs to v2.3.0"
|
||||
);
|
||||
}
|
||||
|
||||
let err;
|
||||
function handleError(_err_) {
|
||||
err = _err_;
|
||||
}
|
||||
|
||||
let metaData;
|
||||
function handleMetaData(_metaData_) {
|
||||
metaData = _metaData_;
|
||||
}
|
||||
|
||||
function handleTransColor(transColor) {
|
||||
metaData.transColor = transColor;
|
||||
}
|
||||
|
||||
function handlePalette(palette) {
|
||||
metaData.palette = palette;
|
||||
}
|
||||
|
||||
function handleSimpleTransparency() {
|
||||
metaData.alpha = true;
|
||||
}
|
||||
|
||||
let gamma;
|
||||
function handleGamma(_gamma_) {
|
||||
gamma = _gamma_;
|
||||
}
|
||||
|
||||
let inflateDataList = [];
|
||||
function handleInflateData(inflatedData) {
|
||||
inflateDataList.push(inflatedData);
|
||||
}
|
||||
|
||||
let reader = new SyncReader(buffer);
|
||||
|
||||
let parser = new Parser(options, {
|
||||
read: reader.read.bind(reader),
|
||||
error: handleError,
|
||||
metadata: handleMetaData,
|
||||
gamma: handleGamma,
|
||||
palette: handlePalette,
|
||||
transColor: handleTransColor,
|
||||
inflateData: handleInflateData,
|
||||
simpleTransparency: handleSimpleTransparency,
|
||||
});
|
||||
|
||||
parser.start();
|
||||
reader.process();
|
||||
|
||||
if (err) {
|
||||
throw err;
|
||||
}
|
||||
|
||||
//join together the inflate datas
|
||||
let inflateData = Buffer.concat(inflateDataList);
|
||||
inflateDataList.length = 0;
|
||||
|
||||
let inflatedData;
|
||||
if (metaData.interlace) {
|
||||
inflatedData = zlib.inflateSync(inflateData);
|
||||
} else {
|
||||
let rowSize =
|
||||
((metaData.width * metaData.bpp * metaData.depth + 7) >> 3) + 1;
|
||||
let imageSize = rowSize * metaData.height;
|
||||
inflatedData = inflateSync(inflateData, {
|
||||
chunkSize: imageSize,
|
||||
maxLength: imageSize,
|
||||
});
|
||||
}
|
||||
inflateData = null;
|
||||
|
||||
if (!inflatedData || !inflatedData.length) {
|
||||
throw new Error("bad png - invalid inflate data response");
|
||||
}
|
||||
|
||||
let unfilteredData = FilterSync.process(inflatedData, metaData);
|
||||
inflateData = null;
|
||||
|
||||
let bitmapData = bitmapper.dataToBitMap(unfilteredData, metaData);
|
||||
unfilteredData = null;
|
||||
|
||||
let normalisedBitmapData = formatNormaliser(
|
||||
bitmapData,
|
||||
metaData,
|
||||
options.skipRescale
|
||||
);
|
||||
|
||||
metaData.data = normalisedBitmapData;
|
||||
metaData.gamma = gamma || 0;
|
||||
|
||||
return metaData;
|
||||
};
|
||||
290
node_modules/pngjs/lib/parser.js
generated
vendored
Normal file
290
node_modules/pngjs/lib/parser.js
generated
vendored
Normal file
@@ -0,0 +1,290 @@
|
||||
"use strict";
|
||||
|
||||
let constants = require("./constants");
|
||||
let CrcCalculator = require("./crc");
|
||||
|
||||
let Parser = (module.exports = function (options, dependencies) {
|
||||
this._options = options;
|
||||
options.checkCRC = options.checkCRC !== false;
|
||||
|
||||
this._hasIHDR = false;
|
||||
this._hasIEND = false;
|
||||
this._emittedHeadersFinished = false;
|
||||
|
||||
// input flags/metadata
|
||||
this._palette = [];
|
||||
this._colorType = 0;
|
||||
|
||||
this._chunks = {};
|
||||
this._chunks[constants.TYPE_IHDR] = this._handleIHDR.bind(this);
|
||||
this._chunks[constants.TYPE_IEND] = this._handleIEND.bind(this);
|
||||
this._chunks[constants.TYPE_IDAT] = this._handleIDAT.bind(this);
|
||||
this._chunks[constants.TYPE_PLTE] = this._handlePLTE.bind(this);
|
||||
this._chunks[constants.TYPE_tRNS] = this._handleTRNS.bind(this);
|
||||
this._chunks[constants.TYPE_gAMA] = this._handleGAMA.bind(this);
|
||||
|
||||
this.read = dependencies.read;
|
||||
this.error = dependencies.error;
|
||||
this.metadata = dependencies.metadata;
|
||||
this.gamma = dependencies.gamma;
|
||||
this.transColor = dependencies.transColor;
|
||||
this.palette = dependencies.palette;
|
||||
this.parsed = dependencies.parsed;
|
||||
this.inflateData = dependencies.inflateData;
|
||||
this.finished = dependencies.finished;
|
||||
this.simpleTransparency = dependencies.simpleTransparency;
|
||||
this.headersFinished = dependencies.headersFinished || function () {};
|
||||
});
|
||||
|
||||
Parser.prototype.start = function () {
|
||||
this.read(constants.PNG_SIGNATURE.length, this._parseSignature.bind(this));
|
||||
};
|
||||
|
||||
Parser.prototype._parseSignature = function (data) {
|
||||
let signature = constants.PNG_SIGNATURE;
|
||||
|
||||
for (let i = 0; i < signature.length; i++) {
|
||||
if (data[i] !== signature[i]) {
|
||||
this.error(new Error("Invalid file signature"));
|
||||
return;
|
||||
}
|
||||
}
|
||||
this.read(8, this._parseChunkBegin.bind(this));
|
||||
};
|
||||
|
||||
Parser.prototype._parseChunkBegin = function (data) {
|
||||
// chunk content length
|
||||
let length = data.readUInt32BE(0);
|
||||
|
||||
// chunk type
|
||||
let type = data.readUInt32BE(4);
|
||||
let name = "";
|
||||
for (let i = 4; i < 8; i++) {
|
||||
name += String.fromCharCode(data[i]);
|
||||
}
|
||||
|
||||
//console.log('chunk ', name, length);
|
||||
|
||||
// chunk flags
|
||||
let ancillary = Boolean(data[4] & 0x20); // or critical
|
||||
// priv = Boolean(data[5] & 0x20), // or public
|
||||
// safeToCopy = Boolean(data[7] & 0x20); // or unsafe
|
||||
|
||||
if (!this._hasIHDR && type !== constants.TYPE_IHDR) {
|
||||
this.error(new Error("Expected IHDR on beggining"));
|
||||
return;
|
||||
}
|
||||
|
||||
this._crc = new CrcCalculator();
|
||||
this._crc.write(Buffer.from(name));
|
||||
|
||||
if (this._chunks[type]) {
|
||||
return this._chunks[type](length);
|
||||
}
|
||||
|
||||
if (!ancillary) {
|
||||
this.error(new Error("Unsupported critical chunk type " + name));
|
||||
return;
|
||||
}
|
||||
|
||||
this.read(length + 4, this._skipChunk.bind(this));
|
||||
};
|
||||
|
||||
Parser.prototype._skipChunk = function (/*data*/) {
|
||||
this.read(8, this._parseChunkBegin.bind(this));
|
||||
};
|
||||
|
||||
Parser.prototype._handleChunkEnd = function () {
|
||||
this.read(4, this._parseChunkEnd.bind(this));
|
||||
};
|
||||
|
||||
Parser.prototype._parseChunkEnd = function (data) {
|
||||
let fileCrc = data.readInt32BE(0);
|
||||
let calcCrc = this._crc.crc32();
|
||||
|
||||
// check CRC
|
||||
if (this._options.checkCRC && calcCrc !== fileCrc) {
|
||||
this.error(new Error("Crc error - " + fileCrc + " - " + calcCrc));
|
||||
return;
|
||||
}
|
||||
|
||||
if (!this._hasIEND) {
|
||||
this.read(8, this._parseChunkBegin.bind(this));
|
||||
}
|
||||
};
|
||||
|
||||
Parser.prototype._handleIHDR = function (length) {
|
||||
this.read(length, this._parseIHDR.bind(this));
|
||||
};
|
||||
Parser.prototype._parseIHDR = function (data) {
|
||||
this._crc.write(data);
|
||||
|
||||
let width = data.readUInt32BE(0);
|
||||
let height = data.readUInt32BE(4);
|
||||
let depth = data[8];
|
||||
let colorType = data[9]; // bits: 1 palette, 2 color, 4 alpha
|
||||
let compr = data[10];
|
||||
let filter = data[11];
|
||||
let interlace = data[12];
|
||||
|
||||
// console.log(' width', width, 'height', height,
|
||||
// 'depth', depth, 'colorType', colorType,
|
||||
// 'compr', compr, 'filter', filter, 'interlace', interlace
|
||||
// );
|
||||
|
||||
if (
|
||||
depth !== 8 &&
|
||||
depth !== 4 &&
|
||||
depth !== 2 &&
|
||||
depth !== 1 &&
|
||||
depth !== 16
|
||||
) {
|
||||
this.error(new Error("Unsupported bit depth " + depth));
|
||||
return;
|
||||
}
|
||||
if (!(colorType in constants.COLORTYPE_TO_BPP_MAP)) {
|
||||
this.error(new Error("Unsupported color type"));
|
||||
return;
|
||||
}
|
||||
if (compr !== 0) {
|
||||
this.error(new Error("Unsupported compression method"));
|
||||
return;
|
||||
}
|
||||
if (filter !== 0) {
|
||||
this.error(new Error("Unsupported filter method"));
|
||||
return;
|
||||
}
|
||||
if (interlace !== 0 && interlace !== 1) {
|
||||
this.error(new Error("Unsupported interlace method"));
|
||||
return;
|
||||
}
|
||||
|
||||
this._colorType = colorType;
|
||||
|
||||
let bpp = constants.COLORTYPE_TO_BPP_MAP[this._colorType];
|
||||
|
||||
this._hasIHDR = true;
|
||||
|
||||
this.metadata({
|
||||
width: width,
|
||||
height: height,
|
||||
depth: depth,
|
||||
interlace: Boolean(interlace),
|
||||
palette: Boolean(colorType & constants.COLORTYPE_PALETTE),
|
||||
color: Boolean(colorType & constants.COLORTYPE_COLOR),
|
||||
alpha: Boolean(colorType & constants.COLORTYPE_ALPHA),
|
||||
bpp: bpp,
|
||||
colorType: colorType,
|
||||
});
|
||||
|
||||
this._handleChunkEnd();
|
||||
};
|
||||
|
||||
Parser.prototype._handlePLTE = function (length) {
|
||||
this.read(length, this._parsePLTE.bind(this));
|
||||
};
|
||||
Parser.prototype._parsePLTE = function (data) {
|
||||
this._crc.write(data);
|
||||
|
||||
let entries = Math.floor(data.length / 3);
|
||||
// console.log('Palette:', entries);
|
||||
|
||||
for (let i = 0; i < entries; i++) {
|
||||
this._palette.push([data[i * 3], data[i * 3 + 1], data[i * 3 + 2], 0xff]);
|
||||
}
|
||||
|
||||
this.palette(this._palette);
|
||||
|
||||
this._handleChunkEnd();
|
||||
};
|
||||
|
||||
Parser.prototype._handleTRNS = function (length) {
|
||||
this.simpleTransparency();
|
||||
this.read(length, this._parseTRNS.bind(this));
|
||||
};
|
||||
Parser.prototype._parseTRNS = function (data) {
|
||||
this._crc.write(data);
|
||||
|
||||
// palette
|
||||
if (this._colorType === constants.COLORTYPE_PALETTE_COLOR) {
|
||||
if (this._palette.length === 0) {
|
||||
this.error(new Error("Transparency chunk must be after palette"));
|
||||
return;
|
||||
}
|
||||
if (data.length > this._palette.length) {
|
||||
this.error(new Error("More transparent colors than palette size"));
|
||||
return;
|
||||
}
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
this._palette[i][3] = data[i];
|
||||
}
|
||||
this.palette(this._palette);
|
||||
}
|
||||
|
||||
// for colorType 0 (grayscale) and 2 (rgb)
|
||||
// there might be one gray/color defined as transparent
|
||||
if (this._colorType === constants.COLORTYPE_GRAYSCALE) {
|
||||
// grey, 2 bytes
|
||||
this.transColor([data.readUInt16BE(0)]);
|
||||
}
|
||||
if (this._colorType === constants.COLORTYPE_COLOR) {
|
||||
this.transColor([
|
||||
data.readUInt16BE(0),
|
||||
data.readUInt16BE(2),
|
||||
data.readUInt16BE(4),
|
||||
]);
|
||||
}
|
||||
|
||||
this._handleChunkEnd();
|
||||
};
|
||||
|
||||
Parser.prototype._handleGAMA = function (length) {
|
||||
this.read(length, this._parseGAMA.bind(this));
|
||||
};
|
||||
Parser.prototype._parseGAMA = function (data) {
|
||||
this._crc.write(data);
|
||||
this.gamma(data.readUInt32BE(0) / constants.GAMMA_DIVISION);
|
||||
|
||||
this._handleChunkEnd();
|
||||
};
|
||||
|
||||
Parser.prototype._handleIDAT = function (length) {
|
||||
if (!this._emittedHeadersFinished) {
|
||||
this._emittedHeadersFinished = true;
|
||||
this.headersFinished();
|
||||
}
|
||||
this.read(-length, this._parseIDAT.bind(this, length));
|
||||
};
|
||||
Parser.prototype._parseIDAT = function (length, data) {
|
||||
this._crc.write(data);
|
||||
|
||||
if (
|
||||
this._colorType === constants.COLORTYPE_PALETTE_COLOR &&
|
||||
this._palette.length === 0
|
||||
) {
|
||||
throw new Error("Expected palette not found");
|
||||
}
|
||||
|
||||
this.inflateData(data);
|
||||
let leftOverLength = length - data.length;
|
||||
|
||||
if (leftOverLength > 0) {
|
||||
this._handleIDAT(leftOverLength);
|
||||
} else {
|
||||
this._handleChunkEnd();
|
||||
}
|
||||
};
|
||||
|
||||
Parser.prototype._handleIEND = function (length) {
|
||||
this.read(length, this._parseIEND.bind(this));
|
||||
};
|
||||
Parser.prototype._parseIEND = function (data) {
|
||||
this._crc.write(data);
|
||||
|
||||
this._hasIEND = true;
|
||||
this._handleChunkEnd();
|
||||
|
||||
if (this.finished) {
|
||||
this.finished();
|
||||
}
|
||||
};
|
||||
12
node_modules/pngjs/lib/png-sync.js
generated
vendored
Normal file
12
node_modules/pngjs/lib/png-sync.js
generated
vendored
Normal file
@@ -0,0 +1,12 @@
|
||||
"use strict";
|
||||
|
||||
let parse = require("./parser-sync");
|
||||
let pack = require("./packer-sync");
|
||||
|
||||
exports.read = function (buffer, options) {
|
||||
return parse(buffer, options || {});
|
||||
};
|
||||
|
||||
exports.write = function (png, options) {
|
||||
return pack(png, options);
|
||||
};
|
||||
194
node_modules/pngjs/lib/png.js
generated
vendored
Normal file
194
node_modules/pngjs/lib/png.js
generated
vendored
Normal file
@@ -0,0 +1,194 @@
|
||||
"use strict";
|
||||
|
||||
let util = require("util");
|
||||
let Stream = require("stream");
|
||||
let Parser = require("./parser-async");
|
||||
let Packer = require("./packer-async");
|
||||
let PNGSync = require("./png-sync");
|
||||
|
||||
let PNG = (exports.PNG = function (options) {
|
||||
Stream.call(this);
|
||||
|
||||
options = options || {}; // eslint-disable-line no-param-reassign
|
||||
|
||||
// coerce pixel dimensions to integers (also coerces undefined -> 0):
|
||||
this.width = options.width | 0;
|
||||
this.height = options.height | 0;
|
||||
|
||||
this.data =
|
||||
this.width > 0 && this.height > 0
|
||||
? Buffer.alloc(4 * this.width * this.height)
|
||||
: null;
|
||||
|
||||
if (options.fill && this.data) {
|
||||
this.data.fill(0);
|
||||
}
|
||||
|
||||
this.gamma = 0;
|
||||
this.readable = this.writable = true;
|
||||
|
||||
this._parser = new Parser(options);
|
||||
|
||||
this._parser.on("error", this.emit.bind(this, "error"));
|
||||
this._parser.on("close", this._handleClose.bind(this));
|
||||
this._parser.on("metadata", this._metadata.bind(this));
|
||||
this._parser.on("gamma", this._gamma.bind(this));
|
||||
this._parser.on(
|
||||
"parsed",
|
||||
function (data) {
|
||||
this.data = data;
|
||||
this.emit("parsed", data);
|
||||
}.bind(this)
|
||||
);
|
||||
|
||||
this._packer = new Packer(options);
|
||||
this._packer.on("data", this.emit.bind(this, "data"));
|
||||
this._packer.on("end", this.emit.bind(this, "end"));
|
||||
this._parser.on("close", this._handleClose.bind(this));
|
||||
this._packer.on("error", this.emit.bind(this, "error"));
|
||||
});
|
||||
util.inherits(PNG, Stream);
|
||||
|
||||
PNG.sync = PNGSync;
|
||||
|
||||
PNG.prototype.pack = function () {
|
||||
if (!this.data || !this.data.length) {
|
||||
this.emit("error", "No data provided");
|
||||
return this;
|
||||
}
|
||||
|
||||
process.nextTick(
|
||||
function () {
|
||||
this._packer.pack(this.data, this.width, this.height, this.gamma);
|
||||
}.bind(this)
|
||||
);
|
||||
|
||||
return this;
|
||||
};
|
||||
|
||||
PNG.prototype.parse = function (data, callback) {
|
||||
if (callback) {
|
||||
let onParsed, onError;
|
||||
|
||||
onParsed = function (parsedData) {
|
||||
this.removeListener("error", onError);
|
||||
|
||||
this.data = parsedData;
|
||||
callback(null, this);
|
||||
}.bind(this);
|
||||
|
||||
onError = function (err) {
|
||||
this.removeListener("parsed", onParsed);
|
||||
|
||||
callback(err, null);
|
||||
}.bind(this);
|
||||
|
||||
this.once("parsed", onParsed);
|
||||
this.once("error", onError);
|
||||
}
|
||||
|
||||
this.end(data);
|
||||
return this;
|
||||
};
|
||||
|
||||
PNG.prototype.write = function (data) {
|
||||
this._parser.write(data);
|
||||
return true;
|
||||
};
|
||||
|
||||
PNG.prototype.end = function (data) {
|
||||
this._parser.end(data);
|
||||
};
|
||||
|
||||
PNG.prototype._metadata = function (metadata) {
|
||||
this.width = metadata.width;
|
||||
this.height = metadata.height;
|
||||
|
||||
this.emit("metadata", metadata);
|
||||
};
|
||||
|
||||
PNG.prototype._gamma = function (gamma) {
|
||||
this.gamma = gamma;
|
||||
};
|
||||
|
||||
PNG.prototype._handleClose = function () {
|
||||
if (!this._parser.writable && !this._packer.readable) {
|
||||
this.emit("close");
|
||||
}
|
||||
};
|
||||
|
||||
PNG.bitblt = function (src, dst, srcX, srcY, width, height, deltaX, deltaY) {
|
||||
// eslint-disable-line max-params
|
||||
// coerce pixel dimensions to integers (also coerces undefined -> 0):
|
||||
/* eslint-disable no-param-reassign */
|
||||
srcX |= 0;
|
||||
srcY |= 0;
|
||||
width |= 0;
|
||||
height |= 0;
|
||||
deltaX |= 0;
|
||||
deltaY |= 0;
|
||||
/* eslint-enable no-param-reassign */
|
||||
|
||||
if (
|
||||
srcX > src.width ||
|
||||
srcY > src.height ||
|
||||
srcX + width > src.width ||
|
||||
srcY + height > src.height
|
||||
) {
|
||||
throw new Error("bitblt reading outside image");
|
||||
}
|
||||
|
||||
if (
|
||||
deltaX > dst.width ||
|
||||
deltaY > dst.height ||
|
||||
deltaX + width > dst.width ||
|
||||
deltaY + height > dst.height
|
||||
) {
|
||||
throw new Error("bitblt writing outside image");
|
||||
}
|
||||
|
||||
for (let y = 0; y < height; y++) {
|
||||
src.data.copy(
|
||||
dst.data,
|
||||
((deltaY + y) * dst.width + deltaX) << 2,
|
||||
((srcY + y) * src.width + srcX) << 2,
|
||||
((srcY + y) * src.width + srcX + width) << 2
|
||||
);
|
||||
}
|
||||
};
|
||||
|
||||
PNG.prototype.bitblt = function (
|
||||
dst,
|
||||
srcX,
|
||||
srcY,
|
||||
width,
|
||||
height,
|
||||
deltaX,
|
||||
deltaY
|
||||
) {
|
||||
// eslint-disable-line max-params
|
||||
|
||||
PNG.bitblt(this, dst, srcX, srcY, width, height, deltaX, deltaY);
|
||||
return this;
|
||||
};
|
||||
|
||||
PNG.adjustGamma = function (src) {
|
||||
if (src.gamma) {
|
||||
for (let y = 0; y < src.height; y++) {
|
||||
for (let x = 0; x < src.width; x++) {
|
||||
let idx = (src.width * y + x) << 2;
|
||||
|
||||
for (let i = 0; i < 3; i++) {
|
||||
let sample = src.data[idx + i] / 255;
|
||||
sample = Math.pow(sample, 1 / 2.2 / src.gamma);
|
||||
src.data[idx + i] = Math.round(sample * 255);
|
||||
}
|
||||
}
|
||||
}
|
||||
src.gamma = 0;
|
||||
}
|
||||
};
|
||||
|
||||
PNG.prototype.adjustGamma = function () {
|
||||
PNG.adjustGamma(this);
|
||||
};
|
||||
168
node_modules/pngjs/lib/sync-inflate.js
generated
vendored
Normal file
168
node_modules/pngjs/lib/sync-inflate.js
generated
vendored
Normal file
@@ -0,0 +1,168 @@
|
||||
"use strict";
|
||||
|
||||
let assert = require("assert").ok;
|
||||
let zlib = require("zlib");
|
||||
let util = require("util");
|
||||
|
||||
let kMaxLength = require("buffer").kMaxLength;
|
||||
|
||||
function Inflate(opts) {
|
||||
if (!(this instanceof Inflate)) {
|
||||
return new Inflate(opts);
|
||||
}
|
||||
|
||||
if (opts && opts.chunkSize < zlib.Z_MIN_CHUNK) {
|
||||
opts.chunkSize = zlib.Z_MIN_CHUNK;
|
||||
}
|
||||
|
||||
zlib.Inflate.call(this, opts);
|
||||
|
||||
// Node 8 --> 9 compatibility check
|
||||
this._offset = this._offset === undefined ? this._outOffset : this._offset;
|
||||
this._buffer = this._buffer || this._outBuffer;
|
||||
|
||||
if (opts && opts.maxLength != null) {
|
||||
this._maxLength = opts.maxLength;
|
||||
}
|
||||
}
|
||||
|
||||
function createInflate(opts) {
|
||||
return new Inflate(opts);
|
||||
}
|
||||
|
||||
function _close(engine, callback) {
|
||||
if (callback) {
|
||||
process.nextTick(callback);
|
||||
}
|
||||
|
||||
// Caller may invoke .close after a zlib error (which will null _handle).
|
||||
if (!engine._handle) {
|
||||
return;
|
||||
}
|
||||
|
||||
engine._handle.close();
|
||||
engine._handle = null;
|
||||
}
|
||||
|
||||
Inflate.prototype._processChunk = function (chunk, flushFlag, asyncCb) {
|
||||
if (typeof asyncCb === "function") {
|
||||
return zlib.Inflate._processChunk.call(this, chunk, flushFlag, asyncCb);
|
||||
}
|
||||
|
||||
let self = this;
|
||||
|
||||
let availInBefore = chunk && chunk.length;
|
||||
let availOutBefore = this._chunkSize - this._offset;
|
||||
let leftToInflate = this._maxLength;
|
||||
let inOff = 0;
|
||||
|
||||
let buffers = [];
|
||||
let nread = 0;
|
||||
|
||||
let error;
|
||||
this.on("error", function (err) {
|
||||
error = err;
|
||||
});
|
||||
|
||||
function handleChunk(availInAfter, availOutAfter) {
|
||||
if (self._hadError) {
|
||||
return;
|
||||
}
|
||||
|
||||
let have = availOutBefore - availOutAfter;
|
||||
assert(have >= 0, "have should not go down");
|
||||
|
||||
if (have > 0) {
|
||||
let out = self._buffer.slice(self._offset, self._offset + have);
|
||||
self._offset += have;
|
||||
|
||||
if (out.length > leftToInflate) {
|
||||
out = out.slice(0, leftToInflate);
|
||||
}
|
||||
|
||||
buffers.push(out);
|
||||
nread += out.length;
|
||||
leftToInflate -= out.length;
|
||||
|
||||
if (leftToInflate === 0) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
if (availOutAfter === 0 || self._offset >= self._chunkSize) {
|
||||
availOutBefore = self._chunkSize;
|
||||
self._offset = 0;
|
||||
self._buffer = Buffer.allocUnsafe(self._chunkSize);
|
||||
}
|
||||
|
||||
if (availOutAfter === 0) {
|
||||
inOff += availInBefore - availInAfter;
|
||||
availInBefore = availInAfter;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
assert(this._handle, "zlib binding closed");
|
||||
let res;
|
||||
do {
|
||||
res = this._handle.writeSync(
|
||||
flushFlag,
|
||||
chunk, // in
|
||||
inOff, // in_off
|
||||
availInBefore, // in_len
|
||||
this._buffer, // out
|
||||
this._offset, //out_off
|
||||
availOutBefore
|
||||
); // out_len
|
||||
// Node 8 --> 9 compatibility check
|
||||
res = res || this._writeState;
|
||||
} while (!this._hadError && handleChunk(res[0], res[1]));
|
||||
|
||||
if (this._hadError) {
|
||||
throw error;
|
||||
}
|
||||
|
||||
if (nread >= kMaxLength) {
|
||||
_close(this);
|
||||
throw new RangeError(
|
||||
"Cannot create final Buffer. It would be larger than 0x" +
|
||||
kMaxLength.toString(16) +
|
||||
" bytes"
|
||||
);
|
||||
}
|
||||
|
||||
let buf = Buffer.concat(buffers, nread);
|
||||
_close(this);
|
||||
|
||||
return buf;
|
||||
};
|
||||
|
||||
util.inherits(Inflate, zlib.Inflate);
|
||||
|
||||
function zlibBufferSync(engine, buffer) {
|
||||
if (typeof buffer === "string") {
|
||||
buffer = Buffer.from(buffer);
|
||||
}
|
||||
if (!(buffer instanceof Buffer)) {
|
||||
throw new TypeError("Not a string or buffer");
|
||||
}
|
||||
|
||||
let flushFlag = engine._finishFlushFlag;
|
||||
if (flushFlag == null) {
|
||||
flushFlag = zlib.Z_FINISH;
|
||||
}
|
||||
|
||||
return engine._processChunk(buffer, flushFlag);
|
||||
}
|
||||
|
||||
function inflateSync(buffer, opts) {
|
||||
return zlibBufferSync(new Inflate(opts), buffer);
|
||||
}
|
||||
|
||||
module.exports = exports = inflateSync;
|
||||
exports.Inflate = Inflate;
|
||||
exports.createInflate = createInflate;
|
||||
exports.inflateSync = inflateSync;
|
||||
45
node_modules/pngjs/lib/sync-reader.js
generated
vendored
Normal file
45
node_modules/pngjs/lib/sync-reader.js
generated
vendored
Normal file
@@ -0,0 +1,45 @@
|
||||
"use strict";
|
||||
|
||||
let SyncReader = (module.exports = function (buffer) {
|
||||
this._buffer = buffer;
|
||||
this._reads = [];
|
||||
});
|
||||
|
||||
SyncReader.prototype.read = function (length, callback) {
|
||||
this._reads.push({
|
||||
length: Math.abs(length), // if length < 0 then at most this length
|
||||
allowLess: length < 0,
|
||||
func: callback,
|
||||
});
|
||||
};
|
||||
|
||||
SyncReader.prototype.process = function () {
|
||||
// as long as there is any data and read requests
|
||||
while (this._reads.length > 0 && this._buffer.length) {
|
||||
let read = this._reads[0];
|
||||
|
||||
if (
|
||||
this._buffer.length &&
|
||||
(this._buffer.length >= read.length || read.allowLess)
|
||||
) {
|
||||
// ok there is any data so that we can satisfy this request
|
||||
this._reads.shift(); // == read
|
||||
|
||||
let buf = this._buffer;
|
||||
|
||||
this._buffer = buf.slice(read.length);
|
||||
|
||||
read.func.call(this, buf.slice(0, read.length));
|
||||
} else {
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (this._reads.length > 0) {
|
||||
throw new Error("There are some read requests waitng on finished stream");
|
||||
}
|
||||
|
||||
if (this._buffer.length > 0) {
|
||||
throw new Error("unrecognised content at end of stream");
|
||||
}
|
||||
};
|
||||
76
node_modules/pngjs/package.json
generated
vendored
Normal file
76
node_modules/pngjs/package.json
generated
vendored
Normal file
@@ -0,0 +1,76 @@
|
||||
{
|
||||
"name": "pngjs",
|
||||
"version": "7.0.0",
|
||||
"description": "PNG encoder/decoder in pure JS, supporting any bit size & interlace, async & sync with full test suite.",
|
||||
"contributors": [
|
||||
"Alexandre Paré",
|
||||
"Gaurav Mali",
|
||||
"Gusts Kaksis",
|
||||
"Kuba Niegowski",
|
||||
"Luke Page",
|
||||
"Pietajan De Potter",
|
||||
"Steven Sojka",
|
||||
"liangzeng",
|
||||
"Michael Vogt",
|
||||
"Xin-Xin Wang",
|
||||
"toriningen",
|
||||
"Eugene Kulabuhov"
|
||||
],
|
||||
"homepage": "https://github.com/lukeapage/pngjs",
|
||||
"keywords": [
|
||||
"PNG",
|
||||
"decoder",
|
||||
"encoder",
|
||||
"js-png",
|
||||
"node-png",
|
||||
"parser",
|
||||
"png",
|
||||
"png-js",
|
||||
"png-parse",
|
||||
"pngjs"
|
||||
],
|
||||
"engines": {
|
||||
"node": ">=14.19.0"
|
||||
},
|
||||
"main": "./lib/png.js",
|
||||
"directories": {
|
||||
"lib": "lib",
|
||||
"example": "examples",
|
||||
"test": "test"
|
||||
},
|
||||
"files": [
|
||||
"browser.js",
|
||||
"lib/"
|
||||
],
|
||||
"scripts": {
|
||||
"build": "yarn prepublish",
|
||||
"prepublish": "yarn browserify",
|
||||
"browserify": "browserify lib/png.js --standalone png > browser.js",
|
||||
"coverage": "nyc --reporter=lcov --reporter=text-summary tape test/*-spec.js",
|
||||
"test": "yarn lint && yarn prettier:check && tape test/*-spec.js | tap-dot && node test/run-compare",
|
||||
"lint": "eslint .",
|
||||
"prettier:write": "prettier --write .",
|
||||
"prettier:check": "prettier --check ."
|
||||
},
|
||||
"repository": {
|
||||
"type": "git",
|
||||
"url": "git://github.com/pngjs/pngjs.git"
|
||||
},
|
||||
"license": "MIT",
|
||||
"bugs": {
|
||||
"url": "https://github.com/pngjs/pngjs/issues"
|
||||
},
|
||||
"devDependencies": {
|
||||
"browserify": "17.0.0",
|
||||
"buffer-equal": "1.0.1",
|
||||
"connect": "3.7.0",
|
||||
"eslint": "8.34.0",
|
||||
"eslint-config-prettier": "8.6.0",
|
||||
"nyc": "15.1.0",
|
||||
"prettier": "2.8.4",
|
||||
"puppeteer": "19.7.1",
|
||||
"serve-static": "1.15.0",
|
||||
"tap-dot": "2.0.0",
|
||||
"tape": "5.6.3"
|
||||
}
|
||||
}
|
||||
Reference in New Issue
Block a user