Skip to content

Commit

Permalink
fix js errors
Browse files Browse the repository at this point in the history
  • Loading branch information
ZJONSSON committed May 12, 2024
1 parent 9ef3ade commit d5622f8
Show file tree
Hide file tree
Showing 13 changed files with 52 additions and 40 deletions.
1 change: 1 addition & 0 deletions .github/workflows/test.yml
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ jobs:
node-version: 18.x
- run: npm install
- run: npx eslint .
- run: npx tsc -p jsconfig.json

test:
runs-on: ubuntu-latest
Expand Down
1 change: 0 additions & 1 deletion .gitignore
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
/.idea
/node_modules
/test.js
/.nyc_output/
/coverage/
.tap/
Expand Down
6 changes: 3 additions & 3 deletions jsconfig.json
Original file line number Diff line number Diff line change
Expand Up @@ -3,11 +3,11 @@
"checkJs": true,
"target": "ES2022",

Check failure on line 4 in jsconfig.json

View workflow job for this annotation

GitHub Actions / lint

Argument for '--target' option must be: 'es3', 'es5', 'es6', 'es2015', 'es2016', 'es2017', 'es2018', 'es2019', 'es2020', 'esnext'.
"moduleResolution":"node",
"types": ["node"]
"types": ["node"],
"maxNodeModuleJsDepth": 0
},
"exclude": [
"node_modules",
"test",
"node_modules/",
"coverage"
]
}
7 changes: 5 additions & 2 deletions lib/Decrypt.js
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,7 @@ function crc(ch, crc) {

if (ch.charCodeAt)
ch = ch.charCodeAt(0);

//@ts-ignore
return (bigInt(crc).shiftRight(8).and(0xffffff)).xor(table[bigInt(crc).xor(ch).and(0xff)]).value;
}

Expand All @@ -36,21 +36,24 @@ function Decrypt() {

Decrypt.prototype.update = function(h) {
this.key0 = crc(h, this.key0);
//@ts-ignore
this.key1 = bigInt(this.key0).and(255).and(4294967295).add(this.key1);
//@ts-ignore
this.key1 = bigInt(this.key1).multiply(134775813).add(1).and(4294967295).value;
this.key2 = crc(bigInt(this.key1).shiftRight(24).and(255), this.key2);
};


Decrypt.prototype.decryptByte = function(c) {
const k = bigInt(this.key2).or(2);
//@ts-ignore
c = c ^ bigInt(k).multiply(bigInt(k^1)).shiftRight(8).and(255);
this.update(c);
return c;
};

Decrypt.prototype.stream = function() {
const stream = Stream.Transform(),
const stream = new Stream.Transform(),
self = this;

stream._transform = function(d, e, cb) {
Expand Down
18 changes: 8 additions & 10 deletions lib/NoopStream.js
Original file line number Diff line number Diff line change
@@ -1,14 +1,12 @@
const Stream = require('stream');
const util = require('util');
function NoopStream() {
if (!(this instanceof NoopStream)) {
return new NoopStream();
}
Stream.Transform.call(this);
class NoopStream extends Stream.Transform {
_transform(d, e, cb) { cb() ;};
promise() {
return new Promise((resolve, reject) => {
this.on('finish', resolve);
this.on('error', reject);
});
};
}

util.inherits(NoopStream, Stream.Transform);

NoopStream.prototype._transform = function(d, e, cb) { cb() ;};

module.exports = NoopStream;
1 change: 1 addition & 0 deletions lib/extract.js
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
//@ts-nocheck
module.exports = Extract;

const Parse = require('./parse');
Expand Down
30 changes: 20 additions & 10 deletions lib/parse.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,23 @@ const parseBuffer = require('./parseBuffer');
const endDirectorySignature = Buffer.alloc(4);
endDirectorySignature.writeUInt32LE(0x06054b50, 0);

class Entry extends Stream.PassThrough{
autodrain;
buffer;
path;
props;
CSSFontFeatureValuesRule;
type;
extra;
size;
vars;
__autodraining;
}


class Parse extends PullStream {
reachedCD;
crxHeader;
constructor(opts) {
super(opts || { verbose: false });
const self = this;
Expand Down Expand Up @@ -105,18 +121,12 @@ class Parse extends PullStream {

return self.pull(vars.fileNameLength).then(function(fileNameBuffer) {
const fileName = fileNameBuffer.toString('utf8');
const entry = Stream.PassThrough();
const entry = new Entry();
let __autodraining = false;

entry.autodrain = function() {
__autodraining = true;
const draining = entry.pipe(NoopStream());
draining.promise = function() {
return new Promise(function(resolve, reject) {
draining.on('finish', resolve);
draining.on('error', reject);
});
};
const draining = entry.pipe(new NoopStream());
return draining;
};

Expand Down Expand Up @@ -155,7 +165,7 @@ class Parse extends PullStream {
self.push(entry);
} else {
self.emit('entry', entry);

//@ts-ignore
if (self._readableState.pipesCount || (self._readableState.pipes && self._readableState.pipes.length))
self.push(entry);
}
Expand All @@ -171,7 +181,7 @@ class Parse extends PullStream {
let eof;

entry.__autodraining = __autodraining; // expose __autodraining for test purposes
const inflater = (vars.compressionMethod && !__autodraining) ? zlib.createInflateRaw() : Stream.PassThrough();
const inflater = (vars.compressionMethod && !__autodraining) ? zlib.createInflateRaw() : new Stream.PassThrough();

if (fileSizeKnown) {
entry.size = vars.uncompressedSize;
Expand Down
4 changes: 1 addition & 3 deletions lib/parseExtraField.js
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
const parseBuffer = require('./parseBuffer');

module.exports = function(extraField, vars) {
let extra;
let extra = {};
// Find the ZIP64 header, if present.
while(!extra && extraField && extraField.length) {
const candidateExtra = parseBuffer.parse(extraField, [
Expand All @@ -22,8 +22,6 @@ module.exports = function(extraField, vars) {
}
}

extra = extra || {};

if (vars.compressedSize === 0xffffffff)
vars.compressedSize = extra.compressedSize;

Expand Down
6 changes: 3 additions & 3 deletions lib/parseOne.js
Original file line number Diff line number Diff line change
Expand Up @@ -4,9 +4,9 @@ const duplexer2 = require('duplexer2');
const BufferStream = require('./BufferStream');

function parseOne(match, opts) {
const inStream = Stream.PassThrough({objectMode:true});
const outStream = Stream.PassThrough();
const transform = Stream.Transform({objectMode:true});
const inStream = new Stream.PassThrough({objectMode:true});
const outStream = new Stream.PassThrough();
const transform = new Stream.Transform({objectMode:true});
const re = match instanceof RegExp ? match : (match && new RegExp(match));
let found;

Expand Down
8 changes: 5 additions & 3 deletions test/compressed-crx.js
Original file line number Diff line number Diff line change
Expand Up @@ -42,17 +42,19 @@ test('open methods', async function(t) {
const s3 = new AWS.S3({region: 'us-east-1'});

// We have to modify the `getObject` and `headObject` to use makeUnauthenticated
//@ts-ignore
s3.getObject = function(params, cb) {
return s3.makeUnauthenticatedRequest('getObject', params, cb);
};

//@ts-ignore
s3.headObject = function(params, cb) {
return s3.makeUnauthenticatedRequest('headObject', params, cb);
};

const tests = [
{name: 'buffer', args: [buffer]},
{name: 'file', args: [archive]},
{name: 'buffer', args: [buffer, {crx: true}]},
{name: 'file', args: [archive, {crx: true}]},
// {name: 'url', args: [request, 'https://s3.amazonaws.com/unzipper/archive.crx']},
// {name: 's3', args: [s3, { Bucket: 'unzipper', Key: 'archive.crx'}]}
];
Expand All @@ -61,7 +63,7 @@ test('open methods', async function(t) {
t.test(test.name, async function(t) {
t.test('opening with crx option', function(t) {
const method = unzip.Open[test.name];
method.apply(method, test.args.concat({crx:true}))
method.apply(method, test.args)
.then(function(d) {
return d.files[1].buffer();
})
Expand Down
4 changes: 2 additions & 2 deletions test/office-files.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,14 +19,14 @@ test("get content a xlsx file without errors", async function () {

test("stream retries when the local file header indicates bigger size than central directory", async function (t) {
const archive = path.join(__dirname, '../testData/office/testfile.xlsx');
let retries = 0, size;
let retries = 0, size = 0;
const directory = await unzip.Open.file(archive, {padding: 10});
const stream = directory.files[0].stream();
stream.on('streamRetry', _size => {
retries += 1;
size = _size;
});
await new Promise(resolve => stream.pipe(NoopStream()).on('finish', resolve));
await new Promise(resolve => stream.pipe(new NoopStream()).on('finish', resolve));
t.ok(retries === 1, 'retries once');
t.ok(size > 0, 'size is set');
});
2 changes: 1 addition & 1 deletion test/streamSingleEntry.js
Original file line number Diff line number Diff line change
Expand Up @@ -6,7 +6,7 @@ const unzip = require('../');
const Stream = require('stream');

test("pipe a single file entry out of a zip", function (t) {
const receiver = Stream.Transform({objectMode:true});
const receiver = new Stream.Transform({objectMode:true});
receiver._transform = function(entry, e, cb) {
if (entry.path === 'file.txt') {
const writableStream = new streamBuffers.WritableStreamBuffer();
Expand Down
4 changes: 2 additions & 2 deletions test/uncompressed.js
Original file line number Diff line number Diff line change
Expand Up @@ -63,7 +63,7 @@ test("do not extract zip slip archive", function (t) {
fs.createReadStream(archive).pipe(unzipExtractor);

function testNoSlip() {
const mode = fs.F_OK | (fs.constants && fs.constants.F_OK);
const mode = fs.constants.F_OK | (fs.constants && fs.constants.F_OK);
return fs.access(path.join(os.tmpdir(), 'evil.txt'), mode, evilFileCallback);
}

Expand Down Expand Up @@ -105,7 +105,7 @@ function testZipSlipArchive(t, slipFileName, attackPathFactory){

function CheckForSlip(path, resultCallback) {
const fsCallback = function(err){ return resultCallback(!err); };
const mode = fs.F_OK | (fs.constants && fs.constants.F_OK);
const mode = fs.constants.F_OK | (fs.constants && fs.constants.F_OK);
return fs.access(path, mode, fsCallback);
}

Expand Down

0 comments on commit d5622f8

Please sign in to comment.