diff --git a/README.md b/README.md index 6bb2647..e082ff0 100644 --- a/README.md +++ b/README.md @@ -24,6 +24,7 @@ If vulnerabilities are found by Trivy, it creates the following GitHub Issue. |issue_title|False|Security Alert|Issue title| |issue_label|False|trivy,vulnerability|Issue label (separated by commma)| |issue_assignee|False|N/A|Issue assignee (separated by commma)| +|fail_on_vulnerabilities|False|false|Whether the action should fail if any vulnerabilities were found.| ### Outputs diff --git a/action.yml b/action.yml index a702767..50904bc 100644 --- a/action.yml +++ b/action.yml @@ -39,6 +39,10 @@ inputs: issue_assignee: description: 'Issue assignee (separated by commma)' required: false + fail_on_vulnerabilities: + description: Whether the action should fail if a vulnerability was found + default: 'false' + required: false outputs: issue_number: diff --git a/dist/index.js b/dist/index.js index 2e24cc1..bb16b23 100644 --- a/dist/index.js +++ b/dist/index.js @@ -1310,1970 +1310,1424 @@ module.exports = require("os"); /***/ }), -/***/ 106: +/***/ 118: /***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; -const EE = __webpack_require__(614) -const Stream = __webpack_require__(413) -const Yallist = __webpack_require__(612) -const SD = __webpack_require__(304).StringDecoder - -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -const DESTROYED = Symbol('destroyed') +const os = __webpack_require__(87); -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = doIter && Symbol.asyncIterator - || Symbol('asyncIterator not implemented') -const ITERATOR = doIter && Symbol.iterator - || Symbol('iterator not implemented') +const nameMap = new Map([ + [19, 'Catalina'], + [18, 'Mojave'], + [17, 'High Sierra'], + [16, 'Sierra'], + [15, 'El Capitan'], + [14, 'Yosemite'], + [13, 'Mavericks'], + [12, 'Mountain Lion'], + [11, 'Lion'], + [10, 'Snow Leopard'], + [9, 'Leopard'], + [8, 'Tiger'], + [7, 'Panther'], + [6, 'Jaguar'], + [5, 'Puma'] +]); -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => - ev === 'end' || - ev === 'finish' || - ev === 'prefinish' +const macosRelease = release => { + release = Number((release || os.release()).split('.')[0]); + return { + name: nameMap.get(release), + version: '10.' + (release - 4) + }; +}; -const isArrayBuffer = b => b instanceof ArrayBuffer || - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0 +module.exports = macosRelease; +// TODO: remove this in the next major version +module.exports.default = macosRelease; -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) -module.exports = class Minipass extends Stream { - constructor (options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this.pipes = new Yallist() - this.buffer = new Yallist() - this[OBJECTMODE] = options && options.objectMode || false - if (this[OBJECTMODE]) - this[ENCODING] = null - else - this[ENCODING] = options && options.encoding || null - if (this[ENCODING] === 'buffer') - this[ENCODING] = null - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - } +/***/ }), - get bufferLength () { return this[BUFFERLENGTH] } +/***/ 126: +/***/ (function(module) { - get encoding () { return this[ENCODING] } - set encoding (enc) { - if (this[OBJECTMODE]) - throw new Error('cannot set encoding in objectMode') +/** + * lodash (Custom Build) + * Build: `lodash modularize exports="npm" -o ./` + * Copyright jQuery Foundation and other contributors + * Released under MIT license + * Based on Underscore.js 1.8.3 + * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + */ - if (this[ENCODING] && enc !== this[ENCODING] && - (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) - throw new Error('cannot change encoding') +/** Used as the size to enable large array optimizations. */ +var LARGE_ARRAY_SIZE = 200; - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this.buffer.length) - this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) - } +/** Used to stand-in for `undefined` hash values. */ +var HASH_UNDEFINED = '__lodash_hash_undefined__'; - this[ENCODING] = enc - } +/** Used as references for various `Number` constants. */ +var INFINITY = 1 / 0; - setEncoding (enc) { - this.encoding = enc - } +/** `Object#toString` result references. */ +var funcTag = '[object Function]', + genTag = '[object GeneratorFunction]'; - get objectMode () { return this[OBJECTMODE] } - set objectMode (ॐ ) { this[OBJECTMODE] = this[OBJECTMODE] || !!ॐ } +/** + * Used to match `RegExp` + * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). + */ +var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; - write (chunk, encoding, cb) { - if (this[EOF]) - throw new Error('write after end') +/** Used to detect host constructors (Safari). */ +var reIsHostCtor = /^\[object .+?Constructor\]$/; - if (this[DESTROYED]) { - this.emit('error', Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - )) - return true - } +/** Detect free variable `global` from Node.js. */ +var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' +/** Detect free variable `self`. */ +var freeSelf = typeof self == 'object' && self && self.Object === Object && self; - if (!encoding) - encoding = 'utf8' +/** Used as a reference to the global object. */ +var root = freeGlobal || freeSelf || Function('return this')(); - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) - chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } +/** + * A specialized version of `_.includes` for arrays without support for + * specifying an index to search from. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ +function arrayIncludes(array, value) { + var length = array ? array.length : 0; + return !!length && baseIndexOf(array, value, 0) > -1; +} - // this ensures at this point that the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!this.objectMode && !chunk.length) { - const ret = this.flowing - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - cb() - return ret - } +/** + * This function is like `arrayIncludes` except that it accepts a comparator. + * + * @private + * @param {Array} [array] The array to inspect. + * @param {*} target The value to search for. + * @param {Function} comparator The comparator invoked per element. + * @returns {boolean} Returns `true` if `target` is found, else `false`. + */ +function arrayIncludesWith(array, value, comparator) { + var index = -1, + length = array ? array.length : 0; - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && !this[OBJECTMODE] && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { - chunk = Buffer.from(chunk, encoding) + while (++index < length) { + if (comparator(value, array[index])) { + return true; } + } + return false; +} - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) +/** + * The base implementation of `_.findIndex` and `_.findLastIndex` without + * support for iteratee shorthands. + * + * @private + * @param {Array} array The array to inspect. + * @param {Function} predicate The function invoked per iteration. + * @param {number} fromIndex The index to search from. + * @param {boolean} [fromRight] Specify iterating from right to left. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function baseFindIndex(array, predicate, fromIndex, fromRight) { + var length = array.length, + index = fromIndex + (fromRight ? 1 : -1); - try { - return this.flowing - ? (this.emit('data', chunk), this.flowing) - : (this[BUFFERPUSH](chunk), false) - } finally { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - cb() + while ((fromRight ? index-- : ++index < length)) { + if (predicate(array[index], index, array)) { + return index; } } + return -1; +} - read (n) { - if (this[DESTROYED]) - return null - - try { - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) - return null - - if (this[OBJECTMODE]) - n = null - - if (this.buffer.length > 1 && !this[OBJECTMODE]) { - if (this.encoding) - this.buffer = new Yallist([ - Array.from(this.buffer).join('') - ]) - else - this.buffer = new Yallist([ - Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH]) - ]) - } +/** + * The base implementation of `_.indexOf` without `fromIndex` bounds checks. + * + * @private + * @param {Array} array The array to inspect. + * @param {*} value The value to search for. + * @param {number} fromIndex The index to search from. + * @returns {number} Returns the index of the matched value, else `-1`. + */ +function baseIndexOf(array, value, fromIndex) { + if (value !== value) { + return baseFindIndex(array, baseIsNaN, fromIndex); + } + var index = fromIndex - 1, + length = array.length; - return this[READ](n || null, this.buffer.head.value) - } finally { - this[MAYBE_EMIT_END]() + while (++index < length) { + if (array[index] === value) { + return index; } } + return -1; +} - [READ] (n, chunk) { - if (n === chunk.length || n === null) - this[BUFFERSHIFT]() - else { - this.buffer.head.value = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } +/** + * The base implementation of `_.isNaN` without support for number objects. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + */ +function baseIsNaN(value) { + return value !== value; +} - this.emit('data', chunk) +/** + * Checks if a cache value for `key` exists. + * + * @private + * @param {Object} cache The cache to query. + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ +function cacheHas(cache, key) { + return cache.has(key); +} - if (!this.buffer.length && !this[EOF]) - this.emit('drain') +/** + * Gets the value at `key` of `object`. + * + * @private + * @param {Object} [object] The object to query. + * @param {string} key The key of the property to get. + * @returns {*} Returns the property value. + */ +function getValue(object, key) { + return object == null ? undefined : object[key]; +} - return chunk +/** + * Checks if `value` is a host object in IE < 9. + * + * @private + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a host object, else `false`. + */ +function isHostObject(value) { + // Many host objects are `Object` objects that can coerce to strings + // despite having improperly defined `toString` methods. + var result = false; + if (value != null && typeof value.toString != 'function') { + try { + result = !!(value + ''); + } catch (e) {} } + return result; +} - end (chunk, encoding, cb) { - if (typeof chunk === 'function') - cb = chunk, chunk = null - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - if (chunk) - this.write(chunk, encoding) - if (cb) - this.once('end', cb) - this[EOF] = true - this.writable = false - - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) - this[MAYBE_EMIT_END]() - return this - } +/** + * Converts `set` to an array of its values. + * + * @private + * @param {Object} set The set to convert. + * @returns {Array} Returns the values. + */ +function setToArray(set) { + var index = -1, + result = Array(set.size); - // don't let the internal resume be overwritten - [RESUME] () { - if (this[DESTROYED]) - return + set.forEach(function(value) { + result[++index] = value; + }); + return result; +} - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this.buffer.length) - this[FLUSH]() - else if (this[EOF]) - this[MAYBE_EMIT_END]() - else - this.emit('drain') - } +/** Used for built-in method references. */ +var arrayProto = Array.prototype, + funcProto = Function.prototype, + objectProto = Object.prototype; - resume () { - return this[RESUME]() - } +/** Used to detect overreaching core-js shims. */ +var coreJsData = root['__core-js_shared__']; - pause () { - this[FLOWING] = false - this[PAUSED] = true - } +/** Used to detect methods masquerading as native. */ +var maskSrcKey = (function() { + var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); + return uid ? ('Symbol(src)_1.' + uid) : ''; +}()); - get destroyed () { - return this[DESTROYED] - } +/** Used to resolve the decompiled source of functions. */ +var funcToString = funcProto.toString; - get flowing () { - return this[FLOWING] - } +/** Used to check objects for own properties. */ +var hasOwnProperty = objectProto.hasOwnProperty; - get paused () { - return this[PAUSED] - } +/** + * Used to resolve the + * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) + * of values. + */ +var objectToString = objectProto.toString; - [BUFFERPUSH] (chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1 - else - this[BUFFERLENGTH] += chunk.length - return this.buffer.push(chunk) - } +/** Used to detect if a method is native. */ +var reIsNative = RegExp('^' + + funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') + .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' +); - [BUFFERSHIFT] () { - if (this.buffer.length) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1 - else - this[BUFFERLENGTH] -= this.buffer.head.value.length - } - return this.buffer.shift() - } +/** Built-in value references. */ +var splice = arrayProto.splice; - [FLUSH] () { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) +/* Built-in method references that are verified to be native. */ +var Map = getNative(root, 'Map'), + Set = getNative(root, 'Set'), + nativeCreate = getNative(Object, 'create'); - if (!this.buffer.length && !this[EOF]) - this.emit('drain') - } +/** + * Creates a hash object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ +function Hash(entries) { + var index = -1, + length = entries ? entries.length : 0; - [FLUSHCHUNK] (chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); } +} - pipe (dest, opts) { - if (this[DESTROYED]) - return +/** + * Removes all key-value entries from the hash. + * + * @private + * @name clear + * @memberOf Hash + */ +function hashClear() { + this.__data__ = nativeCreate ? nativeCreate(null) : {}; +} - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === process.stdout || dest === process.stderr) - opts.end = false - else - opts.end = opts.end !== false - - const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() } - this.pipes.push(p) +/** + * Removes `key` and its value from the hash. + * + * @private + * @name delete + * @memberOf Hash + * @param {Object} hash The hash to modify. + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ +function hashDelete(key) { + return this.has(key) && delete this.__data__[key]; +} - dest.on('drain', p.ondrain) - this[RESUME]() - // piping an ended stream ends immediately - if (ended && p.opts.end) - p.dest.end() - return dest +/** + * Gets the hash value for `key`. + * + * @private + * @name get + * @memberOf Hash + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ +function hashGet(key) { + var data = this.__data__; + if (nativeCreate) { + var result = data[key]; + return result === HASH_UNDEFINED ? undefined : result; } + return hasOwnProperty.call(data, key) ? data[key] : undefined; +} - addListener (ev, fn) { - return this.on(ev, fn) - } +/** + * Checks if a hash value for `key` exists. + * + * @private + * @name has + * @memberOf Hash + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ +function hashHas(key) { + var data = this.__data__; + return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); +} - on (ev, fn) { - try { - return super.on(ev, fn) - } finally { - if (ev === 'data' && !this.pipes.length && !this.flowing) - this[RESUME]() - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } - } - } +/** + * Sets the hash `key` to `value`. + * + * @private + * @name set + * @memberOf Hash + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the hash instance. + */ +function hashSet(key, value) { + var data = this.__data__; + data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; + return this; +} - get emittedEnd () { - return this[EMITTED_END] - } +// Add methods to `Hash`. +Hash.prototype.clear = hashClear; +Hash.prototype['delete'] = hashDelete; +Hash.prototype.get = hashGet; +Hash.prototype.has = hashHas; +Hash.prototype.set = hashSet; - [MAYBE_EMIT_END] () { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this.buffer.length === 0 && - this[EOF]) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) - this.emit('close') - this[EMITTING_END] = false - } +/** + * Creates an list cache object. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ +function ListCache(entries) { + var index = -1, + length = entries ? entries.length : 0; + + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); } +} - emit (ev, data) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - if (!data) - return +/** + * Removes all key-value entries from the list cache. + * + * @private + * @name clear + * @memberOf ListCache + */ +function listCacheClear() { + this.__data__ = []; +} - if (this.pipes.length) - this.pipes.forEach(p => - p.dest.write(data) === false && this.pause()) - } else if (ev === 'end') { - // only actual end gets this treatment - if (this[EMITTED_END] === true) - return +/** + * Removes `key` and its value from the list cache. + * + * @private + * @name delete + * @memberOf ListCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. + */ +function listCacheDelete(key) { + var data = this.__data__, + index = assocIndexOf(data, key); - this[EMITTED_END] = true - this.readable = false + if (index < 0) { + return false; + } + var lastIndex = data.length - 1; + if (index == lastIndex) { + data.pop(); + } else { + splice.call(data, index, 1); + } + return true; +} - if (this[DECODER]) { - data = this[DECODER].end() - if (data) { - this.pipes.forEach(p => p.dest.write(data)) - super.emit('data', data) - } - } +/** + * Gets the list cache value for `key`. + * + * @private + * @name get + * @memberOf ListCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. + */ +function listCacheGet(key) { + var data = this.__data__, + index = assocIndexOf(data, key); - this.pipes.forEach(p => { - p.dest.removeListener('drain', p.ondrain) - if (p.opts.end) - p.dest.end() - }) - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return - } + return index < 0 ? undefined : data[index][1]; +} - // TODO: replace with a spread operator when Node v4 support drops - const args = new Array(arguments.length) - args[0] = ev - args[1] = data - if (arguments.length > 2) { - for (let i = 2; i < arguments.length; i++) { - args[i] = arguments[i] - } - } +/** + * Checks if a list cache value for `key` exists. + * + * @private + * @name has + * @memberOf ListCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ +function listCacheHas(key) { + return assocIndexOf(this.__data__, key) > -1; +} - try { - return super.emit.apply(this, args) - } finally { - if (!isEndish(ev)) - this[MAYBE_EMIT_END]() - else - this.removeAllListeners(ev) - } +/** + * Sets the list cache `key` to `value`. + * + * @private + * @name set + * @memberOf ListCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the list cache instance. + */ +function listCacheSet(key, value) { + var data = this.__data__, + index = assocIndexOf(data, key); + + if (index < 0) { + data.push([key, value]); + } else { + data[index][1] = value; } + return this; +} - // const all = await stream.collect() - collect () { - const buf = [] - if (!this[OBJECTMODE]) - buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) - buf.dataLength += c.length - }) - return p.then(() => buf) - } - - // const data = await stream.concat() - concat () { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) - } - - // stream.promise().then(() => done, er => emitted error) - promise () { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('end', () => resolve()) - this.on('error', er => reject(er)) - }) - } - - // for await (let chunk of stream) - [ASYNCITERATOR] () { - const next = () => { - const res = this.read() - if (res !== null) - return Promise.resolve({ done: false, value: res }) - - if (this[EOF]) - return Promise.resolve({ done: true }) - - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - resolve({ done: true }) - } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } - - return { next } - } - - // for (let chunk of stream) - [ITERATOR] () { - const next = () => { - const value = this.read() - const done = value === null - return { value, done } - } - return { next } - } - - destroy (er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er) - else - this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this.buffer = new Yallist() - this[BUFFERLENGTH] = 0 - - if (typeof this.close === 'function' && !this[CLOSED]) - this.close() - - if (er) - this.emit('error', er) - else // if no error to emit, still reject pending promises - this.emit(DESTROYED) +// Add methods to `ListCache`. +ListCache.prototype.clear = listCacheClear; +ListCache.prototype['delete'] = listCacheDelete; +ListCache.prototype.get = listCacheGet; +ListCache.prototype.has = listCacheHas; +ListCache.prototype.set = listCacheSet; - return this - } +/** + * Creates a map cache object to store key-value pairs. + * + * @private + * @constructor + * @param {Array} [entries] The key-value pairs to cache. + */ +function MapCache(entries) { + var index = -1, + length = entries ? entries.length : 0; - static isStream (s) { - return !!s && (s instanceof Minipass || s instanceof Stream || - s instanceof EE && ( - typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function') // writable - )) + this.clear(); + while (++index < length) { + var entry = entries[index]; + this.set(entry[0], entry[1]); } } - -/***/ }), - -/***/ 118: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const os = __webpack_require__(87); - -const nameMap = new Map([ - [19, 'Catalina'], - [18, 'Mojave'], - [17, 'High Sierra'], - [16, 'Sierra'], - [15, 'El Capitan'], - [14, 'Yosemite'], - [13, 'Mavericks'], - [12, 'Mountain Lion'], - [11, 'Lion'], - [10, 'Snow Leopard'], - [9, 'Leopard'], - [8, 'Tiger'], - [7, 'Panther'], - [6, 'Jaguar'], - [5, 'Puma'] -]); - -const macosRelease = release => { - release = Number((release || os.release()).split('.')[0]); - return { - name: nameMap.get(release), - version: '10.' + (release - 4) - }; -}; - -module.exports = macosRelease; -// TODO: remove this in the next major version -module.exports.default = macosRelease; - - -/***/ }), - -/***/ 126: -/***/ (function(module) { - /** - * lodash (Custom Build) - * Build: `lodash modularize exports="npm" -o ./` - * Copyright jQuery Foundation and other contributors - * Released under MIT license - * Based on Underscore.js 1.8.3 - * Copyright Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors + * Removes all key-value entries from the map. + * + * @private + * @name clear + * @memberOf MapCache */ - -/** Used as the size to enable large array optimizations. */ -var LARGE_ARRAY_SIZE = 200; - -/** Used to stand-in for `undefined` hash values. */ -var HASH_UNDEFINED = '__lodash_hash_undefined__'; - -/** Used as references for various `Number` constants. */ -var INFINITY = 1 / 0; - -/** `Object#toString` result references. */ -var funcTag = '[object Function]', - genTag = '[object GeneratorFunction]'; +function mapCacheClear() { + this.__data__ = { + 'hash': new Hash, + 'map': new (Map || ListCache), + 'string': new Hash + }; +} /** - * Used to match `RegExp` - * [syntax characters](http://ecma-international.org/ecma-262/7.0/#sec-patterns). + * Removes `key` and its value from the map. + * + * @private + * @name delete + * @memberOf MapCache + * @param {string} key The key of the value to remove. + * @returns {boolean} Returns `true` if the entry was removed, else `false`. */ -var reRegExpChar = /[\\^$.*+?()[\]{}|]/g; - -/** Used to detect host constructors (Safari). */ -var reIsHostCtor = /^\[object .+?Constructor\]$/; - -/** Detect free variable `global` from Node.js. */ -var freeGlobal = typeof global == 'object' && global && global.Object === Object && global; - -/** Detect free variable `self`. */ -var freeSelf = typeof self == 'object' && self && self.Object === Object && self; - -/** Used as a reference to the global object. */ -var root = freeGlobal || freeSelf || Function('return this')(); +function mapCacheDelete(key) { + return getMapData(this, key)['delete'](key); +} /** - * A specialized version of `_.includes` for arrays without support for - * specifying an index to search from. + * Gets the map value for `key`. * * @private - * @param {Array} [array] The array to inspect. - * @param {*} target The value to search for. - * @returns {boolean} Returns `true` if `target` is found, else `false`. + * @name get + * @memberOf MapCache + * @param {string} key The key of the value to get. + * @returns {*} Returns the entry value. */ -function arrayIncludes(array, value) { - var length = array ? array.length : 0; - return !!length && baseIndexOf(array, value, 0) > -1; +function mapCacheGet(key) { + return getMapData(this, key).get(key); } /** - * This function is like `arrayIncludes` except that it accepts a comparator. + * Checks if a map value for `key` exists. * * @private - * @param {Array} [array] The array to inspect. - * @param {*} target The value to search for. - * @param {Function} comparator The comparator invoked per element. - * @returns {boolean} Returns `true` if `target` is found, else `false`. - */ -function arrayIncludesWith(array, value, comparator) { - var index = -1, - length = array ? array.length : 0; - - while (++index < length) { - if (comparator(value, array[index])) { - return true; - } - } - return false; + * @name has + * @memberOf MapCache + * @param {string} key The key of the entry to check. + * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + */ +function mapCacheHas(key) { + return getMapData(this, key).has(key); } /** - * The base implementation of `_.findIndex` and `_.findLastIndex` without - * support for iteratee shorthands. + * Sets the map `key` to `value`. * * @private - * @param {Array} array The array to inspect. - * @param {Function} predicate The function invoked per iteration. - * @param {number} fromIndex The index to search from. - * @param {boolean} [fromRight] Specify iterating from right to left. - * @returns {number} Returns the index of the matched value, else `-1`. + * @name set + * @memberOf MapCache + * @param {string} key The key of the value to set. + * @param {*} value The value to set. + * @returns {Object} Returns the map cache instance. */ -function baseFindIndex(array, predicate, fromIndex, fromRight) { - var length = array.length, - index = fromIndex + (fromRight ? 1 : -1); - - while ((fromRight ? index-- : ++index < length)) { - if (predicate(array[index], index, array)) { - return index; - } - } - return -1; +function mapCacheSet(key, value) { + getMapData(this, key).set(key, value); + return this; } +// Add methods to `MapCache`. +MapCache.prototype.clear = mapCacheClear; +MapCache.prototype['delete'] = mapCacheDelete; +MapCache.prototype.get = mapCacheGet; +MapCache.prototype.has = mapCacheHas; +MapCache.prototype.set = mapCacheSet; + /** - * The base implementation of `_.indexOf` without `fromIndex` bounds checks. + * + * Creates an array cache object to store unique values. * * @private - * @param {Array} array The array to inspect. - * @param {*} value The value to search for. - * @param {number} fromIndex The index to search from. - * @returns {number} Returns the index of the matched value, else `-1`. + * @constructor + * @param {Array} [values] The values to cache. */ -function baseIndexOf(array, value, fromIndex) { - if (value !== value) { - return baseFindIndex(array, baseIsNaN, fromIndex); - } - var index = fromIndex - 1, - length = array.length; +function SetCache(values) { + var index = -1, + length = values ? values.length : 0; + this.__data__ = new MapCache; while (++index < length) { - if (array[index] === value) { - return index; - } + this.add(values[index]); } - return -1; } /** - * The base implementation of `_.isNaN` without support for number objects. + * Adds `value` to the array cache. * * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is `NaN`, else `false`. + * @name add + * @memberOf SetCache + * @alias push + * @param {*} value The value to cache. + * @returns {Object} Returns the cache instance. */ -function baseIsNaN(value) { - return value !== value; +function setCacheAdd(value) { + this.__data__.set(value, HASH_UNDEFINED); + return this; } /** - * Checks if a cache value for `key` exists. + * Checks if `value` is in the array cache. * * @private - * @param {Object} cache The cache to query. - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + * @name has + * @memberOf SetCache + * @param {*} value The value to search for. + * @returns {number} Returns `true` if `value` is found, else `false`. */ -function cacheHas(cache, key) { - return cache.has(key); +function setCacheHas(value) { + return this.__data__.has(value); } +// Add methods to `SetCache`. +SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; +SetCache.prototype.has = setCacheHas; + /** - * Gets the value at `key` of `object`. + * Gets the index at which the `key` is found in `array` of key-value pairs. * * @private - * @param {Object} [object] The object to query. - * @param {string} key The key of the property to get. - * @returns {*} Returns the property value. + * @param {Array} array The array to inspect. + * @param {*} key The key to search for. + * @returns {number} Returns the index of the matched value, else `-1`. */ -function getValue(object, key) { - return object == null ? undefined : object[key]; +function assocIndexOf(array, key) { + var length = array.length; + while (length--) { + if (eq(array[length][0], key)) { + return length; + } + } + return -1; } /** - * Checks if `value` is a host object in IE < 9. + * The base implementation of `_.isNative` without bad shim checks. * * @private * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a host object, else `false`. + * @returns {boolean} Returns `true` if `value` is a native function, + * else `false`. */ -function isHostObject(value) { - // Many host objects are `Object` objects that can coerce to strings - // despite having improperly defined `toString` methods. - var result = false; - if (value != null && typeof value.toString != 'function') { - try { - result = !!(value + ''); - } catch (e) {} +function baseIsNative(value) { + if (!isObject(value) || isMasked(value)) { + return false; } - return result; + var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; + return pattern.test(toSource(value)); } /** - * Converts `set` to an array of its values. + * The base implementation of `_.uniqBy` without support for iteratee shorthands. * * @private - * @param {Object} set The set to convert. - * @returns {Array} Returns the values. + * @param {Array} array The array to inspect. + * @param {Function} [iteratee] The iteratee invoked per element. + * @param {Function} [comparator] The comparator invoked per element. + * @returns {Array} Returns the new duplicate free array. */ -function setToArray(set) { +function baseUniq(array, iteratee, comparator) { var index = -1, - result = Array(set.size); + includes = arrayIncludes, + length = array.length, + isCommon = true, + result = [], + seen = result; - set.forEach(function(value) { - result[++index] = value; - }); + if (comparator) { + isCommon = false; + includes = arrayIncludesWith; + } + else if (length >= LARGE_ARRAY_SIZE) { + var set = iteratee ? null : createSet(array); + if (set) { + return setToArray(set); + } + isCommon = false; + includes = cacheHas; + seen = new SetCache; + } + else { + seen = iteratee ? [] : result; + } + outer: + while (++index < length) { + var value = array[index], + computed = iteratee ? iteratee(value) : value; + + value = (comparator || value !== 0) ? value : 0; + if (isCommon && computed === computed) { + var seenIndex = seen.length; + while (seenIndex--) { + if (seen[seenIndex] === computed) { + continue outer; + } + } + if (iteratee) { + seen.push(computed); + } + result.push(value); + } + else if (!includes(seen, computed, comparator)) { + if (seen !== result) { + seen.push(computed); + } + result.push(value); + } + } return result; } -/** Used for built-in method references. */ -var arrayProto = Array.prototype, - funcProto = Function.prototype, - objectProto = Object.prototype; - -/** Used to detect overreaching core-js shims. */ -var coreJsData = root['__core-js_shared__']; - -/** Used to detect methods masquerading as native. */ -var maskSrcKey = (function() { - var uid = /[^.]+$/.exec(coreJsData && coreJsData.keys && coreJsData.keys.IE_PROTO || ''); - return uid ? ('Symbol(src)_1.' + uid) : ''; -}()); - -/** Used to resolve the decompiled source of functions. */ -var funcToString = funcProto.toString; - -/** Used to check objects for own properties. */ -var hasOwnProperty = objectProto.hasOwnProperty; - /** - * Used to resolve the - * [`toStringTag`](http://ecma-international.org/ecma-262/7.0/#sec-object.prototype.tostring) - * of values. + * Creates a set object of `values`. + * + * @private + * @param {Array} values The values to add to the set. + * @returns {Object} Returns the new set. */ -var objectToString = objectProto.toString; - -/** Used to detect if a method is native. */ -var reIsNative = RegExp('^' + - funcToString.call(hasOwnProperty).replace(reRegExpChar, '\\$&') - .replace(/hasOwnProperty|(function).*?(?=\\\()| for .+?(?=\\\])/g, '$1.*?') + '$' -); - -/** Built-in value references. */ -var splice = arrayProto.splice; - -/* Built-in method references that are verified to be native. */ -var Map = getNative(root, 'Map'), - Set = getNative(root, 'Set'), - nativeCreate = getNative(Object, 'create'); +var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { + return new Set(values); +}; /** - * Creates a hash object. + * Gets the data for `map`. * * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. + * @param {Object} map The map to query. + * @param {string} key The reference key. + * @returns {*} Returns the map data. */ -function Hash(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } +function getMapData(map, key) { + var data = map.__data__; + return isKeyable(key) + ? data[typeof key == 'string' ? 'string' : 'hash'] + : data.map; } /** - * Removes all key-value entries from the hash. + * Gets the native function at `key` of `object`. * * @private - * @name clear - * @memberOf Hash + * @param {Object} object The object to query. + * @param {string} key The key of the method to get. + * @returns {*} Returns the function if it's native, else `undefined`. */ -function hashClear() { - this.__data__ = nativeCreate ? nativeCreate(null) : {}; +function getNative(object, key) { + var value = getValue(object, key); + return baseIsNative(value) ? value : undefined; } /** - * Removes `key` and its value from the hash. + * Checks if `value` is suitable for use as unique object key. * * @private - * @name delete - * @memberOf Hash - * @param {Object} hash The hash to modify. - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is suitable, else `false`. */ -function hashDelete(key) { - return this.has(key) && delete this.__data__[key]; +function isKeyable(value) { + var type = typeof value; + return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') + ? (value !== '__proto__') + : (value === null); } /** - * Gets the hash value for `key`. + * Checks if `func` has its source masked. * * @private - * @name get - * @memberOf Hash - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. + * @param {Function} func The function to check. + * @returns {boolean} Returns `true` if `func` is masked, else `false`. */ -function hashGet(key) { - var data = this.__data__; - if (nativeCreate) { - var result = data[key]; - return result === HASH_UNDEFINED ? undefined : result; - } - return hasOwnProperty.call(data, key) ? data[key] : undefined; +function isMasked(func) { + return !!maskSrcKey && (maskSrcKey in func); } /** - * Checks if a hash value for `key` exists. + * Converts `func` to its source code. * * @private - * @name has - * @memberOf Hash - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. + * @param {Function} func The function to process. + * @returns {string} Returns the source code. */ -function hashHas(key) { - var data = this.__data__; - return nativeCreate ? data[key] !== undefined : hasOwnProperty.call(data, key); +function toSource(func) { + if (func != null) { + try { + return funcToString.call(func); + } catch (e) {} + try { + return (func + ''); + } catch (e) {} + } + return ''; } /** - * Sets the hash `key` to `value`. + * Creates a duplicate-free version of an array, using + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * for equality comparisons, in which only the first occurrence of each + * element is kept. * - * @private - * @name set - * @memberOf Hash - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the hash instance. + * @static + * @memberOf _ + * @since 0.1.0 + * @category Array + * @param {Array} array The array to inspect. + * @returns {Array} Returns the new duplicate free array. + * @example + * + * _.uniq([2, 1, 2]); + * // => [2, 1] */ -function hashSet(key, value) { - var data = this.__data__; - data[key] = (nativeCreate && value === undefined) ? HASH_UNDEFINED : value; - return this; +function uniq(array) { + return (array && array.length) + ? baseUniq(array) + : []; } -// Add methods to `Hash`. -Hash.prototype.clear = hashClear; -Hash.prototype['delete'] = hashDelete; -Hash.prototype.get = hashGet; -Hash.prototype.has = hashHas; -Hash.prototype.set = hashSet; - /** - * Creates an list cache object. + * Performs a + * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) + * comparison between two values to determine if they are equivalent. * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. + * @static + * @memberOf _ + * @since 4.0.0 + * @category Lang + * @param {*} value The value to compare. + * @param {*} other The other value to compare. + * @returns {boolean} Returns `true` if the values are equivalent, else `false`. + * @example + * + * var object = { 'a': 1 }; + * var other = { 'a': 1 }; + * + * _.eq(object, object); + * // => true + * + * _.eq(object, other); + * // => false + * + * _.eq('a', 'a'); + * // => true + * + * _.eq('a', Object('a')); + * // => false + * + * _.eq(NaN, NaN); + * // => true */ -function ListCache(entries) { - var index = -1, - length = entries ? entries.length : 0; - - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } +function eq(value, other) { + return value === other || (value !== value && other !== other); } /** - * Removes all key-value entries from the list cache. + * Checks if `value` is classified as a `Function` object. * - * @private - * @name clear - * @memberOf ListCache + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is a function, else `false`. + * @example + * + * _.isFunction(_); + * // => true + * + * _.isFunction(/abc/); + * // => false */ -function listCacheClear() { - this.__data__ = []; +function isFunction(value) { + // The use of `Object#toString` avoids issues with the `typeof` operator + // in Safari 8-9 which returns 'object' for typed array and other constructors. + var tag = isObject(value) ? objectToString.call(value) : ''; + return tag == funcTag || tag == genTag; } /** - * Removes `key` and its value from the list cache. + * Checks if `value` is the + * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) + * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) * - * @private - * @name delete - * @memberOf ListCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. + * @static + * @memberOf _ + * @since 0.1.0 + * @category Lang + * @param {*} value The value to check. + * @returns {boolean} Returns `true` if `value` is an object, else `false`. + * @example + * + * _.isObject({}); + * // => true + * + * _.isObject([1, 2, 3]); + * // => true + * + * _.isObject(_.noop); + * // => true + * + * _.isObject(null); + * // => false */ -function listCacheDelete(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - if (index < 0) { - return false; - } - var lastIndex = data.length - 1; - if (index == lastIndex) { - data.pop(); - } else { - splice.call(data, index, 1); - } - return true; +function isObject(value) { + var type = typeof value; + return !!value && (type == 'object' || type == 'function'); } /** - * Gets the list cache value for `key`. + * This method returns `undefined`. * - * @private - * @name get - * @memberOf ListCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. + * @static + * @memberOf _ + * @since 2.3.0 + * @category Util + * @example + * + * _.times(2, _.noop); + * // => [undefined, undefined] */ -function listCacheGet(key) { - var data = this.__data__, - index = assocIndexOf(data, key); - - return index < 0 ? undefined : data[index][1]; +function noop() { + // No operation performed. } -/** - * Checks if a list cache value for `key` exists. - * - * @private - * @name has - * @memberOf ListCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function listCacheHas(key) { - return assocIndexOf(this.__data__, key) > -1; -} +module.exports = uniq; -/** - * Sets the list cache `key` to `value`. - * - * @private - * @name set - * @memberOf ListCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the list cache instance. - */ -function listCacheSet(key, value) { - var data = this.__data__, - index = assocIndexOf(data, key); - if (index < 0) { - data.push([key, value]); - } else { - data[index][1] = value; - } - return this; -} +/***/ }), -// Add methods to `ListCache`. -ListCache.prototype.clear = listCacheClear; -ListCache.prototype['delete'] = listCacheDelete; -ListCache.prototype.get = listCacheGet; -ListCache.prototype.has = listCacheHas; -ListCache.prototype.set = listCacheSet; +/***/ 129: +/***/ (function(module) { -/** - * Creates a map cache object to store key-value pairs. - * - * @private - * @constructor - * @param {Array} [entries] The key-value pairs to cache. - */ -function MapCache(entries) { - var index = -1, - length = entries ? entries.length : 0; +module.exports = require("child_process"); - this.clear(); - while (++index < length) { - var entry = entries[index]; - this.set(entry[0], entry[1]); - } -} +/***/ }), -/** - * Removes all key-value entries from the map. - * - * @private - * @name clear - * @memberOf MapCache - */ -function mapCacheClear() { - this.__data__ = { - 'hash': new Hash, - 'map': new (Map || ListCache), - 'string': new Hash - }; -} +/***/ 133: +/***/ (function(__unusedmodule, exports, __webpack_require__) { -/** - * Removes `key` and its value from the map. - * - * @private - * @name delete - * @memberOf MapCache - * @param {string} key The key of the value to remove. - * @returns {boolean} Returns `true` if the entry was removed, else `false`. - */ -function mapCacheDelete(key) { - return getMapData(this, key)['delete'](key); -} +"use strict"; -/** - * Gets the map value for `key`. - * - * @private - * @name get - * @memberOf MapCache - * @param {string} key The key of the value to get. - * @returns {*} Returns the entry value. - */ -function mapCacheGet(key) { - return getMapData(this, key).get(key); -} -/** - * Checks if a map value for `key` exists. - * - * @private - * @name has - * @memberOf MapCache - * @param {string} key The key of the entry to check. - * @returns {boolean} Returns `true` if an entry for `key` exists, else `false`. - */ -function mapCacheHas(key) { - return getMapData(this, key).has(key); -} +const assert = __webpack_require__(357) +const Buffer = __webpack_require__(293).Buffer +const realZlib = __webpack_require__(761) -/** - * Sets the map `key` to `value`. - * - * @private - * @name set - * @memberOf MapCache - * @param {string} key The key of the value to set. - * @param {*} value The value to set. - * @returns {Object} Returns the map cache instance. - */ -function mapCacheSet(key, value) { - getMapData(this, key).set(key, value); - return this; -} +const constants = exports.constants = __webpack_require__(60) +const Minipass = __webpack_require__(720) -// Add methods to `MapCache`. -MapCache.prototype.clear = mapCacheClear; -MapCache.prototype['delete'] = mapCacheDelete; -MapCache.prototype.get = mapCacheGet; -MapCache.prototype.has = mapCacheHas; -MapCache.prototype.set = mapCacheSet; +const OriginalBufferConcat = Buffer.concat -/** - * - * Creates an array cache object to store unique values. - * - * @private - * @constructor - * @param {Array} [values] The values to cache. - */ -function SetCache(values) { - var index = -1, - length = values ? values.length : 0; +const _superWrite = Symbol('_superWrite') +class ZlibError extends Error { + constructor (err) { + super('zlib: ' + err.message) + this.code = err.code + this.errno = err.errno + /* istanbul ignore if */ + if (!this.code) + this.code = 'ZLIB_ERROR' - this.__data__ = new MapCache; - while (++index < length) { - this.add(values[index]); + this.message = 'zlib: ' + err.message + Error.captureStackTrace(this, this.constructor) } -} -/** - * Adds `value` to the array cache. - * - * @private - * @name add - * @memberOf SetCache - * @alias push - * @param {*} value The value to cache. - * @returns {Object} Returns the cache instance. - */ -function setCacheAdd(value) { - this.__data__.set(value, HASH_UNDEFINED); - return this; + get name () { + return 'ZlibError' + } } -/** - * Checks if `value` is in the array cache. - * - * @private - * @name has - * @memberOf SetCache - * @param {*} value The value to search for. - * @returns {number} Returns `true` if `value` is found, else `false`. - */ -function setCacheHas(value) { - return this.__data__.has(value); -} +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _opts = Symbol('opts') +const _flushFlag = Symbol('flushFlag') +const _finishFlushFlag = Symbol('finishFlushFlag') +const _fullFlushFlag = Symbol('fullFlushFlag') +const _handle = Symbol('handle') +const _onError = Symbol('onError') +const _sawError = Symbol('sawError') +const _level = Symbol('level') +const _strategy = Symbol('strategy') +const _ended = Symbol('ended') +const _defaultFullFlush = Symbol('_defaultFullFlush') -// Add methods to `SetCache`. -SetCache.prototype.add = SetCache.prototype.push = setCacheAdd; -SetCache.prototype.has = setCacheHas; +class ZlibBase extends Minipass { + constructor (opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor') -/** - * Gets the index at which the `key` is found in `array` of key-value pairs. - * - * @private - * @param {Array} array The array to inspect. - * @param {*} key The key to search for. - * @returns {number} Returns the index of the matched value, else `-1`. - */ -function assocIndexOf(array, key) { - var length = array.length; - while (length--) { - if (eq(array[length][0], key)) { - return length; + super(opts) + this[_ended] = false + this[_opts] = opts + + this[_flushFlag] = opts.flush + this[_finishFlushFlag] = opts.finishFlush + // this will throw if any options are invalid for the class selected + try { + this[_handle] = new realZlib[mode](opts) + } catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er) + } + + this[_onError] = (err) => { + this[_sawError] = true + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close() + this.emit('error', err) } + + this[_handle].on('error', er => this[_onError](new ZlibError(er))) + this.once('end', () => this.close) } - return -1; -} -/** - * The base implementation of `_.isNative` without bad shim checks. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a native function, - * else `false`. - */ -function baseIsNative(value) { - if (!isObject(value) || isMasked(value)) { - return false; + close () { + if (this[_handle]) { + this[_handle].close() + this[_handle] = null + this.emit('close') + } } - var pattern = (isFunction(value) || isHostObject(value)) ? reIsNative : reIsHostCtor; - return pattern.test(toSource(value)); -} -/** - * The base implementation of `_.uniqBy` without support for iteratee shorthands. - * - * @private - * @param {Array} array The array to inspect. - * @param {Function} [iteratee] The iteratee invoked per element. - * @param {Function} [comparator] The comparator invoked per element. - * @returns {Array} Returns the new duplicate free array. - */ -function baseUniq(array, iteratee, comparator) { - var index = -1, - includes = arrayIncludes, - length = array.length, - isCommon = true, - result = [], - seen = result; + reset () { + if (!this[_sawError]) { + assert(this[_handle], 'zlib binding closed') + return this[_handle].reset() + } + } - if (comparator) { - isCommon = false; - includes = arrayIncludesWith; + flush (flushFlag) { + if (this.ended) + return + + if (typeof flushFlag !== 'number') + flushFlag = this[_fullFlushFlag] + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })) } - else if (length >= LARGE_ARRAY_SIZE) { - var set = iteratee ? null : createSet(array); - if (set) { - return setToArray(set); - } - isCommon = false; - includes = cacheHas; - seen = new SetCache; + + end (chunk, encoding, cb) { + if (chunk) + this.write(chunk, encoding) + this.flush(this[_finishFlushFlag]) + this[_ended] = true + return super.end(null, null, cb) } - else { - seen = iteratee ? [] : result; + + get ended () { + return this[_ended] } - outer: - while (++index < length) { - var value = array[index], - computed = iteratee ? iteratee(value) : value; - value = (comparator || value !== 0) ? value : 0; - if (isCommon && computed === computed) { - var seenIndex = seen.length; - while (seenIndex--) { - if (seen[seenIndex] === computed) { - continue outer; - } - } - if (iteratee) { - seen.push(computed); + write (chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding) + + if (this[_sawError]) + return + assert(this[_handle], 'zlib binding closed') + + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + const nativeHandle = this[_handle]._handle + const originalNativeClose = nativeHandle.close + nativeHandle.close = () => {} + const originalClose = this[_handle].close + this[_handle].close = () => {} + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = (args) => args + let result + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] : this[_flushFlag] + result = this[_handle]._processChunk(chunk, flushFlag) + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat + } catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat + this[_onError](new ZlibError(err)) + } finally { + if (this[_handle]) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + this[_handle]._handle = nativeHandle + nativeHandle.close = originalNativeClose + this[_handle].close = originalClose + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this[_handle].removeAllListeners('error') } - result.push(value); } - else if (!includes(seen, computed, comparator)) { - if (seen !== result) { - seen.push(computed); + + let writeReturn + if (result) { + if (Array.isArray(result) && result.length > 0) { + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](Buffer.from(result[0])) + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]) + } + } else { + writeReturn = this[_superWrite](Buffer.from(result)) } - result.push(value); } + + if (cb) + cb() + return writeReturn + } + + [_superWrite] (data) { + return super.write(data) } - return result; } -/** - * Creates a set object of `values`. - * - * @private - * @param {Array} values The values to add to the set. - * @returns {Object} Returns the new set. - */ -var createSet = !(Set && (1 / setToArray(new Set([,-0]))[1]) == INFINITY) ? noop : function(values) { - return new Set(values); -}; +class Zlib extends ZlibBase { + constructor (opts, mode) { + opts = opts || {} -/** - * Gets the data for `map`. - * - * @private - * @param {Object} map The map to query. - * @param {string} key The reference key. - * @returns {*} Returns the map data. - */ -function getMapData(map, key) { - var data = map.__data__; - return isKeyable(key) - ? data[typeof key == 'string' ? 'string' : 'hash'] - : data.map; -} + opts.flush = opts.flush || constants.Z_NO_FLUSH + opts.finishFlush = opts.finishFlush || constants.Z_FINISH + super(opts, mode) -/** - * Gets the native function at `key` of `object`. - * - * @private - * @param {Object} object The object to query. - * @param {string} key The key of the method to get. - * @returns {*} Returns the function if it's native, else `undefined`. - */ -function getNative(object, key) { - var value = getValue(object, key); - return baseIsNative(value) ? value : undefined; -} + this[_fullFlushFlag] = constants.Z_FULL_FLUSH + this[_level] = opts.level + this[_strategy] = opts.strategy + } -/** - * Checks if `value` is suitable for use as unique object key. - * - * @private - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is suitable, else `false`. - */ -function isKeyable(value) { - var type = typeof value; - return (type == 'string' || type == 'number' || type == 'symbol' || type == 'boolean') - ? (value !== '__proto__') - : (value === null); + params (level, strategy) { + if (this[_sawError]) + return + + if (!this[_handle]) + throw new Error('cannot switch params when binding is closed') + + // no way to test this without also not supporting params at all + /* istanbul ignore if */ + if (!this[_handle].params) + throw new Error('not supported in this implementation') + + if (this[_level] !== level || this[_strategy] !== strategy) { + this.flush(constants.Z_SYNC_FLUSH) + assert(this[_handle], 'zlib binding closed') + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this[_handle].flush + this[_handle].flush = (flushFlag, cb) => { + this.flush(flushFlag) + cb() + } + try { + this[_handle].params(level, strategy) + } finally { + this[_handle].flush = origFlush + } + /* istanbul ignore else */ + if (this[_handle]) { + this[_level] = level + this[_strategy] = strategy + } + } + } } -/** - * Checks if `func` has its source masked. - * - * @private - * @param {Function} func The function to check. - * @returns {boolean} Returns `true` if `func` is masked, else `false`. - */ -function isMasked(func) { - return !!maskSrcKey && (maskSrcKey in func); +// minimal 2-byte header +class Deflate extends Zlib { + constructor (opts) { + super(opts, 'Deflate') + } } -/** - * Converts `func` to its source code. - * - * @private - * @param {Function} func The function to process. - * @returns {string} Returns the source code. - */ -function toSource(func) { - if (func != null) { - try { - return funcToString.call(func); - } catch (e) {} - try { - return (func + ''); - } catch (e) {} +class Inflate extends Zlib { + constructor (opts) { + super(opts, 'Inflate') } - return ''; } -/** - * Creates a duplicate-free version of an array, using - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * for equality comparisons, in which only the first occurrence of each - * element is kept. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Array - * @param {Array} array The array to inspect. - * @returns {Array} Returns the new duplicate free array. - * @example - * - * _.uniq([2, 1, 2]); - * // => [2, 1] - */ -function uniq(array) { - return (array && array.length) - ? baseUniq(array) - : []; +// gzip - bigger header, same deflate compression +const _portable = Symbol('_portable') +class Gzip extends Zlib { + constructor (opts) { + super(opts, 'Gzip') + this[_portable] = opts && !!opts.portable + } + + [_superWrite] (data) { + if (!this[_portable]) + return super[_superWrite](data) + + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this[_portable] = false + data[9] = 255 + return super[_superWrite](data) + } } -/** - * Performs a - * [`SameValueZero`](http://ecma-international.org/ecma-262/7.0/#sec-samevaluezero) - * comparison between two values to determine if they are equivalent. - * - * @static - * @memberOf _ - * @since 4.0.0 - * @category Lang - * @param {*} value The value to compare. - * @param {*} other The other value to compare. - * @returns {boolean} Returns `true` if the values are equivalent, else `false`. - * @example - * - * var object = { 'a': 1 }; - * var other = { 'a': 1 }; - * - * _.eq(object, object); - * // => true - * - * _.eq(object, other); - * // => false - * - * _.eq('a', 'a'); - * // => true - * - * _.eq('a', Object('a')); - * // => false - * - * _.eq(NaN, NaN); - * // => true - */ -function eq(value, other) { - return value === other || (value !== value && other !== other); +class Gunzip extends Zlib { + constructor (opts) { + super(opts, 'Gunzip') + } } -/** - * Checks if `value` is classified as a `Function` object. - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is a function, else `false`. - * @example - * - * _.isFunction(_); - * // => true - * - * _.isFunction(/abc/); - * // => false - */ -function isFunction(value) { - // The use of `Object#toString` avoids issues with the `typeof` operator - // in Safari 8-9 which returns 'object' for typed array and other constructors. - var tag = isObject(value) ? objectToString.call(value) : ''; - return tag == funcTag || tag == genTag; +// raw - no header +class DeflateRaw extends Zlib { + constructor (opts) { + super(opts, 'DeflateRaw') + } } -/** - * Checks if `value` is the - * [language type](http://www.ecma-international.org/ecma-262/7.0/#sec-ecmascript-language-types) - * of `Object`. (e.g. arrays, functions, objects, regexes, `new Number(0)`, and `new String('')`) - * - * @static - * @memberOf _ - * @since 0.1.0 - * @category Lang - * @param {*} value The value to check. - * @returns {boolean} Returns `true` if `value` is an object, else `false`. - * @example - * - * _.isObject({}); - * // => true - * - * _.isObject([1, 2, 3]); - * // => true - * - * _.isObject(_.noop); - * // => true - * - * _.isObject(null); - * // => false - */ -function isObject(value) { - var type = typeof value; - return !!value && (type == 'object' || type == 'function'); +class InflateRaw extends Zlib { + constructor (opts) { + super(opts, 'InflateRaw') + } } -/** - * This method returns `undefined`. - * - * @static - * @memberOf _ - * @since 2.3.0 - * @category Util - * @example - * - * _.times(2, _.noop); - * // => [undefined, undefined] - */ -function noop() { - // No operation performed. +// auto-detect header. +class Unzip extends Zlib { + constructor (opts) { + super(opts, 'Unzip') + } } -module.exports = uniq; +class Brotli extends ZlibBase { + constructor (opts, mode) { + opts = opts || {} + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS + opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH -/***/ }), + super(opts, mode) -/***/ 129: -/***/ (function(module) { + this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH + } +} -module.exports = require("child_process"); +class BrotliCompress extends Brotli { + constructor (opts) { + super(opts, 'BrotliCompress') + } +} -/***/ }), +class BrotliDecompress extends Brotli { + constructor (opts) { + super(opts, 'BrotliDecompress') + } +} -/***/ 133: -/***/ (function(__unusedmodule, exports, __webpack_require__) { +exports.Deflate = Deflate +exports.Inflate = Inflate +exports.Gzip = Gzip +exports.Gunzip = Gunzip +exports.DeflateRaw = DeflateRaw +exports.InflateRaw = InflateRaw +exports.Unzip = Unzip +/* istanbul ignore else */ +if (typeof realZlib.BrotliCompress === 'function') { + exports.BrotliCompress = BrotliCompress + exports.BrotliDecompress = BrotliDecompress +} else { + exports.BrotliCompress = exports.BrotliDecompress = class { + constructor () { + throw new Error('Brotli is not supported in this version of Node.js') + } + } +} -"use strict"; +/***/ }), -const assert = __webpack_require__(357) -const Buffer = __webpack_require__(293).Buffer -const realZlib = __webpack_require__(761) +/***/ 143: +/***/ (function(module, __unusedexports, __webpack_require__) { -const constants = exports.constants = __webpack_require__(60) -const Minipass = __webpack_require__(106) +module.exports = withAuthorizationPrefix; -const OriginalBufferConcat = Buffer.concat +const atob = __webpack_require__(368); -const _superWrite = Symbol('_superWrite') -class ZlibError extends Error { - constructor (err) { - super('zlib: ' + err.message) - this.code = err.code - this.errno = err.errno - /* istanbul ignore if */ - if (!this.code) - this.code = 'ZLIB_ERROR' +const REGEX_IS_BASIC_AUTH = /^[\w-]+:/; - this.message = 'zlib: ' + err.message - Error.captureStackTrace(this, this.constructor) +function withAuthorizationPrefix(authorization) { + if (/^(basic|bearer|token) /i.test(authorization)) { + return authorization; } - get name () { - return 'ZlibError' + try { + if (REGEX_IS_BASIC_AUTH.test(atob(authorization))) { + return `basic ${authorization}`; + } + } catch (error) {} + + if (authorization.split(/\./).length === 3) { + return `bearer ${authorization}`; } + + return `token ${authorization}`; } -// the Zlib class they all inherit from -// This thing manages the queue of requests, and returns -// true or false if there is anything in the queue when -// you call the .write() method. -const _opts = Symbol('opts') -const _flushFlag = Symbol('flushFlag') -const _finishFlushFlag = Symbol('finishFlushFlag') -const _fullFlushFlag = Symbol('fullFlushFlag') -const _handle = Symbol('handle') -const _onError = Symbol('onError') -const _sawError = Symbol('sawError') -const _level = Symbol('level') -const _strategy = Symbol('strategy') -const _ended = Symbol('ended') -const _defaultFullFlush = Symbol('_defaultFullFlush') -class ZlibBase extends Minipass { - constructor (opts, mode) { - if (!opts || typeof opts !== 'object') - throw new TypeError('invalid options for ZlibBase constructor') +/***/ }), - super(opts) - this[_ended] = false - this[_opts] = opts +/***/ 145: +/***/ (function(module, __unusedexports, __webpack_require__) { - this[_flushFlag] = opts.flush - this[_finishFlushFlag] = opts.finishFlush - // this will throw if any options are invalid for the class selected - try { - this[_handle] = new realZlib[mode](opts) - } catch (er) { - // make sure that all errors get decorated properly - throw new ZlibError(er) - } +"use strict"; - this[_onError] = (err) => { - this[_sawError] = true - // there is no way to cleanly recover. - // continuing only obscures problems. - this.close() - this.emit('error', err) - } +const pump = __webpack_require__(453); +const bufferStream = __webpack_require__(158); - this[_handle].on('error', er => this[_onError](new ZlibError(er))) - this.once('end', () => this.close) - } +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} - close () { - if (this[_handle]) { - this[_handle].close() - this[_handle] = null - this.emit('close') - } - } +function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } - reset () { - if (!this[_sawError]) { - assert(this[_handle], 'zlib binding closed') - return this[_handle].reset() - } - } + options = Object.assign({maxBuffer: Infinity}, options); - flush (flushFlag) { - if (this.ended) - return + const {maxBuffer} = options; - if (typeof flushFlag !== 'number') - flushFlag = this[_fullFlushFlag] - this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })) - } + let stream; + return new Promise((resolve, reject) => { + const rejectPromise = error => { + if (error) { // A null check + error.bufferedData = stream.getBufferedValue(); + } + reject(error); + }; - end (chunk, encoding, cb) { - if (chunk) - this.write(chunk, encoding) - this.flush(this[_finishFlushFlag]) - this[_ended] = true - return super.end(null, null, cb) - } + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } - get ended () { - return this[_ended] - } + resolve(); + }); - write (chunk, encoding, cb) { - // process the chunk using the sync process - // then super.write() all the outputted chunks - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }).then(() => stream.getBufferedValue()); +} - if (typeof chunk === 'string') - chunk = Buffer.from(chunk, encoding) +module.exports = getStream; +module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); +module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); +module.exports.MaxBufferError = MaxBufferError; - if (this[_sawError]) - return - assert(this[_handle], 'zlib binding closed') - // _processChunk tries to .close() the native handle after it's done, so we - // intercept that by temporarily making it a no-op. - const nativeHandle = this[_handle]._handle - const originalNativeClose = nativeHandle.close - nativeHandle.close = () => {} - const originalClose = this[_handle].close - this[_handle].close = () => {} - // It also calls `Buffer.concat()` at the end, which may be convenient - // for some, but which we are not interested in as it slows us down. - Buffer.concat = (args) => args - let result - try { - const flushFlag = typeof chunk[_flushFlag] === 'number' - ? chunk[_flushFlag] : this[_flushFlag] - result = this[_handle]._processChunk(chunk, flushFlag) - // if we don't throw, reset it back how it was - Buffer.concat = OriginalBufferConcat - } catch (err) { - // or if we do, put Buffer.concat() back before we emit error - // Error events call into user code, which may call Buffer.concat() - Buffer.concat = OriginalBufferConcat - this[_onError](new ZlibError(err)) - } finally { - if (this[_handle]) { - // Core zlib resets `_handle` to null after attempting to close the - // native handle. Our no-op handler prevented actual closure, but we - // need to restore the `._handle` property. - this[_handle]._handle = nativeHandle - nativeHandle.close = originalNativeClose - this[_handle].close = originalClose - // `_processChunk()` adds an 'error' listener. If we don't remove it - // after each call, these handlers start piling up. - this[_handle].removeAllListeners('error') - } - } +/***/ }), - let writeReturn - if (result) { - if (Array.isArray(result) && result.length > 0) { - // The first buffer is always `handle._outBuffer`, which would be - // re-used for later invocations; so, we always have to copy that one. - writeReturn = this[_superWrite](Buffer.from(result[0])) - for (let i = 1; i < result.length; i++) { - writeReturn = this[_superWrite](result[i]) - } - } else { - writeReturn = this[_superWrite](Buffer.from(result)) - } - } +/***/ 148: +/***/ (function(module, __unusedexports, __webpack_require__) { - if (cb) - cb() - return writeReturn - } +module.exports = paginatePlugin; - [_superWrite] (data) { - return super.write(data) - } +const { paginateRest } = __webpack_require__(299); + +function paginatePlugin(octokit) { + Object.assign(octokit, paginateRest(octokit)); } -class Zlib extends ZlibBase { - constructor (opts, mode) { - opts = opts || {} - opts.flush = opts.flush || constants.Z_NO_FLUSH - opts.finishFlush = opts.finishFlush || constants.Z_FINISH - super(opts, mode) +/***/ }), - this[_fullFlushFlag] = constants.Z_FULL_FLUSH - this[_level] = opts.level - this[_strategy] = opts.strategy - } +/***/ 158: +/***/ (function(module, __unusedexports, __webpack_require__) { - params (level, strategy) { - if (this[_sawError]) - return +"use strict"; - if (!this[_handle]) - throw new Error('cannot switch params when binding is closed') +const {PassThrough} = __webpack_require__(413); - // no way to test this without also not supporting params at all - /* istanbul ignore if */ - if (!this[_handle].params) - throw new Error('not supported in this implementation') +module.exports = options => { + options = Object.assign({}, options); - if (this[_level] !== level || this[_strategy] !== strategy) { - this.flush(constants.Z_SYNC_FLUSH) - assert(this[_handle], 'zlib binding closed') - // .params() calls .flush(), but the latter is always async in the - // core zlib. We override .flush() temporarily to intercept that and - // flush synchronously. - const origFlush = this[_handle].flush - this[_handle].flush = (flushFlag, cb) => { - this.flush(flushFlag) - cb() - } - try { - this[_handle].params(level, strategy) - } finally { - this[_handle].flush = origFlush - } - /* istanbul ignore else */ - if (this[_handle]) { - this[_level] = level - this[_strategy] = strategy - } - } - } -} + const {array} = options; + let {encoding} = options; + const buffer = encoding === 'buffer'; + let objectMode = false; -// minimal 2-byte header -class Deflate extends Zlib { - constructor (opts) { - super(opts, 'Deflate') - } -} - -class Inflate extends Zlib { - constructor (opts) { - super(opts, 'Inflate') - } -} - -// gzip - bigger header, same deflate compression -const _portable = Symbol('_portable') -class Gzip extends Zlib { - constructor (opts) { - super(opts, 'Gzip') - this[_portable] = opts && !!opts.portable - } - - [_superWrite] (data) { - if (!this[_portable]) - return super[_superWrite](data) - - // we'll always get the header emitted in one first chunk - // overwrite the OS indicator byte with 0xFF - this[_portable] = false - data[9] = 255 - return super[_superWrite](data) - } -} - -class Gunzip extends Zlib { - constructor (opts) { - super(opts, 'Gunzip') - } -} - -// raw - no header -class DeflateRaw extends Zlib { - constructor (opts) { - super(opts, 'DeflateRaw') - } -} - -class InflateRaw extends Zlib { - constructor (opts) { - super(opts, 'InflateRaw') - } -} - -// auto-detect header. -class Unzip extends Zlib { - constructor (opts) { - super(opts, 'Unzip') - } -} - -class Brotli extends ZlibBase { - constructor (opts, mode) { - opts = opts || {} - - opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS - opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH - - super(opts, mode) - - this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH - } -} - -class BrotliCompress extends Brotli { - constructor (opts) { - super(opts, 'BrotliCompress') - } -} - -class BrotliDecompress extends Brotli { - constructor (opts) { - super(opts, 'BrotliDecompress') - } -} - -exports.Deflate = Deflate -exports.Inflate = Inflate -exports.Gzip = Gzip -exports.Gunzip = Gunzip -exports.DeflateRaw = DeflateRaw -exports.InflateRaw = InflateRaw -exports.Unzip = Unzip -/* istanbul ignore else */ -if (typeof realZlib.BrotliCompress === 'function') { - exports.BrotliCompress = BrotliCompress - exports.BrotliDecompress = BrotliDecompress -} else { - exports.BrotliCompress = exports.BrotliDecompress = class { - constructor () { - throw new Error('Brotli is not supported in this version of Node.js') - } - } -} - - -/***/ }), - -/***/ 143: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = withAuthorizationPrefix; - -const atob = __webpack_require__(368); - -const REGEX_IS_BASIC_AUTH = /^[\w-]+:/; - -function withAuthorizationPrefix(authorization) { - if (/^(basic|bearer|token) /i.test(authorization)) { - return authorization; - } - - try { - if (REGEX_IS_BASIC_AUTH.test(atob(authorization))) { - return `basic ${authorization}`; - } - } catch (error) {} - - if (authorization.split(/\./).length === 3) { - return `bearer ${authorization}`; - } - - return `token ${authorization}`; -} - - -/***/ }), - -/***/ 145: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const pump = __webpack_require__(453); -const bufferStream = __webpack_require__(158); - -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} - -function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); - } - - options = Object.assign({maxBuffer: Infinity}, options); - - const {maxBuffer} = options; - - let stream; - return new Promise((resolve, reject) => { - const rejectPromise = error => { - if (error) { // A null check - error.bufferedData = stream.getBufferedValue(); - } - reject(error); - }; - - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } - - resolve(); - }); - - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }).then(() => stream.getBufferedValue()); -} - -module.exports = getStream; -module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); -module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); -module.exports.MaxBufferError = MaxBufferError; - - -/***/ }), - -/***/ 148: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = paginatePlugin; - -const { paginateRest } = __webpack_require__(299); - -function paginatePlugin(octokit) { - Object.assign(octokit, paginateRest(octokit)); -} - - -/***/ }), - -/***/ 158: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const {PassThrough} = __webpack_require__(413); - -module.exports = options => { - options = Object.assign({}, options); - - const {array} = options; - let {encoding} = options; - const buffer = encoding === 'buffer'; - let objectMode = false; - - if (array) { - objectMode = !(encoding || buffer); - } else { - encoding = encoding || 'utf8'; - } + if (array) { + objectMode = !(encoding || buffer); + } else { + encoding = encoding || 'utf8'; + } if (buffer) { encoding = null; @@ -6584,7 +6038,7 @@ exports.paginateRest = paginateRest; "use strict"; -const MiniPass = __webpack_require__(635) +const MiniPass = __webpack_require__(720) const Pax = __webpack_require__(582) const Header = __webpack_require__(232) const ReadEntry = __webpack_require__(662) @@ -7136,9 +6590,12 @@ function run() { .split(','), }; const token = core.getInput('token', { required: true }); - const output = yield issue_1.createIssue(token, issueOption); + const output = yield issue_1.createOrUpdateIssue(token, image, issueOption); core.setOutput('html_url', output.htmlUrl); core.setOutput('issue_number', output.issueNumber.toString()); + if (core.getInput("fail_on_vulnerabilities") === 'true') { + core.setFailed(`Vulnerabilities found.\n${issueContent}`); + } } catch (error) { core.error(error.stack); @@ -10494,19 +9951,52 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const rest_1 = __webpack_require__(0); +const core = __importStar(__webpack_require__(470)); const github = __importStar(__webpack_require__(469)); -function createIssue(token, options) { +function getTrivyIssues(client, image, labels) { + return __awaiter(this, void 0, void 0, function* () { + if (labels == null) { + return []; + } + let { data: trivyIssues, } = yield client.issues.listForRepo(Object.assign(Object.assign({}, github.context.repo), { state: "open", labels: labels.join(",") })); + return trivyIssues.filter(issue => issue.body.includes(image)); + }); +} +function createIssue(client, options) { return __awaiter(this, void 0, void 0, function* () { - const client = new rest_1.Octokit({ auth: token }); const { data: issue, } = yield client.issues.create(Object.assign(Object.assign({}, github.context.repo), options)); - const result = { - issueNumber: issue.number, - htmlUrl: issue.html_url, - }; - return result; + return issue; + }); +} +function updateIssue(issueNumber, client, options) { + return __awaiter(this, void 0, void 0, function* () { + yield client.issues.update(Object.assign(Object.assign({}, github.context.repo), { issue_number: issueNumber, body: options.body })); + }); +} +function createOrUpdateIssue(token, image, options) { + return __awaiter(this, void 0, void 0, function* () { + const client = new rest_1.Octokit({ auth: token }); + const trivyIssues = yield getTrivyIssues(client, image, options.labels); + if (trivyIssues.length > 0) { + core.info("Found existing issue. Updating existing issue."); + const existingIssue = trivyIssues[0]; + yield updateIssue(existingIssue.number, client, options); + return { + issueNumber: existingIssue.number, + htmlUrl: existingIssue.html_url, + }; + } + else { + core.info("Create new issue"); + const newIssue = yield createIssue(client, options); + return { + issueNumber: newIssue.number, + htmlUrl: newIssue.html_url, + }; + } }); } -exports.createIssue = createIssue; +exports.createOrUpdateIssue = createOrUpdateIssue; /***/ }), @@ -12290,3185 +11780,2639 @@ const addFilesAsync = (p, files) => { /***/ }), -/***/ 635: +/***/ 649: /***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; +module.exports = getLastPage -const EE = __webpack_require__(614) -const Stream = __webpack_require__(413) -const Yallist = __webpack_require__(612) -const SD = __webpack_require__(304).StringDecoder +const getPage = __webpack_require__(265) -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -const DESTROYED = Symbol('destroyed') +function getLastPage (octokit, link, headers) { + return getPage(octokit, link, 'last', headers) +} -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = doIter && Symbol.asyncIterator - || Symbol('asyncIterator not implemented') -const ITERATOR = doIter && Symbol.iterator - || Symbol('iterator not implemented') -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => - ev === 'end' || - ev === 'finish' || - ev === 'prefinish' +/***/ }), -const isArrayBuffer = b => b instanceof ArrayBuffer || - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0 +/***/ 650: +/***/ (function(module, __unusedexports, __webpack_require__) { -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) +module.exports = getUserAgentNode -module.exports = class Minipass extends Stream { - constructor (options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this.pipes = new Yallist() - this.buffer = new Yallist() - this[OBJECTMODE] = options && options.objectMode || false - if (this[OBJECTMODE]) - this[ENCODING] = null - else - this[ENCODING] = options && options.encoding || null - if (this[ENCODING] === 'buffer') - this[ENCODING] = null - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false - } +const osName = __webpack_require__(2) - get bufferLength () { return this[BUFFERLENGTH] } +function getUserAgentNode () { + try { + return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})` + } catch (error) { + if (/wmic os get Caption/.test(error.message)) { + return 'Windows ' + } - get encoding () { return this[ENCODING] } - set encoding (enc) { - if (this[OBJECTMODE]) - throw new Error('cannot set encoding in objectMode') + throw error + } +} - if (this[ENCODING] && enc !== this[ENCODING] && - (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) - throw new Error('cannot change encoding') - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this.buffer.length) - this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) - } +/***/ }), - this[ENCODING] = enc - } +/***/ 654: +/***/ (function(module) { - setEncoding (enc) { - this.encoding = enc - } +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGHUP', + 'SIGINT', + 'SIGTERM' +] - get objectMode () { return this[OBJECTMODE] } - set objectMode (ॐ ) { this[OBJECTMODE] = this[OBJECTMODE] || !!ॐ } +if (process.platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) +} - write (chunk, encoding, cb) { - if (this[EOF]) - throw new Error('write after end') +if (process.platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) +} - if (this[DESTROYED]) { - this.emit('error', Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - )) - return true - } - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' +/***/ }), - if (!encoding) - encoding = 'utf8' +/***/ 656: +/***/ (function(module, __unusedexports, __webpack_require__) { - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) - chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } +"use strict"; - // this ensures at this point that the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!this.objectMode && !chunk.length) { - const ret = this.flowing - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - cb() - return ret - } - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && !this[OBJECTMODE] && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { - chunk = Buffer.from(chunk, encoding) - } +// tar -x +const hlo = __webpack_require__(891) +const Unpack = __webpack_require__(63) +const fs = __webpack_require__(747) +const fsm = __webpack_require__(827) +const path = __webpack_require__(622) - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) +const x = module.exports = (opt_, files, cb) => { + if (typeof opt_ === 'function') + cb = opt_, files = null, opt_ = {} + else if (Array.isArray(opt_)) + files = opt_, opt_ = {} - try { - return this.flowing - ? (this.emit('data', chunk), this.flowing) - : (this[BUFFERPUSH](chunk), false) - } finally { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - cb() - } - } + if (typeof files === 'function') + cb = files, files = null - read (n) { - if (this[DESTROYED]) - return null + if (!files) + files = [] + else + files = Array.from(files) - try { - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) - return null + const opt = hlo(opt_) - if (this[OBJECTMODE]) - n = null + if (opt.sync && typeof cb === 'function') + throw new TypeError('callback not supported for sync tar functions') - if (this.buffer.length > 1 && !this[OBJECTMODE]) { - if (this.encoding) - this.buffer = new Yallist([ - Array.from(this.buffer).join('') - ]) - else - this.buffer = new Yallist([ - Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH]) - ]) - } + if (!opt.file && typeof cb === 'function') + throw new TypeError('callback only supported with file option') - return this[READ](n || null, this.buffer.head.value) - } finally { - this[MAYBE_EMIT_END]() - } - } + if (files.length) + filesFilter(opt, files) - [READ] (n, chunk) { - if (n === chunk.length || n === null) - this[BUFFERSHIFT]() - else { - this.buffer.head.value = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } + return opt.file && opt.sync ? extractFileSync(opt) + : opt.file ? extractFile(opt, cb) + : opt.sync ? extractSync(opt) + : extract(opt) +} - this.emit('data', chunk) +// construct a filter that limits the file entries listed +// include child entries if a dir is included +const filesFilter = (opt, files) => { + const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true])) + const filter = opt.filter - if (!this.buffer.length && !this[EOF]) - this.emit('drain') + const mapHas = (file, r) => { + const root = r || path.parse(file).root || '.' + const ret = file === root ? false + : map.has(file) ? map.get(file) + : mapHas(path.dirname(file), root) - return chunk + map.set(file, ret) + return ret } - end (chunk, encoding, cb) { - if (typeof chunk === 'function') - cb = chunk, chunk = null - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - if (chunk) - this.write(chunk, encoding) - if (cb) - this.once('end', cb) - this[EOF] = true - this.writable = false + opt.filter = filter + ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, '')) + : file => mapHas(file.replace(/\/+$/, '')) +} - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) - this[MAYBE_EMIT_END]() - return this - } +const extractFileSync = opt => { + const u = new Unpack.Sync(opt) - // don't let the internal resume be overwritten - [RESUME] () { - if (this[DESTROYED]) - return + const file = opt.file + let threw = true + let fd + const stat = fs.statSync(file) + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + const readSize = opt.maxReadSize || 16*1024*1024 + const stream = new fsm.ReadStreamSync(file, { + readSize: readSize, + size: stat.size + }) + stream.pipe(u) +} - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this.buffer.length) - this[FLUSH]() - else if (this[EOF]) - this[MAYBE_EMIT_END]() - else - this.emit('drain') - } +const extractFile = (opt, cb) => { + const u = new Unpack(opt) + const readSize = opt.maxReadSize || 16*1024*1024 - resume () { - return this[RESUME]() - } + const file = opt.file + const p = new Promise((resolve, reject) => { + u.on('error', reject) + u.on('close', resolve) - pause () { - this[FLOWING] = false - this[PAUSED] = true - } + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + fs.stat(file, (er, stat) => { + if (er) + reject(er) + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size + }) + stream.on('error', reject) + stream.pipe(u) + } + }) + }) + return cb ? p.then(cb, cb) : p +} - get destroyed () { - return this[DESTROYED] - } +const extractSync = opt => { + return new Unpack.Sync(opt) +} - get flowing () { - return this[FLOWING] - } +const extract = opt => { + return new Unpack(opt) +} - get paused () { - return this[PAUSED] - } - [BUFFERPUSH] (chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1 - else - this[BUFFERLENGTH] += chunk.length - return this.buffer.push(chunk) - } +/***/ }), - [BUFFERSHIFT] () { - if (this.buffer.length) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1 - else - this[BUFFERLENGTH] -= this.buffer.head.value.length - } - return this.buffer.shift() - } +/***/ 662: +/***/ (function(module, __unusedexports, __webpack_require__) { - [FLUSH] () { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) +"use strict"; - if (!this.buffer.length && !this[EOF]) - this.emit('drain') - } +const types = __webpack_require__(554) +const MiniPass = __webpack_require__(720) - [FLUSHCHUNK] (chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false - } +const SLURP = Symbol('slurp') +module.exports = class ReadEntry extends MiniPass { + constructor (header, ex, gex) { + super() + // read entries always start life paused. this is to avoid the + // situation where Minipass's auto-ending empty streams results + // in an entry ending before we're ready for it. + this.pause() + this.extended = ex + this.globalExtended = gex + this.header = header + this.startBlockSize = 512 * Math.ceil(header.size / 512) + this.blockRemain = this.startBlockSize + this.remain = header.size + this.type = header.type + this.meta = false + this.ignore = false + switch (this.type) { + case 'File': + case 'OldFile': + case 'Link': + case 'SymbolicLink': + case 'CharacterDevice': + case 'BlockDevice': + case 'Directory': + case 'FIFO': + case 'ContiguousFile': + case 'GNUDumpDir': + break - pipe (dest, opts) { - if (this[DESTROYED]) - return + case 'NextFileHasLongLinkpath': + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + case 'GlobalExtendedHeader': + case 'ExtendedHeader': + case 'OldExtendedHeader': + this.meta = true + break - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === process.stdout || dest === process.stderr) - opts.end = false - else - opts.end = opts.end !== false + // NOTE: gnutar and bsdtar treat unrecognized types as 'File' + // it may be worth doing the same, but with a warning. + default: + this.ignore = true + } - const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() } - this.pipes.push(p) + this.path = header.path + this.mode = header.mode + if (this.mode) + this.mode = this.mode & 0o7777 + this.uid = header.uid + this.gid = header.gid + this.uname = header.uname + this.gname = header.gname + this.size = header.size + this.mtime = header.mtime + this.atime = header.atime + this.ctime = header.ctime + this.linkpath = header.linkpath + this.uname = header.uname + this.gname = header.gname - dest.on('drain', p.ondrain) - this[RESUME]() - // piping an ended stream ends immediately - if (ended && p.opts.end) - p.dest.end() - return dest + if (ex) this[SLURP](ex) + if (gex) this[SLURP](gex, true) } - addListener (ev, fn) { - return this.on(ev, fn) - } + write (data) { + const writeLen = data.length + if (writeLen > this.blockRemain) + throw new Error('writing more to entry than is appropriate') - on (ev, fn) { - try { - return super.on(ev, fn) - } finally { - if (ev === 'data' && !this.pipes.length && !this.flowing) - this[RESUME]() - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } - } - } + const r = this.remain + const br = this.blockRemain + this.remain = Math.max(0, r - writeLen) + this.blockRemain = Math.max(0, br - writeLen) + if (this.ignore) + return true - get emittedEnd () { - return this[EMITTED_END] + if (r >= writeLen) + return super.write(data) + + // r < writeLen + return super.write(data.slice(0, r)) } - [MAYBE_EMIT_END] () { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this.buffer.length === 0 && - this[EOF]) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) - this.emit('close') - this[EMITTING_END] = false + [SLURP] (ex, global) { + for (let k in ex) { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. + if (ex[k] !== null && ex[k] !== undefined && + !(global && k === 'path')) + this[k] = ex[k] } } +} - emit (ev, data) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - if (!data) - return - - if (this.pipes.length) - this.pipes.forEach(p => - p.dest.write(data) === false && this.pause()) - } else if (ev === 'end') { - // only actual end gets this treatment - if (this[EMITTED_END] === true) - return - this[EMITTED_END] = true - this.readable = false +/***/ }), - if (this[DECODER]) { - data = this[DECODER].end() - if (data) { - this.pipes.forEach(p => p.dest.write(data)) - super.emit('data', data) - } - } +/***/ 669: +/***/ (function(module) { - this.pipes.forEach(p => { - p.dest.removeListener('drain', p.ondrain) - if (p.opts.end) - p.dest.end() - }) - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return - } +module.exports = require("util"); - // TODO: replace with a spread operator when Node v4 support drops - const args = new Array(arguments.length) - args[0] = ev - args[1] = data - if (arguments.length > 2) { - for (let i = 2; i < arguments.length; i++) { - args[i] = arguments[i] - } - } +/***/ }), - try { - return super.emit.apply(this, args) - } finally { - if (!isEndish(ev)) - this[MAYBE_EMIT_END]() - else - this.removeAllListeners(ev) - } - } +/***/ 674: +/***/ (function(module, __unusedexports, __webpack_require__) { - // const all = await stream.collect() - collect () { - const buf = [] - if (!this[OBJECTMODE]) - buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) - buf.dataLength += c.length - }) - return p.then(() => buf) - } +module.exports = authenticate; - // const data = await stream.concat() - concat () { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) - } +const { Deprecation } = __webpack_require__(692); +const once = __webpack_require__(969); - // stream.promise().then(() => done, er => emitted error) - promise () { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('end', () => resolve()) - this.on('error', er => reject(er)) - }) - } +const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); - // for await (let chunk of stream) - [ASYNCITERATOR] () { - const next = () => { - const res = this.read() - if (res !== null) - return Promise.resolve({ done: false, value: res }) +function authenticate(state, options) { + deprecateAuthenticate( + state.octokit.log, + new Deprecation( + '[@octokit/rest] octokit.authenticate() is deprecated. Use "auth" constructor option instead.' + ) + ); - if (this[EOF]) - return Promise.resolve({ done: true }) + if (!options) { + state.auth = false; + return; + } - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.pause() - resolve({ value: value, done: !!this[EOF] }) + switch (options.type) { + case "basic": + if (!options.username || !options.password) { + throw new Error( + "Basic authentication requires both a username and password to be set" + ); } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - resolve({ done: true }) + break; + + case "oauth": + if (!options.token && !(options.key && options.secret)) { + throw new Error( + "OAuth2 authentication requires a token or key & secret to be set" + ); } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } + break; - return { next } - } + case "token": + case "app": + if (!options.token) { + throw new Error("Token authentication requires a token to be set"); + } + break; - // for (let chunk of stream) - [ITERATOR] () { - const next = () => { - const value = this.read() - const done = value === null - return { value, done } - } - return { next } + default: + throw new Error( + "Invalid authentication type, must be 'basic', 'oauth', 'token' or 'app'" + ); } - destroy (er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er) - else - this.emit(DESTROYED) - return this - } - - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this.buffer = new Yallist() - this[BUFFERLENGTH] = 0 + state.auth = options; +} - if (typeof this.close === 'function' && !this[CLOSED]) - this.close() - if (er) - this.emit('error', er) - else // if no error to emit, still reject pending promises - this.emit(DESTROYED) +/***/ }), - return this - } +/***/ 675: +/***/ (function(module) { - static isStream (s) { - return !!s && (s instanceof Minipass || s instanceof Stream || - s instanceof EE && ( - typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function') // writable - )) - } +module.exports = function btoa(str) { + return new Buffer(str).toString('base64') } /***/ }), -/***/ 649: -/***/ (function(module, __unusedexports, __webpack_require__) { +/***/ 692: +/***/ (function(__unusedmodule, exports) { -module.exports = getLastPage +"use strict"; -const getPage = __webpack_require__(265) -function getLastPage (octokit, link, headers) { - return getPage(octokit, link, 'last', headers) -} +Object.defineProperty(exports, '__esModule', { value: true }); +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) -/***/ }), + /* istanbul ignore next */ -/***/ 650: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = getUserAgentNode - -const osName = __webpack_require__(2) - -function getUserAgentNode () { - try { - return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})` - } catch (error) { - if (/wmic os get Caption/.test(error.message)) { - return 'Windows ' + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); } - throw error + this.name = 'Deprecation'; } + } +exports.Deprecation = Deprecation; + /***/ }), -/***/ 654: +/***/ 697: /***/ (function(module) { -// This is not the set of all possible signals. -// -// It IS, however, the set of all signals that trigger -// an exit on either Linux or BSD systems. Linux is a -// superset of the signal names supported on BSD, and -// the unknown signals just fail to register, so we can -// catch that easily enough. -// -// Don't bother with SIGKILL. It's uncatchable, which -// means that we can't fire any callbacks anyway. -// -// If a user does happen to register a handler on a non- -// fatal signal like SIGWINCH or something, and then -// exit, it'll end up firing `process.emit('exit')`, so -// the handler will be fired anyway. -// -// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised -// artificially, inherently leave the process in a -// state from which it is not safe to try and enter JS -// listeners. -module.exports = [ - 'SIGABRT', - 'SIGALRM', - 'SIGHUP', - 'SIGINT', - 'SIGTERM' -] +"use strict"; -if (process.platform !== 'win32') { - module.exports.push( - 'SIGVTALRM', - 'SIGXCPU', - 'SIGXFSZ', - 'SIGUSR2', - 'SIGTRAP', - 'SIGSYS', - 'SIGQUIT', - 'SIGIOT' - // should detect profiler and enable/disable accordingly. - // see #21 - // 'SIGPROF' - ) -} +module.exports = (promise, onFinally) => { + onFinally = onFinally || (() => {}); -if (process.platform === 'linux') { - module.exports.push( - 'SIGIO', - 'SIGPOLL', - 'SIGPWR', - 'SIGSTKFLT', - 'SIGUNUSED' - ) -} + return promise.then( + val => new Promise(resolve => { + resolve(onFinally()); + }).then(() => val), + err => new Promise(resolve => { + resolve(onFinally()); + }).then(() => { + throw err; + }) + ); +}; /***/ }), -/***/ 656: +/***/ 720: /***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; +const EE = __webpack_require__(614) +const Stream = __webpack_require__(413) +const Yallist = __webpack_require__(612) +const SD = __webpack_require__(304).StringDecoder -// tar -x -const hlo = __webpack_require__(891) -const Unpack = __webpack_require__(63) -const fs = __webpack_require__(747) -const fsm = __webpack_require__(827) -const path = __webpack_require__(622) +const EOF = Symbol('EOF') +const MAYBE_EMIT_END = Symbol('maybeEmitEnd') +const EMITTED_END = Symbol('emittedEnd') +const EMITTING_END = Symbol('emittingEnd') +const CLOSED = Symbol('closed') +const READ = Symbol('read') +const FLUSH = Symbol('flush') +const FLUSHCHUNK = Symbol('flushChunk') +const ENCODING = Symbol('encoding') +const DECODER = Symbol('decoder') +const FLOWING = Symbol('flowing') +const PAUSED = Symbol('paused') +const RESUME = Symbol('resume') +const BUFFERLENGTH = Symbol('bufferLength') +const BUFFERPUSH = Symbol('bufferPush') +const BUFFERSHIFT = Symbol('bufferShift') +const OBJECTMODE = Symbol('objectMode') +const DESTROYED = Symbol('destroyed') -const x = module.exports = (opt_, files, cb) => { - if (typeof opt_ === 'function') - cb = opt_, files = null, opt_ = {} - else if (Array.isArray(opt_)) - files = opt_, opt_ = {} +// TODO remove when Node v8 support drops +const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' +const ASYNCITERATOR = doIter && Symbol.asyncIterator + || Symbol('asyncIterator not implemented') +const ITERATOR = doIter && Symbol.iterator + || Symbol('iterator not implemented') - if (typeof files === 'function') - cb = files, files = null +// events that mean 'the stream is over' +// these are treated specially, and re-emitted +// if they are listened for after emitting. +const isEndish = ev => + ev === 'end' || + ev === 'finish' || + ev === 'prefinish' - if (!files) - files = [] - else - files = Array.from(files) +const isArrayBuffer = b => b instanceof ArrayBuffer || + typeof b === 'object' && + b.constructor && + b.constructor.name === 'ArrayBuffer' && + b.byteLength >= 0 - const opt = hlo(opt_) +const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) - if (opt.sync && typeof cb === 'function') - throw new TypeError('callback not supported for sync tar functions') +module.exports = class Minipass extends Stream { + constructor (options) { + super() + this[FLOWING] = false + // whether we're explicitly paused + this[PAUSED] = false + this.pipes = new Yallist() + this.buffer = new Yallist() + this[OBJECTMODE] = options && options.objectMode || false + if (this[OBJECTMODE]) + this[ENCODING] = null + else + this[ENCODING] = options && options.encoding || null + if (this[ENCODING] === 'buffer') + this[ENCODING] = null + this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null + this[EOF] = false + this[EMITTED_END] = false + this[EMITTING_END] = false + this[CLOSED] = false + this.writable = true + this.readable = true + this[BUFFERLENGTH] = 0 + this[DESTROYED] = false + } - if (!opt.file && typeof cb === 'function') - throw new TypeError('callback only supported with file option') + get bufferLength () { return this[BUFFERLENGTH] } - if (files.length) - filesFilter(opt, files) + get encoding () { return this[ENCODING] } + set encoding (enc) { + if (this[OBJECTMODE]) + throw new Error('cannot set encoding in objectMode') - return opt.file && opt.sync ? extractFileSync(opt) - : opt.file ? extractFile(opt, cb) - : opt.sync ? extractSync(opt) - : extract(opt) -} + if (this[ENCODING] && enc !== this[ENCODING] && + (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) + throw new Error('cannot change encoding') -// construct a filter that limits the file entries listed -// include child entries if a dir is included -const filesFilter = (opt, files) => { - const map = new Map(files.map(f => [f.replace(/\/+$/, ''), true])) - const filter = opt.filter + if (this[ENCODING] !== enc) { + this[DECODER] = enc ? new SD(enc) : null + if (this.buffer.length) + this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) + } - const mapHas = (file, r) => { - const root = r || path.parse(file).root || '.' - const ret = file === root ? false - : map.has(file) ? map.get(file) - : mapHas(path.dirname(file), root) + this[ENCODING] = enc + } - map.set(file, ret) - return ret + setEncoding (enc) { + this.encoding = enc } - opt.filter = filter - ? (file, entry) => filter(file, entry) && mapHas(file.replace(/\/+$/, '')) - : file => mapHas(file.replace(/\/+$/, '')) -} + get objectMode () { return this[OBJECTMODE] } + set objectMode (ॐ ) { this[OBJECTMODE] = this[OBJECTMODE] || !!ॐ } -const extractFileSync = opt => { - const u = new Unpack.Sync(opt) + write (chunk, encoding, cb) { + if (this[EOF]) + throw new Error('write after end') - const file = opt.file - let threw = true - let fd - const stat = fs.statSync(file) - // This trades a zero-byte read() syscall for a stat - // However, it will usually result in less memory allocation - const readSize = opt.maxReadSize || 16*1024*1024 - const stream = new fsm.ReadStreamSync(file, { - readSize: readSize, - size: stat.size - }) - stream.pipe(u) -} + if (this[DESTROYED]) { + this.emit('error', Object.assign( + new Error('Cannot call write after a stream was destroyed'), + { code: 'ERR_STREAM_DESTROYED' } + )) + return true + } -const extractFile = (opt, cb) => { - const u = new Unpack(opt) - const readSize = opt.maxReadSize || 16*1024*1024 + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' - const file = opt.file - const p = new Promise((resolve, reject) => { - u.on('error', reject) - u.on('close', resolve) + if (!encoding) + encoding = 'utf8' - // This trades a zero-byte read() syscall for a stat - // However, it will usually result in less memory allocation - fs.stat(file, (er, stat) => { - if (er) - reject(er) - else { - const stream = new fsm.ReadStream(file, { - readSize: readSize, - size: stat.size - }) - stream.on('error', reject) - stream.pipe(u) - } - }) - }) - return cb ? p.then(cb, cb) : p -} - -const extractSync = opt => { - return new Unpack.Sync(opt) -} - -const extract = opt => { - return new Unpack(opt) -} - - -/***/ }), - -/***/ 662: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const types = __webpack_require__(554) -const MiniPass = __webpack_require__(635) - -const SLURP = Symbol('slurp') -module.exports = class ReadEntry extends MiniPass { - constructor (header, ex, gex) { - super() - // read entries always start life paused. this is to avoid the - // situation where Minipass's auto-ending empty streams results - // in an entry ending before we're ready for it. - this.pause() - this.extended = ex - this.globalExtended = gex - this.header = header - this.startBlockSize = 512 * Math.ceil(header.size / 512) - this.blockRemain = this.startBlockSize - this.remain = header.size - this.type = header.type - this.meta = false - this.ignore = false - switch (this.type) { - case 'File': - case 'OldFile': - case 'Link': - case 'SymbolicLink': - case 'CharacterDevice': - case 'BlockDevice': - case 'Directory': - case 'FIFO': - case 'ContiguousFile': - case 'GNUDumpDir': - break + // convert array buffers and typed array views into buffers + // at some point in the future, we may want to do the opposite! + // leave strings and buffers as-is + // anything else switches us into object mode + if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { + if (isArrayBufferView(chunk)) + chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) + else if (isArrayBuffer(chunk)) + chunk = Buffer.from(chunk) + else if (typeof chunk !== 'string') + // use the setter so we throw if we have encoding set + this.objectMode = true + } - case 'NextFileHasLongLinkpath': - case 'NextFileHasLongPath': - case 'OldGnuLongPath': - case 'GlobalExtendedHeader': - case 'ExtendedHeader': - case 'OldExtendedHeader': - this.meta = true - break + // this ensures at this point that the chunk is a buffer or string + // don't buffer it up or send it to the decoder + if (!this.objectMode && !chunk.length) { + const ret = this.flowing + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + if (cb) + cb() + return ret + } - // NOTE: gnutar and bsdtar treat unrecognized types as 'File' - // it may be worth doing the same, but with a warning. - default: - this.ignore = true + // fast-path writing strings of same encoding to a stream with + // an empty buffer, skipping the buffer/decoder dance + if (typeof chunk === 'string' && !this[OBJECTMODE] && + // unless it is a string already ready for us to use + !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { + chunk = Buffer.from(chunk, encoding) } - this.path = header.path - this.mode = header.mode - if (this.mode) - this.mode = this.mode & 0o7777 - this.uid = header.uid - this.gid = header.gid - this.uname = header.uname - this.gname = header.gname - this.size = header.size - this.mtime = header.mtime - this.atime = header.atime - this.ctime = header.ctime - this.linkpath = header.linkpath - this.uname = header.uname - this.gname = header.gname + if (Buffer.isBuffer(chunk) && this[ENCODING]) + chunk = this[DECODER].write(chunk) - if (ex) this[SLURP](ex) - if (gex) this[SLURP](gex, true) + try { + return this.flowing + ? (this.emit('data', chunk), this.flowing) + : (this[BUFFERPUSH](chunk), false) + } finally { + if (this[BUFFERLENGTH] !== 0) + this.emit('readable') + if (cb) + cb() + } } - write (data) { - const writeLen = data.length - if (writeLen > this.blockRemain) - throw new Error('writing more to entry than is appropriate') + read (n) { + if (this[DESTROYED]) + return null - const r = this.remain - const br = this.blockRemain - this.remain = Math.max(0, r - writeLen) - this.blockRemain = Math.max(0, br - writeLen) - if (this.ignore) - return true + try { + if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) + return null - if (r >= writeLen) - return super.write(data) + if (this[OBJECTMODE]) + n = null - // r < writeLen - return super.write(data.slice(0, r)) - } + if (this.buffer.length > 1 && !this[OBJECTMODE]) { + if (this.encoding) + this.buffer = new Yallist([ + Array.from(this.buffer).join('') + ]) + else + this.buffer = new Yallist([ + Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH]) + ]) + } - [SLURP] (ex, global) { - for (let k in ex) { - // we slurp in everything except for the path attribute in - // a global extended header, because that's weird. - if (ex[k] !== null && ex[k] !== undefined && - !(global && k === 'path')) - this[k] = ex[k] + return this[READ](n || null, this.buffer.head.value) + } finally { + this[MAYBE_EMIT_END]() } } -} + [READ] (n, chunk) { + if (n === chunk.length || n === null) + this[BUFFERSHIFT]() + else { + this.buffer.head.value = chunk.slice(n) + chunk = chunk.slice(0, n) + this[BUFFERLENGTH] -= n + } -/***/ }), - -/***/ 669: -/***/ (function(module) { - -module.exports = require("util"); - -/***/ }), - -/***/ 674: -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = authenticate; + this.emit('data', chunk) -const { Deprecation } = __webpack_require__(692); -const once = __webpack_require__(969); + if (!this.buffer.length && !this[EOF]) + this.emit('drain') -const deprecateAuthenticate = once((log, deprecation) => log.warn(deprecation)); + return chunk + } -function authenticate(state, options) { - deprecateAuthenticate( - state.octokit.log, - new Deprecation( - '[@octokit/rest] octokit.authenticate() is deprecated. Use "auth" constructor option instead.' - ) - ); + end (chunk, encoding, cb) { + if (typeof chunk === 'function') + cb = chunk, chunk = null + if (typeof encoding === 'function') + cb = encoding, encoding = 'utf8' + if (chunk) + this.write(chunk, encoding) + if (cb) + this.once('end', cb) + this[EOF] = true + this.writable = false - if (!options) { - state.auth = false; - return; + // if we haven't written anything, then go ahead and emit, + // even if we're not reading. + // we'll re-emit if a new 'end' listener is added anyway. + // This makes MP more suitable to write-only use cases. + if (this.flowing || !this[PAUSED]) + this[MAYBE_EMIT_END]() + return this } - switch (options.type) { - case "basic": - if (!options.username || !options.password) { - throw new Error( - "Basic authentication requires both a username and password to be set" - ); - } - break; - - case "oauth": - if (!options.token && !(options.key && options.secret)) { - throw new Error( - "OAuth2 authentication requires a token or key & secret to be set" - ); - } - break; + // don't let the internal resume be overwritten + [RESUME] () { + if (this[DESTROYED]) + return - case "token": - case "app": - if (!options.token) { - throw new Error("Token authentication requires a token to be set"); - } - break; - - default: - throw new Error( - "Invalid authentication type, must be 'basic', 'oauth', 'token' or 'app'" - ); - } - - state.auth = options; -} - - -/***/ }), - -/***/ 675: -/***/ (function(module) { - -module.exports = function btoa(str) { - return new Buffer(str).toString('base64') -} - - -/***/ }), - -/***/ 692: -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = 'Deprecation'; - } - -} - -exports.Deprecation = Deprecation; - - -/***/ }), - -/***/ 697: -/***/ (function(module) { - -"use strict"; - -module.exports = (promise, onFinally) => { - onFinally = onFinally || (() => {}); - - return promise.then( - val => new Promise(resolve => { - resolve(onFinally()); - }).then(() => val), - err => new Promise(resolve => { - resolve(onFinally()); - }).then(() => { - throw err; - }) - ); -}; - - -/***/ }), - -/***/ 726: -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -// A readable tar stream creator -// Technically, this is a transform stream that you write paths into, -// and tar format comes out of. -// The `add()` method is like `write()` but returns this, -// and end() return `this` as well, so you can -// do `new Pack(opt).add('files').add('dir').end().pipe(output) -// You could also do something like: -// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) - -class PackJob { - constructor (path, absolute) { - this.path = path || './' - this.absolute = absolute - this.entry = null - this.stat = null - this.readdir = null - this.pending = false - this.ignore = false - this.piped = false - } -} - -const MiniPass = __webpack_require__(635) -const zlib = __webpack_require__(133) -const ReadEntry = __webpack_require__(662) -const WriteEntry = __webpack_require__(303) -const WriteEntrySync = WriteEntry.Sync -const WriteEntryTar = WriteEntry.Tar -const Yallist = __webpack_require__(612) -const EOF = Buffer.alloc(1024) -const ONSTAT = Symbol('onStat') -const ENDED = Symbol('ended') -const QUEUE = Symbol('queue') -const CURRENT = Symbol('current') -const PROCESS = Symbol('process') -const PROCESSING = Symbol('processing') -const PROCESSJOB = Symbol('processJob') -const JOBS = Symbol('jobs') -const JOBDONE = Symbol('jobDone') -const ADDFSENTRY = Symbol('addFSEntry') -const ADDTARENTRY = Symbol('addTarEntry') -const STAT = Symbol('stat') -const READDIR = Symbol('readdir') -const ONREADDIR = Symbol('onreaddir') -const PIPE = Symbol('pipe') -const ENTRY = Symbol('entry') -const ENTRYOPT = Symbol('entryOpt') -const WRITEENTRYCLASS = Symbol('writeEntryClass') -const WRITE = Symbol('write') -const ONDRAIN = Symbol('ondrain') - -const fs = __webpack_require__(747) -const path = __webpack_require__(622) -const warner = __webpack_require__(796) - -const Pack = warner(class Pack extends MiniPass { - constructor (opt) { - super(opt) - opt = opt || Object.create(null) - this.opt = opt - this.file = opt.file || '' - this.cwd = opt.cwd || process.cwd() - this.maxReadSize = opt.maxReadSize - this.preservePaths = !!opt.preservePaths - this.strict = !!opt.strict - this.noPax = !!opt.noPax - this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '') - this.linkCache = opt.linkCache || new Map() - this.statCache = opt.statCache || new Map() - this.readdirCache = opt.readdirCache || new Map() - - this[WRITEENTRYCLASS] = WriteEntry - if (typeof opt.onwarn === 'function') - this.on('warn', opt.onwarn) - - this.portable = !!opt.portable - this.zip = null - if (opt.gzip) { - if (typeof opt.gzip !== 'object') - opt.gzip = {} - if (this.portable) - opt.gzip.portable = true - this.zip = new zlib.Gzip(opt.gzip) - this.zip.on('data', chunk => super.write(chunk)) - this.zip.on('end', _ => super.end()) - this.zip.on('drain', _ => this[ONDRAIN]()) - this.on('resume', _ => this.zip.resume()) - } else - this.on('drain', this[ONDRAIN]) - - this.noDirRecurse = !!opt.noDirRecurse - this.follow = !!opt.follow - this.noMtime = !!opt.noMtime - this.mtime = opt.mtime || null - - this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true - - this[QUEUE] = new Yallist - this[JOBS] = 0 - this.jobs = +opt.jobs || 4 - this[PROCESSING] = false - this[ENDED] = false - } - - [WRITE] (chunk) { - return super.write(chunk) - } - - add (path) { - this.write(path) - return this - } - - end (path) { - if (path) - this.write(path) - this[ENDED] = true - this[PROCESS]() - return this - } - - write (path) { - if (this[ENDED]) - throw new Error('write after end') - - if (path instanceof ReadEntry) - this[ADDTARENTRY](path) - else - this[ADDFSENTRY](path) - return this.flowing - } - - [ADDTARENTRY] (p) { - const absolute = path.resolve(this.cwd, p.path) - if (this.prefix) - p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '') - - // in this case, we don't have to wait for the stat - if (!this.filter(p.path, p)) - p.resume() - else { - const job = new PackJob(p.path, absolute, false) - job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) - job.entry.on('end', _ => this[JOBDONE](job)) - this[JOBS] += 1 - this[QUEUE].push(job) - } - - this[PROCESS]() - } - - [ADDFSENTRY] (p) { - const absolute = path.resolve(this.cwd, p) - if (this.prefix) - p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '') - - this[QUEUE].push(new PackJob(p, absolute)) - this[PROCESS]() - } - - [STAT] (job) { - job.pending = true - this[JOBS] += 1 - const stat = this.follow ? 'stat' : 'lstat' - fs[stat](job.absolute, (er, stat) => { - job.pending = false - this[JOBS] -= 1 - if (er) - this.emit('error', er) - else - this[ONSTAT](job, stat) - }) - } - - [ONSTAT] (job, stat) { - this.statCache.set(job.absolute, stat) - job.stat = stat - - // now we have the stat, we can filter it. - if (!this.filter(job.path, stat)) - job.ignore = true - - this[PROCESS]() - } - - [READDIR] (job) { - job.pending = true - this[JOBS] += 1 - fs.readdir(job.absolute, (er, entries) => { - job.pending = false - this[JOBS] -= 1 - if (er) - return this.emit('error', er) - this[ONREADDIR](job, entries) - }) - } - - [ONREADDIR] (job, entries) { - this.readdirCache.set(job.absolute, entries) - job.readdir = entries - this[PROCESS]() - } - - [PROCESS] () { - if (this[PROCESSING]) - return - - this[PROCESSING] = true - for (let w = this[QUEUE].head; - w !== null && this[JOBS] < this.jobs; - w = w.next) { - this[PROCESSJOB](w.value) - if (w.value.ignore) { - const p = w.next - this[QUEUE].removeNode(w) - w.next = p - } - } - - this[PROCESSING] = false - - if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { - if (this.zip) - this.zip.end(EOF) - else { - super.write(EOF) - super.end() - } - } - } - - get [CURRENT] () { - return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value - } - - [JOBDONE] (job) { - this[QUEUE].shift() - this[JOBS] -= 1 - this[PROCESS]() - } - - [PROCESSJOB] (job) { - if (job.pending) - return - - if (job.entry) { - if (job === this[CURRENT] && !job.piped) - this[PIPE](job) - return - } - - if (!job.stat) { - if (this.statCache.has(job.absolute)) - this[ONSTAT](job, this.statCache.get(job.absolute)) - else - this[STAT](job) - } - if (!job.stat) - return - - // filtered out! - if (job.ignore) - return - - if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { - if (this.readdirCache.has(job.absolute)) - this[ONREADDIR](job, this.readdirCache.get(job.absolute)) - else - this[READDIR](job) - if (!job.readdir) - return - } - - // we know it doesn't have an entry, because that got checked above - job.entry = this[ENTRY](job) - if (!job.entry) { - job.ignore = true - return - } - - if (job === this[CURRENT] && !job.piped) - this[PIPE](job) - } - - [ENTRYOPT] (job) { - return { - onwarn: (code, msg, data) => this.warn(code, msg, data), - noPax: this.noPax, - cwd: this.cwd, - absolute: job.absolute, - preservePaths: this.preservePaths, - maxReadSize: this.maxReadSize, - strict: this.strict, - portable: this.portable, - linkCache: this.linkCache, - statCache: this.statCache, - noMtime: this.noMtime, - mtime: this.mtime - } - } - - [ENTRY] (job) { - this[JOBS] += 1 - try { - return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) - .on('end', () => this[JOBDONE](job)) - .on('error', er => this.emit('error', er)) - } catch (er) { - this.emit('error', er) - } - } - - [ONDRAIN] () { - if (this[CURRENT] && this[CURRENT].entry) - this[CURRENT].entry.resume() - } - - // like .pipe() but using super, because our write() is special - [PIPE] (job) { - job.piped = true - - if (job.readdir) - job.readdir.forEach(entry => { - const p = this.prefix ? - job.path.slice(this.prefix.length + 1) || './' - : job.path - - const base = p === './' ? '' : p.replace(/\/*$/, '/') - this[ADDFSENTRY](base + entry) - }) - - const source = job.entry - const zip = this.zip - - if (zip) - source.on('data', chunk => { - if (!zip.write(chunk)) - source.pause() - }) - else - source.on('data', chunk => { - if (!super.write(chunk)) - source.pause() - }) - } - - pause () { - if (this.zip) - this.zip.pause() - return super.pause() - } -}) - -class PackSync extends Pack { - constructor (opt) { - super(opt) - this[WRITEENTRYCLASS] = WriteEntrySync - } - - // pause/resume are no-ops in sync streams. - pause () {} - resume () {} - - [STAT] (job) { - const stat = this.follow ? 'statSync' : 'lstatSync' - this[ONSTAT](job, fs[stat](job.absolute)) - } - - [READDIR] (job, stat) { - this[ONREADDIR](job, fs.readdirSync(job.absolute)) - } - - // gotta get it all in this tick - [PIPE] (job) { - const source = job.entry - const zip = this.zip - - if (job.readdir) - job.readdir.forEach(entry => { - const p = this.prefix ? - job.path.slice(this.prefix.length + 1) || './' - : job.path - - const base = p === './' ? '' : p.replace(/\/*$/, '/') - this[ADDFSENTRY](base + entry) - }) - - if (zip) - source.on('data', chunk => { - zip.write(chunk) - }) - else - source.on('data', chunk => { - super[WRITE](chunk) - }) - } -} - -Pack.Sync = PackSync - -module.exports = Pack - - -/***/ }), - -/***/ 737: -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs_1 = __importDefault(__webpack_require__(747)); -const zlib_1 = __importDefault(__webpack_require__(761)); -const tar_1 = __importDefault(__webpack_require__(885)); -const rest_1 = __webpack_require__(0); -const node_fetch_1 = __importDefault(__webpack_require__(454)); -const child_process_1 = __webpack_require__(129); -const utils_1 = __webpack_require__(163); -class Downloader { - constructor() { - this.githubClient = new rest_1.Octokit(); - } - download(version, trivyCmdDir = __dirname) { - return __awaiter(this, void 0, void 0, function* () { - const os = this.checkPlatform(process.platform); - const downloadUrl = yield this.getDownloadUrl(version, os); - console.debug(`Download URL: ${downloadUrl}`); - const trivyCmdBaseDir = process.env.GITHUB_WORKSPACE || trivyCmdDir; - const trivyCmdPath = yield this.downloadTrivyCmd(downloadUrl, trivyCmdBaseDir); - console.debug(`Trivy Command Path: ${trivyCmdPath}`); - return trivyCmdPath; - }); - } - checkPlatform(platform) { - switch (platform) { - case 'linux': - return 'Linux'; - case 'darwin': - return 'macOS'; - default: - const errorMsg = `Sorry, ${platform} is not supported. - Trivy support Linux, MacOS, FreeBSD and OpenBSD.`; - throw new Error(errorMsg); - } - } - getDownloadUrl(version, os) { - return __awaiter(this, void 0, void 0, function* () { - try { - const response = yield this.getAssets(version); - const filename = `trivy_${response.version}_${os}-64bit.tar.gz`; - for (const asset of response.assets) { - if (asset.name === filename) { - return asset.browser_download_url; - } - } - throw new Error(); - } - catch (error) { - const errorMsg = ` - Cloud not be found a Trivy asset that you specified. - Version: ${version} - OS: ${os} - `; - throw new Error(errorMsg); - } - }); - } - getAssets(version) { - return __awaiter(this, void 0, void 0, function* () { - let response; - if (version === 'latest') { - response = yield this.githubClient.repos.getLatestRelease(Object.assign({}, Downloader.trivyRepository)); - version = response.data.tag_name.replace(/v/, ''); - } - else { - response = yield this.githubClient.repos.getReleaseByTag(Object.assign(Object.assign({}, Downloader.trivyRepository), { tag: `v${version}` })); - } - return { assets: response.data.assets, version }; - }); - } - downloadTrivyCmd(downloadUrl, savedPath = '.') { - return __awaiter(this, void 0, void 0, function* () { - const response = yield node_fetch_1.default(downloadUrl); - return new Promise((resolve, reject) => { - const gunzip = zlib_1.default.createGunzip(); - const extract = tar_1.default.extract({ C: savedPath }, ['trivy']); - response.body - .on('error', reject) - .pipe(gunzip) - .on('error', reject) - .pipe(extract) - .on('error', reject) - .on('finish', () => { - if (!this.trivyExists(savedPath)) { - reject('Failed to extract Trivy command file.'); - } - resolve(`${savedPath}/trivy`); - }); - }); - }); - } - trivyExists(targetDir) { - const trivyCmdPaths = fs_1.default - .readdirSync(targetDir) - .filter(f => f === 'trivy'); - return trivyCmdPaths.length === 1; - } -} -exports.Downloader = Downloader; -Downloader.trivyRepository = { - owner: 'aquasecurity', - repo: 'trivy', -}; -class Trivy { - scan(trivyPath, image, option) { - this.validateOption(option); - const args = [ - '--severity', - option.severity, - '--vuln-type', - option.vulnType, - '--format', - option.format, - '--quiet', - '--no-progress', - ]; - if (option.ignoreUnfixed) - args.push('--ignore-unfixed'); - args.push(image); - const result = child_process_1.spawnSync(trivyPath, args, { - encoding: 'utf-8', - }); - if (result.stdout && result.stdout.length > 0) { - const vulnerabilities = option.format === 'json' ? JSON.parse(result.stdout) : result.stdout; - if (vulnerabilities.length > 0) { - return vulnerabilities; - } - } - throw new Error(`Failed vulnerability scan using Trivy. - stdout: ${result.stdout} - stderr: ${result.stderr} - erorr: ${result.error} - `); - } - parse(vulnerabilities) { - let issueContent = ''; - for (const vuln of vulnerabilities) { - if (vuln.Vulnerabilities === null) - continue; - issueContent += `## ${vuln.Target}\n`; - let vulnTable = '|Title|Severity|CVE|Package Name|'; - vulnTable += 'Installed Version|Fixed Version|References|\n'; - vulnTable += '|:--:|:--:|:--:|:--:|:--:|:--:|:--|\n'; - for (const cve of vuln.Vulnerabilities) { - vulnTable += `|${cve.Title || 'N/A'}|${cve.Severity || 'N/A'}`; - vulnTable += `|${cve.VulnerabilityID || 'N/A'}|${cve.PkgName || 'N/A'}`; - vulnTable += `|${cve.InstalledVersion || 'N/A'}|${cve.FixedVersion || - 'N/A'}|`; - const references = cve.References; - if (!utils_1.isIterable(references)) - continue; - for (const reference of references) { - vulnTable += `${reference || 'N/A'}
`; - } - vulnTable.replace(/
$/, '|\n'); - } - issueContent += `${vulnTable}\n\n`; - } - return issueContent; - } - validateOption(option) { - this.validateSeverity(option.severity.split(',')); - this.validateVulnType(option.vulnType.split(',')); - } - validateSeverity(severities) { - const allowedSeverities = /UNKNOWN|LOW|MEDIUM|HIGH|CRITICAL/; - if (!validateArrayOption(allowedSeverities, severities)) { - throw new Error(`Trivy option error: ${severities.join(',')} is unknown severity. - Trivy supports UNKNOWN, LOW, MEDIUM, HIGH and CRITICAL.`); - } - return true; - } - validateVulnType(vulnTypes) { - const allowedVulnTypes = /os|library/; - if (!validateArrayOption(allowedVulnTypes, vulnTypes)) { - throw new Error(`Trivy option error: ${vulnTypes.join(',')} is unknown vuln-type. - Trivy supports os and library.`); - } - return true; - } -} -exports.Trivy = Trivy; -function validateArrayOption(allowedValue, options) { - for (const option of options) { - if (!allowedValue.test(option)) { - return false; - } - } - return true; -} + this[PAUSED] = false + this[FLOWING] = true + this.emit('resume') + if (this.buffer.length) + this[FLUSH]() + else if (this[EOF]) + this[MAYBE_EMIT_END]() + else + this.emit('drain') + } + resume () { + return this[RESUME]() + } -/***/ }), + pause () { + this[FLOWING] = false + this[PAUSED] = true + } -/***/ 742: -/***/ (function(module, __unusedexports, __webpack_require__) { + get destroyed () { + return this[DESTROYED] + } -var fs = __webpack_require__(747) -var core -if (process.platform === 'win32' || global.TESTING_WINDOWS) { - core = __webpack_require__(818) -} else { - core = __webpack_require__(197) -} + get flowing () { + return this[FLOWING] + } -module.exports = isexe -isexe.sync = sync + get paused () { + return this[PAUSED] + } -function isexe (path, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} + [BUFFERPUSH] (chunk) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] += 1 + else + this[BUFFERLENGTH] += chunk.length + return this.buffer.push(chunk) } - if (!cb) { - if (typeof Promise !== 'function') { - throw new TypeError('callback not provided') + [BUFFERSHIFT] () { + if (this.buffer.length) { + if (this[OBJECTMODE]) + this[BUFFERLENGTH] -= 1 + else + this[BUFFERLENGTH] -= this.buffer.head.value.length } + return this.buffer.shift() + } - return new Promise(function (resolve, reject) { - isexe(path, options || {}, function (er, is) { - if (er) { - reject(er) - } else { - resolve(is) - } - }) - }) + [FLUSH] () { + do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) + + if (!this.buffer.length && !this[EOF]) + this.emit('drain') } - core(path, options || {}, function (er, is) { - // ignore EACCES because that just means we aren't allowed to run it - if (er) { - if (er.code === 'EACCES' || options && options.ignoreErrors) { - er = null - is = false + [FLUSHCHUNK] (chunk) { + return chunk ? (this.emit('data', chunk), this.flowing) : false + } + + pipe (dest, opts) { + if (this[DESTROYED]) + return + + const ended = this[EMITTED_END] + opts = opts || {} + if (dest === process.stdout || dest === process.stderr) + opts.end = false + else + opts.end = opts.end !== false + + const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() } + this.pipes.push(p) + + dest.on('drain', p.ondrain) + this[RESUME]() + // piping an ended stream ends immediately + if (ended && p.opts.end) + p.dest.end() + return dest + } + + addListener (ev, fn) { + return this.on(ev, fn) + } + + on (ev, fn) { + try { + return super.on(ev, fn) + } finally { + if (ev === 'data' && !this.pipes.length && !this.flowing) + this[RESUME]() + else if (isEndish(ev) && this[EMITTED_END]) { + super.emit(ev) + this.removeAllListeners(ev) } } - cb(er, is) - }) -} + } -function sync (path, options) { - // my kingdom for a filtered catch - try { - return core.sync(path, options || {}) - } catch (er) { - if (options && options.ignoreErrors || er.code === 'EACCES') { - return false - } else { - throw er + get emittedEnd () { + return this[EMITTED_END] + } + + [MAYBE_EMIT_END] () { + if (!this[EMITTING_END] && + !this[EMITTED_END] && + !this[DESTROYED] && + this.buffer.length === 0 && + this[EOF]) { + this[EMITTING_END] = true + this.emit('end') + this.emit('prefinish') + this.emit('finish') + if (this[CLOSED]) + this.emit('close') + this[EMITTING_END] = false } } -} + emit (ev, data) { + // error and close are only events allowed after calling destroy() + if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) + return + else if (ev === 'data') { + if (!data) + return -/***/ }), + if (this.pipes.length) + this.pipes.forEach(p => + p.dest.write(data) === false && this.pause()) + } else if (ev === 'end') { + // only actual end gets this treatment + if (this[EMITTED_END] === true) + return -/***/ 747: -/***/ (function(module) { + this[EMITTED_END] = true + this.readable = false -module.exports = require("fs"); + if (this[DECODER]) { + data = this[DECODER].end() + if (data) { + this.pipes.forEach(p => p.dest.write(data)) + super.emit('data', data) + } + } -/***/ }), + this.pipes.forEach(p => { + p.dest.removeListener('drain', p.ondrain) + if (p.opts.end) + p.dest.end() + }) + } else if (ev === 'close') { + this[CLOSED] = true + // don't emit close before 'end' and 'finish' + if (!this[EMITTED_END] && !this[DESTROYED]) + return + } -/***/ 753: -/***/ (function(__unusedmodule, exports, __webpack_require__) { + // TODO: replace with a spread operator when Node v4 support drops + const args = new Array(arguments.length) + args[0] = ev + args[1] = data + if (arguments.length > 2) { + for (let i = 2; i < arguments.length; i++) { + args[i] = arguments[i] + } + } -"use strict"; + try { + return super.emit.apply(this, args) + } finally { + if (!isEndish(ev)) + this[MAYBE_EMIT_END]() + else + this.removeAllListeners(ev) + } + } + // const all = await stream.collect() + collect () { + const buf = [] + if (!this[OBJECTMODE]) + buf.dataLength = 0 + // set the promise first, in case an error is raised + // by triggering the flow here. + const p = this.promise() + this.on('data', c => { + buf.push(c) + if (!this[OBJECTMODE]) + buf.dataLength += c.length + }) + return p.then(() => buf) + } -Object.defineProperty(exports, '__esModule', { value: true }); + // const data = await stream.concat() + concat () { + return this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this.collect().then(buf => + this[OBJECTMODE] + ? Promise.reject(new Error('cannot concat in objectMode')) + : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) + } -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + // stream.promise().then(() => done, er => emitted error) + promise () { + return new Promise((resolve, reject) => { + this.on(DESTROYED, () => reject(new Error('stream destroyed'))) + this.on('end', () => resolve()) + this.on('error', er => reject(er)) + }) + } -var endpoint = __webpack_require__(385); -var universalUserAgent = __webpack_require__(526); -var isPlainObject = _interopDefault(__webpack_require__(548)); -var nodeFetch = _interopDefault(__webpack_require__(454)); -var requestError = __webpack_require__(463); + // for await (let chunk of stream) + [ASYNCITERATOR] () { + const next = () => { + const res = this.read() + if (res !== null) + return Promise.resolve({ done: false, value: res }) -const VERSION = "5.3.1"; + if (this[EOF]) + return Promise.resolve({ done: true }) -function getBufferResponse(response) { - return response.arrayBuffer(); -} + let resolve = null + let reject = null + const onerr = er => { + this.removeListener('data', ondata) + this.removeListener('end', onend) + reject(er) + } + const ondata = value => { + this.removeListener('error', onerr) + this.removeListener('end', onend) + this.pause() + resolve({ value: value, done: !!this[EOF] }) + } + const onend = () => { + this.removeListener('error', onerr) + this.removeListener('data', ondata) + resolve({ done: true }) + } + const ondestroy = () => onerr(new Error('stream destroyed')) + return new Promise((res, rej) => { + reject = rej + resolve = res + this.once(DESTROYED, ondestroy) + this.once('error', onerr) + this.once('end', onend) + this.once('data', ondata) + }) + } -function fetchWrapper(requestOptions) { - if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); + return { next } } - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, requestOptions.request)).then(response => { - url = response.url; - status = response.status; - - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; + // for (let chunk of stream) + [ITERATOR] () { + const next = () => { + const value = this.read() + const done = value === null + return { value, done } } + return { next } + } - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requsets + destroy (er) { + if (this[DESTROYED]) { + if (er) + this.emit('error', er) + else + this.emit(DESTROYED) + return this + } + this[DESTROYED] = true - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } + // throw away all buffered data, it's never coming out + this.buffer = new Yallist() + this[BUFFERLENGTH] = 0 - throw new requestError.RequestError(response.statusText, status, { - headers, - request: requestOptions - }); - } + if (typeof this.close === 'function' && !this[CLOSED]) + this.close() - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - headers, - request: requestOptions - }); - } + if (er) + this.emit('error', er) + else // if no error to emit, still reject pending promises + this.emit(DESTROYED) - if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { - headers, - request: requestOptions - }); + return this + } - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array Fotmat + static isStream (s) { + return !!s && (s instanceof Minipass || s instanceof Stream || + s instanceof EE && ( + typeof s.pipe === 'function' || // readable + (typeof s.write === 'function' && typeof s.end === 'function') // writable + )) + } +} - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 - } - throw error; - }); - } +/***/ }), - const contentType = response.headers.get("content-type"); +/***/ 726: +/***/ (function(module, __unusedexports, __webpack_require__) { - if (/application\/json/.test(contentType)) { - return response.json(); - } +"use strict"; - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - return getBufferResponse(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) { - throw error; - } +// A readable tar stream creator +// Technically, this is a transform stream that you write paths into, +// and tar format comes out of. +// The `add()` method is like `write()` but returns this, +// and end() return `this` as well, so you can +// do `new Pack(opt).add('files').add('dir').end().pipe(output) +// You could also do something like: +// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) - throw new requestError.RequestError(error.message, 500, { - headers, - request: requestOptions - }); - }); +class PackJob { + constructor (path, absolute) { + this.path = path || './' + this.absolute = absolute + this.entry = null + this.stat = null + this.readdir = null + this.pending = false + this.ignore = false + this.piped = false + } } -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); - - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); +const MiniPass = __webpack_require__(720) +const zlib = __webpack_require__(133) +const ReadEntry = __webpack_require__(662) +const WriteEntry = __webpack_require__(303) +const WriteEntrySync = WriteEntry.Sync +const WriteEntryTar = WriteEntry.Tar +const Yallist = __webpack_require__(612) +const EOF = Buffer.alloc(1024) +const ONSTAT = Symbol('onStat') +const ENDED = Symbol('ended') +const QUEUE = Symbol('queue') +const CURRENT = Symbol('current') +const PROCESS = Symbol('process') +const PROCESSING = Symbol('processing') +const PROCESSJOB = Symbol('processJob') +const JOBS = Symbol('jobs') +const JOBDONE = Symbol('jobDone') +const ADDFSENTRY = Symbol('addFSEntry') +const ADDTARENTRY = Symbol('addTarEntry') +const STAT = Symbol('stat') +const READDIR = Symbol('readdir') +const ONREADDIR = Symbol('onreaddir') +const PIPE = Symbol('pipe') +const ENTRY = Symbol('entry') +const ENTRYOPT = Symbol('entryOpt') +const WRITEENTRYCLASS = Symbol('writeEntryClass') +const WRITE = Symbol('write') +const ONDRAIN = Symbol('ondrain') - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); - } +const fs = __webpack_require__(747) +const path = __webpack_require__(622) +const warner = __webpack_require__(796) - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; +const Pack = warner(class Pack extends MiniPass { + constructor (opt) { + super(opt) + opt = opt || Object.create(null) + this.opt = opt + this.file = opt.file || '' + this.cwd = opt.cwd || process.cwd() + this.maxReadSize = opt.maxReadSize + this.preservePaths = !!opt.preservePaths + this.strict = !!opt.strict + this.noPax = !!opt.noPax + this.prefix = (opt.prefix || '').replace(/(\\|\/)+$/, '') + this.linkCache = opt.linkCache || new Map() + this.statCache = opt.statCache || new Map() + this.readdirCache = opt.readdirCache || new Map() - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; + this[WRITEENTRYCLASS] = WriteEntry + if (typeof opt.onwarn === 'function') + this.on('warn', opt.onwarn) - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); -} + this.portable = !!opt.portable + this.zip = null + if (opt.gzip) { + if (typeof opt.gzip !== 'object') + opt.gzip = {} + if (this.portable) + opt.gzip.portable = true + this.zip = new zlib.Gzip(opt.gzip) + this.zip.on('data', chunk => super.write(chunk)) + this.zip.on('end', _ => super.end()) + this.zip.on('drain', _ => this[ONDRAIN]()) + this.on('resume', _ => this.zip.resume()) + } else + this.on('drain', this[ONDRAIN]) -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` - } -}); + this.noDirRecurse = !!opt.noDirRecurse + this.follow = !!opt.follow + this.noMtime = !!opt.noMtime + this.mtime = opt.mtime || null -exports.request = request; -//# sourceMappingURL=index.js.map + this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true + this[QUEUE] = new Yallist + this[JOBS] = 0 + this.jobs = +opt.jobs || 4 + this[PROCESSING] = false + this[ENDED] = false + } -/***/ }), + [WRITE] (chunk) { + return super.write(chunk) + } -/***/ 761: -/***/ (function(module) { + add (path) { + this.write(path) + return this + } -module.exports = require("zlib"); + end (path) { + if (path) + this.write(path) + this[ENDED] = true + this[PROCESS]() + return this + } -/***/ }), + write (path) { + if (this[ENDED]) + throw new Error('write after end') -/***/ 763: -/***/ (function(module) { + if (path instanceof ReadEntry) + this[ADDTARENTRY](path) + else + this[ADDFSENTRY](path) + return this.flowing + } -module.exports = removeHook + [ADDTARENTRY] (p) { + const absolute = path.resolve(this.cwd, p.path) + if (this.prefix) + p.path = this.prefix + '/' + p.path.replace(/^\.(\/+|$)/, '') -function removeHook (state, name, method) { - if (!state.registry[name]) { - return + // in this case, we don't have to wait for the stat + if (!this.filter(p.path, p)) + p.resume() + else { + const job = new PackJob(p.path, absolute, false) + job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) + job.entry.on('end', _ => this[JOBDONE](job)) + this[JOBS] += 1 + this[QUEUE].push(job) + } + + this[PROCESS]() } - var index = state.registry[name] - .map(function (registered) { return registered.orig }) - .indexOf(method) + [ADDFSENTRY] (p) { + const absolute = path.resolve(this.cwd, p) + if (this.prefix) + p = this.prefix + '/' + p.replace(/^\.(\/+|$)/, '') - if (index === -1) { - return + this[QUEUE].push(new PackJob(p, absolute)) + this[PROCESS]() } - state.registry[name].splice(index, 1) -} - + [STAT] (job) { + job.pending = true + this[JOBS] += 1 + const stat = this.follow ? 'stat' : 'lstat' + fs[stat](job.absolute, (er, stat) => { + job.pending = false + this[JOBS] -= 1 + if (er) + this.emit('error', er) + else + this[ONSTAT](job, stat) + }) + } -/***/ }), + [ONSTAT] (job, stat) { + this.statCache.set(job.absolute, stat) + job.stat = stat -/***/ 768: -/***/ (function(module) { + // now we have the stat, we can filter it. + if (!this.filter(job.path, stat)) + job.ignore = true -"use strict"; + this[PROCESS]() + } -module.exports = function (x) { - var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); - var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); + [READDIR] (job) { + job.pending = true + this[JOBS] += 1 + fs.readdir(job.absolute, (er, entries) => { + job.pending = false + this[JOBS] -= 1 + if (er) + return this.emit('error', er) + this[ONREADDIR](job, entries) + }) + } - if (x[x.length - 1] === lf) { - x = x.slice(0, x.length - 1); - } + [ONREADDIR] (job, entries) { + this.readdirCache.set(job.absolute, entries) + job.readdir = entries + this[PROCESS]() + } - if (x[x.length - 1] === cr) { - x = x.slice(0, x.length - 1); - } + [PROCESS] () { + if (this[PROCESSING]) + return - return x; -}; + this[PROCESSING] = true + for (let w = this[QUEUE].head; + w !== null && this[JOBS] < this.jobs; + w = w.next) { + this[PROCESSJOB](w.value) + if (w.value.ignore) { + const p = w.next + this[QUEUE].removeNode(w) + w.next = p + } + } + this[PROCESSING] = false -/***/ }), + if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { + if (this.zip) + this.zip.end(EOF) + else { + super.write(EOF) + super.end() + } + } + } -/***/ 777: -/***/ (function(module, __unusedexports, __webpack_require__) { + get [CURRENT] () { + return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value + } -module.exports = getFirstPage + [JOBDONE] (job) { + this[QUEUE].shift() + this[JOBS] -= 1 + this[PROCESS]() + } -const getPage = __webpack_require__(265) + [PROCESSJOB] (job) { + if (job.pending) + return -function getFirstPage (octokit, link, headers) { - return getPage(octokit, link, 'first', headers) -} + if (job.entry) { + if (job === this[CURRENT] && !job.piped) + this[PIPE](job) + return + } + if (!job.stat) { + if (this.statCache.has(job.absolute)) + this[ONSTAT](job, this.statCache.get(job.absolute)) + else + this[STAT](job) + } + if (!job.stat) + return -/***/ }), + // filtered out! + if (job.ignore) + return -/***/ 796: -/***/ (function(module) { + if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { + if (this.readdirCache.has(job.absolute)) + this[ONREADDIR](job, this.readdirCache.get(job.absolute)) + else + this[READDIR](job) + if (!job.readdir) + return + } -"use strict"; + // we know it doesn't have an entry, because that got checked above + job.entry = this[ENTRY](job) + if (!job.entry) { + job.ignore = true + return + } -module.exports = Base => class extends Base { - warn (code, message, data = {}) { - if (this.file) - data.file = this.file - if (this.cwd) - data.cwd = this.cwd - data.code = message instanceof Error && message.code || code - data.tarCode = code - if (!this.strict && data.recoverable !== false) { - if (message instanceof Error) { - data = Object.assign(message, data) - message = message.message - } - this.emit('warn', data.tarCode, message, data) - } else if (message instanceof Error) { - this.emit('error', Object.assign(message, data)) - } else - this.emit('error', Object.assign(new Error(`${code}: ${message}`), data)) + if (job === this[CURRENT] && !job.piped) + this[PIPE](job) } -} + [ENTRYOPT] (job) { + return { + onwarn: (code, msg, data) => this.warn(code, msg, data), + noPax: this.noPax, + cwd: this.cwd, + absolute: job.absolute, + preservePaths: this.preservePaths, + maxReadSize: this.maxReadSize, + strict: this.strict, + portable: this.portable, + linkCache: this.linkCache, + statCache: this.statCache, + noMtime: this.noMtime, + mtime: this.mtime + } + } -/***/ }), + [ENTRY] (job) { + this[JOBS] += 1 + try { + return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) + .on('end', () => this[JOBDONE](job)) + .on('error', er => this.emit('error', er)) + } catch (er) { + this.emit('error', er) + } + } -/***/ 813: -/***/ (function(__unusedmodule, exports) { + [ONDRAIN] () { + if (this[CURRENT] && this[CURRENT].entry) + this[CURRENT].entry.resume() + } -"use strict"; + // like .pipe() but using super, because our write() is special + [PIPE] (job) { + job.piped = true + if (job.readdir) + job.readdir.forEach(entry => { + const p = this.prefix ? + job.path.slice(this.prefix.length + 1) || './' + : job.path -Object.defineProperty(exports, '__esModule', { value: true }); + const base = p === './' ? '' : p.replace(/\/*$/, '/') + this[ADDFSENTRY](base + entry) + }) -async function auth(token) { - const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} + const source = job.entry + const zip = this.zip -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; + if (zip) + source.on('data', chunk => { + if (!zip.write(chunk)) + source.pause() + }) + else + source.on('data', chunk => { + if (!super.write(chunk)) + source.pause() + }) } - return `token ${token}`; -} - -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} - -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + pause () { + if (this.zip) + this.zip.pause() + return super.pause() } +}) - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); +class PackSync extends Pack { + constructor (opt) { + super(opt) + this[WRITEENTRYCLASS] = WriteEntrySync } - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map - - -/***/ }), + // pause/resume are no-ops in sync streams. + pause () {} + resume () {} -/***/ 814: -/***/ (function(module, __unusedexports, __webpack_require__) { + [STAT] (job) { + const stat = this.follow ? 'statSync' : 'lstatSync' + this[ONSTAT](job, fs[stat](job.absolute)) + } -module.exports = which -which.sync = whichSync + [READDIR] (job, stat) { + this[ONREADDIR](job, fs.readdirSync(job.absolute)) + } -var isWindows = process.platform === 'win32' || - process.env.OSTYPE === 'cygwin' || - process.env.OSTYPE === 'msys' + // gotta get it all in this tick + [PIPE] (job) { + const source = job.entry + const zip = this.zip -var path = __webpack_require__(622) -var COLON = isWindows ? ';' : ':' -var isexe = __webpack_require__(742) + if (job.readdir) + job.readdir.forEach(entry => { + const p = this.prefix ? + job.path.slice(this.prefix.length + 1) || './' + : job.path -function getNotFoundError (cmd) { - var er = new Error('not found: ' + cmd) - er.code = 'ENOENT' + const base = p === './' ? '' : p.replace(/\/*$/, '/') + this[ADDFSENTRY](base + entry) + }) - return er + if (zip) + source.on('data', chunk => { + zip.write(chunk) + }) + else + source.on('data', chunk => { + super[WRITE](chunk) + }) + } } -function getPathInfo (cmd, opt) { - var colon = opt.colon || COLON - var pathEnv = opt.path || process.env.PATH || '' - var pathExt = [''] +Pack.Sync = PackSync - pathEnv = pathEnv.split(colon) +module.exports = Pack - var pathExtExe = '' - if (isWindows) { - pathEnv.unshift(process.cwd()) - pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') - pathExt = pathExtExe.split(colon) +/***/ }), - // Always test the cmd itself first. isexe will check to make sure - // it's found in the pathExt set. - if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') - pathExt.unshift('') - } +/***/ 737: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - // If it has a slash, then we don't bother searching the pathenv. - // just check the file itself, and that's it. - if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) - pathEnv = [''] +"use strict"; - return { - env: pathEnv, - ext: pathExt, - extExe: pathExtExe - } +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs_1 = __importDefault(__webpack_require__(747)); +const zlib_1 = __importDefault(__webpack_require__(761)); +const tar_1 = __importDefault(__webpack_require__(885)); +const rest_1 = __webpack_require__(0); +const node_fetch_1 = __importDefault(__webpack_require__(454)); +const child_process_1 = __webpack_require__(129); +const utils_1 = __webpack_require__(163); +class Downloader { + constructor() { + this.githubClient = new rest_1.Octokit(); + } + download(version, trivyCmdDir = __dirname) { + return __awaiter(this, void 0, void 0, function* () { + const os = this.checkPlatform(process.platform); + const downloadUrl = yield this.getDownloadUrl(version, os); + console.debug(`Download URL: ${downloadUrl}`); + const trivyCmdBaseDir = process.env.GITHUB_WORKSPACE || trivyCmdDir; + const trivyCmdPath = yield this.downloadTrivyCmd(downloadUrl, trivyCmdBaseDir); + console.debug(`Trivy Command Path: ${trivyCmdPath}`); + return trivyCmdPath; + }); + } + checkPlatform(platform) { + switch (platform) { + case 'linux': + return 'Linux'; + case 'darwin': + return 'macOS'; + default: + const errorMsg = `Sorry, ${platform} is not supported. + Trivy support Linux, MacOS, FreeBSD and OpenBSD.`; + throw new Error(errorMsg); + } + } + getDownloadUrl(version, os) { + return __awaiter(this, void 0, void 0, function* () { + try { + const response = yield this.getAssets(version); + const filename = `trivy_${response.version}_${os}-64bit.tar.gz`; + for (const asset of response.assets) { + if (asset.name === filename) { + return asset.browser_download_url; + } + } + throw new Error(); + } + catch (error) { + const errorMsg = ` + Cloud not be found a Trivy asset that you specified. + Version: ${version} + OS: ${os} + `; + throw new Error(errorMsg); + } + }); + } + getAssets(version) { + return __awaiter(this, void 0, void 0, function* () { + let response; + if (version === 'latest') { + response = yield this.githubClient.repos.getLatestRelease(Object.assign({}, Downloader.trivyRepository)); + version = response.data.tag_name.replace(/v/, ''); + } + else { + response = yield this.githubClient.repos.getReleaseByTag(Object.assign(Object.assign({}, Downloader.trivyRepository), { tag: `v${version}` })); + } + return { assets: response.data.assets, version }; + }); + } + downloadTrivyCmd(downloadUrl, savedPath = '.') { + return __awaiter(this, void 0, void 0, function* () { + const response = yield node_fetch_1.default(downloadUrl); + return new Promise((resolve, reject) => { + const gunzip = zlib_1.default.createGunzip(); + const extract = tar_1.default.extract({ C: savedPath }, ['trivy']); + response.body + .on('error', reject) + .pipe(gunzip) + .on('error', reject) + .pipe(extract) + .on('error', reject) + .on('finish', () => { + if (!this.trivyExists(savedPath)) { + reject('Failed to extract Trivy command file.'); + } + resolve(`${savedPath}/trivy`); + }); + }); + }); + } + trivyExists(targetDir) { + const trivyCmdPaths = fs_1.default + .readdirSync(targetDir) + .filter(f => f === 'trivy'); + return trivyCmdPaths.length === 1; + } } - -function which (cmd, opt, cb) { - if (typeof opt === 'function') { - cb = opt - opt = {} - } - - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] - - ;(function F (i, l) { - if (i === l) { - if (opt.all && found.length) - return cb(null, found) - else - return cb(getNotFoundError(cmd)) +exports.Downloader = Downloader; +Downloader.trivyRepository = { + owner: 'aquasecurity', + repo: 'trivy', +}; +class Trivy { + scan(trivyPath, image, option) { + this.validateOption(option); + const args = [ + '--severity', + option.severity, + '--vuln-type', + option.vulnType, + '--format', + option.format, + '--quiet', + '--no-progress', + ]; + if (option.ignoreUnfixed) + args.push('--ignore-unfixed'); + args.push(image); + const result = child_process_1.spawnSync(trivyPath, args, { + encoding: 'utf-8', + }); + if (result.stdout && result.stdout.length > 0) { + const vulnerabilities = option.format === 'json' ? JSON.parse(result.stdout) : result.stdout; + if (vulnerabilities.length > 0) { + return vulnerabilities; + } + } + throw new Error(`Failed vulnerability scan using Trivy. + stdout: ${result.stdout} + stderr: ${result.stderr} + error: ${result.error} + `); } - - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) - - var p = path.join(pathPart, cmd) - if (!pathPart && (/^\.[\\\/]/).test(cmd)) { - p = cmd.slice(0, 2) + p + parse(vulnerabilities) { + let issueContent = ''; + for (const vuln of vulnerabilities) { + if (vuln.Vulnerabilities === null) + continue; + issueContent += `## ${vuln.Target}\n`; + let vulnTable = '|Title|Severity|CVE|Package Name|'; + vulnTable += 'Installed Version|Fixed Version|References|\n'; + vulnTable += '|:--:|:--:|:--:|:--:|:--:|:--:|:--|\n'; + for (const cve of vuln.Vulnerabilities) { + vulnTable += `|${cve.Title || 'N/A'}|${cve.Severity || 'N/A'}`; + vulnTable += `|${cve.VulnerabilityID || 'N/A'}|${cve.PkgName || 'N/A'}`; + vulnTable += `|${cve.InstalledVersion || 'N/A'}|${cve.FixedVersion || + 'N/A'}|`; + const references = cve.References; + if (!utils_1.isIterable(references)) + continue; + for (const reference of references) { + vulnTable += `${reference || 'N/A'}
`; + } + vulnTable.replace(/
$/, '|\n'); + } + issueContent += `${vulnTable}\n\n`; + } + return issueContent; } - ;(function E (ii, ll) { - if (ii === ll) return F(i + 1, l) - var ext = pathExt[ii] - isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { - if (!er && is) { - if (opt.all) - found.push(p + ext) - else - return cb(null, p + ext) + validateOption(option) { + this.validateSeverity(option.severity.split(',')); + this.validateVulnType(option.vulnType.split(',')); + } + validateSeverity(severities) { + const allowedSeverities = /UNKNOWN|LOW|MEDIUM|HIGH|CRITICAL/; + if (!validateArrayOption(allowedSeverities, severities)) { + throw new Error(`Trivy option error: ${severities.join(',')} is unknown severity. + Trivy supports UNKNOWN, LOW, MEDIUM, HIGH and CRITICAL.`); } - return E(ii + 1, ll) - }) - })(0, pathExt.length) - })(0, pathEnv.length) -} - -function whichSync (cmd, opt) { - opt = opt || {} - - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] - - for (var i = 0, l = pathEnv.length; i < l; i ++) { - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) - - var p = path.join(pathPart, cmd) - if (!pathPart && /^\.[\\\/]/.test(cmd)) { - p = cmd.slice(0, 2) + p + return true; } - for (var j = 0, ll = pathExt.length; j < ll; j ++) { - var cur = p + pathExt[j] - var is - try { - is = isexe.sync(cur, { pathExt: pathExtExe }) - if (is) { - if (opt.all) - found.push(cur) - else - return cur + validateVulnType(vulnTypes) { + const allowedVulnTypes = /os|library/; + if (!validateArrayOption(allowedVulnTypes, vulnTypes)) { + throw new Error(`Trivy option error: ${vulnTypes.join(',')} is unknown vuln-type. + Trivy supports os and library.`); } - } catch (ex) {} + return true; } - } - - if (opt.all && found.length) - return found - - if (opt.nothrow) - return null - - throw getNotFoundError(cmd) +} +exports.Trivy = Trivy; +function validateArrayOption(allowedValue, options) { + for (const option of options) { + if (!allowedValue.test(option)) { + return false; + } + } + return true; } /***/ }), -/***/ 816: -/***/ (function(module) { - -"use strict"; - -module.exports = /^#!.*/; - - -/***/ }), - -/***/ 818: +/***/ 742: /***/ (function(module, __unusedexports, __webpack_require__) { -module.exports = isexe -isexe.sync = sync - var fs = __webpack_require__(747) +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = __webpack_require__(818) +} else { + core = __webpack_require__(197) +} -function checkPathExt (path, options) { - var pathext = options.pathExt !== undefined ? - options.pathExt : process.env.PATHEXT +module.exports = isexe +isexe.sync = sync - if (!pathext) { - return true +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} } - pathext = pathext.split(';') - if (pathext.indexOf('') !== -1) { - return true - } - for (var i = 0; i < pathext.length; i++) { - var p = pathext[i].toLowerCase() - if (p && path.substr(-p.length).toLowerCase() === p) { - return true + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') } - } - return false -} -function checkStat (stat, path, options) { - if (!stat.isSymbolicLink() && !stat.isFile()) { - return false + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) } - return checkPathExt(path, options) -} -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, path, options)) + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false + } + } + cb(er, is) }) } function sync (path, options) { - return checkStat(fs.statSync(path), path, options) + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er + } + } } /***/ }), -/***/ 827: +/***/ 747: +/***/ (function(module) { + +module.exports = require("fs"); + +/***/ }), + +/***/ 753: /***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; -const MiniPass = __webpack_require__(841) -const EE = __webpack_require__(614).EventEmitter -const fs = __webpack_require__(747) -let writev = fs.writev -/* istanbul ignore next */ -if (!writev) { - // This entire block can be removed if support for earlier than Node.js - // 12.9.0 is not needed. - const binding = process.binding('fs') - const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback +Object.defineProperty(exports, '__esModule', { value: true }); - writev = (fd, iovec, pos, cb) => { - const done = (er, bw) => cb(er, bw, iovec) - const req = new FSReqWrap() - req.oncomplete = done - binding.writeBuffers(fd, iovec, pos, req) - } +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var endpoint = __webpack_require__(385); +var universalUserAgent = __webpack_require__(526); +var isPlainObject = _interopDefault(__webpack_require__(548)); +var nodeFetch = _interopDefault(__webpack_require__(454)); +var requestError = __webpack_require__(463); + +const VERSION = "5.3.1"; + +function getBufferResponse(response) { + return response.arrayBuffer(); } -const _autoClose = Symbol('_autoClose') -const _close = Symbol('_close') -const _ended = Symbol('_ended') -const _fd = Symbol('_fd') -const _finished = Symbol('_finished') -const _flags = Symbol('_flags') -const _flush = Symbol('_flush') -const _handleChunk = Symbol('_handleChunk') -const _makeBuf = Symbol('_makeBuf') -const _mode = Symbol('_mode') -const _needDrain = Symbol('_needDrain') -const _onerror = Symbol('_onerror') -const _onopen = Symbol('_onopen') -const _onread = Symbol('_onread') -const _onwrite = Symbol('_onwrite') -const _open = Symbol('_open') -const _path = Symbol('_path') -const _pos = Symbol('_pos') -const _queue = Symbol('_queue') -const _read = Symbol('_read') -const _readSize = Symbol('_readSize') -const _reading = Symbol('_reading') -const _remain = Symbol('_remain') -const _size = Symbol('_size') -const _write = Symbol('_write') -const _writing = Symbol('_writing') -const _defaultFlag = Symbol('_defaultFlag') -const _errored = Symbol('_errored') +function fetchWrapper(requestOptions) { + if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } -class ReadStream extends MiniPass { - constructor (path, opt) { - opt = opt || {} - super(opt) + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, requestOptions.request)).then(response => { + url = response.url; + status = response.status; - this.readable = true - this.writable = false + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } - if (typeof path !== 'string') - throw new TypeError('path must be a string') + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requsets - this[_errored] = false - this[_fd] = typeof opt.fd === 'number' ? opt.fd : null - this[_path] = path - this[_readSize] = opt.readSize || 16*1024*1024 - this[_reading] = false - this[_size] = typeof opt.size === 'number' ? opt.size : Infinity - this[_remain] = this[_size] - this[_autoClose] = typeof opt.autoClose === 'boolean' ? - opt.autoClose : true - if (typeof this[_fd] === 'number') - this[_read]() - else - this[_open]() - } + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; + } - get fd () { return this[_fd] } - get path () { return this[_path] } + throw new requestError.RequestError(response.statusText, status, { + headers, + request: requestOptions + }); + } - write () { - throw new TypeError('this is a readable stream') - } + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + headers, + request: requestOptions + }); + } - end () { - throw new TypeError('this is a readable stream') - } + if (status >= 400) { + return response.text().then(message => { + const error = new requestError.RequestError(message, status, { + headers, + request: requestOptions + }); - [_open] () { - fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)) - } + try { + let responseBody = JSON.parse(error.message); + Object.assign(error, responseBody); + let errors = responseBody.errors; // Assumption `errors` would always be in Array Fotmat - [_onopen] (er, fd) { - if (er) - this[_onerror](er) - else { - this[_fd] = fd - this.emit('open', fd) - this[_read]() + error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); + } catch (e) {// ignore, see octokit/rest.js#684 + } + + throw error; + }); } - } - [_makeBuf] () { - return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])) - } + const contentType = response.headers.get("content-type"); - [_read] () { - if (!this[_reading]) { - this[_reading] = true - const buf = this[_makeBuf]() - /* istanbul ignore if */ - if (buf.length === 0) - return process.nextTick(() => this[_onread](null, 0, buf)) - fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) => - this[_onread](er, br, buf)) + if (/application\/json/.test(contentType)) { + return response.json(); } - } - [_onread] (er, br, buf) { - this[_reading] = false - if (er) - this[_onerror](er) - else if (this[_handleChunk](br, buf)) - this[_read]() - } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + return getBufferResponse(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) { + throw error; } - } - [_onerror] (er) { - this[_reading] = true - this[_close]() - this.emit('error', er) - } + throw new requestError.RequestError(error.message, 500, { + headers, + request: requestOptions + }); + }); +} - [_handleChunk] (br, buf) { - let ret = false - // no effect if infinite - this[_remain] -= br - if (br > 0) - ret = super.write(br < buf.length ? buf.slice(0, br) : buf) +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); - if (br === 0 || this[_remain] <= 0) { - ret = false - this[_close]() - super.end() + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); + + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); } - return ret + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; + + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); +} + +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` } +}); + +exports.request = request; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 761: +/***/ (function(module) { + +module.exports = require("zlib"); - emit (ev, data) { - switch (ev) { - case 'prefinish': - case 'finish': - break +/***/ }), - case 'drain': - if (typeof this[_fd] === 'number') - this[_read]() - break +/***/ 763: +/***/ (function(module) { - case 'error': - if (this[_errored]) - return - this[_errored] = true - return super.emit(ev, data) +module.exports = removeHook - default: - return super.emit(ev, data) - } +function removeHook (state, name, method) { + if (!state.registry[name]) { + return } -} -class ReadStreamSync extends ReadStream { - [_open] () { - let threw = true - try { - this[_onopen](null, fs.openSync(this[_path], 'r')) - threw = false - } finally { - if (threw) - this[_close]() - } - } + var index = state.registry[name] + .map(function (registered) { return registered.orig }) + .indexOf(method) - [_read] () { - let threw = true - try { - if (!this[_reading]) { - this[_reading] = true - do { - const buf = this[_makeBuf]() - /* istanbul ignore next */ - const br = buf.length === 0 ? 0 - : fs.readSync(this[_fd], buf, 0, buf.length, null) - if (!this[_handleChunk](br, buf)) - break - } while (true) - this[_reading] = false - } - threw = false - } finally { - if (threw) - this[_close]() - } + if (index === -1) { + return } - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.closeSync(fd) - this.emit('close') - } - } + state.registry[name].splice(index, 1) } -class WriteStream extends EE { - constructor (path, opt) { - opt = opt || {} - super(opt) - this.readable = false - this.writable = true - this[_errored] = false - this[_writing] = false - this[_ended] = false - this[_needDrain] = false - this[_queue] = [] - this[_path] = path - this[_fd] = typeof opt.fd === 'number' ? opt.fd : null - this[_mode] = opt.mode === undefined ? 0o666 : opt.mode - this[_pos] = typeof opt.start === 'number' ? opt.start : null - this[_autoClose] = typeof opt.autoClose === 'boolean' ? - opt.autoClose : true - // truncating makes no sense when writing into the middle - const defaultFlag = this[_pos] !== null ? 'r+' : 'w' - this[_defaultFlag] = opt.flags === undefined - this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags +/***/ }), - if (this[_fd] === null) - this[_open]() - } +/***/ 768: +/***/ (function(module) { - emit (ev, data) { - if (ev === 'error') { - if (this[_errored]) - return - this[_errored] = true - } - return super.emit(ev, data) - } +"use strict"; +module.exports = function (x) { + var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); + var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); - get fd () { return this[_fd] } - get path () { return this[_path] } + if (x[x.length - 1] === lf) { + x = x.slice(0, x.length - 1); + } - [_onerror] (er) { - this[_close]() - this[_writing] = true - this.emit('error', er) - } + if (x[x.length - 1] === cr) { + x = x.slice(0, x.length - 1); + } - [_open] () { - fs.open(this[_path], this[_flags], this[_mode], - (er, fd) => this[_onopen](er, fd)) - } + return x; +}; - [_onopen] (er, fd) { - if (this[_defaultFlag] && - this[_flags] === 'r+' && - er && er.code === 'ENOENT') { - this[_flags] = 'w' - this[_open]() - } else if (er) - this[_onerror](er) - else { - this[_fd] = fd - this.emit('open', fd) - this[_flush]() - } - } - end (buf, enc) { - if (buf) - this.write(buf, enc) +/***/ }), - this[_ended] = true +/***/ 777: +/***/ (function(module, __unusedexports, __webpack_require__) { - // synthetic after-write logic, where drain/finish live - if (!this[_writing] && !this[_queue].length && - typeof this[_fd] === 'number') - this[_onwrite](null, 0) - return this - } +module.exports = getFirstPage - write (buf, enc) { - if (typeof buf === 'string') - buf = Buffer.from(buf, enc) +const getPage = __webpack_require__(265) - if (this[_ended]) { - this.emit('error', new Error('write() after end()')) - return false - } +function getFirstPage (octokit, link, headers) { + return getPage(octokit, link, 'first', headers) +} - if (this[_fd] === null || this[_writing] || this[_queue].length) { - this[_queue].push(buf) - this[_needDrain] = true - return false - } - this[_writing] = true - this[_write](buf) - return true - } +/***/ }), - [_write] (buf) { - fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => - this[_onwrite](er, bw)) - } +/***/ 796: +/***/ (function(module) { - [_onwrite] (er, bw) { - if (er) - this[_onerror](er) - else { - if (this[_pos] !== null) - this[_pos] += bw - if (this[_queue].length) - this[_flush]() - else { - this[_writing] = false +"use strict"; - if (this[_ended] && !this[_finished]) { - this[_finished] = true - this[_close]() - this.emit('finish') - } else if (this[_needDrain]) { - this[_needDrain] = false - this.emit('drain') - } +module.exports = Base => class extends Base { + warn (code, message, data = {}) { + if (this.file) + data.file = this.file + if (this.cwd) + data.cwd = this.cwd + data.code = message instanceof Error && message.code || code + data.tarCode = code + if (!this.strict && data.recoverable !== false) { + if (message instanceof Error) { + data = Object.assign(message, data) + message = message.message } - } + this.emit('warn', data.tarCode, message, data) + } else if (message instanceof Error) { + this.emit('error', Object.assign(message, data)) + } else + this.emit('error', Object.assign(new Error(`${code}: ${message}`), data)) } +} - [_flush] () { - if (this[_queue].length === 0) { - if (this[_ended]) - this[_onwrite](null, 0) - } else if (this[_queue].length === 1) - this[_write](this[_queue].pop()) - else { - const iovec = this[_queue] - this[_queue] = [] - writev(this[_fd], iovec, this[_pos], - (er, bw) => this[_onwrite](er, bw)) - } + +/***/ }), + +/***/ 813: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +async function auth(token) { + const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; +} + +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; } - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) - } - } + return `token ${token}`; } -class WriteStreamSync extends WriteStream { - [_open] () { - let fd - // only wrap in a try{} block if we know we'll retry, to avoid - // the rethrow obscuring the error's source frame in most cases. - if (this[_defaultFlag] && this[_flags] === 'r+') { - try { - fd = fs.openSync(this[_path], this[_flags], this[_mode]) - } catch (er) { - if (er.code === 'ENOENT') { - this[_flags] = 'w' - return this[_open]() - } else - throw er - } - } else - fd = fs.openSync(this[_path], this[_flags], this[_mode]) +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} - this[_onopen](null, fd) +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); } - [_close] () { - if (this[_autoClose] && typeof this[_fd] === 'number') { - const fd = this[_fd] - this[_fd] = null - fs.closeSync(fd) - this.emit('close') - } + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); } - [_write] (buf) { - // throw the original, but try to close if it fails - let threw = true - try { - this[_onwrite](null, - fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])) - threw = false - } finally { - if (threw) - try { this[_close]() } catch (_) {} - } - } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; + +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map + + +/***/ }), + +/***/ 814: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = which +which.sync = whichSync + +var isWindows = process.platform === 'win32' || + process.env.OSTYPE === 'cygwin' || + process.env.OSTYPE === 'msys' + +var path = __webpack_require__(622) +var COLON = isWindows ? ';' : ':' +var isexe = __webpack_require__(742) + +function getNotFoundError (cmd) { + var er = new Error('not found: ' + cmd) + er.code = 'ENOENT' + + return er } -exports.ReadStream = ReadStream -exports.ReadStreamSync = ReadStreamSync +function getPathInfo (cmd, opt) { + var colon = opt.colon || COLON + var pathEnv = opt.path || process.env.PATH || '' + var pathExt = [''] -exports.WriteStream = WriteStream -exports.WriteStreamSync = WriteStreamSync + pathEnv = pathEnv.split(colon) + var pathExtExe = '' + if (isWindows) { + pathEnv.unshift(process.cwd()) + pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') + pathExt = pathExtExe.split(colon) -/***/ }), -/***/ 835: -/***/ (function(module) { + // Always test the cmd itself first. isexe will check to make sure + // it's found in the pathExt set. + if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') + pathExt.unshift('') + } -module.exports = require("url"); + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) + pathEnv = [''] -/***/ }), + return { + env: pathEnv, + ext: pathExt, + extExe: pathExtExe + } +} -/***/ 841: -/***/ (function(module, __unusedexports, __webpack_require__) { +function which (cmd, opt, cb) { + if (typeof opt === 'function') { + cb = opt + opt = {} + } -"use strict"; + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] -const EE = __webpack_require__(614) -const Stream = __webpack_require__(413) -const Yallist = __webpack_require__(612) -const SD = __webpack_require__(304).StringDecoder + ;(function F (i, l) { + if (i === l) { + if (opt.all && found.length) + return cb(null, found) + else + return cb(getNotFoundError(cmd)) + } -const EOF = Symbol('EOF') -const MAYBE_EMIT_END = Symbol('maybeEmitEnd') -const EMITTED_END = Symbol('emittedEnd') -const EMITTING_END = Symbol('emittingEnd') -const CLOSED = Symbol('closed') -const READ = Symbol('read') -const FLUSH = Symbol('flush') -const FLUSHCHUNK = Symbol('flushChunk') -const ENCODING = Symbol('encoding') -const DECODER = Symbol('decoder') -const FLOWING = Symbol('flowing') -const PAUSED = Symbol('paused') -const RESUME = Symbol('resume') -const BUFFERLENGTH = Symbol('bufferLength') -const BUFFERPUSH = Symbol('bufferPush') -const BUFFERSHIFT = Symbol('bufferShift') -const OBJECTMODE = Symbol('objectMode') -const DESTROYED = Symbol('destroyed') + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) -// TODO remove when Node v8 support drops -const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1' -const ASYNCITERATOR = doIter && Symbol.asyncIterator - || Symbol('asyncIterator not implemented') -const ITERATOR = doIter && Symbol.iterator - || Symbol('iterator not implemented') + var p = path.join(pathPart, cmd) + if (!pathPart && (/^\.[\\\/]/).test(cmd)) { + p = cmd.slice(0, 2) + p + } + ;(function E (ii, ll) { + if (ii === ll) return F(i + 1, l) + var ext = pathExt[ii] + isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { + if (!er && is) { + if (opt.all) + found.push(p + ext) + else + return cb(null, p + ext) + } + return E(ii + 1, ll) + }) + })(0, pathExt.length) + })(0, pathEnv.length) +} -// events that mean 'the stream is over' -// these are treated specially, and re-emitted -// if they are listened for after emitting. -const isEndish = ev => - ev === 'end' || - ev === 'finish' || - ev === 'prefinish' +function whichSync (cmd, opt) { + opt = opt || {} -const isArrayBuffer = b => b instanceof ArrayBuffer || - typeof b === 'object' && - b.constructor && - b.constructor.name === 'ArrayBuffer' && - b.byteLength >= 0 + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] -const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b) + for (var i = 0, l = pathEnv.length; i < l; i ++) { + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) -module.exports = class Minipass extends Stream { - constructor (options) { - super() - this[FLOWING] = false - // whether we're explicitly paused - this[PAUSED] = false - this.pipes = new Yallist() - this.buffer = new Yallist() - this[OBJECTMODE] = options && options.objectMode || false - if (this[OBJECTMODE]) - this[ENCODING] = null - else - this[ENCODING] = options && options.encoding || null - if (this[ENCODING] === 'buffer') - this[ENCODING] = null - this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null - this[EOF] = false - this[EMITTED_END] = false - this[EMITTING_END] = false - this[CLOSED] = false - this.writable = true - this.readable = true - this[BUFFERLENGTH] = 0 - this[DESTROYED] = false + var p = path.join(pathPart, cmd) + if (!pathPart && /^\.[\\\/]/.test(cmd)) { + p = cmd.slice(0, 2) + p + } + for (var j = 0, ll = pathExt.length; j < ll; j ++) { + var cur = p + pathExt[j] + var is + try { + is = isexe.sync(cur, { pathExt: pathExtExe }) + if (is) { + if (opt.all) + found.push(cur) + else + return cur + } + } catch (ex) {} + } } - get bufferLength () { return this[BUFFERLENGTH] } + if (opt.all && found.length) + return found - get encoding () { return this[ENCODING] } - set encoding (enc) { - if (this[OBJECTMODE]) - throw new Error('cannot set encoding in objectMode') + if (opt.nothrow) + return null - if (this[ENCODING] && enc !== this[ENCODING] && - (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH])) - throw new Error('cannot change encoding') + throw getNotFoundError(cmd) +} - if (this[ENCODING] !== enc) { - this[DECODER] = enc ? new SD(enc) : null - if (this.buffer.length) - this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) - } - this[ENCODING] = enc - } +/***/ }), - setEncoding (enc) { - this.encoding = enc - } +/***/ 816: +/***/ (function(module) { - get objectMode () { return this[OBJECTMODE] } - set objectMode (ॐ ) { this[OBJECTMODE] = this[OBJECTMODE] || !!ॐ } +"use strict"; - write (chunk, encoding, cb) { - if (this[EOF]) - throw new Error('write after end') +module.exports = /^#!.*/; - if (this[DESTROYED]) { - this.emit('error', Object.assign( - new Error('Cannot call write after a stream was destroyed'), - { code: 'ERR_STREAM_DESTROYED' } - )) - return true - } - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' +/***/ }), - if (!encoding) - encoding = 'utf8' +/***/ 818: +/***/ (function(module, __unusedexports, __webpack_require__) { - // convert array buffers and typed array views into buffers - // at some point in the future, we may want to do the opposite! - // leave strings and buffers as-is - // anything else switches us into object mode - if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) { - if (isArrayBufferView(chunk)) - chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength) - else if (isArrayBuffer(chunk)) - chunk = Buffer.from(chunk) - else if (typeof chunk !== 'string') - // use the setter so we throw if we have encoding set - this.objectMode = true - } +module.exports = isexe +isexe.sync = sync - // this ensures at this point that the chunk is a buffer or string - // don't buffer it up or send it to the decoder - if (!this.objectMode && !chunk.length) { - const ret = this.flowing - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - cb() - return ret - } +var fs = __webpack_require__(747) - // fast-path writing strings of same encoding to a stream with - // an empty buffer, skipping the buffer/decoder dance - if (typeof chunk === 'string' && !this[OBJECTMODE] && - // unless it is a string already ready for us to use - !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) { - chunk = Buffer.from(chunk, encoding) - } +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT - if (Buffer.isBuffer(chunk) && this[ENCODING]) - chunk = this[DECODER].write(chunk) + if (!pathext) { + return true + } - try { - return this.flowing - ? (this.emit('data', chunk), this.flowing) - : (this[BUFFERPUSH](chunk), false) - } finally { - if (this[BUFFERLENGTH] !== 0) - this.emit('readable') - if (cb) - cb() + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true + } + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true } } + return false +} - read (n) { - if (this[DESTROYED]) - return null +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false + } + return checkPathExt(path, options) +} - try { - if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) - return null +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) + }) +} - if (this[OBJECTMODE]) - n = null +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) +} - if (this.buffer.length > 1 && !this[OBJECTMODE]) { - if (this.encoding) - this.buffer = new Yallist([ - Array.from(this.buffer).join('') - ]) - else - this.buffer = new Yallist([ - Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH]) - ]) - } - return this[READ](n || null, this.buffer.head.value) - } finally { - this[MAYBE_EMIT_END]() - } - } +/***/ }), - [READ] (n, chunk) { - if (n === chunk.length || n === null) - this[BUFFERSHIFT]() - else { - this.buffer.head.value = chunk.slice(n) - chunk = chunk.slice(0, n) - this[BUFFERLENGTH] -= n - } +/***/ 827: +/***/ (function(__unusedmodule, exports, __webpack_require__) { - this.emit('data', chunk) +"use strict"; - if (!this.buffer.length && !this[EOF]) - this.emit('drain') +const MiniPass = __webpack_require__(720) +const EE = __webpack_require__(614).EventEmitter +const fs = __webpack_require__(747) - return chunk +let writev = fs.writev +/* istanbul ignore next */ +if (!writev) { + // This entire block can be removed if support for earlier than Node.js + // 12.9.0 is not needed. + const binding = process.binding('fs') + const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback + + writev = (fd, iovec, pos, cb) => { + const done = (er, bw) => cb(er, bw, iovec) + const req = new FSReqWrap() + req.oncomplete = done + binding.writeBuffers(fd, iovec, pos, req) } +} - end (chunk, encoding, cb) { - if (typeof chunk === 'function') - cb = chunk, chunk = null - if (typeof encoding === 'function') - cb = encoding, encoding = 'utf8' - if (chunk) - this.write(chunk, encoding) - if (cb) - this.once('end', cb) - this[EOF] = true - this.writable = false +const _autoClose = Symbol('_autoClose') +const _close = Symbol('_close') +const _ended = Symbol('_ended') +const _fd = Symbol('_fd') +const _finished = Symbol('_finished') +const _flags = Symbol('_flags') +const _flush = Symbol('_flush') +const _handleChunk = Symbol('_handleChunk') +const _makeBuf = Symbol('_makeBuf') +const _mode = Symbol('_mode') +const _needDrain = Symbol('_needDrain') +const _onerror = Symbol('_onerror') +const _onopen = Symbol('_onopen') +const _onread = Symbol('_onread') +const _onwrite = Symbol('_onwrite') +const _open = Symbol('_open') +const _path = Symbol('_path') +const _pos = Symbol('_pos') +const _queue = Symbol('_queue') +const _read = Symbol('_read') +const _readSize = Symbol('_readSize') +const _reading = Symbol('_reading') +const _remain = Symbol('_remain') +const _size = Symbol('_size') +const _write = Symbol('_write') +const _writing = Symbol('_writing') +const _defaultFlag = Symbol('_defaultFlag') +const _errored = Symbol('_errored') - // if we haven't written anything, then go ahead and emit, - // even if we're not reading. - // we'll re-emit if a new 'end' listener is added anyway. - // This makes MP more suitable to write-only use cases. - if (this.flowing || !this[PAUSED]) - this[MAYBE_EMIT_END]() - return this - } +class ReadStream extends MiniPass { + constructor (path, opt) { + opt = opt || {} + super(opt) - // don't let the internal resume be overwritten - [RESUME] () { - if (this[DESTROYED]) - return + this.readable = true + this.writable = false - this[PAUSED] = false - this[FLOWING] = true - this.emit('resume') - if (this.buffer.length) - this[FLUSH]() - else if (this[EOF]) - this[MAYBE_EMIT_END]() + if (typeof path !== 'string') + throw new TypeError('path must be a string') + + this[_errored] = false + this[_fd] = typeof opt.fd === 'number' ? opt.fd : null + this[_path] = path + this[_readSize] = opt.readSize || 16*1024*1024 + this[_reading] = false + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity + this[_remain] = this[_size] + this[_autoClose] = typeof opt.autoClose === 'boolean' ? + opt.autoClose : true + + if (typeof this[_fd] === 'number') + this[_read]() else - this.emit('drain') + this[_open]() } - resume () { - return this[RESUME]() - } + get fd () { return this[_fd] } + get path () { return this[_path] } - pause () { - this[FLOWING] = false - this[PAUSED] = true + write () { + throw new TypeError('this is a readable stream') } - get destroyed () { - return this[DESTROYED] + end () { + throw new TypeError('this is a readable stream') } - get flowing () { - return this[FLOWING] + [_open] () { + fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)) } - get paused () { - return this[PAUSED] + [_onopen] (er, fd) { + if (er) + this[_onerror](er) + else { + this[_fd] = fd + this.emit('open', fd) + this[_read]() + } } - [BUFFERPUSH] (chunk) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] += 1 - else - this[BUFFERLENGTH] += chunk.length - return this.buffer.push(chunk) + [_makeBuf] () { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])) } - [BUFFERSHIFT] () { - if (this.buffer.length) { - if (this[OBJECTMODE]) - this[BUFFERLENGTH] -= 1 - else - this[BUFFERLENGTH] -= this.buffer.head.value.length + [_read] () { + if (!this[_reading]) { + this[_reading] = true + const buf = this[_makeBuf]() + /* istanbul ignore if */ + if (buf.length === 0) + return process.nextTick(() => this[_onread](null, 0, buf)) + fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) => + this[_onread](er, br, buf)) } - return this.buffer.shift() } - [FLUSH] () { - do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) - - if (!this.buffer.length && !this[EOF]) - this.emit('drain') + [_onread] (er, br, buf) { + this[_reading] = false + if (er) + this[_onerror](er) + else if (this[_handleChunk](br, buf)) + this[_read]() } - [FLUSHCHUNK] (chunk) { - return chunk ? (this.emit('data', chunk), this.flowing) : false + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + } } - pipe (dest, opts) { - if (this[DESTROYED]) - return + [_onerror] (er) { + this[_reading] = true + this[_close]() + this.emit('error', er) + } - const ended = this[EMITTED_END] - opts = opts || {} - if (dest === process.stdout || dest === process.stderr) - opts.end = false - else - opts.end = opts.end !== false + [_handleChunk] (br, buf) { + let ret = false + // no effect if infinite + this[_remain] -= br + if (br > 0) + ret = super.write(br < buf.length ? buf.slice(0, br) : buf) - const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() } - this.pipes.push(p) + if (br === 0 || this[_remain] <= 0) { + ret = false + this[_close]() + super.end() + } - dest.on('drain', p.ondrain) - this[RESUME]() - // piping an ended stream ends immediately - if (ended && p.opts.end) - p.dest.end() - return dest + return ret } - addListener (ev, fn) { - return this.on(ev, fn) + emit (ev, data) { + switch (ev) { + case 'prefinish': + case 'finish': + break + + case 'drain': + if (typeof this[_fd] === 'number') + this[_read]() + break + + case 'error': + if (this[_errored]) + return + this[_errored] = true + return super.emit(ev, data) + + default: + return super.emit(ev, data) + } } +} - on (ev, fn) { +class ReadStreamSync extends ReadStream { + [_open] () { + let threw = true try { - return super.on(ev, fn) + this[_onopen](null, fs.openSync(this[_path], 'r')) + threw = false } finally { - if (ev === 'data' && !this.pipes.length && !this.flowing) - this[RESUME]() - else if (isEndish(ev) && this[EMITTED_END]) { - super.emit(ev) - this.removeAllListeners(ev) - } + if (threw) + this[_close]() } } - get emittedEnd () { - return this[EMITTED_END] + [_read] () { + let threw = true + try { + if (!this[_reading]) { + this[_reading] = true + do { + const buf = this[_makeBuf]() + /* istanbul ignore next */ + const br = buf.length === 0 ? 0 + : fs.readSync(this[_fd], buf, 0, buf.length, null) + if (!this[_handleChunk](br, buf)) + break + } while (true) + this[_reading] = false + } + threw = false + } finally { + if (threw) + this[_close]() + } } - [MAYBE_EMIT_END] () { - if (!this[EMITTING_END] && - !this[EMITTED_END] && - !this[DESTROYED] && - this.buffer.length === 0 && - this[EOF]) { - this[EMITTING_END] = true - this.emit('end') - this.emit('prefinish') - this.emit('finish') - if (this[CLOSED]) - this.emit('close') - this[EMITTING_END] = false + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.closeSync(fd) + this.emit('close') } } +} + +class WriteStream extends EE { + constructor (path, opt) { + opt = opt || {} + super(opt) + this.readable = false + this.writable = true + this[_errored] = false + this[_writing] = false + this[_ended] = false + this[_needDrain] = false + this[_queue] = [] + this[_path] = path + this[_fd] = typeof opt.fd === 'number' ? opt.fd : null + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode + this[_pos] = typeof opt.start === 'number' ? opt.start : null + this[_autoClose] = typeof opt.autoClose === 'boolean' ? + opt.autoClose : true + + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== null ? 'r+' : 'w' + this[_defaultFlag] = opt.flags === undefined + this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags + + if (this[_fd] === null) + this[_open]() + } emit (ev, data) { - // error and close are only events allowed after calling destroy() - if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED]) - return - else if (ev === 'data') { - if (!data) + if (ev === 'error') { + if (this[_errored]) return + this[_errored] = true + } + return super.emit(ev, data) + } + + + get fd () { return this[_fd] } + get path () { return this[_path] } + + [_onerror] (er) { + this[_close]() + this[_writing] = true + this.emit('error', er) + } + + [_open] () { + fs.open(this[_path], this[_flags], this[_mode], + (er, fd) => this[_onopen](er, fd)) + } + + [_onopen] (er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && er.code === 'ENOENT') { + this[_flags] = 'w' + this[_open]() + } else if (er) + this[_onerror](er) + else { + this[_fd] = fd + this.emit('open', fd) + this[_flush]() + } + } - if (this.pipes.length) - this.pipes.forEach(p => - p.dest.write(data) === false && this.pause()) - } else if (ev === 'end') { - // only actual end gets this treatment - if (this[EMITTED_END] === true) - return + end (buf, enc) { + if (buf) + this.write(buf, enc) - this[EMITTED_END] = true - this.readable = false + this[_ended] = true - if (this[DECODER]) { - data = this[DECODER].end() - if (data) { - this.pipes.forEach(p => p.dest.write(data)) - super.emit('data', data) - } - } + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && !this[_queue].length && + typeof this[_fd] === 'number') + this[_onwrite](null, 0) + return this + } - this.pipes.forEach(p => { - p.dest.removeListener('drain', p.ondrain) - if (p.opts.end) - p.dest.end() - }) - } else if (ev === 'close') { - this[CLOSED] = true - // don't emit close before 'end' and 'finish' - if (!this[EMITTED_END] && !this[DESTROYED]) - return - } + write (buf, enc) { + if (typeof buf === 'string') + buf = Buffer.from(buf, enc) - // TODO: replace with a spread operator when Node v4 support drops - const args = new Array(arguments.length) - args[0] = ev - args[1] = data - if (arguments.length > 2) { - for (let i = 2; i < arguments.length; i++) { - args[i] = arguments[i] - } + if (this[_ended]) { + this.emit('error', new Error('write() after end()')) + return false } - try { - return super.emit.apply(this, args) - } finally { - if (!isEndish(ev)) - this[MAYBE_EMIT_END]() - else - this.removeAllListeners(ev) + if (this[_fd] === null || this[_writing] || this[_queue].length) { + this[_queue].push(buf) + this[_needDrain] = true + return false } - } - // const all = await stream.collect() - collect () { - const buf = [] - if (!this[OBJECTMODE]) - buf.dataLength = 0 - // set the promise first, in case an error is raised - // by triggering the flow here. - const p = this.promise() - this.on('data', c => { - buf.push(c) - if (!this[OBJECTMODE]) - buf.dataLength += c.length - }) - return p.then(() => buf) + this[_writing] = true + this[_write](buf) + return true } - // const data = await stream.concat() - concat () { - return this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this.collect().then(buf => - this[OBJECTMODE] - ? Promise.reject(new Error('cannot concat in objectMode')) - : this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength)) + [_write] (buf) { + fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => + this[_onwrite](er, bw)) } - // stream.promise().then(() => done, er => emitted error) - promise () { - return new Promise((resolve, reject) => { - this.on(DESTROYED, () => reject(new Error('stream destroyed'))) - this.on('end', () => resolve()) - this.on('error', er => reject(er)) - }) + [_onwrite] (er, bw) { + if (er) + this[_onerror](er) + else { + if (this[_pos] !== null) + this[_pos] += bw + if (this[_queue].length) + this[_flush]() + else { + this[_writing] = false + + if (this[_ended] && !this[_finished]) { + this[_finished] = true + this[_close]() + this.emit('finish') + } else if (this[_needDrain]) { + this[_needDrain] = false + this.emit('drain') + } + } + } } - // for await (let chunk of stream) - [ASYNCITERATOR] () { - const next = () => { - const res = this.read() - if (res !== null) - return Promise.resolve({ done: false, value: res }) + [_flush] () { + if (this[_queue].length === 0) { + if (this[_ended]) + this[_onwrite](null, 0) + } else if (this[_queue].length === 1) + this[_write](this[_queue].pop()) + else { + const iovec = this[_queue] + this[_queue] = [] + writev(this[_fd], iovec, this[_pos], + (er, bw) => this[_onwrite](er, bw)) + } + } - if (this[EOF]) - return Promise.resolve({ done: true }) + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) + } + } +} - let resolve = null - let reject = null - const onerr = er => { - this.removeListener('data', ondata) - this.removeListener('end', onend) - reject(er) - } - const ondata = value => { - this.removeListener('error', onerr) - this.removeListener('end', onend) - this.pause() - resolve({ value: value, done: !!this[EOF] }) - } - const onend = () => { - this.removeListener('error', onerr) - this.removeListener('data', ondata) - resolve({ done: true }) +class WriteStreamSync extends WriteStream { + [_open] () { + let fd + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs.openSync(this[_path], this[_flags], this[_mode]) + } catch (er) { + if (er.code === 'ENOENT') { + this[_flags] = 'w' + return this[_open]() + } else + throw er } - const ondestroy = () => onerr(new Error('stream destroyed')) - return new Promise((res, rej) => { - reject = rej - resolve = res - this.once(DESTROYED, ondestroy) - this.once('error', onerr) - this.once('end', onend) - this.once('data', ondata) - }) - } + } else + fd = fs.openSync(this[_path], this[_flags], this[_mode]) - return { next } + this[_onopen](null, fd) } - // for (let chunk of stream) - [ITERATOR] () { - const next = () => { - const value = this.read() - const done = value === null - return { value, done } + [_close] () { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd] + this[_fd] = null + fs.closeSync(fd) + this.emit('close') } - return { next } } - destroy (er) { - if (this[DESTROYED]) { - if (er) - this.emit('error', er) - else - this.emit(DESTROYED) - return this + [_write] (buf) { + // throw the original, but try to close if it fails + let threw = true + try { + this[_onwrite](null, + fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])) + threw = false + } finally { + if (threw) + try { this[_close]() } catch (_) {} } + } +} - this[DESTROYED] = true - - // throw away all buffered data, it's never coming out - this.buffer = new Yallist() - this[BUFFERLENGTH] = 0 +exports.ReadStream = ReadStream +exports.ReadStreamSync = ReadStreamSync - if (typeof this.close === 'function' && !this[CLOSED]) - this.close() +exports.WriteStream = WriteStream +exports.WriteStreamSync = WriteStreamSync - if (er) - this.emit('error', er) - else // if no error to emit, still reject pending promises - this.emit(DESTROYED) - return this - } +/***/ }), - static isStream (s) { - return !!s && (s instanceof Minipass || s instanceof Stream || - s instanceof EE && ( - typeof s.pipe === 'function' || // readable - (typeof s.write === 'function' && typeof s.end === 'function') // writable - )) - } -} +/***/ 835: +/***/ (function(module) { +module.exports = require("url"); /***/ }), diff --git a/src/index.ts b/src/index.ts index ed6dfaa..c8cfcd4 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,6 +1,6 @@ import * as core from '@actions/core'; import { Trivy, Downloader } from './trivy'; -import { createIssue } from './issue'; +import { createOrUpdateIssue } from './issue'; import { TrivyOption, IssueOption, @@ -69,9 +69,13 @@ async function run() { .split(','), }; const token: string = core.getInput('token', { required: true }); - const output: IssueResponse = await createIssue(token, issueOption); + const output: IssueResponse = await createOrUpdateIssue(token, image, issueOption); core.setOutput('html_url', output.htmlUrl); core.setOutput('issue_number', output.issueNumber.toString()); + + if (core.getInput("fail_on_vulnerabilities") === 'true') { + core.setFailed(`Vulnerabilities found.\n${issueContent}`) + } } catch (error) { core.error(error.stack); core.setFailed(error.message); diff --git a/src/issue.ts b/src/issue.ts index 1d38f1e..641b8c6 100644 --- a/src/issue.ts +++ b/src/issue.ts @@ -1,12 +1,26 @@ import { Octokit } from '@octokit/rest'; +import * as core from '@actions/core'; import * as github from '@actions/github'; import { IssueOption, IssueResponse } from './interface'; -export async function createIssue( - token: string, +async function getTrivyIssues(client: Octokit, image: string, labels: string[] | undefined) { + if (labels == null) { + return [] + } + + let { + data: trivyIssues, + } = await client.issues.listForRepo( + { ...github.context.repo, state: "open", labels: labels.join(",") } + ); + + return trivyIssues.filter(issue => issue.body.includes(image)) +} + +async function createIssue( + client: Octokit, options: IssueOption -): Promise { - const client: Octokit = new Octokit({ auth: token }); +): Promise { const { data: issue, }: Octokit.Response = await client.issues.create( @@ -15,9 +29,48 @@ export async function createIssue( ...options, } ); - const result: IssueResponse = { - issueNumber: issue.number, - htmlUrl: issue.html_url, - }; - return result; + return issue +} + +async function updateIssue( + issueNumber: number, + client: Octokit, + options: IssueOption +): Promise { + await client.issues.update( + { + ...github.context.repo, + issue_number: issueNumber, + body: options.body + } + ); +} + +export async function createOrUpdateIssue( + token: string, + image: string, + options: IssueOption +): Promise { + const client: Octokit = new Octokit({ auth: token }); + + const trivyIssues = await getTrivyIssues(client, image, options.labels) + + if (trivyIssues.length > 0) { + core.info("Found existing issue. Updating existing issue.") + + const existingIssue = trivyIssues[0] + await updateIssue(existingIssue.number, client, options) + return { + issueNumber: existingIssue.number, + htmlUrl: existingIssue.html_url, + } + } else { + core.info("Create new issue") + + const newIssue = await createIssue(client, options) + return { + issueNumber: newIssue.number, + htmlUrl: newIssue.html_url, + } + } }