diff --git a/.eslint-ratchet-high-water-mark b/.eslint-ratchet-high-water-mark index 98102958a72..c7410a5b0a0 100644 --- a/.eslint-ratchet-high-water-mark +++ b/.eslint-ratchet-high-water-mark @@ -1 +1 @@ -1874 +1675 diff --git a/cumulus/tasks/delete-ingest-tracking-data/test/filter-payload-spec.js b/cumulus/tasks/delete-ingest-tracking-data/test/filter-payload-spec.js index e00a57f7b1a..44ca014f5be 100644 --- a/cumulus/tasks/delete-ingest-tracking-data/test/filter-payload-spec.js +++ b/cumulus/tasks/delete-ingest-tracking-data/test/filter-payload-spec.js @@ -3,6 +3,6 @@ const test = require('ava'); const filter = require('../index'); // Nothing really to test here - just a placeholder for future -test('Dummy test', t => { +test('Dummy test', (t) => { t.is(1, 1); }); diff --git a/cumulus/tasks/delete-pdr-ftp/test/delete-pdr-ftp-spec.js b/cumulus/tasks/delete-pdr-ftp/test/delete-pdr-ftp-spec.js index 67373cdd636..73c823e96ce 100644 --- a/cumulus/tasks/delete-pdr-ftp/test/delete-pdr-ftp-spec.js +++ b/cumulus/tasks/delete-pdr-ftp/test/delete-pdr-ftp-spec.js @@ -1,6 +1,6 @@ 'use strict'; const test = require('ava'); -test('TODO - add test', t => { +test('TODO - add test', (t) => { t.is(1, 1); }); diff --git a/cumulus/tasks/discover-cmr-granules/index.js b/cumulus/tasks/discover-cmr-granules/index.js index 6fe98035bd5..d533686f2f2 100644 --- a/cumulus/tasks/discover-cmr-granules/index.js +++ b/cumulus/tasks/discover-cmr-granules/index.js @@ -62,7 +62,7 @@ module.exports = class DiscoverCmrGranulesTask extends Task { const filtered = this.excludeFiltered(messages, this.config.filtered_granule_keys); // Write the messages to a DynamoDB table so we can track ingest failures - const messagePromises = filtered.map(msg => { + const messagePromises = filtered.map((msg) => { const { granuleId, version, collection } = msg.meta; const params = { TableName: this.config.ingest_tracking_table, diff --git a/cumulus/tasks/discover-http-tiles/index.js b/cumulus/tasks/discover-http-tiles/index.js index f2d0fe9904b..1dbffb819ab 100644 --- a/cumulus/tasks/discover-http-tiles/index.js +++ b/cumulus/tasks/discover-http-tiles/index.js @@ -48,12 +48,12 @@ module.exports = class DiscoverHttpTilesTask extends Task { if (granuleFilter) { if (granuleFilter.filtered_granule_keys) { const keySet = new Set(granuleFilter.filtered_granule_keys); - filterFn = msg => keySet.has(msg.meta.key); + filterFn = (msg) => keySet.has(msg.meta.key); } else if (granuleFilter.filtered_granule_key_start) { const start = granuleFilter.filtered_granule_key_start; const end = granuleFilter.filtered_granule_key_end; - filterFn = msg => msg.meta.key >= start && msg.meta.key <= end; + filterFn = (msg) => msg.meta.key >= start && msg.meta.key <= end; } } return messages.filter(filterFn); diff --git a/cumulus/tasks/discover-pdr/index.js b/cumulus/tasks/discover-pdr/index.js index 31a72c873e6..00668c43b57 100644 --- a/cumulus/tasks/discover-pdr/index.js +++ b/cumulus/tasks/discover-pdr/index.js @@ -53,7 +53,7 @@ module.exports = class DiscoverPdr extends Task { // Get the list of PDRs const pdrList = await pdrMod.getPdrList(client, folder, bucket, keyPrefix); - const S3UploadPromises = pdrList.map(async pdrEntry => { + const S3UploadPromises = pdrList.map(async (pdrEntry) => { const fileName = pdrEntry.name; log.info(`FILE: ${fileName}`); // Get the file contents diff --git a/cumulus/tasks/discover-pdr/pdr.js b/cumulus/tasks/discover-pdr/pdr.js index bbf7eae59f2..8b4945a5c4b 100644 --- a/cumulus/tasks/discover-pdr/pdr.js +++ b/cumulus/tasks/discover-pdr/pdr.js @@ -17,7 +17,7 @@ exports.getPdrList = async (client, folder, bucket, keyPrefix) => { const pdrs = await listSync(folder); // Check to see which files we already have in S3 - const fileExistsPromises = pdrs.map(async pdr => { + const fileExistsPromises = pdrs.map(async (pdr) => { const fileName = pdr.name; return S3.fileExists(bucket, `${keyPrefix}/${fileName}`); }); diff --git a/cumulus/tasks/discover-pdr/test/discover-pdr-spec.js b/cumulus/tasks/discover-pdr/test/discover-pdr-spec.js index 67373cdd636..73c823e96ce 100644 --- a/cumulus/tasks/discover-pdr/test/discover-pdr-spec.js +++ b/cumulus/tasks/discover-pdr/test/discover-pdr-spec.js @@ -1,6 +1,6 @@ 'use strict'; const test = require('ava'); -test('TODO - add test', t => { +test('TODO - add test', (t) => { t.is(1, 1); }); diff --git a/cumulus/tasks/download-activity-mock/test/download-activity-mock-spec.js b/cumulus/tasks/download-activity-mock/test/download-activity-mock-spec.js index 67373cdd636..73c823e96ce 100644 --- a/cumulus/tasks/download-activity-mock/test/download-activity-mock-spec.js +++ b/cumulus/tasks/download-activity-mock/test/download-activity-mock-spec.js @@ -1,6 +1,6 @@ 'use strict'; const test = require('ava'); -test('TODO - add test', t => { +test('TODO - add test', (t) => { t.is(1, 1); }); diff --git a/cumulus/tasks/filter-payload/test/filter-payload-spec.js b/cumulus/tasks/filter-payload/test/filter-payload-spec.js index e00a57f7b1a..44ca014f5be 100644 --- a/cumulus/tasks/filter-payload/test/filter-payload-spec.js +++ b/cumulus/tasks/filter-payload/test/filter-payload-spec.js @@ -3,6 +3,6 @@ const test = require('ava'); const filter = require('../index'); // Nothing really to test here - just a placeholder for future -test('Dummy test', t => { +test('Dummy test', (t) => { t.is(1, 1); }); diff --git a/cumulus/tasks/generate-pan/pan.js b/cumulus/tasks/generate-pan/pan.js index f3e1c4a97fd..efb31f1c06d 100644 --- a/cumulus/tasks/generate-pan/pan.js +++ b/cumulus/tasks/generate-pan/pan.js @@ -22,7 +22,7 @@ exports.generatePan = (files, timeStamp) => { pan += `NO_OF_FILES = ${files.length};\n`; - files.forEach(file => { + files.forEach((file) => { const fileName = file.source.url.substring(file.source.url.lastIndexOf('/') + 1); const filePath = file.source.url.substring(file.source.url.lastIndexOf(':') + 3); const fileDirectory = path.dirname(filePath); diff --git a/cumulus/tasks/generate-pan/test/generate-pan-spec.js b/cumulus/tasks/generate-pan/test/generate-pan-spec.js index c63e5311f91..78c78eb731d 100644 --- a/cumulus/tasks/generate-pan/test/generate-pan-spec.js +++ b/cumulus/tasks/generate-pan/test/generate-pan-spec.js @@ -11,7 +11,7 @@ const shortPan = (dateTime) => DISPOSITION = "SUCCESSFUL"; TIME_STAMP = ${timeStamp(dateTime)};`; -test('generates a short PAN if all files succeed', t => { +test('generates a short PAN if all files succeed', (t) => { const input = allSuccessFixture.input; const now = new Date(); const timeStampStr = timeStamp(now); @@ -19,7 +19,7 @@ test('generates a short PAN if all files succeed', t => { t.is(result, shortPan(now)); }); -test('generates a long pan with an entry for the number of files (NO_OF_FILES)', t => { +test('generates a long pan with an entry for the number of files (NO_OF_FILES)', (t) => { const input = missingFileFixture.input; const now = new Date(); const timeStampStr = timeStamp(now); @@ -28,7 +28,7 @@ test('generates a long pan with an entry for the number of files (NO_OF_FILES)', t.is(parseInt(numFilesEntry, 10), input.length); }); -test('generates a disposition message for each file in a long PAN', t => { +test('generates a disposition message for each file in a long PAN', (t) => { const input = missingFileFixture.input; const now = new Date(); const timeStampStr = timeStamp(now); @@ -37,7 +37,7 @@ test('generates a disposition message for each file in a long PAN', t => { t.is(dispositions.length, 2); }); -test('generates a timestamp for each file entry', t => { +test('generates a timestamp for each file entry', (t) => { const input = missingFileFixture.input; const now = new Date(); const timeStampStr = timeStamp(now); @@ -48,7 +48,7 @@ test('generates a timestamp for each file entry', t => { t.is(timeStampCount, input.length); }); -test('generates an error message for each missing file', t => { +test('generates an error message for each missing file', (t) => { const input = missingFileFixture.input; const now = new Date(); const timeStampStr = timeStamp(now); diff --git a/cumulus/tasks/generate-pdr-file-list/pdr.js b/cumulus/tasks/generate-pdr-file-list/pdr.js index fa0b94325a6..cff940cf4dc 100644 --- a/cumulus/tasks/generate-pdr-file-list/pdr.js +++ b/cumulus/tasks/generate-pdr-file-list/pdr.js @@ -11,7 +11,7 @@ const fileSpecFields = * @return {PVLRoot} An object representing a PDR * @throws {Error} Throws an Error if parsing fails */ -exports.parsePdr = pdr => pvl.pvlToJS(pdr); +exports.parsePdr = (pdr) => pvl.pvlToJS(pdr); /** * Convert a PVL FILE_SPEC entry into an object with enough information to download the diff --git a/cumulus/tasks/generate-pdr-file-list/test/generate-pdr-file-list-spec.js b/cumulus/tasks/generate-pdr-file-list/test/generate-pdr-file-list-spec.js index 335252fa0ba..44dce28deec 100644 --- a/cumulus/tasks/generate-pdr-file-list/test/generate-pdr-file-list-spec.js +++ b/cumulus/tasks/generate-pdr-file-list/test/generate-pdr-file-list-spec.js @@ -4,12 +4,12 @@ const pdr = require('../pdr'); const goodFileFixture = require('./fixtures/good-pdr-fixture'); -test('pdrToFileList() - generates an entry for each file', t => { +test('pdrToFileList() - generates an entry for each file', (t) => { const files = pdr.pdrToFileList(goodFileFixture.input, 'localhost', 21); t.is(files.length, 3); }); -test('fileSpecToFileEntry() - generates proper fields', t => { +test('fileSpecToFileEntry() - generates proper fields', (t) => { const pdrObj = pdr.parsePdr(goodFileFixture.input); const fileGroups = pdrObj.objects('FILE_GROUP'); const host = 'localhost'; diff --git a/cumulus/tasks/generate-pdrd/pdrd.js b/cumulus/tasks/generate-pdrd/pdrd.js index 92b550786b0..86d8f3f4145 100644 --- a/cumulus/tasks/generate-pdrd/pdrd.js +++ b/cumulus/tasks/generate-pdrd/pdrd.js @@ -12,7 +12,7 @@ ${topLevelErrors[0]}`; pdrd = 'MESSAGE_TYPE = LONGPDRD;\n'; pdrd += `NO_FILE_GRPS = ${fileGroupErrors.length}\n`; - fileGroupErrors.forEach(errors => { + fileGroupErrors.forEach((errors) => { if (errors.length > 0) { pdrd += errors[0]; } diff --git a/cumulus/tasks/generate-pdrd/test/generate-pdrd-spec.js b/cumulus/tasks/generate-pdrd/test/generate-pdrd-spec.js index c1087395e22..8fba176fd2a 100644 --- a/cumulus/tasks/generate-pdrd/test/generate-pdrd-spec.js +++ b/cumulus/tasks/generate-pdrd/test/generate-pdrd-spec.js @@ -7,7 +7,7 @@ const badFileEntryFixture = require('./fixtures/bad-file-entry-fixture'); const invalidPvlFixture = require('./fixtures/invalid-pvl-fixture'); const missingFieldsFixture = require('./fixtures/missing-fields-fixture'); -test('generatePdrd() - invalid PVL gets a short PDRD', t => { +test('generatePdrd() - invalid PVL gets a short PDRD', (t) => { const pdrdStr = pdrd.generatePdrd( invalidPvlFixture.topLevelErrors, invalidPvlFixture.fileGroupErrors @@ -20,7 +20,7 @@ test('generatePdrd() - invalid PVL gets a short PDRD', t => { t.is(errMsg, 'INVALID PVL STATEMENT'); }); -test('generatePdrd() - missing TOTAL_FILE_COUNT gets a short PDRD', t => { +test('generatePdrd() - missing TOTAL_FILE_COUNT gets a short PDRD', (t) => { const pdrdStr = pdrd.generatePdrd( missingFieldsFixture.invalidFileCount.input.topLevelErrors, missingFieldsFixture.invalidFileCount.input.fileGroupErrors @@ -52,7 +52,7 @@ const testMacro = (t, fixture) => { t.is(errMsg, fixture.error); }; -test('generatePdrd() - missing file fields gets a long PDRD', t => { - badFileEntryFixture.fixtures.forEach(fixture => testMacro(t, fixture)); +test('generatePdrd() - missing file fields gets a long PDRD', (t) => { + badFileEntryFixture.fixtures.forEach((fixture) => testMacro(t, fixture)); }); diff --git a/cumulus/tasks/trigger-process-pdrs/test/trigger-process-pdrs-spec.js b/cumulus/tasks/trigger-process-pdrs/test/trigger-process-pdrs-spec.js index be9b414fffc..8de72c9c3d4 100644 --- a/cumulus/tasks/trigger-process-pdrs/test/trigger-process-pdrs-spec.js +++ b/cumulus/tasks/trigger-process-pdrs/test/trigger-process-pdrs-spec.js @@ -3,7 +3,7 @@ const test = require('ava'); const helpers = require('@cumulus/common/test-helpers'); const TriggerProcessPdrs = require('../index'); -test('trigger process PDRs', async t => { +test('trigger process PDRs', async (t) => { const payload = [ { s3_key: '123' }, { s3_key: 'ABC' } diff --git a/cumulus/tasks/validate-archives/archive-validations.js b/cumulus/tasks/validate-archives/archive-validations.js index f26806ed859..bcc5609fcde 100644 --- a/cumulus/tasks/validate-archives/archive-validations.js +++ b/cumulus/tasks/validate-archives/archive-validations.js @@ -39,14 +39,14 @@ exports.validateArchiveContents = (archiveDirPath) => { // out here. const unarchivedFiles = fs .readdirSync(archiveDirPath) - .filter(fileName => !fileName.startsWith('._')); + .filter((fileName) => !fileName.startsWith('._')); log.debug(`UNARCHIVED FILES: ${JSON.stringify(unarchivedFiles)}`); let hasImage = false; let hasWorldFile = false; let hasMetadata = false; - unarchivedFiles.forEach(filePath => { + unarchivedFiles.forEach((filePath) => { log.debug(filePath); const ext = path.extname(filePath).toUpperCase(); if (ext === '.JPG' || ext === '.PNG') hasImage = true; diff --git a/cumulus/tasks/validate-archives/index.js b/cumulus/tasks/validate-archives/index.js index e90342f5bdc..d2d7b40191a 100644 --- a/cumulus/tasks/validate-archives/index.js +++ b/cumulus/tasks/validate-archives/index.js @@ -20,7 +20,7 @@ const decompress = promisify(tarGz.decompress); * @param {string} archiveFilePath * @return {string} The un-archive directory */ -const archiveDir = archiveFilePath => { +const archiveDir = (archiveFilePath) => { // archive files must be .tgz or .tar.gz files const segments = archiveFilePath.match(/(.*?)(\.tar\.gz|\.tgz)/i); return segments[1]; @@ -33,7 +33,7 @@ const archiveDir = archiveFilePath => { * @param {Object} fileAttrs An object that contains attributes about the archive file */ const uploadArchiveFilesToS3 = async (unarchivedFiles, archiveDirPath, fileAttrs) => { - const fullFilePaths = unarchivedFiles.map(fileName => path.join(archiveDirPath, fileName)); + const fullFilePaths = unarchivedFiles.map((fileName) => path.join(archiveDirPath, fileName)); const s3DirKey = archiveDir(fileAttrs.target.key); return aws.uploadS3Files(fullFilePaths, fileAttrs.target.bucket, s3DirKey); }; @@ -59,7 +59,7 @@ const extractArchive = async (tmpDir, archiveFilePath) => { * @param {string} archiveDirPath The path where the files were extracted */ const deleteExpandedFiles = async (unarchivedFiles, archiveDirPath) => { - unarchivedFiles.forEach(fileName => { + unarchivedFiles.forEach((fileName) => { const fullPath = path.join(archiveDirPath, fileName); fs.unlinkSync(fullPath); }); @@ -88,8 +88,8 @@ module.exports = class ValidateArchives extends Task { // Only files that were successfully downloaded by the provider gateway will be processed const archiveFiles = files - .filter(file => file.success) - .map(file => [file.target.bucket, file.target.key]); + .filter((file) => file.success) + .map((file) => [file.target.bucket, file.target.key]); const downloadRequest = archiveFiles.map(([s3Bucket, s3Key]) => ({ Bucket: s3Bucket, @@ -107,7 +107,7 @@ module.exports = class ValidateArchives extends Task { // Compute the dispositions (status) for each file downloaded successfully by // the provider gateway - const dispositionPromises = files.map(async fileAttrs => { + const dispositionPromises = files.map(async (fileAttrs) => { // Only process archives that were downloaded successfully by the provider gateway if (fileAttrs.success) { const archiveFileName = path.basename(fileAttrs.target.key); @@ -152,7 +152,7 @@ module.exports = class ValidateArchives extends Task { log.info('S3 FILES:'); log.info(JSON.stringify(s3Files)); - const imgFiles = s3Files.map(s3File => ({ Bucket: s3File.bucket, Key: s3File.key })); + const imgFiles = s3Files.map((s3File) => ({ Bucket: s3File.bucket, Key: s3File.key })); if (imgFiles.length > 0) { imageSources.push({ archive: archiveFileName, images: imgFiles }); diff --git a/cumulus/tasks/validate-pdr/index.js b/cumulus/tasks/validate-pdr/index.js index bbb7990539b..26730925404 100644 --- a/cumulus/tasks/validate-pdr/index.js +++ b/cumulus/tasks/validate-pdr/index.js @@ -14,7 +14,7 @@ function fetchPdr(bucket, key) { } function isPdrValid(topLevelErrors, fileGroupErrors) { - return topLevelErrors.length > 0 || fileGroupErrors.some(errors => errors.length > 0); + return topLevelErrors.length > 0 || fileGroupErrors.some((errors) => errors.length > 0); } async function handler(event, context, callback) { diff --git a/cumulus/tasks/validate-pdr/pdr-validations.js b/cumulus/tasks/validate-pdr/pdr-validations.js index c3dad2f63ee..5509f95d460 100644 --- a/cumulus/tasks/validate-pdr/pdr-validations.js +++ b/cumulus/tasks/validate-pdr/pdr-validations.js @@ -16,7 +16,7 @@ const pdrMod = require('./pdr'); * @param {PVLObject} fileSpec * @return {string} An error string or null */ -const directoryIdValidation = fileSpec => { +const directoryIdValidation = (fileSpec) => { const directoryId = fileSpec.get('DIRECTORY_ID'); return !directoryId || directoryId.value === '' ? 'INVALID DIRECTORY' : null; @@ -27,7 +27,7 @@ const directoryIdValidation = fileSpec => { * @param {PVLObject} fileSpec * @return {string} An error string or null */ -const fileSizeValidation = fileSpec => { +const fileSizeValidation = (fileSpec) => { const fileSize = fileSpec.get('FILE_SIZE'); return !fileSize || fileSize.value < 1 ? 'INVALID FILE SIZE' : null; @@ -38,7 +38,7 @@ const fileSizeValidation = fileSpec => { * @param {PVLObject} fileSpec * @return {string} An error string or null */ -const fileIdValidation = fileSpec => { +const fileIdValidation = (fileSpec) => { const fileId = fileSpec.get('FILE_ID'); return !fileId || fileId.value === '' ? 'INVALID FILE ID' : null; @@ -49,7 +49,7 @@ const fileIdValidation = fileSpec => { * @param {PVLObject} fileSpec * @return {string} An error string or null */ -const fileTypeValidation = fileSpec => { +const fileTypeValidation = (fileSpec) => { const fileType = fileSpec.get('FILE_TYPE'); return !fileType || fileType.value === '' ? 'INVALID FILE TYPE' : null; @@ -60,7 +60,7 @@ const fileTypeValidation = fileSpec => { * @param {PVLObject} fileSpec * @return {string} An error string or null */ -const fileCksumTypeMissingValidation = fileSpec => { +const fileCksumTypeMissingValidation = (fileSpec) => { const cksumType = fileSpec.get('FILE_CKSUM_TYPE'); return !cksumType || cksumType.value === '' ? 'MISSING FILE_CKSUM_TYPE PARAMETER' : null; @@ -71,7 +71,7 @@ const fileCksumTypeMissingValidation = fileSpec => { * @param {PVLObject} fileSpec * @return {string} An error string or null */ -const fileCksumTypeValidation = fileSpec => { +const fileCksumTypeValidation = (fileSpec) => { const cksumTypeEntry = fileSpec.get('FILE_CKSUM_TYPE'); const cksumType = cksumTypeEntry ? cksumTypeEntry.value : null; return cksumType === 'MD5' || cksumType === 'SHA1' ? null : 'UNSUPPORTED CHECKSUM TYPE'; @@ -82,7 +82,7 @@ const fileCksumTypeValidation = fileSpec => { * @param {PVLObject} fileSpec * @return {string} An error string or null */ -const fileCksumValueMissingValidation = fileSpec => { +const fileCksumValueMissingValidation = (fileSpec) => { const cksum = fileSpec.get('FILE_CKSUM_VALUE'); return (!cksum || cksum.value === '') ? 'MISSING FILE_CKSUM_VALUE PARAMETER' : null; @@ -93,7 +93,7 @@ const fileCksumValueMissingValidation = fileSpec => { * @param {PVLObject} fileSpec * @return {string} An error string or null */ -const fileCksumValueValidation = fileSpec => { +const fileCksumValueValidation = (fileSpec) => { const cksumEntry = fileSpec.get('FILE_CKSUM_VALUE'); const cksum = cksumEntry ? cksumEntry.value : ''; const cksumType = fileSpec.get('FILE_CKSUM_TYPE'); @@ -121,8 +121,8 @@ const fileSpecValidations = [ * @param {PVLObject} fileGroup A `PVLObject` object representing a file group entry * @return {Array} An (possibly empty) array of error strings. */ -const validateFileSpec = fileSpec => - fileSpecValidations.map(validationFn => validationFn(fileSpec)).filter(err => err); +const validateFileSpec = (fileSpec) => + fileSpecValidations.map((validationFn) => validationFn(fileSpec)).filter((err) => err); /** * File group validations @@ -133,7 +133,7 @@ const validateFileSpec = fileSpec => * @param {PVLObject} fileGroup A `PVLObject` object representing a file group entry * @return {string} An error string or null */ -const dataTypeValidation = fileGroup => { +const dataTypeValidation = (fileGroup) => { const dataType = fileGroup.get('DATA_TYPE'); let rval = null; @@ -149,7 +149,7 @@ const dataTypeValidation = fileGroup => { * @param {PVLObject} fileGroup A `PVLObject` object representing a file group entry * @return {string} An error string or null */ -const versionIdValidation = fileGroup => { +const versionIdValidation = (fileGroup) => { const versionId = fileGroup.get('VERSION_ID') || fileGroup.get('DATA_VERSION'); let rval = null; @@ -168,16 +168,16 @@ const fileGroupValidations = [dataTypeValidation, versionIdValidation]; * @param {PVLObject} fileGroup A `PVLObject` object representing a file group entry * @return {Array} An (possibly empty) array of error strings. */ -const validateFileGroup = fileGroup => { - const fileGroupErrors = fileGroupValidations.map(validationFn => validationFn(fileGroup)) - .filter(err => err); +const validateFileGroup = (fileGroup) => { + const fileGroupErrors = fileGroupValidations.map((validationFn) => validationFn(fileGroup)) + .filter((err) => err); if (fileGroupErrors.length > 0) { return fileGroupErrors; } // No errors in file group parameters, so validate each FILE_SPEC in the FILE_GROUP const fileSpecs = fileGroup.objects('FILE_SPEC'); const fileSpecErrors = []; - fileSpecs.forEach(fileSpec => { + fileSpecs.forEach((fileSpec) => { const fileErrors = validateFileSpec(fileSpec); if (fileErrors.length > 0) { // Only need one error @@ -197,7 +197,7 @@ const validateFileGroup = fileGroup => { * @param {PVLRoot} pdr The `PVLRoot` object for the PDR * @return An error string or null */ -const fileCountValidation = pdr => { +const fileCountValidation = (pdr) => { let rval = null; if (!pdr.get('TOTAL_FILE_COUNT') || pdr.get('TOTAL_FILE_COUNT').value < 1) { rval = 'INVALID FILE COUNT'; @@ -211,11 +211,11 @@ const pdrTopLevelValidations = [fileCountValidation]; /** * Performs a series of top-level validations on a PDR */ -const validateTopLevelPdr = pdr => - pdrTopLevelValidations.map(validationFn => validationFn(pdr)).filter(err => err); +const validateTopLevelPdr = (pdr) => + pdrTopLevelValidations.map((validationFn) => validationFn(pdr)).filter((err) => err); -exports.validatePdr = pdr => { +exports.validatePdr = (pdr) => { // Parse the PDR and do a preliminary validation let pdrObj; let topLevelErrors = []; diff --git a/cumulus/tasks/validate-pdr/pdr.js b/cumulus/tasks/validate-pdr/pdr.js index 03f0a8a6ec3..0ffbcf510ae 100644 --- a/cumulus/tasks/validate-pdr/pdr.js +++ b/cumulus/tasks/validate-pdr/pdr.js @@ -8,4 +8,4 @@ const pvl = require('@cumulus/pvl/t'); * @return {PVLRoot} An object representing a PDR * @throws {Error} Throws an Error if parsing fails */ -exports.parsePdr = pdr => pvl.pvlToJS(pdr); +exports.parsePdr = (pdr) => pvl.pvlToJS(pdr); diff --git a/cumulus/tasks/validate-pdr/test/pdr-validations.test.js b/cumulus/tasks/validate-pdr/test/pdr-validations.test.js index b347e712670..d831ae83244 100644 --- a/cumulus/tasks/validate-pdr/test/pdr-validations.test.js +++ b/cumulus/tasks/validate-pdr/test/pdr-validations.test.js @@ -19,21 +19,21 @@ const testMacro = (t, fixture) => { }; // Good PDR gets no errors -test('validatePdr() - success', t => { +test('validatePdr() - success', (t) => { testMacro(t, successFixture); }); // Bad PVL in PDR -test('validatePdr() - invalid PVL', t => { +test('validatePdr() - invalid PVL', (t) => { testMacro(t, invalidPvlFixture); }); // High level missing field -test('validatePdr() - top level missing field', t => { +test('validatePdr() - top level missing field', (t) => { testMacro(t, missingFieldsFixture); }); // File group and file spec errors -test('validatePdr() - file group / file spec errors', t => { +test('validatePdr() - file group / file spec errors', (t) => { testMacro(t, fileErrorsFixture); }); diff --git a/packages/api/endpoints/collections.js b/packages/api/endpoints/collections.js index c9161be8ae5..f676aa0e705 100644 --- a/packages/api/endpoints/collections.js +++ b/packages/api/endpoints/collections.js @@ -18,7 +18,7 @@ const examplePayload = require('../tests/data/collections_post.json'); */ function list(event, cb) { const collection = new Collection(event); - collection.query().then(res => cb(null, res)).catch(cb); + collection.query().then((res) => cb(null, res)).catch(cb); } /** @@ -36,7 +36,7 @@ function get(event, cb) { const collection = new Collection(event); return collection.getStats([res], [res.name]); }) - .then(res => cb(null, res[0])) + .then((res) => cb(null, res[0])) .catch(cb); } @@ -123,7 +123,7 @@ function handler(event, context) { return context.fail('HttpMethod is missing'); } - return handle(event, context, !inTestMode() /* authCheck */, cb => { + return handle(event, context, !inTestMode() /* authCheck */, (cb) => { if (event.httpMethod === 'GET' && event.pathParameters) { get(event, cb); } @@ -146,7 +146,7 @@ module.exports = handler; justLocalRun(() => { handler(examplePayload, { - succeed: r => log.error(r), - failed: e => log.error(e) + succeed: (r) => log.error(r), + failed: (e) => log.error(e) }, (e, r) => log.error(e, r)); }); diff --git a/packages/api/endpoints/distribution.js b/packages/api/endpoints/distribution.js index 4f51abd19e4..e1aa8916d0b 100644 --- a/packages/api/endpoints/distribution.js +++ b/packages/api/endpoints/distribution.js @@ -100,7 +100,7 @@ function handler(event, context, cb) { 'Strict-Transport-Security': 'max-age=31536000' } }); - }).catch(e => cb(e)); + }).catch((e) => cb(e)); } // ending up here means that user was not login diff --git a/packages/api/endpoints/executions.js b/packages/api/endpoints/executions.js index f3908b47ec7..1094c84a5cb 100644 --- a/packages/api/endpoints/executions.js +++ b/packages/api/endpoints/executions.js @@ -13,7 +13,7 @@ const Search = require('../es/search').Search; */ function list(event, cb) { const search = new Search(event, 'execution'); - search.query().then(response => cb(null, response)).catch((e) => { + search.query().then((response) => cb(null, response)).catch((e) => { cb(e); }); } diff --git a/packages/api/endpoints/granules.js b/packages/api/endpoints/granules.js index fcd9a3ef5e5..1656a2d97d4 100644 --- a/packages/api/endpoints/granules.js +++ b/packages/api/endpoints/granules.js @@ -37,7 +37,7 @@ async function removeGranuleFromCmr(granuleId, collectionId) { */ function list(event, cb) { const search = new Search(event, 'granule'); - search.query().then(response => cb(null, response)).catch((e) => { + search.query().then((response) => cb(null, response)).catch((e) => { cb(e); }); } @@ -131,10 +131,10 @@ function handler(event, context) { get(event, cb); } else if (event.httpMethod === 'PUT' && event.pathParameters) { - put(event).then(r => cb(null, r)).catch(e => cb(e)); + put(event).then((r) => cb(null, r)).catch((e) => cb(e)); } else if (event.httpMethod === 'DELETE' && event.pathParameters) { - del(event).then(r => cb(null, r)).catch(e => cb(e)); + del(event).then((r) => cb(null, r)).catch((e) => cb(e)); } else { list(event, cb); diff --git a/packages/api/endpoints/pdrs.js b/packages/api/endpoints/pdrs.js index 2b00183d9c2..c7e0c55e4ea 100644 --- a/packages/api/endpoints/pdrs.js +++ b/packages/api/endpoints/pdrs.js @@ -15,7 +15,7 @@ const Search = require('../es/search').Search; */ function list(event, cb) { const search = new Search(event, 'pdr'); - search.query().then(response => cb(null, response)).catch((e) => { + search.query().then((response) => cb(null, response)).catch((e) => { cb(e); }); } @@ -61,7 +61,7 @@ function handler(event, context) { get(event, cb); } else if (event.httpMethod === 'DELETE' && event.pathParameters) { - del(event).then(r => cb(null, r)).catch(e => cb(e)); + del(event).then((r) => cb(null, r)).catch((e) => cb(e)); } else { list(event, cb); diff --git a/packages/api/endpoints/providers.js b/packages/api/endpoints/providers.js index 46e8bf3b4b9..7cb7ad0b926 100644 --- a/packages/api/endpoints/providers.js +++ b/packages/api/endpoints/providers.js @@ -16,7 +16,7 @@ const { Search } = require('../es/search'); */ function list(event, cb) { const search = new Search(event, 'provider'); - search.query().then(response => cb(null, response)).catch(cb); + search.query().then((response) => cb(null, response)).catch(cb); } /** @@ -57,8 +57,8 @@ function post(event, cb) { .catch((e) => { if (e instanceof RecordDoesNotExist) { return p.create(data) - .then(data => cb(null, { message: 'Record saved', record: data })) - .catch(err => cb(err)); + .then((data) => cb(null, { message: 'Record saved', record: data })) + .catch((err) => cb(err)); } return cb(e); }); @@ -87,7 +87,7 @@ function put(event, cb) { originalData = d; return p.update({ id }, data); }) - .then(data => cb(null, data)) + .then((data) => cb(null, data)) .catch((err) => { if (err instanceof RecordDoesNotExist) cb({ message: 'Record does not exist' }); return cb(err); diff --git a/packages/api/endpoints/rules.js b/packages/api/endpoints/rules.js index 394f93d4d23..46a5c87960c 100644 --- a/packages/api/endpoints/rules.js +++ b/packages/api/endpoints/rules.js @@ -17,7 +17,7 @@ const { Search } = require('../es/search'); */ function list(event, cb) { const search = new Search(event, 'rule'); - search.query().then(response => cb(null, response)).catch(cb); + search.query().then((response) => cb(null, response)).catch(cb); } /** @@ -55,7 +55,7 @@ function post(event, cb) { .catch((e) => { if (e instanceof RecordDoesNotExist) { return model.create(data) - .then(r => cb(null, { message: 'Record saved', record: r })) + .then((r) => cb(null, { message: 'Record saved', record: r })) .catch(cb); } return cb(e); @@ -79,9 +79,9 @@ async function put(event) { // if the data includes any fields other than state and rule.value // throw error if (action && action !== 'rerun') { - let check = Object.keys(data).filter(f => (f !== 'state' && f !== 'rule')); + let check = Object.keys(data).filter((f) => (f !== 'state' && f !== 'rule')); if (data.rule) { - check = check.concat(Object.keys(data.rule).filter(f => f !== 'value')); + check = check.concat(Object.keys(data.rule).filter((f) => f !== 'value')); } if (check.length > 0) { throw new Error('Only state and rule.value values can be changed'); @@ -114,12 +114,12 @@ async function del(event) { name = name.replace(/%20/g, ' '); - await model.get({ name }).then(record => model.delete(record)); + await model.get({ name }).then((record) => model.delete(record)); return { message: 'Record deleted' }; } function handler(event, context) { - return handle(event, context, !inTestMode() /* authCheck */, cb => { + return handle(event, context, !inTestMode() /* authCheck */, (cb) => { if (event.httpMethod === 'GET' && event.pathParameters) { get(event, cb); } @@ -127,10 +127,10 @@ function handler(event, context) { post(event, cb); } else if (event.httpMethod === 'PUT' && event.pathParameters) { - put(event).then(r => cb(null, r)).catch(e => cb(JSON.stringify(e))); + put(event).then((r) => cb(null, r)).catch((e) => cb(JSON.stringify(e))); } else if (event.httpMethod === 'DELETE' && event.pathParameters) { - del(event).then(r => cb(null, r)).catch(e => cb(JSON.stringify(e))); + del(event).then((r) => cb(null, r)).catch((e) => cb(JSON.stringify(e))); } else { list(event, cb); diff --git a/packages/api/endpoints/stats.js b/packages/api/endpoints/stats.js index 18b5bf96119..6dafb37b255 100644 --- a/packages/api/endpoints/stats.js +++ b/packages/api/endpoints/stats.js @@ -37,28 +37,28 @@ function summary(event, cb) { params.timestamp__to = _get(params, 'timestamp__to', Date.now()); const stats = new Stats({ queryStringParameters: params }); - stats.query().then(r => cb(null, r)).catch(e => cb(e)); + stats.query().then((r) => cb(null, r)).catch((e) => cb(e)); } function histogram(event, cb) { const type = getType(event); const stats = new Stats(event, type.type, type.index); - stats.histogram().then(r => cb(null, r)).catch(e => cb(e)); + stats.histogram().then((r) => cb(null, r)).catch((e) => cb(e)); } function count(event, cb) { const type = getType(event); const stats = new Stats(event, type.type, type.index); - stats.count().then(r => cb(null, r)).catch(e => cb(e)); + stats.count().then((r) => cb(null, r)).catch((e) => cb(e)); } function average(event, cb) { const type = getType(event); const stats = new Stats(event, type.type, type.index); - stats.avg().then(r => cb(null, r)).catch(e => cb(e)); + stats.avg().then((r) => cb(null, r)).catch((e) => cb(e)); } function handler(event, context) { diff --git a/packages/api/endpoints/token.js b/packages/api/endpoints/token.js index ad4e37e2ea1..b76ee906912 100644 --- a/packages/api/endpoints/token.js +++ b/packages/api/endpoints/token.js @@ -57,14 +57,14 @@ function token(event, context) { }); } return resp(context, null, JSON.stringify({ token: accessToken }), 200); - }).catch(e => { + }).catch((e) => { log.error('User is not authorized', e); if (e.message.includes('No record found for')) { return resp(context, new Error('User is not authorized to access this site')); } return resp(context, e); }); - }).catch(e => { + }).catch((e) => { log.error('Error caught when checking code:', e); resp(context, e); }); diff --git a/packages/api/endpoints/workflows.js b/packages/api/endpoints/workflows.js index f334e8dc7ab..1c28bfa2f30 100644 --- a/packages/api/endpoints/workflows.js +++ b/packages/api/endpoints/workflows.js @@ -13,10 +13,10 @@ const handle = require('../lib/response').handle; */ function list(event, cb) { const key = `${process.env.stackName}/workflows/list.json`; - S3.get(process.env.bucket, key).then(file => { + S3.get(process.env.bucket, key).then((file) => { const workflows = JSON.parse(file.Body.toString()); return cb(null, workflows); - }).catch(e => cb(e)); + }).catch((e) => cb(e)); } /** @@ -29,7 +29,7 @@ function get(event, cb) { const name = _get(event.pathParameters, 'name'); const key = `${process.env.stackName}/workflows/list.json`; - S3.get(process.env.bucket, key).then(file => { + S3.get(process.env.bucket, key).then((file) => { const workflows = JSON.parse(file.Body.toString()); for (const w of workflows) { if (w.name === name) { @@ -37,7 +37,7 @@ function get(event, cb) { } } return cb({ message: `A record already exists for ${name}` }); - }).catch(e => cb(e)); + }).catch((e) => cb(e)); } function handler(event, context) { diff --git a/packages/api/es/collections.js b/packages/api/es/collections.js index 4baa87b26f1..efb9b2cc56f 100644 --- a/packages/api/es/collections.js +++ b/packages/api/es/collections.js @@ -53,7 +53,7 @@ class Collection extends BaseSearch { }); // add aggs to res - records = records.map(r => { + records = records.map((r) => { r.stats = { running: 0, completed: 0, @@ -63,7 +63,7 @@ class Collection extends BaseSearch { for (const b of aggs.aggregations.hashes.buckets) { if (b.key === r.name) { r.stats.total = b.stats.doc_count; - b.stats.count.buckets.forEach(s => { + b.stats.count.buckets.forEach((s) => { r.stats[s.key] = s.doc_count; }); return r; diff --git a/packages/api/es/queries.js b/packages/api/es/queries.js index d70f8f329f5..9120511d71d 100644 --- a/packages/api/es/queries.js +++ b/packages/api/es/queries.js @@ -17,7 +17,7 @@ const regexes = { }; const build = { - general: params => ({ + general: (params) => ({ query_string: { query: params.q } @@ -34,7 +34,7 @@ const build = { 'name' ]; - terms = terms.map(f => f.name); + terms = terms.map((f) => f.name); // remove fields that are included in the termFields fields = fields.filter((field) => { @@ -44,7 +44,7 @@ const build = { return false; }); - const results = fields.map(f => ({ + const results = fields.map((f) => ({ prefix: { [`${f}`]: _prefix } @@ -98,7 +98,7 @@ const build = { // because elasticsearch doesn't support multiple // fields in range query, make it an erray - const results = Object.keys(fields).map(k => ({ + const results = Object.keys(fields).map((k) => ({ range: { [k]: fields[k] } } )); @@ -202,7 +202,7 @@ module.exports = function(params) { // determine which search strategy should be applied // options are term, terms, range, exists and not in - const fields = Object.keys(params).map(k => ({ name: k, value: params[k] })); + const fields = Object.keys(params).map((k) => ({ name: k, value: params[k] })); Object.keys(regexes).forEach((k) => { const f = selectParams(fields, regexes[k]); diff --git a/packages/api/es/search.js b/packages/api/es/search.js index 7f576c414e9..4878ac029a0 100644 --- a/packages/api/es/search.js +++ b/packages/api/es/search.js @@ -266,7 +266,7 @@ class BaseSearch { } const result = await this.client.search(searchParams); - const response = result.hits.hits.map(s => s._source); + const response = result.hits.hits.map((s) => s._source); const meta = this._metaTemplate(); meta.limit = this.size; diff --git a/packages/api/es/stats.js b/packages/api/es/stats.js index 86171486a21..252553e23fd 100644 --- a/packages/api/es/stats.js +++ b/packages/api/es/stats.js @@ -107,7 +107,7 @@ class Stats extends BaseSearch { count: hist.hits.total, criteria }, - histogram: hist.aggregations.histogram.buckets.map(b => ({ + histogram: hist.aggregations.histogram.buckets.map((b) => ({ date: b.key_as_string, count: b.doc_count })) @@ -137,7 +137,7 @@ class Stats extends BaseSearch { count: count.hits.total, field: field }, - count: count.aggregations.count.buckets.map(b => ({ + count: count.aggregations.count.buckets.map((b) => ({ key: b.key, count: b.doc_count })) diff --git a/packages/api/lib/response.js b/packages/api/lib/response.js index ca7e975db41..a061d8651e5 100644 --- a/packages/api/lib/response.js +++ b/packages/api/lib/response.js @@ -55,7 +55,7 @@ function resp(context, err, body, status = null, headers = null) { const res = new proxy.Response({ cors: true, statusCode: status }); res.set('Strict-Transport-Security', 'max-age=31536000'); if (headers) { - Object.keys(headers).forEach(h => res.set(h, headers[h])); + Object.keys(headers).forEach((h) => res.set(h, headers[h])); } return context.succeed(res.send(body)); } @@ -93,7 +93,7 @@ function handle(event, context, authCheck, func) { return cb('Session expired'); } return func(cb); - }).catch(e => cb('Invalid Authorization token', e)); + }).catch((e) => cb('Invalid Authorization token', e)); } return func(cb); } diff --git a/packages/api/models/base.js b/packages/api/models/base.js index 29f9ab1eb16..d7b072920d7 100644 --- a/packages/api/models/base.js +++ b/packages/api/models/base.js @@ -131,7 +131,7 @@ class Manager { async batchWrite(_deletes, _puts) { let deletes = _deletes; let puts = _puts; - deletes = deletes ? deletes.map(d => ({ DeleteRequest: { Key: d } })) : []; + deletes = deletes ? deletes.map((d) => ({ DeleteRequest: { Key: d } })) : []; puts = puts ? puts.map((_d) => { const d = _d; d.updatedAt = Date.now(); diff --git a/packages/api/models/collections.js b/packages/api/models/collections.js index 64821a24255..dade1517021 100644 --- a/packages/api/models/collections.js +++ b/packages/api/models/collections.js @@ -36,7 +36,7 @@ class Collection extends Manager { checkRegex(item.granuleId, match[1]); // then check all the files - item.files.forEach(i => checkRegex(i.regex, i.sampleFileName)); + item.files.forEach((i) => checkRegex(i.regex, i.sampleFileName)); } constructor() { diff --git a/packages/api/tests/test-db-indexer.js b/packages/api/tests/test-db-indexer.js index ce82353b773..fdf56aff166 100644 --- a/packages/api/tests/test-db-indexer.js +++ b/packages/api/tests/test-db-indexer.js @@ -96,7 +96,7 @@ if (process.env.LOCALSTACK_HOST === 'localhost') { await new Promise((resolve, reject) => { aws.dynamodbstreams().listStreams({TableName: process.env.CollectionsTable}, (err, data) => { if (err) reject(err); - const collectionsTableStreamArn = data.Streams.find(s => s.TableName === 'test-stack-CollectionsTable').StreamArn; + const collectionsTableStreamArn = data.Streams.find((s) => s.TableName === 'test-stack-CollectionsTable').StreamArn; const eventSourceMappingParams = { EventSourceArn: collectionsTableStreamArn, FunctionName: dbIndexerFnName, @@ -119,7 +119,7 @@ if (process.env.LOCALSTACK_HOST === 'localhost') { await aws.recursivelyDeleteS3Bucket(process.env.internal); }); - test.skip('creates a collection in dynamodb and es', async t => { + test.skip('creates a collection in dynamodb and es', async (t) => { const { name } = testCollection; await collections.create(testCollection) .then(() => { @@ -134,7 +134,7 @@ if (process.env.LOCALSTACK_HOST === 'localhost') { .catch(console.log); }); - test.skip('thrown error is caught', async t => { + test.skip('thrown error is caught', async (t) => { const { name } = collectionOnlyInDynamo; await collections.delete({ name }) .then((result) => { @@ -144,7 +144,7 @@ if (process.env.LOCALSTACK_HOST === 'localhost') { .catch(console.log); }); } else { - test('db-indexer TODO test', t => { + test('db-indexer TODO test', (t) => { t.is(1+1, 2); }); } diff --git a/packages/api/tests/test-endpoints-collections.js b/packages/api/tests/test-endpoints-collections.js index 49604510ccf..572a6cf04cc 100755 --- a/packages/api/tests/test-endpoints-collections.js +++ b/packages/api/tests/test-endpoints-collections.js @@ -47,7 +47,7 @@ test.after.always(async () => teardown()); // TODO(aimee): Debug why this is _passing_ - we don't expect to already have a // collection in ES. -test('default returns list of collections', t => { +test('default returns list of collections', (t) => { const listEvent = { httpMethod: 'list' }; return testEndpoint(collectionsEndpoint, listEvent, (response) => { const { results } = JSON.parse(response.body); @@ -55,7 +55,7 @@ test('default returns list of collections', t => { }); }); -test('GET returns an existing collection', t => { +test('GET returns an existing collection', (t) => { const getEvent = { httpMethod: 'GET', pathParameters: { @@ -69,7 +69,7 @@ test('GET returns an existing collection', t => { }); }); -test('POST creates a new collection', t => { +test('POST creates a new collection', (t) => { const newCollection = Object.assign({}, testCollection, {name: 'collection-post'}); const postEvent = { httpMethod: 'POST', @@ -82,7 +82,7 @@ test('POST creates a new collection', t => { }); }); -test('PUT updates an existing collection', t => { +test('PUT updates an existing collection', (t) => { const newPath = '/new_path'; const updateEvent = { body: JSON.stringify({ @@ -102,7 +102,7 @@ test('PUT updates an existing collection', t => { }); }); -test('DELETE deletes an existing collection', t => { +test('DELETE deletes an existing collection', (t) => { const deleteEvent = { httpMethod: 'DELETE', pathParameters: { diff --git a/packages/api/tests/test-endpoints-providers.js b/packages/api/tests/test-endpoints-providers.js index a404199a773..188e0077b8e 100644 --- a/packages/api/tests/test-endpoints-providers.js +++ b/packages/api/tests/test-endpoints-providers.js @@ -38,7 +38,7 @@ test.before(async () => setup()); test.after.always(async () => teardown()); // TODO(aimee): Add a provider to ES. List uses ES and we don't have any providers in ES. -test('default returns list of providers', t => { +test('default returns list of providers', (t) => { const listEvent = { httpMethod: 'list' }; return testEndpoint(providerEndpoint, listEvent, (response) => { const { results } = JSON.parse(response.body); @@ -46,7 +46,7 @@ test('default returns list of providers', t => { }); }); -test('GET returns an existing provider', t => { +test('GET returns an existing provider', (t) => { const getEvent = { httpMethod: 'GET', pathParameters: { id: testProvider.id } @@ -56,7 +56,7 @@ test('GET returns an existing provider', t => { }); }); -test('POST creates a new provider', t => { +test('POST creates a new provider', (t) => { const newProviderId = 'AQUA'; const newProvider = Object.assign({}, testProvider, { id: newProviderId }); const postEvent = { @@ -70,7 +70,7 @@ test('POST creates a new provider', t => { }); }); -test('PUT updates an existing provider', t => { +test('PUT updates an existing provider', (t) => { const updatedLimit = 2; const putEvent = { httpMethod: 'PUT', @@ -83,7 +83,7 @@ test('PUT updates an existing provider', t => { }); }); -test('DELETE deletes an existing provider', t => { +test('DELETE deletes an existing provider', (t) => { const deleteEvent = { httpMethod: 'DELETE', pathParameters: { id: testProvider.id } diff --git a/packages/api/tests/test-endpoints-rules.js b/packages/api/tests/test-endpoints-rules.js index 21d7a849cf5..84ee86b11a0 100644 --- a/packages/api/tests/test-endpoints-rules.js +++ b/packages/api/tests/test-endpoints-rules.js @@ -52,7 +52,7 @@ test.before(async () => setup()); test.after.always(async () => teardown()); // TODO(aimee): Add a rule to ES. List uses ES and we don't have any rules in ES. -test('default returns list of rules', t => { +test('default returns list of rules', (t) => { const listEvent = { httpMethod: 'list ' }; return testEndpoint(rulesEndpoint, listEvent, (response) => { const { results } = JSON.parse(response.body); @@ -60,7 +60,7 @@ test('default returns list of rules', t => { }); }); -test('GET gets a rule', t => { +test('GET gets a rule', (t) => { const getEvent = { pathParameters: { name: testRule.name @@ -73,7 +73,7 @@ test('GET gets a rule', t => { }); }); -test('POST creates a rule', t => { +test('POST creates a rule', (t) => { const newRule = Object.assign({}, testRule, {name: 'make_waffles'}); const postEvent = { httpMethod: 'POST', @@ -86,7 +86,7 @@ test('POST creates a rule', t => { }); }); -test('PUT updates a rule', t => { +test('PUT updates a rule', (t) => { const updateEvent = { body: JSON.stringify({state: 'ENABLED'}), pathParameters: { @@ -100,7 +100,7 @@ test('PUT updates a rule', t => { }); }); -test('DELETE deletes a rule', t => { +test('DELETE deletes a rule', (t) => { const deleteEvent = { pathParameters: { name: testRule.name diff --git a/packages/api/tests/testUtils.js b/packages/api/tests/testUtils.js index 17b1dfb2810..b857a454061 100644 --- a/packages/api/tests/testUtils.js +++ b/packages/api/tests/testUtils.js @@ -3,8 +3,8 @@ function testEndpoint(endpoint, event, testCallback) { return new Promise((resolve, reject) => { endpoint(event, { - succeed: response => resolve(testCallback(response)), - fail: e => reject(e) + succeed: (response) => resolve(testCallback(response)), + fail: (e) => reject(e) }); }); } diff --git a/packages/common/aws.js b/packages/common/aws.js index 702bdb84343..5fb2dca9846 100644 --- a/packages/common/aws.js +++ b/packages/common/aws.js @@ -202,7 +202,7 @@ exports.fileExists = async (bucket, key) => { exports.downloadS3Files = (s3Objs, dir, s3opts = {}) => { // Scrub s3Ojbs to avoid errors from the AWS SDK - const scrubbedS3Objs = s3Objs.map(s3Obj => ({ + const scrubbedS3Objs = s3Objs.map((s3Obj) => ({ Bucket: s3Obj.Bucket, Key: s3Obj.Key })); diff --git a/packages/common/errors.js b/packages/common/errors.js index 0afcefa204a..caeca59cef6 100644 --- a/packages/common/errors.js +++ b/packages/common/errors.js @@ -24,7 +24,7 @@ const WorkflowError = createErrorType('WorkflowError'); * Returns true if the error is a resource error. * This is used because for some reason instanceof WorkflowError is not working when deployed. */ -const isWorkflowError = error => error.name.includes('WorkflowError'); +const isWorkflowError = (error) => error.name.includes('WorkflowError'); module.exports = { diff --git a/packages/ingest/consumer.js b/packages/ingest/consumer.js index e9c11214d0c..930000b3692 100644 --- a/packages/ingest/consumer.js +++ b/packages/ingest/consumer.js @@ -29,7 +29,7 @@ class Consume { counter += messages.length; if (messages.length > 0) { - const processes = messages.map(message => this.processMessage(message, fn)); + const processes = messages.map((message) => this.processMessage(message, fn)); await Promise.all(processes); } diff --git a/packages/ingest/ftp.js b/packages/ingest/ftp.js index 7822963794e..f45d03cc7dd 100644 --- a/packages/ingest/ftp.js +++ b/packages/ingest/ftp.js @@ -107,7 +107,7 @@ module.exports.ftpMixin = (superclass) => class extends superclass { return this._list(path, counter).then((r) => { log.info(`${counter} retry suceeded`); return resolve(r); - }).catch(e => reject(e)); + }).catch((e) => reject(e)); } return reject(err); } diff --git a/packages/ingest/granule.js b/packages/ingest/granule.js index fa9635834da..9d2a1e65b1f 100644 --- a/packages/ingest/granule.js +++ b/packages/ingest/granule.js @@ -39,7 +39,7 @@ class Discover { // create hash with file regex as key this.regexes = {}; - this.collection.files.forEach(f => { + this.collection.files.forEach((f) => { this.regexes[f.regex] = { collection: this.collection.name, bucket: this.buckets[f.bucket] @@ -109,13 +109,13 @@ class Discover { } async findNewGranules(files) { - const checkFiles = files.map(f => this.fileIsNew(f)); + const checkFiles = files.map((f) => this.fileIsNew(f)); const t = await Promise.all(checkFiles); - const newFiles = t.filter(f => f); + const newFiles = t.filter((f) => f); // reorganize by granule const granules = {}; - newFiles.forEach(_f => { + newFiles.forEach((_f) => { const f = _f; const granuleId = f.granuleId; delete f.granuleId; @@ -130,7 +130,7 @@ class Discover { } }); - return Object.keys(granules).map(k => granules[k]); + return Object.keys(granules).map((k) => granules[k]); } } @@ -172,9 +172,9 @@ class Granule { // download / verify checksum / upload const downloadFiles = granule.files - .map(f => this.getBucket(f)) - .filter(f => this.filterChecksumFiles(f)) - .map(f => this.ingestFile(f, this.collection.duplicateHandling)); + .map((f) => this.getBucket(f)) + .filter((f) => this.filterChecksumFiles(f)) + .map((f) => this.ingestFile(f, this.collection.duplicateHandling)); const files = await Promise.all(downloadFiles); diff --git a/packages/ingest/pdr.js b/packages/ingest/pdr.js index 97481633096..06231f25c63 100644 --- a/packages/ingest/pdr.js +++ b/packages/ingest/pdr.js @@ -84,10 +84,10 @@ class Discover { * @private */ async findNewPdrs(pdrs) { - const checkPdrs = pdrs.map(pdr => this.pdrIsNew(pdr)); + const checkPdrs = pdrs.map((pdr) => this.pdrIsNew(pdr)); const _pdrs = await Promise.all(checkPdrs); - const newPdrs = _pdrs.filter(p => p); + const newPdrs = _pdrs.filter((p) => p); return newPdrs; } } diff --git a/packages/ingest/recursion.js b/packages/ingest/recursion.js index d9e31c052e4..09d6c86c177 100644 --- a/packages/ingest/recursion.js +++ b/packages/ingest/recursion.js @@ -18,8 +18,8 @@ const log = require('@cumulus/common/log'); async function recursion(fn, originalPath, currentPath = null, position = 0) { // build the recursion path object const regex = /(\(.*?\))/g; - const rules = originalPath.split(regex).map(i => i.replace(/\\\\/g, '\\')); - const map = rules.map(r => (r.match(regex) !== null)); + const rules = originalPath.split(regex).map((i) => i.replace(/\\\\/g, '\\')); + const map = rules.map((r) => (r.match(regex) !== null)); let files = []; let path = currentPath; diff --git a/packages/pvl/lib/models.js b/packages/pvl/lib/models.js index b11705ab0b1..b0a87bcc710 100644 --- a/packages/pvl/lib/models.js +++ b/packages/pvl/lib/models.js @@ -11,17 +11,17 @@ class PVLAggregate { this.store.push([key, value]); return this; } - get (key) { return this.store.find(item => item[0] === key) ? this.store.find(item => item[0] === key)[1] : null; } - getAll (key) { return this.store.filter(item => item[0] === key).map(item => item[1]); } - removeAll (key) { this.store = this.store.filter(item => item[0] !== key); } + get (key) { return this.store.find((item) => item[0] === key) ? this.store.find((item) => item[0] === key)[1] : null; } + getAll (key) { return this.store.filter((item) => item[0] === key).map((item) => item[1]); } + removeAll (key) { this.store = this.store.filter((item) => item[0] !== key); } // Since OBJECT and GROUP are reserved keywords, this won't collide with attribute keys addAggregate (aggregate) { this.store.push([aggregate.type, aggregate]); return this; } - objects (key) { return this.getAll('OBJECT').filter(o => key ? areIDsSame(o.identifier, key) : true); } - groups (key) { return this.getAll('GROUP').filter(g => key ? areIDsSame(g.identifier, key) : true); } + objects (key) { return this.getAll('OBJECT').filter((o) => key ? areIDsSame(o.identifier, key) : true); } + groups (key) { return this.getAll('GROUP').filter((g) => key ? areIDsSame(g.identifier, key) : true); } aggregates (key) { return this.objects(key).concat(this.groups(key)); } toPVL () { diff --git a/packages/pvl/t.js b/packages/pvl/t.js index 3386e4e92fe..a8b6ebd1bd4 100644 --- a/packages/pvl/t.js +++ b/packages/pvl/t.js @@ -35,15 +35,15 @@ function pvlToJS (pvlString) { // Currently assumes single-line statements, not allowing multi-line values let pvlStatements = pvlString .split('\n') - .map(s => s.trim()) + .map((s) => s.trim()) // Strip statement-ending semicolons - .map(s => s.replace(/;$/, '')) + .map((s) => s.replace(/;$/, '')) // Ignore blank lines - .filter(s => s !== '') + .filter((s) => s !== '') // Ignore full-line comments - .filter(s => !(s.startsWith('/*') && s.endsWith('*/'))); + .filter((s) => !(s.startsWith('/*') && s.endsWith('*/'))); - pvlStatements.forEach(s => { + pvlStatements.forEach((s) => { if (s === 'END') { return result; } @@ -75,7 +75,7 @@ function jsToPVL (pvlObject) { // Spec doesn't require indentation, but does highly recommended it let depth = 0; - const indented = stringified.split('\n').map(s => { + const indented = stringified.split('\n').map((s) => { if (s.match(/^END_(GROUP|OBJECT)( = .+)?$/)) { depth -= 1; } const thisLine = `${' '.repeat(depth * INDENTATION_WIDTH)}${s}`; if (s.match(/^(BEGIN_)?(GROUP|OBJECT) = .+$/)) { depth += 1; } diff --git a/packages/pvl/test/parsing.js b/packages/pvl/test/parsing.js index 98067378970..e22e5883595 100644 --- a/packages/pvl/test/parsing.js +++ b/packages/pvl/test/parsing.js @@ -8,11 +8,11 @@ const PVLNumeric = require('../lib/models').PVLNumeric; const PVLDateTime = require('../lib/models').PVLDateTime; const PVLTextString = require('../lib/models').PVLTextString; -test('parsing empty string returns empty object', t => { +test('parsing empty string returns empty object', (t) => { t.deepEqual(pvlToJS(''), new PVLRoot()); }); -test('parsing non-nested items', t => { +test('parsing non-nested items', (t) => { const input = 'THIS = THAT;\n' + 'HERE = THERE;'; @@ -22,7 +22,7 @@ test('parsing non-nested items', t => { t.deepEqual(pvlToJS(input), expected); }); -test('ignore full-line comment when parsing', t => { +test('ignore full-line comment when parsing', (t) => { const input = '/*Comment*/\n' + 'THIS = THAT;\n' + @@ -33,7 +33,7 @@ test('ignore full-line comment when parsing', t => { t.deepEqual(pvlToJS(input), expected); }); -test('ignore leading white space when parsing', t => { +test('ignore leading white space when parsing', (t) => { const input = ' THIS = THAT;\n' + ' HERE = THERE;'; @@ -43,7 +43,7 @@ test('ignore leading white space when parsing', t => { t.deepEqual(pvlToJS(input), expected); }); -test('ignore trailing white space when parsing', t => { +test('ignore trailing white space when parsing', (t) => { const input = 'THIS = THAT; \n' + 'HERE = THERE; '; @@ -53,7 +53,7 @@ test('ignore trailing white space when parsing', t => { t.deepEqual(pvlToJS(input), expected); }); -test('allow duplicate keys when parsing', t => { +test('allow duplicate keys when parsing', (t) => { const input = 'THIS = THAT;\n' + 'THIS = THERE;'; @@ -61,7 +61,7 @@ test('allow duplicate keys when parsing', t => { t.deepEqual(pvlToJS(input).store, expectedStore); }); -test('parsing a singly-nested item', t => { +test('parsing a singly-nested item', (t) => { const input = 'GROUP = THAT;\n' + ' HERE = THERE;\n' + @@ -73,7 +73,7 @@ test('parsing a singly-nested item', t => { t.deepEqual(pvlToJS(input), expected); }); -test('parsing a singly-nested item with a named end-aggregate', t => { +test('parsing a singly-nested item with a named end-aggregate', (t) => { const input = 'OBJECT = THAT;\n' + ' HERE = THERE;\n' + @@ -85,7 +85,7 @@ test('parsing a singly-nested item with a named end-aggregate', t => { t.deepEqual(pvlToJS(input), expected); }); -test('parsing a doubly-nested item', t => { +test('parsing a doubly-nested item', (t) => { const input = 'GROUP = THAT;\n' + ' GROUP = THOSE;\n' + @@ -101,7 +101,7 @@ test('parsing a doubly-nested item', t => { t.deepEqual(pvlToJS(input), expected); }); -test('parsing Objects within a Group', t => { +test('parsing Objects within a Group', (t) => { const input = 'GROUP = THAT;\n' + ' OBJECT = THOSE;\n' + @@ -123,7 +123,7 @@ test('parsing Objects within a Group', t => { t.deepEqual(pvlToJS(input), expected); }); -test('parsing nested item with attribute', t => { +test('parsing nested item with attribute', (t) => { const input = 'GROUP = THAT;\n' + ' PROP = YEAH_IT_EXISTS;\n' + @@ -141,7 +141,7 @@ test('parsing nested item with attribute', t => { t.deepEqual(pvlToJS(input), expected); }); -test('parsing an aggregate name wrapped in quotes', t => { +test('parsing an aggregate name wrapped in quotes', (t) => { const inputSimple = "OBJECT = 'THAT';\n" + ' FOO = BAR;\n' + @@ -169,18 +169,18 @@ test('parsing an aggregate name wrapped in quotes', t => { t.deepEqual(pvlToJS(inputComplex), expectedComplex); }); -test('parsing Numeric value', t => { +test('parsing Numeric value', (t) => { t.deepEqual(parseValue('12345'), new PVLNumeric('12345')); t.deepEqual(parseValue('12345'), new PVLNumeric(12345)); t.is(parseValue('12345').value, 12345); }); -test('parsing DateTime value', t => { +test('parsing DateTime value', (t) => { t.deepEqual(parseValue('1990-07-04T12:00'), new PVLDateTime('1990-07-04T12:00')); t.deepEqual(parseValue('1990-07-04T12:00').value, new Date('1990-07-04T12:00')); }); -test('parsing quoted TextString value', t => { +test('parsing quoted TextString value', (t) => { t.deepEqual(parseValue('foobar'), new PVLTextString('foobar')); t.is(parseValue('foobar').value, 'foobar'); @@ -193,7 +193,7 @@ test('parsing quoted TextString value', t => { t.deepEqual(parseValue('"FO\'obaR\'"'), new PVLTextString("FO'obaR'")); }); -test('parsing unquoted TextString value', t => { +test('parsing unquoted TextString value', (t) => { t.deepEqual(parseValue('FOOBAR'), new PVLTextString('FOOBAR')); t.deepEqual(parseValue('foobAR'), new PVLTextString('foobAR')); }); diff --git a/packages/pvl/test/serializing.js b/packages/pvl/test/serializing.js index 901a04d274f..ad46727da0d 100644 --- a/packages/pvl/test/serializing.js +++ b/packages/pvl/test/serializing.js @@ -7,14 +7,14 @@ const PVLNumeric = require('../lib/models').PVLNumeric; const PVLDateTime = require('../lib/models').PVLDateTime; const PVLTextString = require('../lib/models').PVLTextString; -test('write one attribute', t => { +test('write one attribute', (t) => { const input = new PVLRoot() .add('FOO', new PVLTextString('BAR')); const expected = 'FOO = "BAR";\n'; t.deepEqual(jsToPVL(input), expected); }); -test('write multiple attributes', t => { +test('write multiple attributes', (t) => { const input = new PVLRoot() .add('FOO', new PVLTextString('BAR')) .add('BAZ', new PVLTextString('QUX')) @@ -26,7 +26,7 @@ test('write multiple attributes', t => { t.deepEqual(jsToPVL(input), expected); }); -test('write one group', t => { +test('write one group', (t) => { const input = new PVLRoot() .addAggregate(new PVLGroup('FOO') .add('BAR', new PVLTextString('BAZ')) @@ -38,7 +38,7 @@ test('write one group', t => { t.deepEqual(jsToPVL(input), expected); }); -test('write multiple groups', t => { +test('write multiple groups', (t) => { const input = new PVLRoot() .addAggregate(new PVLGroup('FOO') .add('BAR', new PVLTextString('BAZ')) @@ -56,7 +56,7 @@ test('write multiple groups', t => { t.deepEqual(jsToPVL(input), expected); }); -test('write nested groups', t => { +test('write nested groups', (t) => { const input = new PVLRoot() .addAggregate(new PVLGroup('FOO') .addAggregate(new PVLObject('QUX') @@ -72,28 +72,28 @@ test('write nested groups', t => { t.deepEqual(jsToPVL(input), expected); }); -test('write Numeric', t => { +test('write Numeric', (t) => { const input = new PVLRoot() .add('FOO', new PVLNumeric(12345)); const expected = 'FOO = 12345;\n'; t.is(jsToPVL(input), expected); }); -test('write DateTime', t => { +test('write DateTime', (t) => { const input = new PVLRoot() .add('FOO', new PVLDateTime('2016-12-05T23:24Z')); const expected = 'FOO = 2016-12-05T23:24:00.000Z;\n'; t.is(jsToPVL(input), expected); }); -test('write TextString', t => { +test('write TextString', (t) => { const input = new PVLRoot() .add('FOO', new PVLTextString('201612-BAZ')); const expected = 'FOO = "201612-BAZ";\n'; t.is(jsToPVL(input), expected); }); -test('write TextString with embedded double-quote', t => { +test('write TextString with embedded double-quote', (t) => { const input = new PVLRoot() .add('FOO', new PVLTextString('Dwayne "The Rock" Johnson')); const expected = "FOO = 'Dwayne \"The Rock\" Johnson';\n"; diff --git a/packages/pvl/test/utils.js b/packages/pvl/test/utils.js index 05dfdeee960..ab732bbaa8e 100644 --- a/packages/pvl/test/utils.js +++ b/packages/pvl/test/utils.js @@ -2,7 +2,7 @@ const test = require('ava').test; const pvlToJS = require('../t').pvlToJS; const PVLTextString = require('../lib/models').PVLTextString; -test('accessing aggregates', t => { +test('accessing aggregates', (t) => { const input = pvlToJS( 'GROUP = THAT;\n' + ' OBJECT = THOSE;\n' + @@ -32,7 +32,7 @@ test('accessing aggregates', t => { t.is(input.objects("'thOse'").length, 0); }); -test('parsing non-nested items', t => { +test('parsing non-nested items', (t) => { const input = pvlToJS( 'THIS = THAT;\n' + 'HERE = THERE;' diff --git a/packages/task-debug/src/workflow.js b/packages/task-debug/src/workflow.js index a13795a7710..09052cbc2de 100644 --- a/packages/task-debug/src/workflow.js +++ b/packages/task-debug/src/workflow.js @@ -11,7 +11,7 @@ const taskMap = { * @param {string} taskName The name of the task * @return {string} The path to the module */ -const requirePathForTask = taskName => { +const requirePathForTask = (taskName) => { const moduleName = taskMap[taskName]; return `../../../cumulus/tasks/${moduleName}`; }; @@ -22,7 +22,7 @@ const requirePathForTask = taskName => { * @param {Function} invocation Function that returns the message for a task * @return {*} The result from the execution */ -exports.runTask = (handler, invocation) => handler(invocation(), {}, result => result); +exports.runTask = (handler, invocation) => handler(invocation(), {}, (result) => result); /** * Returns a function that provides the message for a task @@ -37,7 +37,7 @@ exports.genMessage = (collectionId, taskName, resources = {}, payload = null, co local.collectionMessageInput( collectionId, taskName, - o => + (o) => Object.assign({}, o, { resources: resources, payload: payload