diff --git a/.eslint-ratchet-high-water-mark b/.eslint-ratchet-high-water-mark index a4ea6e7a770..65bb5e1ec8c 100644 --- a/.eslint-ratchet-high-water-mark +++ b/.eslint-ratchet-high-water-mark @@ -1 +1 @@ -2059 +2051 diff --git a/CHANGELOG.md b/CHANGELOG.md index 5e447203dec..70340119074 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,6 +7,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0. ## [Unreleased] ### Fixed +- **CUMULUS-331:** Fix aws.downloadS3File to handle non-existent key - Using test ftp provider for discover-granules testing [CUMULUS-427] - **CUMULUS-304: "Add AWS API throttling to pdr-status-check task"** Added concurrency limit on SFN API calls. The default concurrency is 10 and is configurable through Lambda environment variable CONCURRENCY. - **CUMULUS-414: "Schema validation not being performed on many tasks"** revised npm build scripts of tasks that use cumulus-message-adapter to place schema directories into dist directories. diff --git a/packages/common/aws.js b/packages/common/aws.js index 08ef45fcd2f..71a955b92d8 100644 --- a/packages/common/aws.js +++ b/packages/common/aws.js @@ -10,6 +10,7 @@ const log = require('./log'); const string = require('./string'); const { inTestMode, randomString, testAwsClient } = require('./test-utils'); const promiseRetry = require('promise-retry'); +const pump = require('pump'); const region = exports.region = process.env.AWS_DEFAULT_REGION || 'us-east-1'; if (region) { @@ -151,18 +152,22 @@ exports.promiseS3Upload = (params) => { /** * Downloads the given s3Obj to the given filename in a streaming manner - * @param s3Obj The parameters to send to S3 getObject call - * @param filename The output filename + * + * @param {Object} s3Obj - The parameters to send to S3 getObject call + * @param {string} filepath - The filepath of the file that is downloaded + * @returns {Promise} - returns filename if successful */ -exports.downloadS3File = (s3Obj, filename) => { +exports.downloadS3File = (s3Obj, filepath) => { const s3 = exports.s3(); - const file = fs.createWriteStream(filename); + const fileWriteStream = fs.createWriteStream(filepath); + return new Promise((resolve, reject) => { - s3.getObject(s3Obj) - .createReadStream() - .pipe(file) - .on('finish', () => resolve(filename)) - .on('error', reject); + const objectReadStream = s3.getObject(s3Obj).createReadStream(); + + pump(objectReadStream, fileWriteStream, (err) => { + if (err) reject(err); + else resolve(filepath); + }); }); }; diff --git a/packages/common/tests/aws.js b/packages/common/tests/aws.js index 6bdee2a6838..98eb01fe6f4 100644 --- a/packages/common/tests/aws.js +++ b/packages/common/tests/aws.js @@ -1,5 +1,8 @@ 'use strict'; +const fs = require('fs'); +const path = require('path'); +const { tmpdir } = require('os'); const test = require('ava'); const aws = require('../aws'); const { randomString } = require('../test-utils'); @@ -43,3 +46,36 @@ test('listS3ObjectsV2 handles truncated case', async (t) => { return aws.recursivelyDeleteS3Bucket(Bucket); }); + +test('downloadS3File rejects promise if key not found', async (t) => { + const Bucket = randomString(); + await aws.s3().createBucket({ Bucket }).promise(); + + try { + await aws.downloadS3File({ Bucket, Key: 'not-gonna-find-it' }, '/tmp/wut'); + } + catch (err) { + t.is(err.message, 'The specified key does not exist.'); + } +}); + +test('downloadS3File resolves filepath if key is found', async (t) => { + const Bucket = randomString(); + const Key = 'example'; + const Body = 'example'; + + await aws.s3().createBucket({ Bucket }).promise(); + await aws.s3().putObject({ Bucket, Key: Key, Body: Body }).promise(); + + const params = { Bucket, Key: Key }; + const filepath = await aws.downloadS3File(params, path.join(tmpdir(), 'example')); + + const result = await new Promise((resolve, reject) => { + fs.readFile(filepath, 'utf-8', (err, data) => { + if (err) reject(err); + else resolve(data); + }); + }); + + t.is(result, Body); +});