Skip to content

Commit

Permalink
Merge pull request #416 from cumulus-nasa/fix_for_502_bad_gateway
Browse files Browse the repository at this point in the history
run tests in serial to ease the load on LocalStack
  • Loading branch information
Alireza committed Jul 3, 2018
2 parents 3be89f9 + 440d15b commit 7467eb3
Show file tree
Hide file tree
Showing 17 changed files with 99 additions and 98 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
"description": "Cumulus Framework for ingesting and processing Nasa Earth data streams",
"scripts": {
"e2e": "ava tests/*.js --serial",
"test": "nyc lerna run test --concurrency 2",
"test": "nyc lerna run test",
"bootstrap": "lerna bootstrap --npm-client=npm",
"ybootstrap": "lerna bootstrap",
"bootstrap-no-build": "lerna bootstrap --ignore-scripts",
Expand Down
10 changes: 5 additions & 5 deletions packages/api/tests/lambdas/create-reconciliation-report.js
Original file line number Diff line number Diff line change
Expand Up @@ -134,7 +134,7 @@ test.afterEach.always((t) =>
aws.dynamodb().deleteTable({ TableName }).promise())
])));

test('A valid reconciliation report is generated for no buckets', async (t) => {
test.serial('A valid reconciliation report is generated for no buckets', async (t) => {
// Write the buckets config to S3
await storeBucketsConfigToS3(
[],
Expand Down Expand Up @@ -162,7 +162,7 @@ test('A valid reconciliation report is generated for no buckets', async (t) => {
t.true(reportStartTime <= reportEndTime);
});

test('A valid reconciliation report is generated when everything is in sync', async (t) => {
test.serial('A valid reconciliation report is generated when everything is in sync', async (t) => {
const dataBuckets = range(2).map(() => randomString());
await Promise.all(dataBuckets.map((bucket) =>
createBucket(bucket)
Expand Down Expand Up @@ -208,7 +208,7 @@ test('A valid reconciliation report is generated when everything is in sync', as
t.true(reportStartTime <= reportEndTime);
});

test('A valid reconciliation report is generated when there are extra S3 objects', async (t) => {
test.serial('A valid reconciliation report is generated when there are extra S3 objects', async (t) => {
const dataBuckets = range(2).map(() => randomString());
await Promise.all(dataBuckets.map((bucket) =>
createBucket(bucket)
Expand Down Expand Up @@ -262,7 +262,7 @@ test('A valid reconciliation report is generated when there are extra S3 objects
t.true(reportStartTime <= reportEndTime);
});

test('A valid reconciliation report is generated when there are extra DynamoDB objects', async (t) => {
test.serial('A valid reconciliation report is generated when there are extra DynamoDB objects', async (t) => {
const dataBuckets = range(2).map(() => randomString());
await Promise.all(dataBuckets.map((bucket) =>
createBucket(bucket)
Expand Down Expand Up @@ -319,7 +319,7 @@ test('A valid reconciliation report is generated when there are extra DynamoDB o
t.true(reportStartTime <= reportEndTime);
});

test('A valid reconciliation report is generated when there are both extra DynamoDB and extra S3 files', async (t) => {
test.serial('A valid reconciliation report is generated when there are both extra DynamoDB and extra S3 files', async (t) => {
const dataBuckets = range(2).map(() => randomString());
await Promise.all(dataBuckets.map((bucket) =>
createBucket(bucket)
Expand Down
45 changes: 23 additions & 22 deletions packages/api/tests/test-back-restore.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,65 +12,66 @@ const restore = require('../bin/restore');
const backup = require('../bin/backup');

let tempFolder;
const tableName = randomString();

/**
* small helper for populating DynamoDB with fake records
*
* @param {string} tableName - DynamoDB table anme
* @param {string} table - DynamoDB table name
* @param {integer} limit - number of granule records to generate
* @returns {Promise<Object>} an array of DynamoDB responses
* @returns {Promise<Array>} an array of objects with granuleIds
*/
function populateDynamoDB(tableName, limit) {
async function populateDynamoDB(table, limit) {
const granules = [];
const granuleIds = [];
const model = new models.Granule();
model.tableName = tableName;
model.tableName = table;

for (let i = 0; i < limit; i += 1) {
granules.push(fakeGranuleFactory());
const g = fakeGranuleFactory();
granules.push(g);
granuleIds.push({ granuleId: g.granuleId });
}

const chunked = chunk(granules, 25);
return Promise.all(chunked.map((c) => model.batchWrite(null, c)));
await Promise.all(chunked.map((c) => model.batchWrite(null, c)));
return granuleIds;
}

test.before(async () => {
tempFolder = fs.mkdtempSync(`${os.tmpdir()}${path.sep}`);
await models.Manager.createTable(tableName, { name: 'granuleId', type: 'S' });
});

test.after.always(async () => {
await fs.remove(tempFolder);
});

test.beforeEach(async (t) => {
t.context.tableName = randomString();
await models.Manager.createTable(t.context.tableName, { name: 'granuleId', type: 'S' });
});

test.afterEach.always(async (t) => {
await models.Manager.deleteTable(t.context.tableName);
await models.Manager.deleteTable(tableName);
});

test.serial('backup records from DynamoDB', async (t) => {
const limit = 12;
const tempBackupFile = path.join(tempFolder, `${t.context.tableName}.json`);
const tempBackupFile = path.join(tempFolder, `${tableName}.json`);

await populateDynamoDB(t.context.tableName, limit);
const granuleIds = await populateDynamoDB(tableName, limit);

process.env.GranulesTable = t.context.tableName;
process.env.GranulesTable = tableName;
const gModel = new models.Granule();
const resp = await gModel.scan(null, null, 0, 'COUNT');
t.is(resp.Count, limit);

await backup(t.context.tableName, 'us-east-1', tempFolder);
await backup(tableName, 'us-east-1', tempFolder);

const stats = fs.statSync(tempBackupFile);
t.truthy(stats);

// delete records
await gModel.batchWrite(granuleIds);
});

test.serial('restore records to DynamoDB', async (t) => {
const limit = 55;
const granuleIds = [];
const tempRestoreFile = path.join(tempFolder, `${t.context.tableName}.json`);
const tempRestoreFile = path.join(tempFolder, `restore_${tableName}.json`);

// create a backup file with 200 records
let fileContent = '';
Expand All @@ -81,11 +82,11 @@ test.serial('restore records to DynamoDB', async (t) => {
}
fs.writeFileSync(tempRestoreFile, fileContent);

await restore(tempRestoreFile, t.context.tableName, 2);
await restore(tempRestoreFile, tableName, 2);

// count the records
const gModel = new models.Manager();
gModel.tableName = t.context.tableName;
gModel.tableName = tableName;
const resp = await gModel.scan(null, null, 0, 'COUNT');
t.is(resp.Count, limit);
});
8 changes: 4 additions & 4 deletions packages/api/tests/test-endpoints-reconciliation-reports.js
Original file line number Diff line number Diff line change
Expand Up @@ -25,7 +25,7 @@ test.beforeEach(async () => {

test.afterEach.always(() => aws.recursivelyDeleteS3Bucket(process.env.system_bucket));

test('default returns list of reports', (t) => {
test.serial('default returns list of reports', (t) => {
const event = { httpMethod: 'GET' };
return testEndpoint(reconciliationReportEndpoint, event, (response) => {
const results = JSON.parse(response.body);
Expand All @@ -34,7 +34,7 @@ test('default returns list of reports', (t) => {
});
});

test('get a report', async (t) => {
test.serial('get a report', async (t) => {
await Promise.all(reportNames.map((reportName) => {
const event = {
pathParameters: {
Expand All @@ -49,7 +49,7 @@ test('get a report', async (t) => {
}));
});

test('delete a report', async (t) => {
test.serial('delete a report', async (t) => {
await Promise.all(reportNames.map((reportName) => {
const event = {
pathParameters: {
Expand All @@ -64,7 +64,7 @@ test('delete a report', async (t) => {
}));
});

test('create a report', (t) => {
test.serial('create a report', (t) => {
const event = { httpMethod: 'POST' };
return testEndpoint(reconciliationReportEndpoint, event, (response) => {
const content = JSON.parse(response.body);
Expand Down
12 changes: 6 additions & 6 deletions packages/api/tests/test-kinesis-consumer.js
Original file line number Diff line number Diff line change
Expand Up @@ -135,15 +135,15 @@ test.afterEach(async (t) => {

// getKinesisRule tests
// eslint-disable-next-line max-len
test('it should look up kinesis-type rules which are associated with the collection, but not those that are disabled', async (t) => {
test.serial('it should look up kinesis-type rules which are associated with the collection, but not those that are disabled', async (t) => {
await getKinesisRules(JSON.parse(eventData))
.then((result) => {
t.is(result.length, 2);
});
});

// handler tests
test('it should enqueue a message for each associated workflow', async (t) => {
test.serial('it should enqueue a message for each associated workflow', async (t) => {
await handler(event, {}, testCallback);
const actualQueueUrl = sfSchedulerSpy.getCall(0).args[0];
t.is(actualQueueUrl, stubQueueUrl);
Expand All @@ -166,7 +166,7 @@ test('it should enqueue a message for each associated workflow', async (t) => {
t.deepEqual(actualMessage.payload, expectedMessage.payload);
});

test('it should throw an error if message does not include a collection', async (t) => {
test.serial('it should throw an error if message does not include a collection', async (t) => {
const invalidMessage = JSON.stringify({});
const kinesisEvent = {
Records: [{ kinesis: { data: Buffer.from(invalidMessage).toString('base64') } }]
Expand All @@ -177,7 +177,7 @@ test('it should throw an error if message does not include a collection', async
t.is(errors[0].errors[0].message, 'should have required property \'collection\'');
});

test('it should throw an error if message collection has wrong data type', async (t) => {
test.serial('it should throw an error if message collection has wrong data type', async (t) => {
const invalidMessage = JSON.stringify({ collection: {} });
const kinesisEvent = {
Records: [{ kinesis: { data: Buffer.from(invalidMessage).toString('base64') } }]
Expand All @@ -188,7 +188,7 @@ test('it should throw an error if message collection has wrong data type', async
t.is(errors[0].errors[0].message, 'should be string');
});

test('it should throw an error if message is invalid json', async(t) => {
test.serial('it should throw an error if message is invalid json', async(t) => {
const invalidMessage = '{';
const kinesisEvent = {
Records: [{ kinesis: { data: Buffer.from(invalidMessage).toString('base64') } }]
Expand All @@ -197,7 +197,7 @@ test('it should throw an error if message is invalid json', async(t) => {
t.is(errors[0].message, 'Unexpected end of JSON input');
});

test('it should not throw if message is valid', (t) => {
test.serial('it should not throw if message is valid', (t) => {
const validMessage = JSON.stringify({ collection: 'confection-collection' });
const kinesisEvent = {
Records: [{ kinesis: { data: Buffer.from(validMessage).toString('base64') } }]
Expand Down
6 changes: 3 additions & 3 deletions packages/common/tests/aws/DynamoDbScanQueue.js
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ test.beforeEach(async (t) => {

test.afterEach.always((t) => dynamodb().deleteTable({ TableName: t.context.tableName }).promise());

test('DynamoDbScanQueue.peek() returns the next item but does not remove it from the queue', async (t) => { // eslint-disable-line max-len
test.serial('DynamoDbScanQueue.peek() returns the next item but does not remove it from the queue', async (t) => { // eslint-disable-line max-len
const bucket = randomString();
const key = randomString();

Expand All @@ -50,7 +50,7 @@ test('DynamoDbScanQueue.peek() returns the next item but does not remove it from
t.is((await queue.peek()).bucket.S, bucket);
});

test('DynamoDbScanQueue.shift() returns the next object and removes it from the queue', async (t) => { // eslint-disable-line max-len
test.serial('DynamoDbScanQueue.shift() returns the next object and removes it from the queue', async (t) => { // eslint-disable-line max-len
const bucket = randomString();
const key = randomString();

Expand All @@ -69,7 +69,7 @@ test('DynamoDbScanQueue.shift() returns the next object and removes it from the
t.is(await queue.peek(), null);
});

test('DynamoDbScanQueue can handle paging', async (t) => {
test.serial('DynamoDbScanQueue can handle paging', async (t) => {
await Promise.all(range(11).map(() =>
dynamodb().putItem({
TableName: t.context.tableName,
Expand Down
6 changes: 3 additions & 3 deletions packages/common/tests/aws/S3ListObjectsV2Queue.js
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ test.beforeEach((t) => {

test.afterEach.always((t) => recursivelyDeleteS3Bucket(t.context.bucketName));

test('S3ListObjectsV2Queue.peek() returns the next object but does not remove it from the queue', async (t) => { // eslint-disable-line max-len
test.serial('S3ListObjectsV2Queue.peek() returns the next object but does not remove it from the queue', async (t) => { // eslint-disable-line max-len
const key = randomString();
await s3().putObject({ Bucket: t.context.bucketName, Key: key, Body: 'body' }).promise();

Expand All @@ -26,7 +26,7 @@ test('S3ListObjectsV2Queue.peek() returns the next object but does not remove it
t.is((await queue.peek()).Key, key);
});

test('S3ListObjectsV2Queue.shift() returns the next object and removes it from the queue', async (t) => { // eslint-disable-line max-len
test.serial('S3ListObjectsV2Queue.shift() returns the next object and removes it from the queue', async (t) => { // eslint-disable-line max-len
const key = randomString();
await s3().putObject({ Bucket: t.context.bucketName, Key: key, Body: 'body' }).promise();

Expand All @@ -37,7 +37,7 @@ test('S3ListObjectsV2Queue.shift() returns the next object and removes it from t
t.is(await queue.peek(), null);
});

test('S3ListObjectsV2Queue can handle paging', async (t) => {
test.serial('S3ListObjectsV2Queue can handle paging', async (t) => {
await Promise.all(range(11).map(() =>
s3().putObject({
Bucket: t.context.bucketName,
Expand Down
16 changes: 8 additions & 8 deletions packages/common/tests/collection-config-store.js
Original file line number Diff line number Diff line change
Expand Up @@ -26,7 +26,7 @@ test.afterEach((t) =>
if (err.code !== 'NoSuchBucket') throw err;
}));

test('get() fetches a collection config from S3', async (t) => {
test.serial('get() fetches a collection config from S3', async (t) => {
await s3().putObject({
Bucket: t.context.bucket,
Key: t.context.collectionConfigKey(t.context.dataType),
Expand All @@ -39,7 +39,7 @@ test('get() fetches a collection config from S3', async (t) => {
t.deepEqual(fetchedCollectionConfig, t.context.collectionConfig);
});

test('get() does not hit S3 for a cached collection config', async (t) => {
test.serial('get() does not hit S3 for a cached collection config', async (t) => {
await s3().putObject({
Bucket: t.context.bucket,
Key: t.context.collectionConfigKey(t.context.dataType),
Expand All @@ -60,7 +60,7 @@ test('get() does not hit S3 for a cached collection config', async (t) => {
t.deepEqual(fetchedCollectionConfig, t.context.collectionConfig);
});

test('get() throws an exception if the collection config could not be found', async (t) => {
test.serial('get() throws an exception if the collection config could not be found', async (t) => {
const invalidDataType = randomString();
const collectionConfigStore = new CollectionConfigStore(t.context.bucket, t.context.stackName);

Expand All @@ -73,7 +73,7 @@ test('get() throws an exception if the collection config could not be found', as
}
});

test('get() throws an exception if the bucket does not exist', async (t) => {
test.serial('get() throws an exception if the bucket does not exist', async (t) => {
const invalidBucket = randomString();
const collectionConfigStore = new CollectionConfigStore(invalidBucket, t.context.stackName);

Expand All @@ -86,7 +86,7 @@ test('get() throws an exception if the bucket does not exist', async (t) => {
}
});

test('put() stores a collection config to S3', async (t) => {
test.serial('put() stores a collection config to S3', async (t) => {
const collectionConfigStore = new CollectionConfigStore(t.context.bucket, t.context.stackName);
await collectionConfigStore.put(t.context.dataType, t.context.collectionConfig);

Expand All @@ -99,7 +99,7 @@ test('put() stores a collection config to S3', async (t) => {
t.deepEqual(storedCollectionConfig, t.context.collectionConfig);
});

test('put() updates the cache with the new collection config', async (t) => {
test.serial('put() updates the cache with the new collection config', async (t) => {
const collectionConfigStore = new CollectionConfigStore(t.context.bucket, t.context.stackName);
await collectionConfigStore.put(t.context.dataType, t.context.collectionConfig);

Expand All @@ -112,7 +112,7 @@ test('put() updates the cache with the new collection config', async (t) => {
t.deepEqual(fetchedCollectionConfig, t.context.collectionConfig);
});

test('delete() removes the collection config from S3', async (t) => {
test.serial('delete() removes the collection config from S3', async (t) => {
const bucket = t.context.bucket;
const collectionConfigKey = t.context.collectionConfigKey(t.context.dataType);

Expand All @@ -131,7 +131,7 @@ test('delete() removes the collection config from S3', async (t) => {
t.false(await s3ObjectExists({ Bucket: bucket, Key: collectionConfigKey }));
});

test('delete() the collection conf ig from the cache', async (t) => {
test.serial('delete() the collection conf ig from the cache', async (t) => {
const collectionConfigStore = new CollectionConfigStore(t.context.bucket, t.context.stackName);

// Store the collection config to S3, which will also cache it
Expand Down
Loading

0 comments on commit 7467eb3

Please sign in to comment.