Skip to content

Commit

Permalink
Merge pull request #279 from cumulus-nasa/CUMULUS-301
Browse files Browse the repository at this point in the history
Update all tests to use test-data package
  • Loading branch information
Alireza committed Mar 29, 2018
2 parents 578b942 + 0a1b0a9 commit c70cc42
Show file tree
Hide file tree
Showing 10 changed files with 21 additions and 116 deletions.
2 changes: 1 addition & 1 deletion .circleci/config.yml
Original file line number Diff line number Diff line change
Expand Up @@ -46,7 +46,7 @@ jobs:
# start http service
sudo rm -rf /var/www/html
sudo ln -s /home/circleci/project/.tmp-test-data /var/www/html
sudo ln -s /home/circleci/project/packages/test-data /var/www/html
sudo service apache2 start
# start sftp service
Expand Down
2 changes: 1 addition & 1 deletion .eslint-ratchet-high-water-mark
Original file line number Diff line number Diff line change
@@ -1 +1 @@
1349
1348
3 changes: 0 additions & 3 deletions .tmp-test-data/.gitignore

This file was deleted.

6 changes: 0 additions & 6 deletions .tmp-test-data/README

This file was deleted.

1 change: 1 addition & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@ and this project adheres to [Semantic Versioning](http://semver.org/spec/v2.0.0.
- Using test ftp provider for discover-granules testing [CUMULUS-427]
- **CUMULUS-304: "Add AWS API throttling to pdr-status-check task"** Added concurrency limit on SFN API calls. The default concurrency is 10 and is configurable through Lambda environment variable CONCURRENCY.
- **CUMULUS-414: "Schema validation not being performed on many tasks"** revised npm build scripts of tasks that use cumulus-message-adapter to place schema directories into dist directories.
- **CUMULUS-301:** Update all tests to use test-data package for testing data.
- **CUMULUS-271: "Empty response body from rules PUT endpoint"** Added the updated rule to response body.
- Increased memory allotment for `CustomBootstrap` lambda function. Resolves failed deployments where `CustomBootstrap` lambda function was failing with error `Process exited before completing request`. This was causing deployments to stall, fail to update and fail to rollback. This error is thrown when the lambda function tries to use more memory than it is allotted.
- Cumulus repository folders structure updated:
Expand Down
4 changes: 2 additions & 2 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@ services:
command: start
volumes:
- "./packages/test-data:/home/vsftpd:ro"
- "./.tmp-test-data:/var/www/html:ro"
- "./packages/test-data:/var/www/html:ro"
- "./packages/test-data:/home/user"
ports:
- "20:20"
Expand All @@ -24,7 +24,7 @@ services:
volumes:
- ".:/home/circleci/project"
- "./packages/test-data:/home/vsftpd:ro"
- "./.tmp-test-data:/var/www/html:ro"
- "./packages/test-data:/var/html:ro"
- "./packages/test-data:/home/user"
ports:
- "20:20"
Expand Down
40 changes: 5 additions & 35 deletions tasks/discover-granules/tests/index.js
Original file line number Diff line number Diff line change
@@ -1,13 +1,10 @@
'use strict';

const fs = require('fs-extra');
const path = require('path');
const test = require('ava');
const mur = require('./fixtures/mur.json');
const { cloneDeep } = require('lodash');
const { recursivelyDeleteS3Bucket, s3 } = require('@cumulus/common/aws');
const {
findTmpTestDataDirectory,
randomString,
validateConfig,
validateOutput
Expand Down Expand Up @@ -88,36 +85,17 @@ test('discover granules using SFTP', async (t) => {
});

test('discover granules using HTTP', async (t) => {
const internalBucketName = randomString();
const providerPath = randomString();

// Figure out the directory paths that we're working with
const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath);

// Create providerPathDirectory and internal bucket
await Promise.all([
fs.ensureDir(providerPathDirectory),
s3().createBucket({ Bucket: internalBucketName }).promise()
]);

// State sample files
const files = [
'granule-1.nc', 'granule-1.nc.md5',
'granule-2.nc', 'granule-2.nc.md5',
'granule-3.nc', 'granule-3.nc.md5'
];
await Promise.all(files.map((file) =>
fs.outputFile(path.join(providerPathDirectory, file), `This is ${file}`)));

const event = cloneDeep(mur);
event.config.collection.provider_path = providerPath;
event.config.bucket = randomString();
event.config.collection.provider_path = '/granules/fake_granules';
event.config.provider = {
id: 'MODAPS',
protocol: 'http',
host: 'http://localhost:3030'
};

await validateConfig(t, event.config);
await s3().createBucket({ Bucket: event.config.bucket }).promise();

try {
const output = await discoverGranules(event);
Expand All @@ -133,10 +111,7 @@ test('discover granules using HTTP', async (t) => {
}
finally {
// Clean up
await Promise.all([
recursivelyDeleteS3Bucket(internalBucketName),
fs.remove(providerPathDirectory)
]);
await recursivelyDeleteS3Bucket(event.config.bucket);
}
});

Expand All @@ -145,12 +120,8 @@ test('discover granules using S3', async (t) => {
const sourceBucketName = randomString();
const providerPath = randomString();

// Figure out the directory paths that we're working with
const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath);

// Create providerPathDirectory and internal bucket
await Promise.all([
fs.ensureDir(providerPathDirectory),
s3().createBucket({ Bucket: internalBucketName }).promise(),
s3().createBucket({ Bucket: sourceBucketName }).promise()
]);
Expand Down Expand Up @@ -188,8 +159,7 @@ test('discover granules using S3', async (t) => {
// Clean up
await Promise.all([
recursivelyDeleteS3Bucket(internalBucketName),
recursivelyDeleteS3Bucket(sourceBucketName),
fs.remove(providerPathDirectory)
recursivelyDeleteS3Bucket(sourceBucketName)
]);
}
});
27 changes: 4 additions & 23 deletions tasks/discover-pdrs/tests/index.js
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,6 @@ const input = require('./fixtures/input.json');
const { recursivelyDeleteS3Bucket, s3 } = require('@cumulus/common/aws');
const {
findTestDataDirectory,
findTmpTestDataDirectory,
randomString,
validateConfig,
validateOutput
Expand Down Expand Up @@ -153,29 +152,14 @@ test('test pdr discovery with FTP assuming some PDRs are new', async (t) => {

test('test pdr discovery with HTTP assuming some PDRs are new', async (t) => {
const internalBucketName = randomString();
const providerPath = randomString();

// Figure out the directory paths that we're working with
const testDataDirectory = path.join(await findTestDataDirectory(), 'pdrs');
const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath);

// Create providerPathDirectory and internal bucket
await Promise.all([
fs.ensureDir(providerPathDirectory),
s3().createBucket({ Bucket: internalBucketName }).promise()
]);

try {
// Copy the PDRs to the HTTP directory
await s3().createBucket({ Bucket: internalBucketName }).promise();
const testDataDirectory = path.join(await findTestDataDirectory(), 'pdrs');
const pdrFilenames = await fs.readdir(testDataDirectory);

const oldPdr = pdrFilenames[0];
const newPdrs = pdrFilenames.slice(1);

await Promise.all(pdrFilenames.map((pdrFilename) => fs.copy(
path.join(testDataDirectory, pdrFilename),
path.join(providerPathDirectory, pdrFilename))));

// Build the event
const event = cloneDeep(input);
event.config.bucket = internalBucketName;
Expand All @@ -184,7 +168,7 @@ test('test pdr discovery with HTTP assuming some PDRs are new', async (t) => {
protocol: 'http',
host: 'http://localhost:3030'
};
event.config.collection.provider_path = providerPath;
event.config.collection.provider_path = '/pdrs';
event.input = {};

// Mark one of the PDRs as not new
Expand Down Expand Up @@ -215,10 +199,7 @@ test('test pdr discovery with HTTP assuming some PDRs are new', async (t) => {
}
finally {
// Clean up
await Promise.all([
recursivelyDeleteS3Bucket(internalBucketName),
fs.remove(providerPathDirectory)
]);
await recursivelyDeleteS3Bucket(internalBucketName);
}
});

Expand Down
19 changes: 3 additions & 16 deletions tasks/parse-pdr/tests/parse_pdrs_test.js
Original file line number Diff line number Diff line change
Expand Up @@ -10,7 +10,6 @@ const { recursivelyDeleteS3Bucket, s3 } = require('@cumulus/common/aws');
const { cloneDeep } = require('lodash');
const {
findTestDataDirectory,
findTmpTestDataDirectory,
randomString,
validateConfig,
validateInput,
Expand Down Expand Up @@ -57,24 +56,15 @@ test('parse PDR from FTP endpoint', async (t) => {

test('parse PDR from HTTP endpoint', async (t) => {
const internalBucketName = randomString();
const providerPath = randomString();

// Figure out the directory paths that we're working with
const testDataDirectory = path.join(await findTestDataDirectory(), 'pdrs');
const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath);

// Create providerPathDirectory and internal bucket
await Promise.all([
fs.ensureDir(providerPathDirectory),
s3().createBucket({ Bucket: internalBucketName }).promise()
]);
await s3().createBucket({ Bucket: internalBucketName }).promise();

const pdrName = 'MOD09GQ.PDR';

await fs.copy(
path.join(testDataDirectory, pdrName),
path.join(providerPathDirectory, pdrName));

const newPayload = cloneDeep(modis);
newPayload.config.bucket = internalBucketName;
newPayload.config.provider = {
Expand All @@ -85,7 +75,7 @@ test('parse PDR from HTTP endpoint', async (t) => {
newPayload.input = {
pdr: {
name: pdrName,
path: `/${providerPath}`
path: `/pdrs`
}
};

Expand All @@ -107,10 +97,7 @@ test('parse PDR from HTTP endpoint', async (t) => {
}
finally {
// Clean up
await Promise.all([
recursivelyDeleteS3Bucket(internalBucketName),
fs.remove(providerPathDirectory)
]);
await recursivelyDeleteS3Bucket(internalBucketName);
}
});

Expand Down
33 changes: 4 additions & 29 deletions tasks/sync-granule/tests/sync_granule_test.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,6 @@ const { recursivelyDeleteS3Bucket, s3 } = require('@cumulus/common/aws');
const { cloneDeep } = require('lodash');
const {
findTestDataDirectory,
findTmpTestDataDirectory,
randomString,
validateConfig,
validateInput,
Expand Down Expand Up @@ -90,29 +89,20 @@ test('download Granule from FTP endpoint', async (t) => {
});

test('download Granule from HTTP endpoint', async (t) => {
const granulePath = randomString();
const localGranulePath = path.join(await findTmpTestDataDirectory(), granulePath);

t.context.event.config.provider = {
id: 'MODAPS',
protocol: 'http',
host: 'http://localhost:3030'
};
t.context.event.input.granules[0].files[0].path = `/${granulePath}`;
t.context.event.input.granules[0].files[0].path = '/granules';

validateConfig(t, t.context.event.config);
validateInput(t, t.context.event.input);

await fs.mkdir(localGranulePath);
// await fs.mkdir(localGranulePath);
try {
const granuleFilename = t.context.event.input.granules[0].files[0].name;

// Stage the file to be downloaded
await fs.copy(
path.join(await findTestDataDirectory(), 'granules', granuleFilename),
path.join(localGranulePath, granuleFilename)
);

const output = await syncGranule(t.context.event);

validateOutput(t, output);
Expand All @@ -130,9 +120,6 @@ test('download Granule from HTTP endpoint', async (t) => {
}
else throw e;
}
finally {
fs.remove(localGranulePath);
}
});

test('download Granule from SFTP endpoint', async (t) => {
Expand Down Expand Up @@ -227,25 +214,17 @@ test('download granule with checksum in file from an HTTP endpoint', async (t) =
};

const granulePath = randomString();
event.input.granules[0].files[0].path = `/${granulePath}`;
event.input.granules[0].files[1].path = `/${granulePath}`;
event.input.granules[0].files[0].path = '/granules';
event.input.granules[0].files[1].path = '/granules';

validateConfig(t, event.config);
validateInput(t, event.input);

const localGranulePath = path.join(await findTmpTestDataDirectory(), granulePath);
await fs.mkdir(localGranulePath);
try {
// Stage the files to be downloaded
const sourceDir = path.join(await findTestDataDirectory(), 'granules');
const granuleFilename = event.input.granules[0].files[0].name;
const checksumFilename = event.input.granules[0].files[1].name;
await Promise.all([
fs.copy(path.join(sourceDir, granuleFilename),
path.join(localGranulePath, granuleFilename)),
fs.copy(path.join(sourceDir, checksumFilename),
path.join(localGranulePath, checksumFilename))
]);

const output = await syncGranule(event);

Expand All @@ -262,10 +241,6 @@ test('download granule with checksum in file from an HTTP endpoint', async (t) =
}
else throw e;
}
finally {
// Clean up
fs.remove(localGranulePath);
}
});

// TODO Fix this test as part of https://bugs.earthdata.nasa.gov/browse/CUMULUS-272
Expand Down

0 comments on commit c70cc42

Please sign in to comment.