diff --git a/.circleci/config.yml b/.circleci/config.yml index 1c7c14d7fd9..82024144bfc 100644 --- a/.circleci/config.yml +++ b/.circleci/config.yml @@ -3,7 +3,7 @@ version: 2 jobs: build_and_test: docker: - - image: circleci/node:6.10 + - image: cumuluss/circleci:node-6.10 - name: localstack image: localstack/localstack working_directory: ~/project @@ -18,6 +18,20 @@ jobs: - run: name: Installing Dependencies command: | + # start ftp + sudo rm -rf /home/vsftpd + sudo ln -s /home/circleci/project/packages/test-data /home/vsftpd + sudo service vsftpd start || true + + # start http service + sudo rm -rf /var/www/html + sudo ln -s /home/circleci/project/.tmp-test-data /var/www/html + sudo service apache2 start + + # start sftp service + sudo bash /usr/sbin/sftp.sh user:password + sudo cp -r /home/circleci/project/packages/test-data/* /home/user/ + yarn install yarn bootstrap-no-build diff --git a/README.md b/README.md index 1ad58cae238..7314ebad3a6 100644 --- a/README.md +++ b/README.md @@ -57,9 +57,7 @@ Build and watch packages: The LocalStack repository has [installation instructions](https://github.com/localstack/localstack#installing). -Before running tests, start the LocalStack servers: - - $ localstack start +Localstack is included in the docker-compose file. You only need to run the docker-compose command in the next section in order to use it with your tests. ### Docker containers diff --git a/cumulus/tasks/discover-granules/tests/index.js b/cumulus/tasks/discover-granules/tests/index.js index 3ebd4f4d4a6..fd6f16100ab 100644 --- a/cumulus/tasks/discover-granules/tests/index.js +++ b/cumulus/tasks/discover-granules/tests/index.js @@ -36,30 +36,12 @@ test('discover granules using FTP', async (t) => { test('discover granules using SFTP', async (t) => { const internalBucketName = randomString(); - const providerPath = randomString(); - - // Figure out the directory paths that we're working with - const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket - await Promise.all([ - fs.ensureDir(providerPathDirectory), - s3().createBucket({ Bucket: internalBucketName }).promise() - ]); - - // State sample files - const files = [ - 'granule-1.nc', 'granule-1.nc.md5', - 'granule-2.nc', 'granule-2.nc.md5', - 'granule-3.nc', 'granule-3.nc.md5' - ]; - await Promise.all(files.map((file) => - fs.outputFile(path.join(providerPathDirectory, file), `This is ${file}`))); + await s3().createBucket({ Bucket: internalBucketName }).promise(); const event = cloneDeep(mur); - // The test-data prefix is required in the provider_path because of the way - // that the sftp container is configured in docker-compose.yml. - event.config.collection.provider_path = `test-data/${providerPath}`; + event.config.collection.provider_path = 'granules/fake_granules'; event.config.provider = { id: 'MODAPS', protocol: 'sftp', @@ -85,10 +67,7 @@ test('discover granules using SFTP', async (t) => { } finally { // Clean up - await Promise.all([ - recursivelyDeleteS3Bucket(internalBucketName), - fs.remove(providerPathDirectory) - ]); + await recursivelyDeleteS3Bucket(internalBucketName); } }); @@ -119,7 +98,7 @@ test('discover granules using HTTP', async (t) => { event.config.provider = { id: 'MODAPS', protocol: 'http', - host: 'http://localhost:8080' + host: 'http://localhost:3030' }; await validateConfig(t, event.config); diff --git a/cumulus/tasks/discover-pdrs/tests/index.js b/cumulus/tasks/discover-pdrs/tests/index.js index d12d9a91527..6c5bd31bd50 100644 --- a/cumulus/tasks/discover-pdrs/tests/index.js +++ b/cumulus/tasks/discover-pdrs/tests/index.js @@ -182,7 +182,7 @@ test('test pdr discovery with HTTP assuming some PDRs are new', async (t) => { event.config.provider = { id: 'MODAPS', protocol: 'http', - host: 'http://localhost:8080' + host: 'http://localhost:3030' }; event.config.collection.provider_path = providerPath; event.input = {}; @@ -224,17 +224,12 @@ test('test pdr discovery with HTTP assuming some PDRs are new', async (t) => { test('test pdr discovery with SFTP assuming some PDRs are new', async (t) => { const internalBucketName = randomString(); - const providerPath = randomString(); // Figure out the directory paths that we're working with const testDataDirectory = path.join(await findTestDataDirectory(), 'pdrs'); - const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket - await Promise.all([ - fs.ensureDir(providerPathDirectory), - s3().createBucket({ Bucket: internalBucketName }).promise() - ]); + await s3().createBucket({ Bucket: internalBucketName }).promise(); try { // Copy the PDRs to the SFTP directory @@ -243,10 +238,6 @@ test('test pdr discovery with SFTP assuming some PDRs are new', async (t) => { const oldPdr = pdrFilenames[0]; const newPdrs = pdrFilenames.slice(1); - await Promise.all(pdrFilenames.map((pdrFilename) => fs.copy( - path.join(testDataDirectory, pdrFilename), - path.join(providerPathDirectory, pdrFilename)))); - // Build the event const event = cloneDeep(input); event.config.bucket = internalBucketName; @@ -258,15 +249,13 @@ test('test pdr discovery with SFTP assuming some PDRs are new', async (t) => { username: 'user', password: 'password' }; - // The test-data prefix is required because of the way that the sftp - // container is configured in docker-compose.yml. - event.config.collection.provider_path = `test-data/${providerPath}`; + event.config.collection.provider_path = 'pdrs'; event.input = {}; // Mark one of the PDRs as not new await s3().putObject({ Bucket: internalBucketName, - // 'pdrs' is the default 'folder' value in the Discover contructor + // 'pdrs' is the default 'folder' value in the Discover constructor Key: `${event.config.stack}/pdrs/${oldPdr}`, Body: 'Pretend this is a PDR' }).promise(); @@ -291,9 +280,6 @@ test('test pdr discovery with SFTP assuming some PDRs are new', async (t) => { } finally { // Clean up - await Promise.all([ - recursivelyDeleteS3Bucket(internalBucketName), - fs.remove(providerPathDirectory) - ]); + await recursivelyDeleteS3Bucket(internalBucketName); } }); diff --git a/cumulus/tasks/parse-pdr/tests/parse_pdrs_test.js b/cumulus/tasks/parse-pdr/tests/parse_pdrs_test.js index faa41c50ecd..a15399750cf 100644 --- a/cumulus/tasks/parse-pdr/tests/parse_pdrs_test.js +++ b/cumulus/tasks/parse-pdr/tests/parse_pdrs_test.js @@ -79,7 +79,7 @@ test('parse PDR from HTTP endpoint', async (t) => { newPayload.config.provider = { id: 'MODAPS', protocol: 'http', - host: 'http://localhost:8080' + host: 'http://localhost:3030' }; newPayload.input = { pdr: { @@ -115,14 +115,9 @@ test('parse PDR from HTTP endpoint', async (t) => { test('parse PDR from SFTP endpoint', async (t) => { const internalBucketName = randomString(); - const providerPath = randomString(); - const providerPathDirectory = path.join(await findTmpTestDataDirectory(), providerPath); // Create providerPathDirectory and internal bucket - await Promise.all([ - fs.ensureDir(providerPathDirectory), - s3().createBucket({ Bucket: internalBucketName }).promise() - ]); + await s3().createBucket({ Bucket: internalBucketName }).promise(); const pdrName = 'MOD09GQ.PDR'; @@ -139,9 +134,7 @@ test('parse PDR from SFTP endpoint', async (t) => { newPayload.input = { pdr: { name: pdrName, - // The test-data prefix is required because of the way that the sftp - // container is configured in docker-compose.yml. - path: `/test-data/${providerPath}` + path: 'pdrs' } }; @@ -149,12 +142,6 @@ test('parse PDR from SFTP endpoint', async (t) => { await validateConfig(t, newPayload.config); try { - // Stage the file to be downloaded - const testDataDirectory = path.join(await findTestDataDirectory(), 'pdrs'); - await fs.copy( - path.join(testDataDirectory, pdrName), - path.join(providerPathDirectory, pdrName)); - const output = await parsePdr(newPayload); await validateOutput(t, output); @@ -170,10 +157,7 @@ test('parse PDR from SFTP endpoint', async (t) => { } finally { // Clean up - await Promise.all([ - recursivelyDeleteS3Bucket(internalBucketName), - fs.remove(providerPathDirectory) - ]); + await recursivelyDeleteS3Bucket(internalBucketName); } }); diff --git a/cumulus/tasks/sync-granule/tests/sync_granule_test.js b/cumulus/tasks/sync-granule/tests/sync_granule_test.js index 82c651a8c5e..d4050ec6110 100644 --- a/cumulus/tasks/sync-granule/tests/sync_granule_test.js +++ b/cumulus/tasks/sync-granule/tests/sync_granule_test.js @@ -96,7 +96,7 @@ test('download Granule from HTTP endpoint', async (t) => { t.context.event.config.provider = { id: 'MODAPS', protocol: 'http', - host: 'http://localhost:8080' + host: 'http://localhost:3030' }; t.context.event.input.granules[0].files[0].path = `/${granulePath}`; @@ -136,9 +136,6 @@ test('download Granule from HTTP endpoint', async (t) => { }); test('download Granule from SFTP endpoint', async (t) => { - const granulePath = randomString(); - const localGranulePath = path.join(await findTmpTestDataDirectory(), granulePath); - t.context.event.config.provider = t.context.event.config.provider = { id: 'MODAPS', protocol: 'sftp', @@ -148,23 +145,14 @@ test('download Granule from SFTP endpoint', async (t) => { password: 'password' }; - // The test-data prefix is required because of the way that the sftp - // container is configured in docker-compose.yml. - t.context.event.input.granules[0].files[0].path = `test-data/${granulePath}`; + t.context.event.input.granules[0].files[0].path = '/granules'; validateConfig(t, t.context.event.config); validateInput(t, t.context.event.input); - await fs.mkdir(localGranulePath); try { const granuleFilename = t.context.event.input.granules[0].files[0].name; - // Stage the file to be downloaded - await fs.copy( - path.join(await findTestDataDirectory(), 'granules', granuleFilename), - path.join(localGranulePath, granuleFilename) - ); - const output = await syncGranule(t.context.event); validateOutput(t, output); @@ -182,9 +170,6 @@ test('download Granule from SFTP endpoint', async (t) => { } else throw e; } - finally { - fs.remove(localGranulePath); - } }); test('download granule from S3 provider', async (t) => { @@ -238,7 +223,7 @@ test('download granule with checksum in file from an HTTP endpoint', async (t) = event.config.provider = { id: 'MODAPS', protocol: 'http', - host: 'http://localhost:8080' + host: 'http://localhost:3030' }; const granulePath = randomString(); @@ -288,7 +273,7 @@ test('download granule with checksum in file from an HTTP endpoint', async (t) = // const provider = { // id: 'MODAPS', // protocol: 'http', -// host: 'http://localhost:8080' +// host: 'http://localhost:3030' // }; // sinon.stub(S3, 'fileExists').callsFake(() => true); // const uploaded = sinon.stub(S3, 'upload').callsFake(() => '/test/test.hd'); diff --git a/docker-compose.yml b/docker-compose.yml index dafe9723fcf..fa52f30adfb 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,32 +1,44 @@ -version: '2' +version: '3' services: local: - image: bogem/ftp + image: cumuluss/circleci:node-6.10 + command: start volumes: - - "./packages/test-data:/home/vsftpd" + - "./packages/test-data:/home/vsftpd:ro" + - "./.tmp-test-data:/var/www/html:ro" + - "./packages/test-data:/home/user" ports: - "20:20" - "21:21" - "47400-47470:47400-47470" - environment: - - FTP_USER=testuser - - FTP_PASS=testpass - - PASV_ADDRESS=127.0.0.1 + - "3030:3030" + - "2222:2222" + links: + - localstack depends_on: - - http - - sftp + - localstack - http: - image: centos/httpd - ports: - - "8080:80" + test: + image: cumuluss/circleci:node-6.10 + command: /bin/bash volumes: + - ".:/home/circleci/project" + - "./packages/test-data:/home/vsftpd:ro" - "./.tmp-test-data:/var/www/html:ro" + - "./packages/test-data:/home/user" + ports: + - "20:20" + - "21:21" + - "47400-47470:47400-47470" + - "3030:3030" + - "2222:2222" + links: + - localstack + depends_on: + - localstack - sftp: - image: atmoz/sftp + localstack: + image: localstack/localstack ports: - - "2222:22" - volumes: - - "./.tmp-test-data:/home/user/test-data:ro" - command: user:password + - 4567-4582:4567-4582 + - 8080:8080 \ No newline at end of file diff --git a/packages/test-data/granules/fake_granules/granule-1.nc b/packages/test-data/granules/fake_granules/granule-1.nc new file mode 100644 index 00000000000..5825481ecb4 --- /dev/null +++ b/packages/test-data/granules/fake_granules/granule-1.nc @@ -0,0 +1 @@ +This is granule-1.nc \ No newline at end of file diff --git a/packages/test-data/granules/fake_granules/granule-1.nc.md5 b/packages/test-data/granules/fake_granules/granule-1.nc.md5 new file mode 100644 index 00000000000..ab1f7294701 --- /dev/null +++ b/packages/test-data/granules/fake_granules/granule-1.nc.md5 @@ -0,0 +1 @@ +This is granule-1.nc.md5 \ No newline at end of file diff --git a/packages/test-data/granules/fake_granules/granule-2.nc b/packages/test-data/granules/fake_granules/granule-2.nc new file mode 100644 index 00000000000..b2e6b3b122d --- /dev/null +++ b/packages/test-data/granules/fake_granules/granule-2.nc @@ -0,0 +1 @@ +This is granule-2.nc \ No newline at end of file diff --git a/packages/test-data/granules/fake_granules/granule-2.nc.md5 b/packages/test-data/granules/fake_granules/granule-2.nc.md5 new file mode 100644 index 00000000000..bdbdaa2a6bb --- /dev/null +++ b/packages/test-data/granules/fake_granules/granule-2.nc.md5 @@ -0,0 +1 @@ +This is granule-2.nc.md5 \ No newline at end of file diff --git a/packages/test-data/granules/fake_granules/granule-3.nc b/packages/test-data/granules/fake_granules/granule-3.nc new file mode 100644 index 00000000000..c3fcb07ba8d --- /dev/null +++ b/packages/test-data/granules/fake_granules/granule-3.nc @@ -0,0 +1 @@ +This is granule-3.nc \ No newline at end of file diff --git a/packages/test-data/granules/fake_granules/granule-3.nc.md5 b/packages/test-data/granules/fake_granules/granule-3.nc.md5 new file mode 100644 index 00000000000..abf43b928a8 --- /dev/null +++ b/packages/test-data/granules/fake_granules/granule-3.nc.md5 @@ -0,0 +1 @@ +This is granule-3.nc.md5 \ No newline at end of file