Skip to content

Commit

Permalink
Uploading file to AWS S3 bucket (#211)
Browse files Browse the repository at this point in the history
https://eaflood.atlassian.net/browse/WATER-3966

This pull request is just a small part of a larger project that involves exporting all our database schemas, converting them into CSV files, and uploading them to our Amazon S3 bucket. This PR's primary focus is connecting to the amazon s3 bucket and uploading the file. To expedite the export process and see the output sooner, we are using a vertical slicing approach, rather than a horizontal one, which means exporting a single table at a time from each schema.
  • Loading branch information
Beckyrose200 authored May 12, 2023
1 parent d749ffe commit 1225599
Show file tree
Hide file tree
Showing 8 changed files with 7,044 additions and 4,428 deletions.
5 changes: 3 additions & 2 deletions .labrc.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,10 @@ module.exports = {
'__extends', '__assign', '__rest', '__decorate', '__param', '__metadata', '__awaiter', '__generator',
'__exportStar', '__createBinding', '__values', '__read', '__spread', '__spreadArrays', '__spreadArray', '__await',
'__asyncGenerator', '__asyncDelegator', '__asyncValues', '__makeTemplateObject', '__importStar', '__importDefault',
'__classPrivateFieldGet', '__classPrivateFieldSet',
'__classPrivateFieldGet', '__classPrivateFieldSet', '__esDecorate', '__runInitializers', '__propKey',
'__setFunctionName', '__classPrivateFieldIn',
// We also ignore globals exposed by global-agent:
'GLOBAL_AGENT','ROARR',
'GLOBAL_AGENT', 'ROARR',
// GlobalNotifier is added by us a global in a server plugin. It's how we make logging available anywhere in the app
// whilst avoiding having to pass it around
'GlobalNotifier',
Expand Down
2 changes: 1 addition & 1 deletion app/services/db-export/compress-files.service.js
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ async function go (filePath) {
await _compressFile(filePath)
global.GlobalNotifier.omg(`${filePath} successfully compressed to gzip.`)

return true
return `${filePath}.gz`
}

async function _compressFile (filePath) {
Expand Down
57 changes: 57 additions & 0 deletions app/services/db-export/send-to-s3-bucket.service.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
'use strict'

/**
* Sends a file to our AWS S3 bucket
* @module SendToS3BucketService
*/

const fs = require('fs')
const path = require('path')
const { PutObjectCommand, S3Client } = require('@aws-sdk/client-s3')

const S3Config = require('../../../config/s3.config')
/**
* Sends a file to our AWS S3 Bucket using the filePath that it receives
*
* @param {String} filePath A string containing the path of the file to send to the S3 bucket
*
* @returns {Boolean} True if the file is uploaded successfully and false if not
*/
async function go (filePath) {
const bucketName = S3Config.s3.bucket
const fileName = path.basename(filePath)
const fileContent = fs.readFileSync(filePath)

const params = {
Bucket: bucketName,
Key: `export/${fileName}`,
Body: fileContent
}

return _uploadToBucket(params, fileName)
}
/**
* Uploads a file to an Amazon S3 bucket using the given parameters
*
* @param {Object} params The parameters to use when uploading the file.
* @param {String} fileName The name of the file to upload
*
* @returns {Boolean} True if the file is uploaded successfully and false if not
*/
async function _uploadToBucket (params, fileName) {
const s3Client = new S3Client()
const command = new PutObjectCommand(params)

try {
await s3Client.send(command)
global.GlobalNotifier.omg(`The file ${fileName} was uploaded successfully`)
return true
} catch (error) {
global.GlobalNotifier.omfg(`ERROR uploading file: ${error.message}`)
return false
}
}

module.exports = {
go
}
18 changes: 18 additions & 0 deletions config/s3.config.js
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
'use strict'

/**
* Config values used for the amazon s3 bucket
* @module S3Config
*/

// We require dotenv directly in each config file to support unit tests that depend on this this subset of config.
// Requiring dotenv in multiple places has no effect on the app when running for real.
require('dotenv').config()

const config = {
s3: {
bucket: process.env.AWS_MAINTENANCE_BUCKET
}
}

module.exports = config
Loading

0 comments on commit 1225599

Please sign in to comment.