diff --git a/README.md b/README.md index a5c05bd49a..e44047eda9 100644 --- a/README.md +++ b/README.md @@ -230,6 +230,23 @@ To update the SAF CLI on Windows, uninstall any existing version from your syste ## Usage --- +### File Input/Output +The SAF CLI can take local files, an HTTP(s) URL, or a file within an S3 bucket as input. + +For example, to view an HDF file from an S3 bucket: + +``saf view heimdall -f s3://HDF/rhel7-scan_02032022A.json`` + +Or to take a URL as input: + +``saf convert hdf2csv -i https://raw.githubusercontent.com/mitre/saf/main/test/sample_data/HDF/input/red_hat_good.json -o red_hat_good.csv`` + +The SAF CLI supports writing its output to the local filesystem or to an S3 bucket. + +For example, to convert an HDF file into ASFF using remote buckets: + +``saf convert hdf2asff -i s3://HDF/rhel7-scan_02032022A.json -a 123456789 -r us-east-1 -t rhel7_example_host -o s3://ASFF/rhel7.asff`` + ### Attest Attest to 'Not Reviewed' controls: sometimes requirements can’t be tested automatically by security tools and hence require manual review, whereby someone interviews people and/or examines a system to confirm (i.e., attest as to) whether the control requirements have been satisfied. @@ -307,7 +324,7 @@ convert hdf2asff Translate a Heimdall Data Format JSON file into -t, --target= (required) Unique name for target to track findings across time -u, --upload Upload findings to AWS Security Hub EXAMPLES - $ saf convert hdf2asff -i rhel7-scan_02032022A.json -a 123456789 -r us-east-1 -t rhel7_example_host -o rhel7.asff + $ saf convert hdf2asff -i s3://HDF/rhel7-scan_02032022A.json -a 123456789 -r us-east-1 -t rhel7_example_host -o s3://ASFF/rhel7.asff $ saf convert hdf2asff -i rds_mysql_i123456789scan_03042022A.json -a 987654321 -r us-west-1 -t Instance_i123456789 -u $ saf convert hdf2asff -i snyk_acme_project5_hdf_04052022A.json -a 2143658798 -r us-east-1 -t acme_project5 -o snyk_acme_project5 -u ``` @@ -339,6 +356,7 @@ convert hdf2splunk Translate and upload a Heimdall Data Format JSON f EXAMPLES $ saf convert hdf2splunk -i rhel7-results.json -H 127.0.0.1 -u admin -p Valid_password! -I hdf $ saf convert hdf2splunk -i rhel7-results.json -H 127.0.0.1 -t your.splunk.token -I hdf + $ saf convert hdf2splunk -i s3://HDF/rhel7-results.json -H 127.0.0.1 -t your.splunk.token -I hdf ``` For HDF Splunk Schema documentation visit 👉 [Heimdall converter schemas](https://github.com/mitre/heimdall2/blob/master/libs/hdf-converters/src/converters-from-hdf/splunk/Schemas.md#schemas) @@ -379,6 +397,7 @@ convert hdf2xccdf Translate an HDF file into an XCCDF XML EXAMPLES $ saf convert hdf2xccdf -i hdf_input.json -o xccdf-results.xml + $ saf convert hdf2xccdf -i s3://hdf/hdf_input.json -o xccdf-results.xml ``` [top](#convert-hdf-to-other-formats) #### HDF to Checklist diff --git a/package-lock.json b/package-lock.json index 9684ec2d72..11f47313d8 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1941,6 +1941,15 @@ "yaml": "^2.1.1" } }, + "node_modules/@mitre/hdf-converters/node_modules/axios": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz", + "integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==", + "dependencies": { + "follow-redirects": "^1.14.9", + "form-data": "^4.0.0" + } + }, "node_modules/@mitre/hdf-converters/node_modules/dom-serializer": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", @@ -20985,6 +20994,15 @@ "yaml": "^2.1.1" }, "dependencies": { + "axios": { + "version": "0.27.2", + "resolved": "https://registry.npmjs.org/axios/-/axios-0.27.2.tgz", + "integrity": "sha512-t+yRIyySRTp/wua5xEr+z1q60QmLq8ABsS5O9Me1AsE5dfKqgnCFzwiCZZ/cGNd1lq4/7akDWMxdhVlucjmnOQ==", + "requires": { + "follow-redirects": "^1.14.9", + "form-data": "^4.0.0" + } + }, "dom-serializer": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/dom-serializer/-/dom-serializer-1.4.1.tgz", diff --git a/src/commands/convert/asff2hdf.ts b/src/commands/convert/asff2hdf.ts index e670b98ff6..e13e699556 100644 --- a/src/commands/convert/asff2hdf.ts +++ b/src/commands/convert/asff2hdf.ts @@ -1,5 +1,4 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {ASFFResults as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' import _ from 'lodash' @@ -8,6 +7,7 @@ import AWS from 'aws-sdk' import https from 'https' import {AwsSecurityFindingFilters} from 'aws-sdk/clients/securityhub' import {createWinstonLogger} from '../../utils/logging' +import {createFolderIfNotExists, folderExistsURI, readFileURI, writeFileURI} from '../../utils/io' // Should be no more than 100 const API_MAX_RESULTS = 100 @@ -40,17 +40,17 @@ export default class ASFF2HDF extends Command { async run() { const {flags} = await this.parse(ASFF2HDF) const logger = createWinstonLogger('asff2hdf', flags.logLevel) - let securityhub + let securityhub: string[] | undefined // Check if output folder already exists - if (fs.existsSync(flags.output)) { + if (await folderExistsURI(flags.output)) { throw new Error(`Output folder ${flags.output} already exists`) } const findings: string[] = [] // If we've been passed an input file if (flags.input) { - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') // Attempt to convert to one finding per line try { const convertedJson = JSON.parse(data) @@ -81,9 +81,9 @@ export default class ASFF2HDF extends Command { // If we've been passed any Security Standards JSONs if (flags.securityhub) { - securityhub = flags.securityhub.map((file: string) => - fs.readFileSync(file, 'utf8'), - ) + securityhub = await Promise.all(flags.securityhub.map((file: string) => + readFileURI(file, 'utf8'), + )) } } else if (flags.aws) { // Flag to pull findings from AWS Security Hub AWS.config.update({ @@ -92,7 +92,7 @@ export default class ASFF2HDF extends Command { // Disable HTTPS verification if requested rejectUnauthorized: !flags.insecure, // Pass an SSL certificate to trust - ca: flags.certificate ? fs.readFileSync(flags.certificate, 'utf8') : undefined, + ca: flags.certificate ? await readFileURI(flags.certificate, 'utf8') : undefined, }), }, }) @@ -176,9 +176,9 @@ export default class ASFF2HDF extends Command { const results = converter.toHdf() - fs.mkdirSync(flags.output) - _.forOwn(results, (result, filename) => { - fs.writeFileSync( + createFolderIfNotExists(flags.output) + _.forOwn(results, async (result, filename) => { + await writeFileURI( path.join(flags.output, checkSuffix(filename)), JSON.stringify(result), ) diff --git a/src/commands/convert/aws_config2hdf.ts b/src/commands/convert/aws_config2hdf.ts index 27dc8ff687..cf5ad1d13e 100644 --- a/src/commands/convert/aws_config2hdf.ts +++ b/src/commands/convert/aws_config2hdf.ts @@ -1,8 +1,8 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {AwsConfigMapper as Mapper} from '@mitre/hdf-converters' import {ExecJSON} from 'inspecjs' import {checkSuffix} from '../../utils/global' +import {writeFileURI} from '../../utils/io' export default class AWSConfig2HDF extends Command { static usage = 'convert aws_config2hdf -r -o [-h] [-a ] [-s ] [-t ] [-i]' @@ -56,6 +56,6 @@ export default class AWSConfig2HDF extends Command { region: flags.region, }, !flags.insecure) : new Mapper({region: flags.region}, !flags.insecure) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(this.ensureRefs(await converter.toHdf()))) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(this.ensureRefs(await converter.toHdf()))) } } diff --git a/src/commands/convert/burpsuite2hdf.ts b/src/commands/convert/burpsuite2hdf.ts index b9d45b5868..82a9b1f2df 100644 --- a/src/commands/convert/burpsuite2hdf.ts +++ b/src/commands/convert/burpsuite2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {BurpSuiteMapper as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Burpsuite2HDF extends Command { static usage = 'convert burpsuite2hdf -i -o [-h] [-w]' @@ -21,10 +21,10 @@ export default class Burpsuite2HDF extends Command { const {flags} = await this.parse(Burpsuite2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'burp', 'BurpSuite Pro XML') const converter = new Mapper(data, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/ckl2POAM.ts b/src/commands/convert/ckl2POAM.ts index ad867532cf..e0b0d49fc5 100644 --- a/src/commands/convert/ckl2POAM.ts +++ b/src/commands/convert/ckl2POAM.ts @@ -1,5 +1,4 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import path from 'path' import _ from 'lodash' import {createLogger, format, transports} from 'winston' @@ -11,6 +10,7 @@ import moment from 'moment' import {cci2nist, cklSeverityToImpact, cklSeverityToLikelihood, cklSeverityToPOAMSeverity, cklSeverityToRelevanceOfThreat, cklSeverityToResidualRiskLevel, cleanStatus, combineComments, convertToRawSeverity, createCVD, extractSolution, extractSTIGUrl, replaceSpecialCharacters} from '../../utils/ckl2poam' import {default as files} from '../../resources/files.json' import {convertFullPathToFilename, dataURLtoU8Array} from '../../utils/global' +import {createFolderIfNotExists, readFileURI} from '../../utils/io' const prompt = promptSync() const {printf} = format @@ -53,11 +53,9 @@ export default class CKL2POAM extends Command { async run() { const {flags} = await this.parse(CKL2POAM) // Create output folder if it doesn't exist already - if (!fs.existsSync(flags.output)) { - fs.mkdirSync(flags.output) - } + await createFolderIfNotExists(flags.output) - flags.input.forEach((fileName: string) => { + for (const fileName of flags.input) { // Ignore files that start with . (e.g .gitignore) if (fileName.startsWith('.')) { return @@ -69,145 +67,137 @@ export default class CKL2POAM extends Command { message: 'Opening file', }) const parser = new xml2js.Parser() - fs.readFile(fileName, function (readFileError, data) { - if (readFileError) { + + const inputFileData = await readFileURI(fileName, 'utf8') + + parser.parseString(inputFileData, function (parseFileError: any, result: STIG) { + if (parseFileError) { logger.log({ level: 'error', file: fileName, - message: `An error occurred opening the file ${fileName}: ${readFileError}`, + message: `An error occurred parsing the file: ${parseFileError}`, }) - } - - // Parse the XML to a javascript object - parser.parseString(data, function (parseFileError: any, result: STIG) { - if (parseFileError) { - logger.log({ - level: 'error', - file: fileName, - message: `An error occurred parsing the file: ${readFileError}`, - }) - } else { - const infos: Record = {} - let vulnerabilities: Vulnerability[] = [] - const iStigs: STIGHolder[] = [] - const stigs = result.CHECKLIST.STIGS - logger.log({ - level: 'info', - file: fileName, - message: `Found ${stigs?.length} STIGs`, + } else { + const infos: Record = {} + let vulnerabilities: Vulnerability[] = [] + const iStigs: STIGHolder[] = [] + const stigs = result.CHECKLIST.STIGS + logger.log({ + level: 'info', + file: fileName, + message: `Found ${stigs?.length} STIGs`, + }) + // Get nested iSTIGs + stigs?.forEach(stig => { + stig.iSTIG?.forEach(iStig => { + iStigs.push(iStig) }) - // Get nested iSTIGs - stigs?.forEach(stig => { - stig.iSTIG?.forEach(iStig => { - iStigs.push(iStig) + }) + logger.log({ + level: 'info', + file: fileName, + message: `Found ${iStigs.length} iSTIGs`, + }) + // Get the controls/vulnerabilities from each stig + iStigs.forEach(iSTIG => { + iSTIG.STIG_INFO?.forEach(info => { + info.SI_DATA?.forEach(data => { + if (data.SID_DATA) { + infos[data.SID_NAME[0]] = data.SID_DATA[0] + } }) }) - logger.log({ - level: 'info', - file: fileName, - message: `Found ${iStigs.length} iSTIGs`, - }) - // Get the controls/vulnerabilities from each stig - iStigs.forEach(iSTIG => { - iSTIG.STIG_INFO?.forEach(info => { - info.SI_DATA?.forEach(data => { - if (data.SID_DATA) { - infos[data.SID_NAME[0]] = data.SID_DATA[0] - } - }) - }) - if (iSTIG.VULN) { - vulnerabilities = [ - ...vulnerabilities, - ...iSTIG.VULN.map(vulnerability => { - const values: Record = {} - // Extract STIG_DATA - vulnerability.STIG_DATA?.reverse().forEach(data => { - values[data.VULN_ATTRIBUTE[0]] = data.ATTRIBUTE_DATA[0] - }) - // Extract remaining fields (status, finding details, comments, security override, and severity justification) - Object.entries(vulnerability).forEach(([key, value]) => { - values[key] = value[0] - }) - return values - }), - ] - } - }) - logger.log({ - level: 'info', - file: fileName, - message: `Found ${vulnerabilities.length} vulnerabilities`, - }) - const officeOrg = flags.officeOrg || prompt('What should the default value be for Office/org? ') - const host = flags.deviceName || prompt('What is the device name? ') - // Read our template - XlsxPopulate.fromDataAsync(dataURLtoU8Array(files.POAMTemplate.data)).then((workBook: any) => { - // eMASS reads the first sheet in the notebook - const sheet = workBook.sheet(0) - // The current row we are on - let currentRow = STARTING_ROW - // The scheduled completion date, default of one year from today - const aYearFromNow = moment(new Date(new Date().setFullYear(new Date().getFullYear() + 1))).format('M/DD/YYYY') - // For each vulnerability - vulnerabilities.forEach(vulnerability => { - if (vulnerability.STATUS !== 'NotAFinding' && vulnerability.STATUS !== 'Not_Reviewed') { - // Control Vulnerability Description - if (vulnerability.STATUS === 'Not_Applicable') { - sheet.cell(`C${currentRow}`).value('Not Applicable') - } else { - sheet.cell(`C${currentRow}`).value(replaceSpecialCharacters(createCVD(vulnerability))) - } + if (iSTIG.VULN) { + vulnerabilities = [ + ...vulnerabilities, + ...iSTIG.VULN.map(vulnerability => { + const values: Record = {} + // Extract STIG_DATA + vulnerability.STIG_DATA?.reverse().forEach(data => { + values[data.VULN_ATTRIBUTE[0]] = data.ATTRIBUTE_DATA[0] + }) + // Extract remaining fields (status, finding details, comments, security override, and severity justification) + Object.entries(vulnerability).forEach(([key, value]) => { + values[key] = value[0] + }) + return values + }), + ] + } + }) + logger.log({ + level: 'info', + file: fileName, + message: `Found ${vulnerabilities.length} vulnerabilities`, + }) + const officeOrg = flags.officeOrg || prompt('What should the default value be for Office/org? ') + const host = flags.deviceName || prompt('What is the device name? ') + // Read our template + XlsxPopulate.fromDataAsync(dataURLtoU8Array(files.POAMTemplate.data)).then((workBook: any) => { + // eMASS reads the first sheet in the notebook + const sheet = workBook.sheet(0) + // The current row we are on + let currentRow = STARTING_ROW + // The scheduled completion date, default of one year from today + const aYearFromNow = moment(new Date(new Date().setFullYear(new Date().getFullYear() + 1))).format('M/DD/YYYY') + // For each vulnerability + vulnerabilities.forEach(vulnerability => { + if (vulnerability.STATUS !== 'NotAFinding' && vulnerability.STATUS !== 'Not_Reviewed') { + // Control Vulnerability Description + if (vulnerability.STATUS === 'Not_Applicable') { + sheet.cell(`C${currentRow}`).value('Not Applicable') + } else { + sheet.cell(`C${currentRow}`).value(replaceSpecialCharacters(createCVD(vulnerability))) + } - // Security Control Number - sheet.cell(`D${currentRow}`).value(cci2nist(vulnerability.CCI_REF || '')) - // Office/org - sheet.cell(`E${currentRow}`).value(officeOrg) - // Security Checks - sheet.cell(`F${currentRow}`).value(vulnerability.Rule_ID?.split(',')[0]) - // Resources Required - sheet.cell(`G${currentRow}`).value('NA') - // Scheduled Completion Date - // Default is one year from today - sheet.cell(`H${currentRow}`).value(aYearFromNow) - // Source Identifying Vulnerability - sheet.cell(`K${currentRow}`).value(infos.title || '') - // Status - sheet.cell(`L${currentRow}`).value(cleanStatus(vulnerability.STATUS || '')) - // Comments - if (vulnerability.STATUS === 'Open' || vulnerability.STATUS === 'Not_Applicable') { - if (host.startsWith('Nessus')) { - sheet.cell(`M${currentRow}`).value(combineComments(vulnerability, extractSTIGUrl(vulnerability.FINDING_DETAILS || ''))) - } else { - sheet.cell(`M${currentRow}`).value(combineComments(vulnerability, host)) - } + // Security Control Number + sheet.cell(`D${currentRow}`).value(cci2nist(vulnerability.CCI_REF || '')) + // Office/org + sheet.cell(`E${currentRow}`).value(officeOrg) + // Security Checks + sheet.cell(`F${currentRow}`).value(vulnerability.Rule_ID?.split(',')[0]) + // Resources Required + sheet.cell(`G${currentRow}`).value('NA') + // Scheduled Completion Date + // Default is one year from today + sheet.cell(`H${currentRow}`).value(aYearFromNow) + // Source Identifying Vulnerability + sheet.cell(`K${currentRow}`).value(infos.title || '') + // Status + sheet.cell(`L${currentRow}`).value(cleanStatus(vulnerability.STATUS || '')) + // Comments + if (vulnerability.STATUS === 'Open' || vulnerability.STATUS === 'Not_Applicable') { + if (host.startsWith('Nessus')) { + sheet.cell(`M${currentRow}`).value(combineComments(vulnerability, extractSTIGUrl(vulnerability.FINDING_DETAILS || ''))) + } else { + sheet.cell(`M${currentRow}`).value(combineComments(vulnerability, host)) } - - // Raw Severity - sheet.cell(`N${currentRow}`).value(convertToRawSeverity(vulnerability.Severity || '')) - // Severity - sheet.cell(`P${currentRow}`).value(cklSeverityToPOAMSeverity(vulnerability.Severity || '')) - // Relevance of Threat - sheet.cell(`Q${currentRow}`).value(cklSeverityToRelevanceOfThreat(vulnerability.Severity || '')) - // Likelihood - sheet.cell(`R${currentRow}`).value(cklSeverityToLikelihood(vulnerability.Severity || '')) - // Impact - sheet.cell(`S${currentRow}`).value(cklSeverityToImpact(vulnerability.Severity || '')) - // Residual Risk Level - sheet.cell(`U${currentRow}`).value(cklSeverityToResidualRiskLevel(vulnerability.Severity || '')) - // Impact Description - sheet.cell(`T${currentRow}`).value(replaceSpecialCharacters(vulnerability.Vuln_Discuss || '')) - // Recommendations - sheet.cell(`V${currentRow}`).value(replaceSpecialCharacters(vulnerability.Fix_Text || extractSolution(vulnerability.FINDING_DETAILS || '') || '')) - // Go to the next row - currentRow += flags.rowsToSkip + 1 } - }) - return workBook.toFileAsync(path.join(flags.output, `${convertFullPathToFilename(fileName)}-${moment(new Date()).format('YYYY-MM-DD-HHmm')}.xlsm`)) + + // Raw Severity + sheet.cell(`N${currentRow}`).value(convertToRawSeverity(vulnerability.Severity || '')) + // Severity + sheet.cell(`P${currentRow}`).value(cklSeverityToPOAMSeverity(vulnerability.Severity || '')) + // Relevance of Threat + sheet.cell(`Q${currentRow}`).value(cklSeverityToRelevanceOfThreat(vulnerability.Severity || '')) + // Likelihood + sheet.cell(`R${currentRow}`).value(cklSeverityToLikelihood(vulnerability.Severity || '')) + // Impact + sheet.cell(`S${currentRow}`).value(cklSeverityToImpact(vulnerability.Severity || '')) + // Residual Risk Level + sheet.cell(`U${currentRow}`).value(cklSeverityToResidualRiskLevel(vulnerability.Severity || '')) + // Impact Description + sheet.cell(`T${currentRow}`).value(replaceSpecialCharacters(vulnerability.Vuln_Discuss || '')) + // Recommendations + sheet.cell(`V${currentRow}`).value(replaceSpecialCharacters(vulnerability.Fix_Text || extractSolution(vulnerability.FINDING_DETAILS || '') || '')) + // Go to the next row + currentRow += flags.rowsToSkip + 1 + } }) - } - }) + return workBook.toFileAsync(path.join(flags.output, `${convertFullPathToFilename(fileName)}-${moment(new Date()).format('YYYY-MM-DD-HHmm')}.xlsm`)) + }) + } }) - }) + } } } diff --git a/src/commands/convert/dbprotect2hdf.ts b/src/commands/convert/dbprotect2hdf.ts index cc0f84cd96..0491b5f418 100644 --- a/src/commands/convert/dbprotect2hdf.ts +++ b/src/commands/convert/dbprotect2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {DBProtectMapper as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class DBProtect2HDF extends Command { static usage = 'convert dbprotect2hdf -i -o [-h] [-w]' @@ -21,10 +21,10 @@ export default class DBProtect2HDF extends Command { const {flags} = await this.parse(DBProtect2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'dbProtect', 'DBProtect report in "Check Results Details" XML format') const converter = new Mapper(data, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/fortify2hdf.ts b/src/commands/convert/fortify2hdf.ts index 8f5576d844..417ed9e2c5 100644 --- a/src/commands/convert/fortify2hdf.ts +++ b/src/commands/convert/fortify2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {FortifyMapper as Mapper} from '@mitre/hdf-converters' import {checkSuffix, checkInput} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Fortify2HDF extends Command { static usage = 'convert fortify2hdf -i -o [-h] [-w]' @@ -21,10 +21,10 @@ export default class Fortify2HDF extends Command { const {flags} = await this.parse(Fortify2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'fortify', 'Fortify results FVDL file') const converter = new Mapper(data, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/hdf2asff.ts b/src/commands/convert/hdf2asff.ts index b6e29181d5..ccb9fbd29d 100644 --- a/src/commands/convert/hdf2asff.ts +++ b/src/commands/convert/hdf2asff.ts @@ -1,5 +1,4 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import https from 'https' import {FromHdfToAsffMapper as Mapper} from '@mitre/hdf-converters' import path from 'path' @@ -7,6 +6,7 @@ import AWS from 'aws-sdk' import {checkSuffix, convertFullPathToFilename} from '../../utils/global' import _ from 'lodash' import {BatchImportFindingsRequestFindingList} from 'aws-sdk/clients/securityhub' +import {createFolderIfNotExists, readFileURI, writeFileURI} from '../../utils/io' export default class HDF2ASFF extends Command { static usage = 'convert hdf2asff -a -r -i -t [-h] [-R] (-u [-I -C ] | [-o ])' @@ -31,7 +31,7 @@ export default class HDF2ASFF extends Command { async run() { const {flags} = await this.parse(HDF2ASFF) - const converted = new Mapper(JSON.parse(fs.readFileSync(flags.input, 'utf8')), { + const converted = new Mapper(JSON.parse(await readFileURI(flags.input, 'utf8')), { awsAccountId: flags.accountId, region: flags.region, regionAttribute: flags.specifyRegionAttribute, @@ -42,14 +42,14 @@ export default class HDF2ASFF extends Command { if (flags.output) { const convertedSlices = _.chunk(converted, 100) // AWS doesn't allow uploading more than 100 findings at a time so we need to split them into chunks const outputFolder = flags.output?.replace('.json', '') || 'asff-output' - fs.mkdirSync(outputFolder) + await createFolderIfNotExists(outputFolder) if (convertedSlices.length === 1) { const outfilePath = path.join(outputFolder, convertFullPathToFilename(checkSuffix(flags.output))) - fs.writeFileSync(outfilePath, JSON.stringify(convertedSlices[0])) + await writeFileURI(outfilePath, JSON.stringify(convertedSlices[0])) } else { - convertedSlices.forEach((slice, index) => { + convertedSlices.forEach(async (slice, index) => { const outfilePath = path.join(outputFolder, `${convertFullPathToFilename(checkSuffix(flags.output || '')).replace('.json', '')}.p${index}.json`) - fs.writeFileSync(outfilePath, JSON.stringify(slice)) + await writeFileURI(outfilePath, JSON.stringify(slice)) }) } } @@ -69,7 +69,7 @@ export default class HDF2ASFF extends Command { httpOptions: { agent: new https.Agent({ rejectUnauthorized: !flags.insecure, - ca: flags.certificate ? fs.readFileSync(flags.certificate, 'utf8') : undefined, + ca: flags.certificate ? await readFileURI(flags.certificate, 'utf8') : undefined, }), }, }) diff --git a/src/commands/convert/hdf2ckl.ts b/src/commands/convert/hdf2ckl.ts index 5c74fef7e2..7113ba6677 100644 --- a/src/commands/convert/hdf2ckl.ts +++ b/src/commands/convert/hdf2ckl.ts @@ -1,13 +1,13 @@ import {Command, Flags} from '@oclif/core' import {contextualizeEvaluation} from 'inspecjs' import _ from 'lodash' -import fs from 'fs' import {v4} from 'uuid' import {default as files} from '../../resources/files.json' import Mustache from 'mustache' import {CKLMetadata} from '../../types/checklist' import {convertFullPathToFilename, getProfileInfo} from '../../utils/global' import {getDetails} from '../../utils/checklist' +import {readFileURI, writeFileURI} from '../../utils/io' export default class HDF2CKL extends Command { static usage = 'convert hdf2ckl -i -o [-h] [-m ] [-H ] [-F ] [-M ] [-I ]' @@ -29,7 +29,7 @@ export default class HDF2CKL extends Command { async run() { const {flags} = await this.parse(HDF2CKL) - const contextualizedEvaluation = contextualizeEvaluation(JSON.parse(fs.readFileSync(flags.input, 'utf8'))) + const contextualizedEvaluation = contextualizeEvaluation(JSON.parse(await readFileURI(flags.input, 'utf8'))) const profileName = contextualizedEvaluation.data.profiles[0].name const controls = contextualizedEvaluation.contains.flatMap(profile => profile.contains) || [] const rootControls = _.uniqBy(controls, control => @@ -58,7 +58,7 @@ export default class HDF2CKL extends Command { } if (flags.metadata) { - const cklMetadataInput: CKLMetadata = JSON.parse(fs.readFileSync(flags.metadata, 'utf8')) + const cklMetadataInput: CKLMetadata = JSON.parse(await readFileURI(flags.metadata, 'utf8')) for (const field in cklMetadataInput) { if (typeof cklMetadata[field] === 'string' || typeof cklMetadata[field] === 'object') { cklMetadata[field] = cklMetadataInput[field] @@ -73,6 +73,6 @@ export default class HDF2CKL extends Command { uuid: v4(), controls: rootControls.map(control => getDetails(control, profileName)), } - fs.writeFileSync(flags.output, Mustache.render(files['cklExport.ckl'].data, cklData).replaceAll(/[^\x00-\x7F]/g, '')) + await writeFileURI(flags.output, Mustache.render(files['cklExport.ckl'].data, cklData).replaceAll(/[^\x00-\x7F]/gu, '')) } } diff --git a/src/commands/convert/hdf2condensed.ts b/src/commands/convert/hdf2condensed.ts index ddbcd2d201..e133f07748 100644 --- a/src/commands/convert/hdf2condensed.ts +++ b/src/commands/convert/hdf2condensed.ts @@ -1,9 +1,9 @@ import {Command, Flags} from '@oclif/core' import {ContextualizedProfile, convertFileContextual} from 'inspecjs' -import fs from 'fs' import {calculateCompliance, extractControlSummariesBySeverity, extractStatusCounts, renameStatusName, severityTargetsObject} from '../../utils/threshold' import _ from 'lodash' import {checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class HDF2Condensed extends Command { static usage = 'convert hdf2condensed -i -o [-h]' @@ -21,7 +21,7 @@ export default class HDF2Condensed extends Command { async run() { const {flags} = await this.parse(HDF2Condensed) const thresholds: Record> = {} - const parsedExecJSON = convertFileContextual(fs.readFileSync(flags.input, 'utf8')) + const parsedExecJSON = convertFileContextual(await readFileURI(flags.input, 'utf8')) const parsedProfile = parsedExecJSON.contains[0] as ContextualizedProfile const overallStatusCounts = extractStatusCounts(parsedProfile) const overallCompliance = calculateCompliance(overallStatusCounts) @@ -51,6 +51,6 @@ export default class HDF2Condensed extends Command { buckets: extractControlSummariesBySeverity(parsedProfile), status: thresholds, } - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(result)) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(result)) } } diff --git a/src/commands/convert/hdf2csv.ts b/src/commands/convert/hdf2csv.ts index 74273e1eac..1678fad19e 100644 --- a/src/commands/convert/hdf2csv.ts +++ b/src/commands/convert/hdf2csv.ts @@ -1,11 +1,11 @@ import {Command, Flags} from '@oclif/core' import {ContextualizedEvaluation, contextualizeEvaluation} from 'inspecjs' import _ from 'lodash' -import fs from 'fs' import ObjectsToCsv from 'objects-to-csv' import {ControlSetRows} from '../../types/csv' import {convertRow, csvExportFields} from '../../utils/csv' import {convertFullPathToFilename} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class HDF2CSV extends Command { static usage = 'convert hdf2csv -i -o [-h] [-f ] [-t]' @@ -29,7 +29,10 @@ export default class HDF2CSV extends Command { async run() { const {flags} = await this.parse(HDF2CSV) - const contextualizedEvaluation = contextualizeEvaluation(JSON.parse(fs.readFileSync(flags.input, 'utf8'))) + + const data = JSON.parse(await readFileURI(flags.input, 'utf8')) + + const contextualizedEvaluation = contextualizeEvaluation(data) // Convert all controls from a file to ControlSetRows let rows: ControlSetRows = this.convertRows(contextualizedEvaluation, convertFullPathToFilename(flags.input), flags.fields.split(',')) @@ -51,6 +54,8 @@ export default class HDF2CSV extends Command { return cleanedRow }) - await new ObjectsToCsv(rows).toDisk(flags.output) + + const output = await new ObjectsToCsv(rows).toString(true) + await writeFileURI(flags.output, output) } } diff --git a/src/commands/convert/hdf2splunk.ts b/src/commands/convert/hdf2splunk.ts index b30530102c..cb9bd401cf 100644 --- a/src/commands/convert/hdf2splunk.ts +++ b/src/commands/convert/hdf2splunk.ts @@ -1,8 +1,8 @@ import {Command, Flags} from '@oclif/core' import {FromHDFToSplunkMapper} from '@mitre/hdf-converters' import {convertFullPathToFilename} from '../../utils/global' -import fs from 'fs' import {createWinstonLogger, getHDFSummary} from '../../utils/logging' +import {readFileURI} from '../../utils/io' export default class HDF2Splunk extends Command { static usage = 'convert hdf2splunk -i -H -I [-h] [-P ] [-s http|https] [-u | -t ] [-p ] [-L info|warn|debug|verbose]' @@ -34,8 +34,9 @@ export default class HDF2Splunk extends Command { } logger.warn('Please ensure the necessary configuration changes for your Splunk server have been configured to prevent data loss. See https://github.com/mitre/saf/wiki/Splunk-Configuration') - const inputFile = JSON.parse(fs.readFileSync(flags.input, 'utf8')) + const inputFile = JSON.parse(await readFileURI(flags.input, 'utf8')) logger.info(`Input File "${convertFullPathToFilename(flags.input)}": ${getHDFSummary(inputFile)}`) + await new FromHDFToSplunkMapper(inputFile, logger).toSplunk({ host: flags.host, port: flags.port, diff --git a/src/commands/convert/hdf2xccdf.ts b/src/commands/convert/hdf2xccdf.ts index 488a4c26e0..1c8b1c0d9e 100644 --- a/src/commands/convert/hdf2xccdf.ts +++ b/src/commands/convert/hdf2xccdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {FromHDFToXCCDFMapper as Mapper} from '@mitre/hdf-converters' import {default as files} from '../../resources/files.json' +import {readFileURI, writeFileURI} from '../../utils/io' export default class HDF2XCCDF extends Command { static usage = 'convert hdf2xccdf -i -o [-h]' @@ -19,7 +19,7 @@ export default class HDF2XCCDF extends Command { async run() { const {flags} = await this.parse(HDF2XCCDF) - const converter = new Mapper(fs.readFileSync(flags.input, 'utf8'), files['xccdfTemplate.xml'].data) - fs.writeFileSync(flags.output, converter.toXCCDF()) + const converter = new Mapper(await readFileURI(flags.input, 'utf8'), files['xccdfTemplate.xml'].data) + await writeFileURI(flags.output, converter.toXCCDF()) } } diff --git a/src/commands/convert/ionchannel2hdf.ts b/src/commands/convert/ionchannel2hdf.ts index 813cd59aa3..20b1bb12ea 100644 --- a/src/commands/convert/ionchannel2hdf.ts +++ b/src/commands/convert/ionchannel2hdf.ts @@ -2,8 +2,8 @@ import {IonChannelAPIMapper, IonChannelMapper} from '@mitre/hdf-converters' import {Command, Flags} from '@oclif/core' import {checkInput, checkSuffix, convertFullPathToFilename} from '../../utils/global' import {createWinstonLogger} from '../../utils/logging' -import fs from 'fs' import path from 'path' +import {createFolderIfNotExists, readFileURI, writeFileURI} from '../../utils/io' export default class IonChannel2HDF extends Command { static usage = 'convert ionchannel2hdf -o [-h] (-i | -a -t [--raw ] [-p ] [-A ]) [-L info|warn|debug|verbose]' @@ -72,7 +72,7 @@ export default class IonChannel2HDF extends Command { await apiClient.setTeam(flags.teamName) logger.debug(`Set team to ID ${apiClient.teamId}`) - fs.mkdirSync(flags.output) + await createFolderIfNotExists(flags.output) const availableProjects = await apiClient.getProjects() for (const project of availableProjects) { logger.info(`Pulling findings from ${project.name}`) @@ -88,7 +88,7 @@ export default class IonChannel2HDF extends Command { json = await apiClient.toHdf() } - fs.writeFileSync(path.join(flags.output, filename), JSON.stringify(json)) + await writeFileURI(path.join(flags.output, filename), JSON.stringify(json)) } } else if (flags.apiKey && flags.teamName && Array.isArray(flags.project)) { logger.debug('Creating Ion Channel API Client') @@ -97,7 +97,7 @@ export default class IonChannel2HDF extends Command { await apiClient.setTeam(flags.teamName) logger.debug(`Set team to ID ${apiClient.teamId}`) - fs.mkdirSync(flags.output) + await createFolderIfNotExists(flags.output) for (const projectName of flags.project) { logger.info(`Pulling findings from ${projectName}`) await apiClient.setProject(projectName) @@ -112,18 +112,18 @@ export default class IonChannel2HDF extends Command { json = await apiClient.toHdf() } - fs.writeFileSync(path.join(flags.output, filename), JSON.stringify(json)) + await writeFileURI(path.join(flags.output, filename), JSON.stringify(json)) } } else if (Array.isArray(flags.input)) { logger.debug('Processing input files') - fs.mkdirSync(flags.output) + await createFolderIfNotExists(flags.output) for (const filename of flags.input) { // Check for correct input type - const data = fs.readFileSync(filename, 'utf8') + const data = await readFileURI(filename, 'utf8') checkInput({data: data, filename: filename}, 'ionchannel', 'IonChannel JSON') logger.debug(`Processing...${filename}`) - fs.writeFileSync( + await writeFileURI( path.join( flags.output, checkSuffix(convertFullPathToFilename(filename)), diff --git a/src/commands/convert/jfrog_xray2hdf.ts b/src/commands/convert/jfrog_xray2hdf.ts index ac685d4c4d..374aa7fb92 100644 --- a/src/commands/convert/jfrog_xray2hdf.ts +++ b/src/commands/convert/jfrog_xray2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {JfrogXrayMapper as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class JfrogXray2HDF extends Command { static usage = 'convert jfrog_xray2hdf -i -o [-h] [-w]' @@ -21,10 +21,10 @@ export default class JfrogXray2HDF extends Command { const {flags} = await this.parse(JfrogXray2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'jfrog', 'JFrog Xray results JSON') const converter = new Mapper(data, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/nessus2hdf.ts b/src/commands/convert/nessus2hdf.ts index 508e69b840..e14af49103 100644 --- a/src/commands/convert/nessus2hdf.ts +++ b/src/commands/convert/nessus2hdf.ts @@ -1,8 +1,8 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {NessusResults as Mapper} from '@mitre/hdf-converters' import _ from 'lodash' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Nessus2HDF extends Command { static usage = 'convert nessus2hdf -i -o [-h] [-w]' @@ -22,17 +22,17 @@ export default class Nessus2HDF extends Command { const {flags} = await this.parse(Nessus2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'nessus', 'Nessus XML results file') const converter = new Mapper(data, flags['with-raw']) const result = converter.toHdf() if (Array.isArray(result)) { for (const element of result) { - fs.writeFileSync(`${flags.output.replaceAll(/\.json/gi, '')}-${_.get(element, 'platform.target_id')}.json`, JSON.stringify(element)) + await writeFileURI(`${flags.output.replaceAll(/\.json/gi, '')}-${_.get(element, 'platform.target_id')}.json`, JSON.stringify(element)) } } else { - fs.writeFileSync(`${checkSuffix(flags.output)}`, JSON.stringify(result)) + await writeFileURI(`${checkSuffix(flags.output)}`, JSON.stringify(result)) } } } diff --git a/src/commands/convert/netsparker2hdf.ts b/src/commands/convert/netsparker2hdf.ts index 57b76605b1..95671aa511 100644 --- a/src/commands/convert/netsparker2hdf.ts +++ b/src/commands/convert/netsparker2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {NetsparkerMapper as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Netsparker2HDF extends Command { static usage = 'convert netsparker2hdf -i -o [-h]' @@ -20,10 +20,10 @@ export default class Netsparker2HDF extends Command { const {flags} = await this.parse(Netsparker2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data: data, filename: flags.input}, 'netsparker', 'Netsparker XML results file') const converter = new Mapper(data) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/nikto2hdf.ts b/src/commands/convert/nikto2hdf.ts index f11044c217..57058d33bf 100644 --- a/src/commands/convert/nikto2hdf.ts +++ b/src/commands/convert/nikto2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {NiktoMapper as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Nikto2HDF extends Command { static usage = 'convert nikto2hdf -i -o [-h] [-w]' @@ -21,10 +21,10 @@ export default class Nikto2HDF extends Command { const {flags} = await this.parse(Nikto2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'nikto', 'Nikto results JSON') const converter = new Mapper(data, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/prisma2hdf.ts b/src/commands/convert/prisma2hdf.ts index 99bd5c9cc3..9f291102dd 100644 --- a/src/commands/convert/prisma2hdf.ts +++ b/src/commands/convert/prisma2hdf.ts @@ -1,8 +1,8 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {PrismaMapper as Mapper} from '@mitre/hdf-converters' import path from 'path' import _ from 'lodash' +import {createFolderIfNotExists, readFileURI, writeFileURI} from '../../utils/io' export default class Prisma2HDF extends Command { static usage = 'convert prisma2hdf -i -o [-h]' @@ -21,16 +21,14 @@ export default class Prisma2HDF extends Command { const {flags} = await this.parse(Prisma2HDF) const converter = new Mapper( - fs.readFileSync(flags.input, {encoding: 'utf8'}), + await readFileURI(flags.input, 'utf8'), ) const results = converter.toHdf() - if (!fs.existsSync(flags.output)) { - fs.mkdirSync(flags.output) - } + await createFolderIfNotExists(flags.output) - _.forOwn(results, result => { - fs.writeFileSync( + _.forOwn(results, async result => { + await writeFileURI( path.join(flags.output, `${_.get(result, 'platform.target_id')}.json`), JSON.stringify(result), ) diff --git a/src/commands/convert/prowler2hdf.ts b/src/commands/convert/prowler2hdf.ts index c481147dfd..2551c9f214 100644 --- a/src/commands/convert/prowler2hdf.ts +++ b/src/commands/convert/prowler2hdf.ts @@ -1,9 +1,9 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {ASFFResults as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' import _ from 'lodash' import path from 'path' +import {createFolderIfNotExists, readFileURI, writeFileURI} from '../../utils/io' export default class Prowler2HDF extends Command { static usage = 'convert prowler2hdf -i -o [-h]' @@ -20,18 +20,16 @@ export default class Prowler2HDF extends Command { async run() { const {flags} = await this.parse(Prowler2HDF) - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data: data, filename: flags.input}, 'asff', 'Prowler-derived AWS Security Finding Format results') const converter = new Mapper(data) const results = converter.toHdf() // Create output folder if not exists - if (!fs.existsSync(flags.output)) { - fs.mkdirSync(flags.output) - } + await createFolderIfNotExists(flags.output) - _.forOwn(results, (result, filename) => { - fs.writeFileSync( + _.forOwn(results, async (result, filename) => { + await writeFileURI( path.join(flags.output, checkSuffix(filename)), JSON.stringify(result), ) diff --git a/src/commands/convert/sarif2hdf.ts b/src/commands/convert/sarif2hdf.ts index 9300c52906..2e401dde50 100644 --- a/src/commands/convert/sarif2hdf.ts +++ b/src/commands/convert/sarif2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {SarifMapper as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Sarif2HDF extends Command { static usage = 'convert sarif2hdf -i -o [-h] [-w]' @@ -21,10 +21,10 @@ export default class Sarif2HDF extends Command { const {flags} = await this.parse(Sarif2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'sarif', 'SARIF JSON') const converter = new Mapper(data, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/scoutsuite2hdf.ts b/src/commands/convert/scoutsuite2hdf.ts index f9feeea99f..94b3bb18df 100644 --- a/src/commands/convert/scoutsuite2hdf.ts +++ b/src/commands/convert/scoutsuite2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {ScoutsuiteMapper as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Scoutsuite2HDF extends Command { static usage = 'convert scoutsuite2hdf -i -o [-h] [-w]' @@ -21,10 +21,10 @@ export default class Scoutsuite2HDF extends Command { const {flags} = await this.parse(Scoutsuite2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'scoutsuite', 'ScoutSuite results from a Javascript object') const converter = new Mapper(data, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/snyk2hdf.ts b/src/commands/convert/snyk2hdf.ts index 496f644d83..4d75c32552 100644 --- a/src/commands/convert/snyk2hdf.ts +++ b/src/commands/convert/snyk2hdf.ts @@ -1,8 +1,8 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {SnykResults as Mapper} from '@mitre/hdf-converters' import _ from 'lodash' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Snyk2HDF extends Command { static usage = 'convert snyk2hdf -i -o [-h]' @@ -21,17 +21,17 @@ export default class Snyk2HDF extends Command { const {flags} = await this.parse(Snyk2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data: data, filename: flags.input}, 'snyk', 'Snyk results JSON') const converter = new Mapper(data) const result = converter.toHdf() if (Array.isArray(result)) { for (const element of result) { - fs.writeFileSync(`${flags.output.replaceAll(/\.json/gi, '')}-${_.get(element, 'platform.target_id')}.json`, JSON.stringify(element)) + await writeFileURI(`${flags.output.replaceAll(/\.json/gi, '')}-${_.get(element, 'platform.target_id')}.json`, JSON.stringify(element)) } } else { - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(result)) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(result)) } } } diff --git a/src/commands/convert/sonarqube2hdf.ts b/src/commands/convert/sonarqube2hdf.ts index 13f33f678b..42b31a40a8 100644 --- a/src/commands/convert/sonarqube2hdf.ts +++ b/src/commands/convert/sonarqube2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {SonarQubeResults as Mapper} from '@mitre/hdf-converters' import {checkSuffix} from '../../utils/global' +import {writeFileURI} from '../../utils/io' export default class Sonarqube2HDF extends Command { static usage = 'convert sonarqube2hdf -n -u -a [ -b | -p ] -o ' @@ -23,6 +23,6 @@ export default class Sonarqube2HDF extends Command { async run() { const {flags} = await this.parse(Sonarqube2HDF) const converter = new Mapper(flags.url, flags.projectKey, flags.auth, flags.branch, flags.pullRequestID) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(await converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(await converter.toHdf())) } } diff --git a/src/commands/convert/splunk2hdf.ts b/src/commands/convert/splunk2hdf.ts index ebff1192f4..22802d54e5 100644 --- a/src/commands/convert/splunk2hdf.ts +++ b/src/commands/convert/splunk2hdf.ts @@ -3,8 +3,8 @@ import {SplunkMapper} from '@mitre/hdf-converters/lib/src/splunk-mapper' import {table} from 'table' import {createWinstonLogger} from '../../utils/logging' import _ from 'lodash' -import fs from 'fs' import path from 'path' +import {createFolderIfNotExists, writeFileURI} from '../../utils/io' export default class Splunk2HDF extends Command { static usage = 'splunk2hdf -H -I [-h] [-P ] [-s http|https] (-u -p | -t ) [-L info|warn|debug|verbose] [-i -o ]' @@ -52,13 +52,13 @@ export default class Splunk2HDF extends Command { if (flags.input && flags.output) { const outputFolder = flags.output?.replace('.json', '') || 'asff-output' - fs.mkdirSync(outputFolder) + await createFolderIfNotExists(outputFolder) flags.input.forEach(async (input: string) => { // If we have a GUID if (/^(\w){30}$/.test(input)) { const hdf = await mapper.toHdf(input) // Rename example.json -> example-p9dwG2kdSoHsYdyF2dMytUmljgOHD5.json and put into the outputFolder - fs.writeFileSync( + await writeFileURI( path.join( outputFolder, _.get(hdf, 'meta.filename', '').replace(/\.json$/, '') + _.get(hdf, 'meta.guid') + '.json', @@ -70,7 +70,7 @@ export default class Splunk2HDF extends Command { const executions = await this.searchExecutions(mapper, input) executions.forEach(async execution => { const hdf = await mapper.toHdf(_.get(execution, 'meta.guid')) - fs.writeFileSync( + await writeFileURI( path.join( outputFolder, _.get(hdf, 'meta.filename', '').replace(/\.json$/, '') + _.get(hdf, 'meta.guid') + '.json', diff --git a/src/commands/convert/trivy2hdf.ts b/src/commands/convert/trivy2hdf.ts index b1a0deed47..17b924a60b 100644 --- a/src/commands/convert/trivy2hdf.ts +++ b/src/commands/convert/trivy2hdf.ts @@ -1,9 +1,9 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {ASFFResults as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' import _ from 'lodash' import path from 'path' +import {createFolderIfNotExists, readFileURI, writeFileURI} from '../../utils/io' export default class Trivy2HDF extends Command { static usage = 'convert trivy2hdf -i -o ' @@ -21,22 +21,17 @@ export default class Trivy2HDF extends Command { async run() { const {flags} = await this.parse(Trivy2HDF) // comes as an _asff.json file which is basically the array of findings but without the surrounding object; however, could also be properly formed asff since it depends on the template used - const input = fs.readFileSync(flags.input, 'utf8').trim() - // if (Array.isArray(JSON.parse(input))) { - // input = `{"Findings": ${fs.readFileSync(flags.input, 'utf8').trim()}}` - // } + const input = await readFileURI(flags.input, 'utf8') - checkInput({data: input, filename: flags.input}, 'asff', 'Trivy-derived AWS Security Finding Format results') + checkInput({data: input.trim(), filename: flags.input}, 'asff', 'Trivy-derived AWS Security Finding Format results') - const converter = new Mapper(input) + const converter = new Mapper(input.trim()) const results = converter.toHdf() - if (!fs.existsSync(flags.output)) { - fs.mkdirSync(flags.output) - } + await createFolderIfNotExists(flags.output) - _.forOwn(results, (result, filename) => { - fs.writeFileSync( + _.forOwn(results, async (result, filename) => { + await writeFileURI( path.join(flags.output, checkSuffix(filename)), JSON.stringify(result), ) diff --git a/src/commands/convert/twistlock2hdf.ts b/src/commands/convert/twistlock2hdf.ts index dc15d3dc4d..c1c0b154df 100644 --- a/src/commands/convert/twistlock2hdf.ts +++ b/src/commands/convert/twistlock2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {TwistlockResults as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Twistlock2HDF extends Command { static usage = 'convert twistlock2hdf -i -o [-h] [-w]' @@ -21,11 +21,11 @@ export default class Twistlock2HDF extends Command { const {flags} = await this.parse(Twistlock2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'twistlock', 'Twistlock CLI output file') const converter = new Mapper(data, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/xccdf_results2hdf.ts b/src/commands/convert/xccdf_results2hdf.ts index 51173084fe..1e4fbfe92a 100644 --- a/src/commands/convert/xccdf_results2hdf.ts +++ b/src/commands/convert/xccdf_results2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {XCCDFResultsMapper as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class XCCDFResults2HDF extends Command { static usage = 'convert xccdf_results2hdf -i -o [-h] [-w]' @@ -21,10 +21,10 @@ export default class XCCDFResults2HDF extends Command { const {flags} = await this.parse(XCCDFResults2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'xccdf', 'SCAP client XCCDF-Results XML report') const converter = new Mapper(data, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/convert/zap2hdf.ts b/src/commands/convert/zap2hdf.ts index f5c890afee..b8994b697f 100644 --- a/src/commands/convert/zap2hdf.ts +++ b/src/commands/convert/zap2hdf.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import {ZapMapper as Mapper} from '@mitre/hdf-converters' import {checkInput, checkSuffix} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Zap2HDF extends Command { static usage = 'convert zap2hdf -i -n -o [-h] [-w]' @@ -22,10 +22,10 @@ export default class Zap2HDF extends Command { const {flags} = await this.parse(Zap2HDF) // Check for correct input type - const data = fs.readFileSync(flags.input, 'utf8') + const data = await readFileURI(flags.input, 'utf8') checkInput({data, filename: flags.input}, 'zap', 'OWASP ZAP results JSON') - const converter = new Mapper(fs.readFileSync(flags.input, 'utf8'), flags.name, flags['with-raw']) - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) + const converter = new Mapper(await readFileURI(flags.input, 'utf8'), flags.name, flags['with-raw']) + await writeFileURI(checkSuffix(flags.output), JSON.stringify(converter.toHdf())) } } diff --git a/src/commands/generate/ckl_metadata.ts b/src/commands/generate/ckl_metadata.ts index f46044c0d1..c065a8d015 100644 --- a/src/commands/generate/ckl_metadata.ts +++ b/src/commands/generate/ckl_metadata.ts @@ -1,7 +1,7 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import promptSync from 'prompt-sync' import _ from 'lodash' +import {writeFileURI} from '../../utils/io' const prompt = promptSync() @@ -38,6 +38,6 @@ export default class GenerateCKLMetadata extends Command { web_db_site: prompt({ask: 'What is the Web or DB site? '}) || null, web_db_instance: prompt({ask: 'What is the Web or DB instance? '}) || null, } - fs.writeFileSync(flags.output, JSON.stringify(cklMetadata)) + await writeFileURI(flags.output, JSON.stringify(cklMetadata)) } } diff --git a/src/commands/generate/delta.ts b/src/commands/generate/delta.ts index 632bc61ca0..4930817235 100644 --- a/src/commands/generate/delta.ts +++ b/src/commands/generate/delta.ts @@ -4,6 +4,7 @@ import {processInSpecProfile, processOVAL, UpdatedProfileReturn, updateProfileUs import path from 'path' import {createWinstonLogger} from '../../utils/logging' import fse from 'fs-extra' +import {writeFileURI} from '../../utils/io' export default class GenerateDelta extends Command { static description = 'Update an existing InSpec profile with updated XCCDF guidance' @@ -187,7 +188,7 @@ export default class GenerateDelta extends Command { if (flags.report) { logger.debug('Writing report markdown file') - fs.writeFileSync(path.join(markDownFile), updatedResult.markdown) + await writeFileURI(path.join(markDownFile), updatedResult.markdown) } } else { if (!existingProfile) { diff --git a/src/commands/generate/inspec_metadata.ts b/src/commands/generate/inspec_metadata.ts index 0e5a61075f..9e152f64be 100644 --- a/src/commands/generate/inspec_metadata.ts +++ b/src/commands/generate/inspec_metadata.ts @@ -1,6 +1,6 @@ import {Command, Flags} from '@oclif/core' -import fs from 'fs' import promptSync from 'prompt-sync' +import {writeFileURI} from '../../utils/io' const prompt = promptSync() @@ -26,6 +26,6 @@ export default class GenerateInSpecMetadata extends Command { license: prompt({ask: 'What is the license of the profile? '}), version: prompt({ask: 'What is the version of the profile? '}), } - fs.writeFileSync(flags.output, JSON.stringify(inspecMetadata)) + await writeFileURI(flags.output, JSON.stringify(inspecMetadata)) } } diff --git a/src/commands/generate/threshold.ts b/src/commands/generate/threshold.ts index 735de70861..df11a7ef3f 100644 --- a/src/commands/generate/threshold.ts +++ b/src/commands/generate/threshold.ts @@ -1,10 +1,10 @@ import {Command, Flags} from '@oclif/core' import {ContextualizedProfile, convertFileContextual} from 'inspecjs' import _ from 'lodash' -import fs from 'fs' import YAML from 'yaml' import {ThresholdValues} from '../../types/threshold' import {calculateCompliance, extractStatusCounts, getControlIdMap, renameStatusName, severityTargetsObject} from '../../utils/threshold' +import {readFileURI, writeFileURI} from '../../utils/io' export default class GenerateThreshold extends Command { static usage = 'generate threshold -i [-o ] [-h] [-e] [-c]' @@ -24,7 +24,7 @@ export default class GenerateThreshold extends Command { async run() { const {flags} = await this.parse(GenerateThreshold) const thresholds: ThresholdValues = {} - const parsedExecJSON = convertFileContextual(fs.readFileSync(flags.input, 'utf8')) + const parsedExecJSON = convertFileContextual(await readFileURI(flags.input, 'utf8')) const parsedProfile = parsedExecJSON.contains[0] as ContextualizedProfile const overallStatusCounts = extractStatusCounts(parsedProfile) const overallCompliance = calculateCompliance(overallStatusCounts) @@ -62,7 +62,7 @@ export default class GenerateThreshold extends Command { } if (flags.output) { - fs.writeFileSync(flags.output, YAML.stringify(thresholds)) + await writeFileURI(flags.output, YAML.stringify(thresholds)) } else { console.log(YAML.stringify(thresholds)) } diff --git a/src/commands/supplement/passthrough/read.ts b/src/commands/supplement/passthrough/read.ts index f663d255c6..85081ac6bf 100644 --- a/src/commands/supplement/passthrough/read.ts +++ b/src/commands/supplement/passthrough/read.ts @@ -1,6 +1,6 @@ import {Command, Flags} from '@oclif/core' import {ExecJSON} from 'inspecjs' -import fs from 'fs' +import {readFileURI, writeFileURI} from '../../../utils/io' export default class ReadPassthrough extends Command { static usage = 'supplement passthrough read -i [-o ]' @@ -18,12 +18,12 @@ export default class ReadPassthrough extends Command { async run() { const {flags} = await this.parse(ReadPassthrough) - const input: ExecJSON.Execution & {passthrough?: unknown} = JSON.parse(fs.readFileSync(flags.input, 'utf8')) + const input: ExecJSON.Execution & {passthrough?: unknown} = JSON.parse(await readFileURI(flags.input, 'utf8')) const passthrough = input.passthrough || {} if (flags.output) { - fs.writeFileSync(flags.output, JSON.stringify(passthrough, null, 2)) + await writeFileURI(flags.output, JSON.stringify(passthrough, null, 2)) } else { process.stdout.write(JSON.stringify(passthrough, null, 2)) } diff --git a/src/commands/supplement/passthrough/write.ts b/src/commands/supplement/passthrough/write.ts index 7d4c1425d9..b682e890a2 100644 --- a/src/commands/supplement/passthrough/write.ts +++ b/src/commands/supplement/passthrough/write.ts @@ -1,6 +1,6 @@ import {Command, Flags} from '@oclif/core' import {ExecJSON} from 'inspecjs' -import fs from 'fs' +import {readFileURI, writeFileURI} from '../../../utils/io' export default class WritePassthrough extends Command { static usage = 'supplement passthrough write -i (-f | -d ) [-o ]' @@ -25,13 +25,13 @@ export default class WritePassthrough extends Command { async run() { const {flags} = await this.parse(WritePassthrough) - const input: ExecJSON.Execution & {passthrough?: unknown} = JSON.parse(fs.readFileSync(flags.input, 'utf8')) + const input: ExecJSON.Execution & {passthrough?: unknown} = JSON.parse(await readFileURI(flags.input, 'utf8')) const output: string = flags.output || flags.input let passthrough: unknown if (flags.passthroughFile) { try { - passthrough = JSON.parse(fs.readFileSync(flags.passthroughFile, 'utf8')) + passthrough = JSON.parse(await readFileURI(flags.passthroughFile, 'utf8')) } catch (error: unknown) { throw new Error(`Couldn't parse passthrough data: ${error}`) } @@ -47,6 +47,6 @@ export default class WritePassthrough extends Command { input.passthrough = passthrough - fs.writeFileSync(output, JSON.stringify(input, null, 2)) + await writeFileURI(output, JSON.stringify(input, null, 2)) } } diff --git a/src/commands/supplement/target/read.ts b/src/commands/supplement/target/read.ts index c8edbe0af7..b06cb1a875 100644 --- a/src/commands/supplement/target/read.ts +++ b/src/commands/supplement/target/read.ts @@ -1,6 +1,6 @@ import {Command, Flags} from '@oclif/core' import {ExecJSON} from 'inspecjs' -import fs from 'fs' +import {readFileURI, writeFileURI} from '../../../utils/io' export default class ReadTarget extends Command { static usage = 'supplement target read -i [-o ]' @@ -18,12 +18,12 @@ export default class ReadTarget extends Command { async run() { const {flags} = await this.parse(ReadTarget) - const input: ExecJSON.Execution & {target?: unknown} = JSON.parse(fs.readFileSync(flags.input, 'utf8')) + const input: ExecJSON.Execution & {target?: unknown} = JSON.parse(await readFileURI(flags.input, 'utf8')) const target = input.target || {} if (flags.output) { - fs.writeFileSync(flags.output, JSON.stringify(target, null, 2)) + await writeFileURI(flags.output, JSON.stringify(target, null, 2)) } else { process.stdout.write(JSON.stringify(target, null, 2)) } diff --git a/src/commands/supplement/target/write.ts b/src/commands/supplement/target/write.ts index 31dc928187..883738c657 100644 --- a/src/commands/supplement/target/write.ts +++ b/src/commands/supplement/target/write.ts @@ -1,6 +1,6 @@ import {Command, Flags} from '@oclif/core' import {ExecJSON} from 'inspecjs' -import fs from 'fs' +import {readFileURI, writeFileURI} from '../../../utils/io' export default class WriteTarget extends Command { static usage = 'supplement target write -i (-f | -d ) [-o ]' @@ -25,13 +25,13 @@ export default class WriteTarget extends Command { async run() { const {flags} = await this.parse(WriteTarget) - const input: ExecJSON.Execution & {target?: unknown} = JSON.parse(fs.readFileSync(flags.input, 'utf8')) + const input: ExecJSON.Execution & {target?: unknown} = JSON.parse(await readFileURI(flags.input, 'utf8')) const output: string = flags.output || flags.input let target: unknown if (flags.targetFile) { try { - target = JSON.parse(fs.readFileSync(flags.targetFile, 'utf8')) + target = JSON.parse(await readFileURI(flags.targetFile, 'utf8')) } catch (error: unknown) { throw new Error(`Couldn't parse target data: ${error}`) } @@ -47,6 +47,6 @@ export default class WriteTarget extends Command { input.target = target - fs.writeFileSync(output, JSON.stringify(input, null, 2)) + await writeFileURI(output, JSON.stringify(input, null, 2)) } } diff --git a/src/commands/validate/threshold.ts b/src/commands/validate/threshold.ts index 34c6a2d9e0..a568a30f97 100644 --- a/src/commands/validate/threshold.ts +++ b/src/commands/validate/threshold.ts @@ -1,12 +1,12 @@ import {Command, Flags} from '@oclif/core' import flat from 'flat' import YAML from 'yaml' -import fs from 'fs' import {ContextualizedProfile, convertFileContextual} from 'inspecjs' import _ from 'lodash' import {ThresholdValues} from '../../types/threshold' import {calculateCompliance, exitNonZeroIfTrue, extractStatusCounts, getControlIdMap, renameStatusName, severityTargetsObject, statusSeverityPaths, totalMax, totalMin} from '../../utils/threshold' import {expect} from 'chai' +import {readFileURI} from '../../utils/io' export default class Threshold extends Command { static usage = 'validate threshold -i [-h] [-T | -F ]' @@ -36,7 +36,7 @@ export default class Threshold extends Command { thresholds = flat.unflatten(toUnpack) } else if (flags.templateFile) { - const parsed = YAML.parse(fs.readFileSync(flags.templateFile, 'utf8')) + const parsed = YAML.parse(await readFileURI(flags.templateFile, 'utf8')) thresholds = Object.values(parsed).every(key => typeof key === 'number') ? flat.unflatten(parsed) : parsed } else { console.log('Please provide an inline compliance template or a compliance file.') @@ -44,7 +44,7 @@ export default class Threshold extends Command { return } - const parsedExecJSON = convertFileContextual(fs.readFileSync(flags.input, 'utf8')) + const parsedExecJSON = convertFileContextual(await readFileURI(flags.input, 'utf8')) const overallStatusCounts = extractStatusCounts(parsedExecJSON.contains[0] as ContextualizedProfile) if (thresholds.compliance) { diff --git a/src/commands/view/heimdall.ts b/src/commands/view/heimdall.ts index 85398f7165..7e254479fe 100644 --- a/src/commands/view/heimdall.ts +++ b/src/commands/view/heimdall.ts @@ -1,9 +1,9 @@ import {Command, Flags} from '@oclif/core' import express from 'express' -import fs from 'fs' import path from 'path' import open from 'open' import {getInstalledPath} from '../../utils/global' +import {fileExistsURI, readFileURI} from '../../utils/io' export default class Heimdall extends Command { static aliases = ['heimdall'] @@ -33,13 +33,18 @@ export default class Heimdall extends Command { // If we were passed a file, does it exist? Can it convert to JSON correctly? if (flags.files && flags.files.length > 0) { - if (!flags.files.every((file: string) => fs.statSync(file).isFile())) { + const allFilesExist = await Promise.all(flags.files.map(file => fileExistsURI(file))) + if (!allFilesExist.every(Boolean)) { console.log('An option passed as a file was not a file') return } - parsedJSONs = flags.files.map((file: string) => { - return {filename: path.parse(file).base, data: fs.readFileSync(file, 'utf8')} + const parsedFiles = await Promise.all(flags.files.map(file => { + return readFileURI(file, 'utf8') + })) + + parsedJSONs = flags.files.map((file: string, index: number) => { + return {filename: file.split('/')[file.split('/').length - 1], data: parsedFiles[index]} }) } diff --git a/src/commands/view/summary.ts b/src/commands/view/summary.ts index 72ba43471d..95a0d15179 100644 --- a/src/commands/view/summary.ts +++ b/src/commands/view/summary.ts @@ -1,11 +1,11 @@ import {Command, Flags} from '@oclif/core' import {ContextualizedEvaluation, ContextualizedProfile, convertFileContextual} from 'inspecjs' -import fs from 'fs' import YAML from 'yaml' import {calculateCompliance, extractStatusCounts, renameStatusName, severityTargetsObject} from '../../utils/threshold' import _ from 'lodash' import flat from 'flat' import {convertFullPathToFilename} from '../../utils/global' +import {readFileURI, writeFileURI} from '../../utils/io' export default class Summary extends Command { static aliases = ['summary'] @@ -29,9 +29,15 @@ export default class Summary extends Command { const complianceScores: Record = {} const execJSONs: Record = {} - flags.input.forEach((file: string) => { - execJSONs[file] = convertFileContextual(fs.readFileSync(file, 'utf8')) as ContextualizedEvaluation + + const parsedFiles = await Promise.all(flags.input.map(file => { + return readFileURI(file, 'utf8') + })) + + parsedFiles.forEach((file: string, index) => { + execJSONs[file] = convertFileContextual(parsedFiles[index]) as ContextualizedEvaluation }) + Object.entries(execJSONs).forEach(([, parsedExecJSON]) => { const summary: Record> = {} const parsedProfile = parsedExecJSON.contains[0] as ContextualizedProfile @@ -96,7 +102,7 @@ export default class Summary extends Command { }) console.log(flags.json ? JSON.stringify(printableSummaries) : YAML.stringify(printableSummaries)) if (flags.output) { - fs.writeFileSync(flags.output, flags.json ? JSON.stringify(printableSummaries) : YAML.stringify(printableSummaries)) + await writeFileURI(flags.output, flags.json ? JSON.stringify(printableSummaries) : YAML.stringify(printableSummaries)) } } } diff --git a/src/utils/io.ts b/src/utils/io.ts new file mode 100644 index 0000000000..26edab2744 --- /dev/null +++ b/src/utils/io.ts @@ -0,0 +1,193 @@ +import fs from 'fs' +import axios from 'axios' +import AWS from 'aws-sdk' +import {createWinstonLogger} from './logging' +import winston from 'winston' + +export async function readFileURI(uri: string, encoding: BufferEncoding, logger?: winston.Logger): Promise { + if (!logger) { + logger = createWinstonLogger('File IO', 'warn') + } + + let parsedURI + + try { + parsedURI = new URL(uri) + logger.debug(`Parsed URI: ${uri} - Protocol is ${parsedURI.protocol}`) + } catch { + return fs.readFileSync(uri, encoding) + } + + if (parsedURI.protocol === 's3:') { + // Read file from S3 Bucket + logger.debug('Starting read from S3') + const s3 = new AWS.S3() + const s3Params = { + Bucket: parsedURI.hostname, + Key: parsedURI.pathname.slice(1), + } + const s3Object = await s3.getObject(s3Params).promise() + logger.debug('Finished read from S3') + if (s3Object.Body) { + const bodyString = s3Object.Body.toString('utf8') + logger.debug(`Read ${bodyString.length} bytes from S3`) + return bodyString + } + + throw new Error('S3 Object Body is empty') + } else if (parsedURI.protocol === 'http:' || parsedURI.protocol === 'https:') { + return axios.get(uri, { + responseType: 'text', + }).then(({data}) => data) + } + + throw new Error(`Unsupported protocol to read file: ${parsedURI.protocol}`) +} + +export async function fileExistsURI(uri: string, logger?: winston.Logger): Promise { + if (!logger) { + logger = createWinstonLogger('io', 'warn') + } + + let parsedURI + + try { + parsedURI = new URL(uri) + logger.debug(`Parsed URI: ${uri} - Protocol is ${parsedURI.protocol}`) + } catch { + return fs.existsSync(uri) + } + + if (parsedURI.protocol === 's3:') { + // Check if file exists in S3 Bucket + logger.debug('Starting check if file exists in S3') + const s3 = new AWS.S3() + const s3Params = { + Bucket: parsedURI.hostname, + Key: parsedURI.pathname.slice(1), + } + + try { + await s3.headObject(s3Params).promise() + logger.debug('Finished check if file exists in S3') + return true + } catch (error: any) { + if (error.code === 'NotFound') { + return false + } + + throw error + } + } else if (parsedURI.protocol === 'http:' || parsedURI.protocol === 'https:') { + // Check if file exists at URL + logger.debug('Starting check if file exists at URL') + return axios.get(uri, { + responseType: 'text', + }).then(() => true).catch(() => { + return false + }) + } + + console.error(`Unsupported protocol: ${parsedURI.protocol}`) + return false +} + +export async function folderExistsURI(uri: string, logger?: winston.Logger): Promise { + if (!logger) { + logger = createWinstonLogger('io', 'warn') + } + + let parsedURI + + try { + parsedURI = new URL(uri) + logger.debug(`Parsed URI: ${uri} - Protocol is ${parsedURI.protocol}`) + } catch { + return fs.existsSync(uri) + } + + if (parsedURI.protocol === 's3:') { + // Check if folder exists in S3 Bucket + logger.debug('Starting check if folder exists in S3') + const s3 = new AWS.S3() + const s3Params = { + Bucket: parsedURI.hostname, + Prefix: parsedURI.pathname.slice(1), + } + + try { + await s3.listObjectsV2(s3Params).promise() + return true + } catch (error: any) { + if (error.code === 'NotFound') { + return false + } + + throw error + } + } + + console.error(`Unsupported protocol: ${parsedURI.protocol}`) + throw new Error(`Unsupported protocol for file: ${uri}`) +} + +export async function createFolderIfNotExists(path: string, logger?: winston.Logger): Promise { + if (!logger) { + logger = createWinstonLogger('File IO', 'warn') + } + + let parsedURI + + try { + parsedURI = new URL(path) + logger.debug(`Parsed URI: ${path} - Protocol is ${parsedURI.protocol}`) + } catch { + if (!fs.existsSync(path)) { + fs.mkdirSync(path) + } + + return + } + + if (parsedURI.protocol === 's3:') { + // Create folder in S3 Bucket + logger.debug('Starting folder creation in S3') + const s3 = new AWS.S3() + const s3Params = { + Bucket: parsedURI.hostname, + Key: parsedURI.pathname.slice(1), + } + await s3.putObject(s3Params).promise() + logger.debug('Finished folder creation in S3') + } +} + +export async function writeFileURI(uri: string, data: string, logger?: winston.Logger): Promise { + if (!logger) { + logger = createWinstonLogger('File IO', 'warn') + } + + let parsedURI + + try { + parsedURI = new URL(uri) + logger.debug(`Parsed URI: ${uri} - Protocol is ${parsedURI.protocol}`) + } catch { + return fs.writeFileSync(uri, data) + } + + if (parsedURI.protocol === 's3:') { + // Write file to S3 Bucket + logger.debug('Starting write to S3') + const s3 = new AWS.S3() + const s3Params = { + Bucket: parsedURI.hostname, + Key: parsedURI.pathname.slice(1), + Body: data, + } + await s3.putObject(s3Params).promise() + logger.debug('Finished write to S3') + } else { + throw new Error('Unsupported URI protocol') + } +}