From b784a200d1ea6e2745ad15e94f05f1c0d0a43b48 Mon Sep 17 00:00:00 2001 From: andytang99 Date: Wed, 31 Jul 2024 18:42:27 +0000 Subject: [PATCH] finished comments from pr --- README.md | 18 + src/commands/convert/index.ts | 327 ++++++++++--------- src/commands/convert/trufflehog2hdf.ts | 36 +- test/commands/convert/trufflehog2hdf.test.ts | 290 ++++++++-------- 4 files changed, 349 insertions(+), 322 deletions(-) diff --git a/README.md b/README.md index 6df9a1803..e8c05f1d3 100644 --- a/README.md +++ b/README.md @@ -62,6 +62,7 @@ The SAF CLI is the successor to [Heimdall Tools](https://github.com/mitre/heimda * [SonarQube to HDF](#sonarqube-to-hdf) * [Splunk to HDF](#splunk-to-hdf) * [Trivy to HDF](#trivy-to-hdf) + * [Trufflehog to HDF](#trufflehog-to-hdf) * [Twistlock to HDF](#twistlock-to-hdf) * [Veracode to HDF](#veracode-to-hdf) * [XCCDF Results to HDF](#xccdf-results-to-hdf) @@ -854,6 +855,23 @@ convert trivy2hdf Translate a Trivy-derived AWS Security Finding $ saf convert trivy2hdf -i trivy-asff.json -o output-folder ``` +[top](#convert-other-formats-to-hdf) +#### Trufflehog to HDF +``` +convert trufflehog2hdf Translate a Trufflehog CLI output file into an HDF results set + + USAGE + $ saf convert trufflehog2hdf -i -o + + FLAGS + -h, --help Show CLI help. + -i, --input= (required) Input Trufflehog file + -o, --output= (required) Output HDF JSON File + + EXAMPLES + $ saf convert trufflehog2hdf -i trufflehog.json -o output-hdf-name.json +``` + [top](#convert-other-formats-to-hdf) #### Twistlock to HDF ``` diff --git a/src/commands/convert/index.ts b/src/commands/convert/index.ts index 3166ae758..42f6267dc 100644 --- a/src/commands/convert/index.ts +++ b/src/commands/convert/index.ts @@ -18,42 +18,42 @@ import { TwistlockResults, XCCDFResultsMapper, ZapMapper, -} from "@mitre/hdf-converters"; -import fs from "fs"; -import _ from "lodash"; -import { checkSuffix, convertFullPathToFilename } from "../../utils/global"; -import path from "path"; -import ASFF2HDF from "./asff2hdf"; -import { Command, Flags } from "@oclif/core"; -import Zap2HDF from "./zap2hdf"; +} from '@mitre/hdf-converters' +import fs from 'fs' +import _ from 'lodash' +import {checkSuffix, convertFullPathToFilename} from '../../utils/global' +import path from 'path' +import ASFF2HDF from './asff2hdf' +import {Command, Flags} from '@oclif/core' +import Zap2HDF from './zap2hdf' function getInputFilename(): string { const inputFileIndex = process.argv.findIndex( - (param) => param.toLowerCase() === "-i" || param.toLowerCase() === "--input" - ); + param => param.toLowerCase() === '-i' || param.toLowerCase() === '--input', + ) if (inputFileIndex === -1) { - return process.env.INPUT_FILE ?? ""; + return process.env.INPUT_FILE ?? '' } - return process.argv[inputFileIndex + 1]; + return process.argv[inputFileIndex + 1] } export default class Convert extends Command { static description = - "The generic convert command translates any supported file-based security results set into the Heimdall Data Format"; + 'The generic convert command translates any supported file-based security results set into the Heimdall Data Format'; - static examples = ["saf convert -i input -o output"]; + static examples = ['saf convert -i input -o output']; static flags = { input: Flags.string({ - char: "i", + char: 'i', required: true, - description: "Input results set file", + description: 'Input results set file', }), output: Flags.string({ - char: "o", + char: 'o', required: true, - description: "Output results sets", + description: 'Output results sets', }), ...Convert.getFlagsForInputFile(getInputFilename()), }; @@ -61,268 +61,277 @@ export default class Convert extends Command { static getFlagsForInputFile(filePath: string) { if (filePath) { Convert.detectedType = fingerprint({ - data: fs.readFileSync(filePath, "utf8"), + data: fs.readFileSync(filePath, 'utf8'), filename: convertFullPathToFilename(filePath), - }); + }) switch ( Convert.detectedType // skipcq: JS-0047 ) { - case "asff": { - return ASFF2HDF.flags; + case 'asff': { + return ASFF2HDF.flags } - case "zap": { - return Zap2HDF.flags; + case 'zap': { + return Zap2HDF.flags } - case "burp": - case "conveyor": - case "checklist": - case "dbProtect": - case "fortify": - case "jfrog": - case "nessus": - case "netsparker": - case "nikto": - case "prisma": - case "sarif": - case "scoutsuite": - case "snyk": - case "trufflehog": - case "twistlock": - case "xccdf": { - return {}; + case 'burp': + case 'conveyor': + case 'checklist': + case 'dbProtect': + case 'fortify': + case 'jfrog': + case 'nessus': + case 'netsparker': + case 'nikto': + case 'prisma': + case 'sarif': + case 'scoutsuite': + case 'snyk': + case 'trufflehog': + case 'twistlock': + case 'xccdf': { + return {} } } } - return {}; + return {} } static detectedType: string; async run() { // skipcq: JS-0044 - const { flags } = await this.parse(Convert); - let converter; + const {flags} = await this.parse(Convert) + let converter switch (Convert.detectedType) { - case "asff": { - let securityhub = _.get(flags, "securityhub") as string[]; + case 'asff': { + let securityhub = _.get(flags, 'securityhub') as string[] if (securityhub) { - securityhub = securityhub.map((file) => - fs.readFileSync(file, "utf8") - ); + securityhub = securityhub.map(file => + fs.readFileSync(file, 'utf8'), + ) } converter = new ASFFResults( - fs.readFileSync(flags.input, "utf8"), - securityhub - ); - const results = converter.toHdf(); + fs.readFileSync(flags.input, 'utf8'), + securityhub, + ) + const results = converter.toHdf() - fs.mkdirSync(flags.output); + fs.mkdirSync(flags.output) _.forOwn(results, (result, filename) => { fs.writeFileSync( path.join(flags.output, checkSuffix(filename)), - JSON.stringify(result) - ); - }); - break; + JSON.stringify(result), + ) + }) + break } - case "burp": { - converter = new BurpSuiteMapper(fs.readFileSync(flags.input, "utf8")); + case 'burp': { + converter = new BurpSuiteMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "conveyor": { - converter = new ConveyorResults(fs.readFileSync(flags.input, "utf8")); - const results = converter.toHdf(); - fs.mkdirSync(flags.output); + case 'conveyor': { + converter = new ConveyorResults(fs.readFileSync(flags.input, 'utf8')) + const results = converter.toHdf() + fs.mkdirSync(flags.output) for (const [filename, result] of Object.entries(results)) { fs.writeFileSync( path.join(flags.output, checkSuffix(filename as string)), - JSON.stringify(result) - ); + JSON.stringify(result), + ) } - break; + break } - case "checklist": { - converter = new ChecklistResults(fs.readFileSync(flags.input, "utf8")); + case 'checklist': { + converter = new ChecklistResults(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "dbProtect": { - converter = new DBProtectMapper(fs.readFileSync(flags.input, "utf8")); + case 'dbProtect': { + converter = new DBProtectMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "fortify": { - converter = new FortifyMapper(fs.readFileSync(flags.input, "utf8")); + case 'fortify': { + converter = new FortifyMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "jfrog": { - converter = new JfrogXrayMapper(fs.readFileSync(flags.input, "utf8")); + case 'jfrog': { + converter = new JfrogXrayMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "nessus": { - converter = new NessusResults(fs.readFileSync(flags.input, "utf8")); - const result = converter.toHdf(); + case 'nessus': { + converter = new NessusResults(fs.readFileSync(flags.input, 'utf8')) + const result = converter.toHdf() if (Array.isArray(result)) { for (const element of result) { fs.writeFileSync( - `${flags.output.replaceAll(/\.json/gi, "")}-${_.get( + `${flags.output.replaceAll(/\.json/gi, '')}-${_.get( element, - "platform.target_id" + 'platform.target_id', )}.json`, - JSON.stringify(element) - ); + JSON.stringify(element), + ) } } else { fs.writeFileSync( `${checkSuffix(flags.output)}`, - JSON.stringify(result) - ); + JSON.stringify(result), + ) } - break; + break } - case "netsparker": { - converter = new NetsparkerMapper(fs.readFileSync(flags.input, "utf8")); + case 'netsparker': { + converter = new NetsparkerMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "nikto": { - converter = new NiktoMapper(fs.readFileSync(flags.input, "utf8")); + case 'nikto': { + converter = new NiktoMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "prisma": { + case 'prisma': { converter = new PrismaMapper( - fs.readFileSync(flags.input, { encoding: "utf8" }) - ); - const results = converter.toHdf(); + fs.readFileSync(flags.input, {encoding: 'utf8'}), + ) + const results = converter.toHdf() - fs.mkdirSync(flags.output); - _.forOwn(results, (result) => { + fs.mkdirSync(flags.output) + _.forOwn(results, result => { fs.writeFileSync( path.join( flags.output, - `${_.get(result, "platform.target_id")}.json` + `${_.get(result, 'platform.target_id')}.json`, ), - JSON.stringify(result) - ); - }); - break; + JSON.stringify(result), + ) + }) + break } - case "sarif": { - converter = new SarifMapper(fs.readFileSync(flags.input, "utf8")); + case 'sarif': { + converter = new SarifMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "scoutsuite": { - converter = new ScoutsuiteMapper(fs.readFileSync(flags.input, "utf8")); + case 'scoutsuite': { + converter = new ScoutsuiteMapper(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "snyk": { - converter = new SnykResults(fs.readFileSync(flags.input, "utf8")); - const result = converter.toHdf(); + case 'snyk': { + converter = new SnykResults(fs.readFileSync(flags.input, 'utf8')) + const result = converter.toHdf() if (Array.isArray(result)) { for (const element of result) { fs.writeFileSync( - `${flags.output.replaceAll(/\.json/gi, "")}-${_.get( + `${flags.output.replaceAll(/\.json/gi, '')}-${_.get( element, - "platform.target_id" + 'platform.target_id', )}.json`, - JSON.stringify(element) - ); + JSON.stringify(element), + ) } } else { - fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(result)); + fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(result)) } - break; + break } - case "twistlock": { - converter = new TwistlockResults(fs.readFileSync(flags.input, "utf8")); + case 'trufflehog': { + converter = new TrufflehogResults(fs.readFileSync(flags.input, 'utf8')) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "xccdf": { + case 'twistlock': { + converter = new TwistlockResults(fs.readFileSync(flags.input, 'utf8')) + fs.writeFileSync( + checkSuffix(flags.output), + JSON.stringify(converter.toHdf()), + ) + break + } + + case 'xccdf': { converter = new XCCDFResultsMapper( - fs.readFileSync(flags.input, "utf8") - ); + fs.readFileSync(flags.input, 'utf8'), + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } - case "zap": { + case 'zap': { converter = new ZapMapper( - fs.readFileSync(flags.input, "utf8"), - _.get(flags, "name") as string - ); + fs.readFileSync(flags.input, 'utf8'), + _.get(flags, 'name') as string, + ) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); - break; + JSON.stringify(converter.toHdf()), + ) + break } default: { throw new Error(`Unknown filetype provided: ${getInputFilename()} The generic convert command should only be used for taking supported file-based security results and converting into Heimdall Data Format - For more information, run "saf convert --help"`); + For more information, run "saf convert --help"`) } } } diff --git a/src/commands/convert/trufflehog2hdf.ts b/src/commands/convert/trufflehog2hdf.ts index ecb05269e..6a6050254 100644 --- a/src/commands/convert/trufflehog2hdf.ts +++ b/src/commands/convert/trufflehog2hdf.ts @@ -1,42 +1,42 @@ -import { Command, Flags } from "@oclif/core"; -import fs from "fs"; -import { TrufflehogResults as Mapper } from "@mitre/hdf-converters"; -import { checkSuffix } from "../../utils/global"; +import {Command, Flags} from '@oclif/core' +import fs from 'fs' +import {TrufflehogResults as Mapper} from '@mitre/hdf-converters' +import {checkSuffix} from '../../utils/global' export default class Trufflehog2HDF extends Command { static usage = - "convert Trufflehog2hdf -i -o "; + 'convert trufflehog2hdf -i -o '; static description = - "Translate a Trufflehog output file into an HDF results set"; + 'Translate a Trufflehog output file into an HDF results set'; static examples = [ - "saf convert Trufflehog2hdf -i Trufflehog.json -o output-hdf-name.json", + 'saf convert trufflehog2hdf -i trufflehog.json -o output-hdf-name.json', ]; static flags = { - help: Flags.help({ char: "h" }), + help: Flags.help({char: 'h'}), input: Flags.string({ - char: "i", + char: 'i', required: true, - description: "Input Trufflehog file", + description: 'Input Trufflehog file', }), output: Flags.string({ - char: "o", + char: 'o', required: true, - description: "Output HDF file", + description: 'Output HDF file', }), - "with-raw": Flags.boolean({ char: "w", required: false }), + 'with-raw': Flags.boolean({char: 'w', required: false}), }; async run() { - const { flags } = await this.parse(Trufflehog2HDF); - const input = fs.readFileSync(flags.input, "utf8"); + const {flags} = await this.parse(Trufflehog2HDF) + const input = fs.readFileSync(flags.input, 'utf8') - const converter = new Mapper(input, flags["with-raw"]); + const converter = new Mapper(input, flags['with-raw']) fs.writeFileSync( checkSuffix(flags.output), - JSON.stringify(converter.toHdf()) - ); + JSON.stringify(converter.toHdf(), null, 2), + ) } } diff --git a/test/commands/convert/trufflehog2hdf.test.ts b/test/commands/convert/trufflehog2hdf.test.ts index 62ee7d9d8..57febc27f 100644 --- a/test/commands/convert/trufflehog2hdf.test.ts +++ b/test/commands/convert/trufflehog2hdf.test.ts @@ -1,261 +1,261 @@ -import { expect, test } from "@oclif/test"; -import tmp from "tmp"; -import path from "path"; -import fs from "fs"; -import { omitHDFChangingFields } from "../utils"; +import {expect, test} from '@oclif/test' +import tmp from 'tmp' +import path from 'path' +import fs from 'fs' +import {omitHDFChangingFields} from '../utils' -describe("Test Trufflehog", () => { - const tmpobj = tmp.dirSync({ unsafeCleanup: true }); +describe('Test Trufflehog', () => { + const tmpobj = tmp.dirSync({unsafeCleanup: true}) test .stdout() .command([ - "convert Trufflehog2hdf", - "-i", + 'convert Trufflehog2hdf', + '-i', path.resolve( - "./test/sample_data/Trufflehog/sample_input_report/trufflehog.json" + './test/sample_data/Trufflehog/sample_input_report/trufflehog.json', ), - "-o", + '-o', `${tmpobj.name}/Trufflehogtest.json`, ]) - .it("hdf-converter output test", () => { + .it('hdf-converter output test', () => { const converted = JSON.parse( - fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, "utf8") - ); + fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, 'utf8'), + ) const sample = JSON.parse( fs.readFileSync( - path.resolve("./test/sample_data/Trufflehog/trufflehog-hdf.json"), - "utf8" - ) - ); + path.resolve('./test/sample_data/Trufflehog/trufflehog-hdf.json'), + 'utf8', + ), + ) expect(omitHDFChangingFields(converted)).to.eql( - omitHDFChangingFields(sample) - ); - }); -}); + omitHDFChangingFields(sample), + ) + }) +}) -describe("Test Trufflehog withraw flag", () => { - const tmpobj = tmp.dirSync({ unsafeCleanup: true }); +describe('Test Trufflehog withraw flag', () => { + const tmpobj = tmp.dirSync({unsafeCleanup: true}) test .stdout() .command([ - "convert Trufflehog2hdf", - "-i", + 'convert Trufflehog2hdf', + '-i', path.resolve( - "./test/sample_data/Trufflehog/sample_input_report/trufflehog.json" + './test/sample_data/Trufflehog/sample_input_report/trufflehog.json', ), - "-o", + '-o', `${tmpobj.name}/Trufflehogtest.json`, - "-w", + '-w', ]) - .it("hdf-converter withraw output test", () => { + .it('hdf-converter withraw output test', () => { const converted = JSON.parse( - fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, "utf8") - ); + fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, 'utf8'), + ) const sample = JSON.parse( fs.readFileSync( path.resolve( - "./test/sample_data/Trufflehog/trufflehog-hdf-withraw.json" + './test/sample_data/Trufflehog/trufflehog-hdf-withraw.json', ), - "utf8" - ) - ); + 'utf8', + ), + ) expect(omitHDFChangingFields(converted)).to.eql( - omitHDFChangingFields(sample) - ); - }); -}); + omitHDFChangingFields(sample), + ) + }) +}) -describe("Test Trufflehog Docker Example", () => { - const tmpobj = tmp.dirSync({ unsafeCleanup: true }); +describe('Test Trufflehog Docker Example', () => { + const tmpobj = tmp.dirSync({unsafeCleanup: true}) test .stdout() .command([ - "convert Trufflehog2hdf", - "-i", + 'convert Trufflehog2hdf', + '-i', path.resolve( - "./test/sample_data/Trufflehog/sample_input_report/trufflehog_docker_example.json" + './test/sample_data/Trufflehog/sample_input_report/trufflehog_docker_example.json', ), - "-o", + '-o', `${tmpobj.name}/Trufflehogtest.json`, ]) - .it("hdf-converter output test", () => { + .it('hdf-converter output test', () => { const converted = JSON.parse( - fs.readFileSync(`${tmpobj.name}/Trufflehogdockertest.json`, "utf8") - ); + fs.readFileSync(`${tmpobj.name}/Trufflehogdockertest.json`, 'utf8'), + ) const sample = JSON.parse( fs.readFileSync( path.resolve( - "./test/sample_data/Trufflehog/trufflehog-docker-hdf.json" + './test/sample_data/Trufflehog/trufflehog-docker-hdf.json', ), - "utf8" - ) - ); + 'utf8', + ), + ) expect(omitHDFChangingFields(converted)).to.eql( - omitHDFChangingFields(sample) - ); - }); -}); + omitHDFChangingFields(sample), + ) + }) +}) -describe("Test Trufflehog docker example withraw flag", () => { - const tmpobj = tmp.dirSync({ unsafeCleanup: true }); +describe('Test Trufflehog docker example withraw flag', () => { + const tmpobj = tmp.dirSync({unsafeCleanup: true}) test .stdout() .command([ - "convert Trufflehog2hdf", - "-i", + 'convert Trufflehog2hdf', + '-i', path.resolve( - "./test/sample_data/Trufflehog/sample_input_report/trufflehog_docker_example.json" + './test/sample_data/Trufflehog/sample_input_report/trufflehog_docker_example.json', ), - "-o", + '-o', `${tmpobj.name}/Trufflehogtest.json`, - "-w", + '-w', ]) - .it("hdf-converter withraw output test", () => { + .it('hdf-converter withraw output test', () => { const converted = JSON.parse( - fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, "utf8") - ); + fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, 'utf8'), + ) const sample = JSON.parse( fs.readFileSync( path.resolve( - "./test/sample_data/Trufflehog/trufflehog-docker-hdf-withraw.json" + './test/sample_data/Trufflehog/trufflehog-docker-hdf-withraw.json', ), - "utf8" - ) - ); + 'utf8', + ), + ) expect(omitHDFChangingFields(converted)).to.eql( - omitHDFChangingFields(sample) - ); - }); -}); + omitHDFChangingFields(sample), + ) + }) +}) -describe("Test Trufflehog json object", () => { - const tmpobj = tmp.dirSync({ unsafeCleanup: true }); +describe('Test Trufflehog json object', () => { + const tmpobj = tmp.dirSync({unsafeCleanup: true}) test .stdout() .command([ - "convert Trufflehog2hdf", - "-i", + 'convert Trufflehog2hdf', + '-i', path.resolve( - "./test/sample_data/Trufflehog/sample_input_report/trufflehog-report-example.json" + './test/sample_data/Trufflehog/sample_input_report/trufflehog-report-example.json', ), - "-o", + '-o', `${tmpobj.name}/Trufflehogtest.json`, ]) - .it("hdf-converter output test", () => { + .it('hdf-converter output test', () => { const converted = JSON.parse( - fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, "utf8") - ); + fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, 'utf8'), + ) const sample = JSON.parse( fs.readFileSync( path.resolve( - "./test/sample_data/Trufflehog/trufflehog-report-example-hdf.json" + './test/sample_data/Trufflehog/trufflehog-report-example-hdf.json', ), - "utf8" - ) - ); + 'utf8', + ), + ) expect(omitHDFChangingFields(converted)).to.eql( - omitHDFChangingFields(sample) - ); - }); -}); + omitHDFChangingFields(sample), + ) + }) +}) -describe("Test Trufflehog json object withraw flag", () => { - const tmpobj = tmp.dirSync({ unsafeCleanup: true }); +describe('Test Trufflehog json object withraw flag', () => { + const tmpobj = tmp.dirSync({unsafeCleanup: true}) test .stdout() .command([ - "convert Trufflehog2hdf", - "-i", + 'convert Trufflehog2hdf', + '-i', path.resolve( - "./test/sample_data/Trufflehog/sample_input_report/trufflehog-report-example.json" + './test/sample_data/Trufflehog/sample_input_report/trufflehog-report-example.json', ), - "-o", + '-o', `${tmpobj.name}/Trufflehogtest.json`, - "-w", + '-w', ]) - .it("hdf-converter withraw output test", () => { + .it('hdf-converter withraw output test', () => { const converted = JSON.parse( - fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, "utf8") - ); + fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, 'utf8'), + ) const sample = JSON.parse( fs.readFileSync( path.resolve( - "./test/sample_data/Trufflehog/trufflehog-report-example-hdf-withraw.json" + './test/sample_data/Trufflehog/trufflehog-report-example-hdf-withraw.json', ), - "utf8" - ) - ); + 'utf8', + ), + ) expect(omitHDFChangingFields(converted)).to.eql( - omitHDFChangingFields(sample) - ); - }); -}); + omitHDFChangingFields(sample), + ) + }) +}) -describe("Test Trufflehog saf example", () => { - const tmpobj = tmp.dirSync({ unsafeCleanup: true }); +describe('Test Trufflehog saf example', () => { + const tmpobj = tmp.dirSync({unsafeCleanup: true}) test .stdout() .command([ - "convert Trufflehog2hdf", - "-i", + 'convert Trufflehog2hdf', + '-i', path.resolve( - "./test/sample_data/Trufflehog/sample_input_report/trufflehog_saf_example.json" + './test/sample_data/Trufflehog/sample_input_report/trufflehog_saf_example.json', ), - "-o", + '-o', `${tmpobj.name}/Trufflehogtest.json`, ]) - .it("hdf-converter output test", () => { + .it('hdf-converter output test', () => { const converted = JSON.parse( - fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, "utf8") - ); + fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, 'utf8'), + ) const sample = JSON.parse( fs.readFileSync( - path.resolve("./test/sample_data/Trufflehog/trufflehog-saf-hdf.json"), - "utf8" - ) - ); + path.resolve('./test/sample_data/Trufflehog/trufflehog-saf-hdf.json'), + 'utf8', + ), + ) expect(omitHDFChangingFields(converted)).to.eql( - omitHDFChangingFields(sample) - ); - }); -}); + omitHDFChangingFields(sample), + ) + }) +}) -describe("Test Trufflehog saf example withraw flag", () => { - const tmpobj = tmp.dirSync({ unsafeCleanup: true }); +describe('Test Trufflehog saf example withraw flag', () => { + const tmpobj = tmp.dirSync({unsafeCleanup: true}) test .stdout() .command([ - "convert Trufflehog2hdf", - "-i", + 'convert Trufflehog2hdf', + '-i', path.resolve( - "./test/sample_data/Trufflehog/sample_input_report/trufflehog_saf_example.json" + './test/sample_data/Trufflehog/sample_input_report/trufflehog_saf_example.json', ), - "-o", + '-o', `${tmpobj.name}/Trufflehogtest.json`, - "-w", + '-w', ]) - .it("hdf-converter withraw output test", () => { + .it('hdf-converter withraw output test', () => { const converted = JSON.parse( - fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, "utf8") - ); + fs.readFileSync(`${tmpobj.name}/Trufflehogtest.json`, 'utf8'), + ) const sample = JSON.parse( fs.readFileSync( path.resolve( - "./test/sample_data/Trufflehog/trufflehog-saf-hdf-withraw.json" + './test/sample_data/Trufflehog/trufflehog-saf-hdf-withraw.json', ), - "utf8" - ) - ); + 'utf8', + ), + ) expect(omitHDFChangingFields(converted)).to.eql( - omitHDFChangingFields(sample) - ); - }); -}); + omitHDFChangingFields(sample), + ) + }) +})