Skip to content

Commit

Permalink
Merge pull request #597 from mitre/twistlock_fix
Browse files Browse the repository at this point in the history
swapped twistlock mapper to results
  • Loading branch information
brett-w authored Sep 22, 2022
2 parents 872adeb + 8910135 commit 8bc9ffd
Show file tree
Hide file tree
Showing 43 changed files with 178,538 additions and 142,948 deletions.
50 changes: 25 additions & 25 deletions package-lock.json

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

8 changes: 4 additions & 4 deletions package.json
Original file line number Diff line number Diff line change
@@ -1,14 +1,14 @@
{
"name": "@mitre/saf",
"description": "The MITRE Security Automation Framework (SAF) Command Line Interface (CLI) brings together applications, techniques, libraries, and tools developed by MITRE and the security community to streamline security automation for systems and DevOps pipelines",
"version": "1.1.12",
"version": "1.1.13",
"author": "The MITRE Security Automation Framework",
"bin": "./bin/run",
"bugs": "https://github.com/mitre/saf/issues",
"dependencies": {
"@aws-sdk/client-config-service": "^3.53.0",
"@mitre/hdf-converters": "^2.6.29",
"@mitre/heimdall-lite": "^2.6.29",
"@mitre/hdf-converters": "^2.6.31",
"@mitre/heimdall-lite": "^2.6.31",
"@mitre/inspec-objects": "^0.0.18",
"@oclif/core": "^1.6.0",
"@oclif/plugin-help": "^5",
Expand Down Expand Up @@ -41,7 +41,7 @@
"get-installed-path": "^4.0.8",
"htmlparser2": "^8.0.1",
"https": "^1.0.0",
"inspecjs": "^2.6.28",
"inspecjs": "^2.6.30",
"lodash": "^4.17.21",
"moment": "^2.29.1",
"mustache": "^4.2.0",
Expand Down
10 changes: 5 additions & 5 deletions src/commands/convert/hdf2asff.ts
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@ import https from 'https'
import {FromHdfToAsffMapper as Mapper} from '@mitre/hdf-converters'
import path from 'path'
import AWS from 'aws-sdk'
import {checkSuffix, sliceIntoChunks} from '../../utils/global'
import {checkSuffix, convertFullPathToFilename} from '../../utils/global'
import _ from 'lodash'
import {BatchImportFindingsRequestFindingList} from 'aws-sdk/clients/securityhub'

Expand Down Expand Up @@ -40,23 +40,23 @@ export default class HDF2ASFF extends Command {
}).toAsff()

if (flags.output) {
const convertedSlices = sliceIntoChunks(converted, 100)
const convertedSlices = _.chunk(converted, 100) // AWS doesn't allow uploading more than 100 findings at a time so we need to split them into chunks
const outputFolder = flags.output?.replace('.json', '') || 'asff-output'
fs.mkdirSync(outputFolder)
if (convertedSlices.length === 1) {
const outfilePath = path.join(outputFolder, checkSuffix(flags.output))
const outfilePath = path.join(outputFolder, convertFullPathToFilename(checkSuffix(flags.output)))
fs.writeFileSync(outfilePath, JSON.stringify(convertedSlices[0]))
} else {
convertedSlices.forEach((slice, index) => {
const outfilePath = path.join(outputFolder, `${checkSuffix(flags.output || '').replace('.json', '')}.p${index}.json`)
const outfilePath = path.join(outputFolder, `${convertFullPathToFilename(checkSuffix(flags.output || '')).replace('.json', '')}.p${index}.json`)
fs.writeFileSync(outfilePath, JSON.stringify(slice))
})
}
}

if (flags.upload) {
const profileInfoFinding = converted.pop()
const convertedSlices = sliceIntoChunks(converted, 100)
const convertedSlices = _.chunk(converted, 100) as BatchImportFindingsRequestFindingList[]

if (flags.insecure) {
console.warn('WARNING: Using --insecure will make all connections to AWS open to MITM attacks, if possible pass a certificate file with --certificate')
Expand Down
4 changes: 2 additions & 2 deletions src/commands/convert/index.ts
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
import {ASFFResults, BurpSuiteMapper, DBProtectMapper, fingerprint, FortifyMapper, JfrogXrayMapper, NessusResults, NetsparkerMapper, NiktoMapper, PrismaMapper, SarifMapper, ScoutsuiteMapper, SnykResults, TwistlockMapper, XCCDFResultsMapper, ZapMapper} from '@mitre/hdf-converters'
import {ASFFResults, BurpSuiteMapper, DBProtectMapper, fingerprint, FortifyMapper, JfrogXrayMapper, NessusResults, NetsparkerMapper, NiktoMapper, PrismaMapper, SarifMapper, ScoutsuiteMapper, SnykResults, TwistlockResults, XCCDFResultsMapper, ZapMapper} from '@mitre/hdf-converters'
import fs from 'fs'
import _ from 'lodash'
import {checkSuffix, convertFullPathToFilename} from '../../utils/global'
Expand Down Expand Up @@ -178,7 +178,7 @@ export default class Convert extends Command {
}

case 'twistlock': {
converter = new TwistlockMapper(fs.readFileSync(flags.input, 'utf8'))
converter = new TwistlockResults(fs.readFileSync(flags.input, 'utf8'))
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf()))
break
}
Expand Down
2 changes: 1 addition & 1 deletion src/commands/convert/twistlock2hdf.ts
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import {Command, Flags} from '@oclif/core'
import fs from 'fs'
import {TwistlockMapper as Mapper} from '@mitre/hdf-converters'
import {TwistlockResults as Mapper} from '@mitre/hdf-converters'
import {checkInput, checkSuffix} from '../../utils/global'

export default class Twistlock2HDF extends Command {
Expand Down
12 changes: 0 additions & 12 deletions src/utils/global.ts
Original file line number Diff line number Diff line change
Expand Up @@ -17,18 +17,6 @@ export function checkSuffix(input: string) {
return `${input}.json`
}

export function sliceIntoChunks(
arr: any[],
chunkSize: number,
): any[][] {
const res = []
for (let i = 0; i < arr.length; i += chunkSize) {
res.push(arr.slice(i, i + chunkSize))
}

return res
}

export function convertFullPathToFilename(inputPath: string): string {
let filePath = inputPath.split('/')
const relativeFileName = filePath[filePath.length - 1]
Expand Down
2 changes: 1 addition & 1 deletion test/commands/convert/convert.test.ts
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@ describe('Test (generic) convert', () => {
)

const test = JSON.parse(fs.readFileSync(`${tmpobj.name}/asfftest/CIS AWS Foundations Benchmark v1.2.0.json`, 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/asff/asff-hdf.json'), 'utf8'))
const sample = JSON.parse(fs.readFileSync(path.resolve('./test/sample_data/asff/asff-cis_aws-foundations_benchmark_v1.2.0-hdf.json'), 'utf8'))
expect(omitHDFChangingFields(test)).to.eql(omitHDFChangingFields(sample))
})

Expand Down
Loading

0 comments on commit 8bc9ffd

Please sign in to comment.