Skip to content

Commit d0fc78a

Browse files
kemley76Amndeep7
andauthored
Checklist metadata validation and checklist mapper severities (#2750)
* input validation for checklist metadata Signed-off-by: kemley76 <kemley@mitre.org> * use hdf-converters in hdf2ckl Signed-off-by: kemley76 <kemley@mitre.org> * updated hdf2ckl tests Signed-off-by: kemley76 <kemley@mitre.org> * update tests based on changes to ckl mapper Signed-off-by: Kaden Emley <kemley@mitre.org> * update ckl metadata validation to use hdf-converters helper function Signed-off-by: Kaden Emley <kemley@mitre.org> * added ability to use local install of inspecjs Signed-off-by: Kaden Emley <kemley@mitre.org> * update checklist commands and tests Signed-off-by: Kaden Emley <kemley@mitre.org> * ensure threshold counts stay based off impact Signed-off-by: Kaden Emley <kemley@mitre.org> * added tests to ensure that converting with invalid metadata display an error message Signed-off-by: Kaden Emley <kemley@mitre.org> * use checklist types from hdf-converters Signed-off-by: Kaden Emley <kemley@mitre.org> * remove redundant code in hdf2ckl command Signed-off-by: Kaden Emley <kemley@mitre.org> * use inspecJS to convert impact to severity Signed-off-by: Kaden Emley <kemley@mitre.org> * use checklist types from hdf-converters Signed-off-by: Kaden Emley <kemley@mitre.org> * fix test data Signed-off-by: Kaden Emley <kemley@mitre.org> * enforce enum matching for user input in generate ckl_metadata command Signed-off-by: Kaden Emley <kemley@mitre.org> * add backwards compatibility for old checklist metadata format Signed-off-by: Kaden Emley <kemley@mitre.org> * remove debug statement Signed-off-by: Kaden Emley <kemley@mitre.org> * fix code smells Signed-off-by: Kaden Emley <kemley@mitre.org> * linting Signed-off-by: Kaden Emley <kemley@mitre.org> * format every output json file with 2 space indent Signed-off-by: Kaden Emley <kemley@mitre.org> * add flags for all metadata fields on hdf2ckl command Signed-off-by: Kaden Emley <kemley@mitre.org> * clarify instructions on ckl metadata generation Signed-off-by: Kaden Emley <kemley@mitre.org> * change formating from 4 to 2 space indent Signed-off-by: Kaden Emley <kemley@mitre.org> * make version and release number optional in checklist metadata generation Signed-off-by: Kaden Emley <kemley@mitre.org> * update tests to reflect better formatted error messages Signed-off-by: Kaden Emley <kemley@mitre.org> * update markdown summary table to include row for severity: none Signed-off-by: Kaden Emley <kemley@mitre.org> * update code and tests to count N/A controls with severity other than none Signed-off-by: Kaden Emley <kemley@mitre.org> * fix code smells Signed-off-by: Kaden Emley <kemley@mitre.org> * revert addition of severity-none row to markdown summary table Signed-off-by: Kaden Emley <kemley@mitre.org> * remove heimdall version when running checklist tests Signed-off-by: Kaden Emley <kemley@mitre.org> * change return type of string | undefined to string | null Signed-off-by: Kaden Emley <kemley@mitre.org> --------- Signed-off-by: kemley76 <kemley@mitre.org> Signed-off-by: Kaden Emley <kemley@mitre.org> Co-authored-by: Amndeep Singh Mann <amann@mitre.org>
1 parent e867fb1 commit d0fc78a

File tree

71 files changed

+388745
-91346
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

71 files changed

+388745
-91346
lines changed

README.md

+37-10
Original file line numberDiff line numberDiff line change
@@ -383,20 +383,47 @@ convert hdf2ckl Translate a Heimdall Data Format JSON file into a
383383
DISA checklist file
384384
385385
USAGE
386-
$ saf convert hdf2ckl -i <hdf-scan-results-json> -o <output-ckl> [-h] [-m <metadata>] [-H <hostname>] [-F <fqdn>] [-M <mac-address>] [-I <ip-address>]
386+
$ saf convert hdf2ckl saf convert hdf2ckl -i <hdf-scan-results-json> -o <output-ckl> [-h] [-m <metadata>] [--profilename <value>] [--profiletitle <value>] [--version <value>] [--releasenumber <value>] [--releasedate <value>] [--marking <value>] [-H <value>] [-I <value>] [-M <value>] [-F <value>] [--targetcomment <value>] [--role Domain Controller|Member Server|None|Workstation] [--assettype Computing|Non-Computing] [--techarea |Application Review|Boundary Security|CDS Admin Review|CDS Technical Review|Database Review|Domain Name System (DNS)|Exchange Server|Host Based System Security (HBSS)|Internal Network|Mobility|Other Review|Releasable Networks (REL)|Releaseable Networks (REL)|Traditional Security|UNIX OS|VVOIP Review|Web Review|Windows OS] [--stigguid <value>] [--targetkey <value>] [--webdbsite <value> --webordatabase] [--webdbinstance <value> ] [--vulidmapping gid|id]
387387
388388
FLAGS
389-
-F, --fqdn=<fqdn> FQDN for CKL metadata
390-
-H, --hostname=<hostname> Hostname for CKL metadata
391-
-I, --ip=<ip-address> IP address for CKL metadata
392-
-M, --mac=<mac-address> MAC address for CKL metadata
393-
-h, --help Show CLI help.
394-
-i, --input=<hdf-scan-results-json> (required) Input HDF file
395-
-m, --metadata=<metadata> Metadata JSON file, generate one with "saf generate ckl_metadata"
396-
-o, --output=<output-ckl> (required) Output CKL file
389+
-h, --help Show CLI help.
390+
-i, --input=<value> (required) Input HDF file
391+
-o, --output=<value> (required) Output CKL file
392+
393+
CHECKLIST METADATA FLAGS
394+
-F, --fqdn=<value> Fully Qualified Domain Name
395+
-H, --hostname=<value> The name assigned to the asset within the network
396+
-I, --ip=<value> IP address
397+
-M, --mac=<value> MAC address
398+
-m, --metadata=<value> Metadata JSON file, generate one with "saf generate ckl_metadata"
399+
--assettype=<option> The category or classification of the asset
400+
<options: Computing|Non-Computing>
401+
--marking=<value> A security classification or designation of the asset, indicating its sensitivity level
402+
--profilename=<value> Profile name
403+
--profiletitle=<value> Profile title
404+
--releasedate=<value> Profile release date
405+
--releasenumber=<value> Profile release number
406+
--role=<option> The primary function or role of the asset within the network or organization
407+
<options: Domain Controller|Member Server|None|Workstation>
408+
--stigguid=<value> A unique identifier associated with the STIG for the asset
409+
--targetcomment=<value> Additional comments or notes about the asset
410+
--targetkey=<value> A unique key or identifier for the asset within the checklist or inventory system
411+
--techarea=<option> The technical area or domain to which the asset belongs
412+
<options: |Application Review|Boundary Security|CDS Admin Review|CDS Technical Review|Database Review|Domain Name System (DNS)|Exchange Server|Host Based System Security (HBSS)|Internal Network|Mobility|Other Review|Releasable Networks (REL)|Releaseable Networks (REL)|Traditional Security|UNIX OS|VVOIP Review|Web Review|Windows OS>
413+
--version=<value> Profile version number
414+
--vulidmapping=<option> Which type of control identifier to map to the checklist ID
415+
<options: gid|id>
416+
--webdbinstance=<value> The specific instance of the web application or database running on the server
417+
--webdbsite=<value> The specific site or application hosted on the web or database server
418+
--webordatabase Indicates whether the STIG is primarily for either a web or database server
419+
420+
DESCRIPTION
421+
Translate a Heimdall Data Format JSON file into a DISA checklist file
397422
398423
EXAMPLES
399-
$ saf convert hdf2ckl -i rhel7-results.json -o rhel7.ckl --fqdn reverseproxy.example.org --hostname reverseproxy --ip 10.0.0.3 --mac 12:34:56:78:90
424+
$ saf convert hdf2ckl -i rhel7-results.json -o rhel7.ckl --fqdn reverseproxy.example.org --hostname reverseproxy --ip 10.0.0.3 --mac 12:34:56:78:90:AB
425+
426+
$ saf convert hdf2ckl -i rhel8-results.json -o rhel8.ckl -m rhel8-metadata.json
400427
```
401428
[top](#convert-hdf-to-other-formats)
402429
#### HDF to CSV

pack-inspecjs.bat

+56
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,56 @@
1+
ECHO OFF
2+
3+
SET CYPRESS_INSTALL_BINARY=0
4+
SET PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true
5+
6+
SET original_dir=%cd%
7+
ECHO %original_dir%
8+
9+
IF DEFINED npm_config_heimdall (
10+
CD %npm_config_heimdall%/libs/inspecjs/
11+
) ELSE (
12+
CD ../heimdall2/libs/inspecjs/
13+
)
14+
15+
IF DEFINED npm_config_branch (
16+
CALL git switch %npm_config_branch% || EXIT /B %ERRORLEVEL%
17+
) ELSE (
18+
CALL git switch master || EXIT /B %ERRORLEVEL%
19+
)
20+
21+
ECHO Executing - git fetch ...
22+
CALL git fetch || EXIT /B %ERRORLEVEL%
23+
24+
ECHO Executing - git pull ...
25+
CALL git pull || EXIT /B %ERRORLEVEL%
26+
27+
ECHO Executing - yarn install ...
28+
CALL yarn install || EXIT /B %ERRORLEVEL%
29+
30+
ECHO Executing - yarn pack ...
31+
CALL yarn pack || EXIT /B %ERRORLEVEL%
32+
33+
ECHO Finished generating the tarball
34+
35+
CD %original_dir%
36+
37+
ECHO Executing - npm install remote ...
38+
CALL npm i || EXIT /B %ERRORLEVEL%
39+
40+
ECHO Executing - npm install local ...
41+
42+
IF DEFINED npm_config_heimdall (
43+
FOR /f "tokens=*" %%a IN ('dir /b %npm_config_heimdall%\libs\inspecjs\inspecjs-v*.tgz') DO (
44+
SET THIS_TAR_ZIP=%npm_config_heimdall%\libs\inspecjs\%%a
45+
)
46+
) ELSE (
47+
FOR /f "tokens=*" %%a IN ('dir /b ..\heimdall2\libs\inspecjs\inspecjs-v*.tgz') DO (
48+
SET THIS_TAR_ZIP=..\heimdall2\libs\inspecjs\%%a
49+
)
50+
)
51+
CALL npm i %THIS_TAR_ZIP% || EXIT /B %ERRORLEVEL%
52+
53+
ECHO Executing - npm run prepack ...
54+
CALL npm run prepack || EXIT /B %ERRORLEVEL%
55+
56+
ECHO Install of local inspecjs complete.

pack-inspecjs.sh

+40
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,40 @@
1+
#!/bin/bash
2+
3+
set -o errexit # abort on nonzero exitstatus
4+
set -o nounset # abort on unbound variable
5+
set -o pipefail # don't hide errors within pipes
6+
7+
ORIGINAL=$PWD
8+
echo $ORIGINAL
9+
10+
cd "${npm_config_heimdall:-../heimdall2}"
11+
cd libs/inspecjs
12+
13+
git switch "${npm_config_branch:-master}"
14+
15+
echo "Executing - git fetch ..."
16+
git fetch
17+
18+
echo "Executing - git pull ..."
19+
git pull
20+
21+
echo "Executing - yarn install ..."
22+
CYPRESS_INSTALL_BINARY=0 PUPPETEER_SKIP_CHROMIUM_DOWNLOAD=true yarn install
23+
24+
echo "Executing - yarn pack ..."
25+
yarn pack
26+
27+
echo "Finished generating the tarball"
28+
29+
cd "$ORIGINAL"
30+
31+
echo "Executing - npm install remote ..."
32+
npm i
33+
34+
echo "Executing - npm install local ..."
35+
npm i "${npm_config_heimdall:-../heimdall2}/libs/inspecjs/inspecjs-v"*".tgz"
36+
37+
echo "Executing - npm run prepack ..."
38+
npm run prepack
39+
40+
echo "Install of local inspecjs complete."

package.json

+4-1
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,10 @@
196196
"prepack:darwin:linux": "rm -rf lib && tsc",
197197
"pack-hdf-converters": "run-script-os",
198198
"pack-hdf-converters:win32": "pack-hdf-converters.bat",
199-
"pack-hdf-converters:darwin:linux": "./pack-hdf-converters.sh"
199+
"pack-hdf-converters:darwin:linux": "./pack-hdf-converters.sh",
200+
"pack-inspecjs": "run-script-os",
201+
"pack-inspecjs:win32": "pack-inspecjs.bat",
202+
"pack-inspecjs:darwin:linux": "./pack-inspecjs.sh"
200203
},
201204
"types": "lib/index.d.ts",
202205
"jest": {

src/commands/convert/asff2hdf.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -196,7 +196,7 @@ export default class ASFF2HDF extends Command {
196196
_.forOwn(results, (result, filename) => {
197197
fs.writeFileSync(
198198
path.join(flags.output, checkSuffix(filename)),
199-
JSON.stringify(result),
199+
JSON.stringify(result, null, 2),
200200
)
201201
})
202202
}

src/commands/convert/aws_config2hdf.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -57,6 +57,6 @@ export default class AWSConfig2HDF extends Command {
5757
region: flags.region,
5858
}, !flags.insecure, flags.certificate ? fs.readFileSync(flags.certificate, 'utf8') : undefined) : new Mapper({region: flags.region}, !flags.insecure, flags.certificate ? fs.readFileSync(flags.certificate, 'utf8') : undefined)
5959

60-
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(this.ensureRefs(await converter.toHdf())))
60+
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(this.ensureRefs(await converter.toHdf()), null, 2))
6161
}
6262
}

src/commands/convert/burpsuite2hdf.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,6 @@ export default class Burpsuite2HDF extends Command {
2525
checkInput({data, filename: flags.input}, 'burp', 'BurpSuite Pro XML')
2626

2727
const converter = new Mapper(data, flags['with-raw'])
28-
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf()))
28+
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
2929
}
3030
}

src/commands/convert/ckl2hdf.ts

+6-2
Original file line numberDiff line numberDiff line change
@@ -23,7 +23,11 @@ export default class CKL2HDF extends Command {
2323
const data = fs.readFileSync(flags.input, 'utf8')
2424
checkInput({data, filename: flags.input}, 'checklist', 'DISA Checklist')
2525

26-
const converter = new Mapper(data, flags['with-raw'])
27-
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf()))
26+
try {
27+
const converter = new Mapper(data, flags['with-raw'])
28+
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
29+
} catch (error) {
30+
console.error(`Error converting to hdf:\n${error}`)
31+
}
2832
}
2933
}

src/commands/convert/conveyor2hdf.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,7 @@ export default class Conveyor2HDF extends Command {
2929
for (const [filename, result] of Object.entries(results)) {
3030
fs.writeFileSync(
3131
path.join(flags.output, checkSuffix(filename)),
32-
JSON.stringify(result),
32+
JSON.stringify(result, null, 2),
3333
)
3434
}
3535
}

src/commands/convert/dbprotect2hdf.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,6 @@ export default class DBProtect2HDF extends Command {
2525
checkInput({data, filename: flags.input}, 'dbProtect', 'DBProtect report in "Check Results Details" XML format')
2626

2727
const converter = new Mapper(data, flags['with-raw'])
28-
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf()))
28+
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
2929
}
3030
}

src/commands/convert/fortify2hdf.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,6 @@ export default class Fortify2HDF extends Command {
2525
checkInput({data, filename: flags.input}, 'fortify', 'Fortify results FVDL file')
2626

2727
const converter = new Mapper(data, flags['with-raw'])
28-
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf()))
28+
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
2929
}
3030
}

src/commands/convert/gosec2hdf.ts

+1-1
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,6 @@ export default class Gosec2HDF extends Command {
2525
checkInput({data, filename: flags.input}, 'gosec', 'gosec results JSON')
2626

2727
const converter = new Mapper(data, flags['with-raw'])
28-
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf()))
28+
fs.writeFileSync(checkSuffix(flags.output), JSON.stringify(converter.toHdf(), null, 2))
2929
}
3030
}

src/commands/convert/hdf2asff.ts

+2-2
Original file line numberDiff line numberDiff line change
@@ -45,11 +45,11 @@ export default class HDF2ASFF extends Command {
4545
fs.mkdirSync(outputFolder)
4646
if (convertedSlices.length === 1) {
4747
const outfilePath = path.join(outputFolder, convertFullPathToFilename(checkSuffix(flags.output)))
48-
fs.writeFileSync(outfilePath, JSON.stringify(convertedSlices[0]))
48+
fs.writeFileSync(outfilePath, JSON.stringify(convertedSlices[0], null, 2))
4949
} else {
5050
convertedSlices.forEach((slice, index) => {
5151
const outfilePath = path.join(outputFolder, `${convertFullPathToFilename(checkSuffix(flags.output || '')).replace('.json', '')}.p${index}.json`)
52-
fs.writeFileSync(outfilePath, JSON.stringify(slice))
52+
fs.writeFileSync(outfilePath, JSON.stringify(slice, null, 2))
5353
})
5454
}
5555
}

0 commit comments

Comments
 (0)