{[SM.he(values.content?.trim())]}', - '
{[SM.he(values.check?.content?.trim())]}', '
From 475616a35957d265716d46eb1aefd3f41cd80285 Mon Sep 17 00:00:00 2001 From: csmig <33138761+csmig@users.noreply.github.com> Date: Thu, 9 Mar 2023 20:27:05 +0000 Subject: [PATCH] breaking: MySQL Service and OAS updates for STIG revision data, including rules, checks and fixes (#936) --- api/source/controllers/STIG.js | 8 +- api/source/service/mysql/AssetService.js | 138 +- api/source/service/mysql/CollectionService.js | 163 ++- api/source/service/mysql/ReviewService.js | 50 +- api/source/service/mysql/STIGService.js | 770 +++++------ api/source/service/mysql/migrations/0020.js | 190 +++ api/source/service/mysql/utils.js | 19 +- api/source/specification/stig-manager.yaml | 68 +- api/source/utils/parsers.js | 44 +- client/src/js/SM/Global.js | 8 +- client/src/js/SM/Library.js | 75 +- client/src/js/SM/StigRevision.js | 39 +- client/src/js/collectionReview.js | 2 +- client/src/js/review.js | 2 +- docs/admin-guide/admin-guide.rst | 3 + docs/admin-guide/admin-quickstart.rst | 4 +- docs/reference/index.rst | 4 +- .../U_VPN_SRG-OTHER_V1R1_Manual-xccdf.xml | 43 + test/api/postman_collection.json | 1178 +++++++++++++++-- test/api/postman_environment.json | 4 +- 20 files changed, 1893 insertions(+), 919 deletions(-) create mode 100644 api/source/service/mysql/migrations/0020.js create mode 100644 test/api/form-data-files/U_VPN_SRG-OTHER_V1R1_Manual-xccdf.xml diff --git a/api/source/controllers/STIG.js b/api/source/controllers/STIG.js index 059a3402e..7a77b0a75 100644 --- a/api/source/controllers/STIG.js +++ b/api/source/controllers/STIG.js @@ -8,12 +8,13 @@ const STIG = require(`../service/${config.database.type}/STIGService`) module.exports.importBenchmark = async function importManualBenchmark (req, res, next) { try { const extension = req.file.originalname.substring(req.file.originalname.lastIndexOf(".")+1) + const clobber = req.query.clobber ?? false if (extension.toLowerCase() != 'xml') { throw new SmError.ClientError(`File extension .${extension} not supported`) } let benchmark try { - benchmark = await parsers.benchmarkFromXccdf(req.file.buffer) + benchmark = parsers.benchmarkFromXccdf(req.file.buffer) } catch(err){ throw new SmError.ClientError(err.message) @@ -21,9 +22,8 @@ module.exports.importBenchmark = async function importManualBenchmark (req, res, if (benchmark.scap) { throw new SmError.UnprocessableError('SCAP Benchmarks are not imported.') } - const revision = await STIG.insertManualBenchmark(benchmark, res.svcStatus) - const response = await STIG.getRevisionByString(revision.benchmarkId, revision.revisionStr) - res.json(response) + const revision = await STIG.insertManualBenchmark(benchmark, clobber, res.svcStatus) + res.json(revision) } catch(err) { next(err) diff --git a/api/source/service/mysql/AssetService.js b/api/source/service/mysql/AssetService.js index 71d3cb1bc..030b3bf0c 100644 --- a/api/source/service/mysql/AssetService.js +++ b/api/source/service/mysql/AssetService.js @@ -419,12 +419,12 @@ exports.queryChecklist = async function (inProjection, inPredicates, elevate, us try { const columns = [ 'CAST(:assetId as char) as "assetId"', - 'r.ruleId', - 'r.title as "ruleTitle"', - 'r.version', - 'g.groupId', - 'g.title as "groupTitle"', - 'r.severity', + 'rgr.ruleId', + 'rgr.title as "ruleTitle"', + 'rgr.version', + 'rgr.groupId', + 'rgr.groupTitle', + 'rgr.severity', `result.api as "result"`, `CASE WHEN review.resultEngine = 0 THEN NULL ELSE review.resultEngine END as resultEngine`, `review.autoResult`, @@ -432,11 +432,8 @@ exports.queryChecklist = async function (inProjection, inPredicates, elevate, us ] const joins = [ 'current_rev rev', - 'left join rev_group_map rg on rev.revId = rg.revId', - 'left join `group` g on rg.groupId=g.groupId', - 'left join rev_group_rule_map rgr on rg.rgId=rgr.rgId', - 'left join rule r on rgr.ruleId=r.ruleId', - 'left join review on r.ruleId = review.ruleId and review.assetId = :assetId', + 'left join rev_group_rule_map rgr using (revId)', + 'left join review on rgr.ruleId = review.ruleId and review.assetId = :assetId', 'left join result on review.resultId=result.resultId', 'left join status on review.statusId=status.statusId' ] @@ -459,11 +456,7 @@ exports.queryChecklist = async function (inProjection, inPredicates, elevate, us predicates.binds.revId = revId } const groupBy = [ - 'r.ruleId', - 'r.title', - 'g.groupId', - 'g.title', - 'r.severity', + 'rgr.rgrId', 'result.api', 'review.autoResult', 'status.api', @@ -473,7 +466,7 @@ exports.queryChecklist = async function (inProjection, inPredicates, elevate, us 'review.comment' ] const orderBy = [ - 'substring(g.groupId from 3) + 0' + 'substring(rgr.groupId from 3) + 0' ] const sql = dbUtils.makeQueryString({columns, joins, predicates, groupBy, orderBy}) @@ -591,78 +584,54 @@ exports.cklFromAssetStigs = async function cklFromAssetStigs (assetId, stigs, el const sqlGetAsset = "select name, fqdn, ip, mac, noncomputing, metadata from asset where assetId = ?" const sqlGetChecklist =`SELECT - g.groupId, - r.severity, - g.title as "groupTitle", - r.ruleId, - r.title as "ruleTitle", - r.weight, - r.version, - r.vulnDiscussion, - r.iaControls, - r.falsePositives, - r.falseNegatives, - r.documentable, - r.mitigations, - r.potentialImpacts, - r.thirdPartyTools, - r.mitigationControl, - r.responsibility, - r.severityOverrideGuidance, + rgr.groupId, + rgr.severity, + rgr.groupTitle, + rgr.ruleId, + rgr.title as "ruleTitle", + rgr.weight, + rgr.version, + rgr.vulnDiscussion, + rgr.iaControls, + rgr.falsePositives, + rgr.falseNegatives, + rgr.documentable, + rgr.mitigations, + rgr.potentialImpacts, + rgr.thirdPartyTools, + rgr.mitigationControl, + rgr.responsibility, + rgr.severityOverrideGuidance, result.ckl as "result", LEFT(review.detail,32767) as "detail", LEFT(review.comment,32767) as "comment", - MAX(cc.content) as "checkContent", - MAX(fix.text) as "fixText", - group_concat(rcc.cci ORDER BY rcc.cci) as "ccis" + cc.content as "checkContent", + ft.text as "fixText", + group_concat(rgrcc.cci ORDER BY rgrcc.cci) as "ccis" FROM revision rev - left join rev_group_map rg on rev.revId = rg.revId - left join \`group\` g on rg.groupId = g.groupId - - left join rev_group_rule_map rgr on rg.rgId = rgr.rgId - left join rule r on rgr.ruleId = r.ruleId - left join severity_cat_map sc on r.severity = sc.severity + left join rev_group_rule_map rgr on rev.revId = rgr.revId + left join severity_cat_map sc on rgr.severity = sc.severity - left join rule_cci_map rcc on rgr.ruleId = rcc.ruleId + left join rev_group_rule_cci_map rgrcc on rgr.rgrId = rgrcc.rgrId - left join rev_group_rule_check_map rgrc on rgr.rgrId = rgrc.rgrId - left join \`check\` c on rgrc.checkId = c.checkId - left join check_content cc on c.ccId = cc.ccId + left join check_content cc on rgr.checkDigest = cc.digest - left join rev_group_rule_fix_map rgrf on rgr.rgrId = rgrf.rgrId - left join fix on rgrf.fixId = fix.fixId + left join fix_text ft on rgr.fixDigest = ft.digest - left join review on r.ruleId = review.ruleId and review.assetId = ? + left join review on rgr.ruleId = review.ruleId and review.assetId = ? left join result on review.resultId = result.resultId left join status on review.statusId = status.statusId WHERE rev.revId = ? GROUP BY - g.groupId, - r.severity, - g.title, - r.ruleId, - r.title, - r.weight, - r.version, - r.vulnDiscussion, - r.iaControls, - r.falsePositives, - r.falseNegatives, - r.documentable, - r.mitigations, - r.potentialImpacts, - r.thirdPartyTools, - r.mitigationControl, - r.responsibility, - r.severityOverrideGuidance, + rgr.rgrId, result.ckl, review.detail, review.comment order by - substring(g.groupId from 3) + 0 asc + substring(rgr.groupId from 3) + 0 asc ` connection = await dbUtils.pool.getConnection() @@ -831,14 +800,14 @@ exports.xccdfFromAssetStig = async function (assetId, benchmarkId, revisionStr = // queries and query methods const sqlGetAsset = "select name, fqdn, ip, mac, noncomputing, metadata from asset where assetId = ?" const sqlGetChecklist =`SELECT - g.groupId, - g.title as "groupTitle", - r.ruleId, - r.title as "ruleTitle", - r.severity, - r.weight, - r.version, - c.checkId, + rgr.groupId, + rgr.groupTitle, + rgr.ruleId, + rgr.title as "ruleTitle", + rgr.severity, + rgr.weight, + rgr.version, + rgr.checkSystem, cc.content as "checkContent", result.api as "result", review.ts, @@ -847,20 +816,15 @@ exports.xccdfFromAssetStig = async function (assetId, benchmarkId, revisionStr = review.resultEngine FROM revision rev - left join rev_group_map rg on rev.revId = rg.revId - left join \`group\` g on rg.groupId = g.groupId - left join rev_group_rule_map rgr on rg.rgId = rgr.rgId - left join rule r on rgr.ruleId = r.ruleId - left join rev_group_rule_check_map rgrc on rgr.rgrId = rgrc.rgrId - left join \`check\` c on rgrc.checkId = c.checkId - left join check_content cc on c.ccId = cc.ccId - left join review on r.ruleId = review.ruleId and review.assetId = ? + left join rev_group_rule_map rgr on rev.revId = rgr.revId + left join check_content cc on rgr.checkDigest = cc.digest + left join review on rgr.ruleId = review.ruleId and review.assetId = ? left join result on review.resultId = result.resultId left join status on review.statusId = status.statusId WHERE rev.revId = ? order by - substring(g.groupId from 3) + 0 asc + substring(rgr.groupId from 3) + 0 asc ` async function getBenchmarkRevision(connection, benchmarkId, revisionStr) { let revisionStrResolved diff --git a/api/source/service/mysql/CollectionService.js b/api/source/service/mysql/CollectionService.js index 1b57bf977..05f3d4a18 100644 --- a/api/source/service/mysql/CollectionService.js +++ b/api/source/service/mysql/CollectionService.js @@ -218,31 +218,31 @@ exports.queryFindings = async function (aggregator, inProjection = [], inPredica switch (aggregator) { case 'ruleId': columns = [ - 'ru.ruleId', - 'ru.title', - 'ru.severity', + 'rgr.ruleId', + 'rgr.title', + 'rgr.severity', 'count(distinct a.assetId) as assetCount' ] groupBy = [ - 'ru.ruleId', - 'ru.title', - 'ru.severity' + 'rgr.ruleId', + 'rgr.title', + 'rgr.severity' ] - orderBy = 'ru.ruleId' + orderBy = 'rgr.ruleId' break case 'groupId': columns = [ - 'g.groupId', - 'g.title', - 'g.severity', + 'rgr.groupId', + 'rgr.groupTitle as title', + 'rgr.severity', 'count(distinct a.assetId) as assetCount' ] groupBy = [ - 'g.groupId', - 'g.title', - 'g.severity' + 'rgr.groupId', + 'rgr.groupTitle', + 'rgr.severity' ] - orderBy = 'substring(g.groupId from 3) + 0' + orderBy = 'substring(rgr.groupId from 3) + 0' break case 'cci': columns = [ @@ -265,13 +265,11 @@ exports.queryFindings = async function (aggregator, inProjection = [], inPredica 'left join asset a on c.collectionId = a.collectionId', 'inner join stig_asset_map sa on a.assetId = sa.assetId', 'left join user_stig_asset_map usa on sa.saId = usa.saId', - 'inner join current_group_rule cgr on sa.benchmarkId = cgr.benchmarkId', 'inner join current_rev cr on sa.benchmarkId = cr.benchmarkId', - 'inner join review rv on (cgr.ruleId = rv.ruleId and a.assetId = rv.assetId and rv.resultId = 4)', - 'inner join `group` g on cgr.groupId = g.groupId', - 'inner join rule ru on rv.ruleId = ru.ruleId', - 'left join rule_cci_map rulecci on ru.ruleId = rulecci.ruleId', - 'left join cci on rulecci.cci = cci.cci' + 'inner join rev_group_rule_map rgr using (revId)', + 'inner join rev_group_rule_cci_map rgrcc using (rgrId)', + 'inner join review rv on (rgr.ruleId = rv.ruleId and a.assetId = rv.assetId and rv.resultId = 4)', + 'left join cci on rgrcc.cci = cci.cci' ] // PROJECTIONS @@ -279,10 +277,10 @@ exports.queryFindings = async function (aggregator, inProjection = [], inPredica // Not exposed in API, used internally if (inProjection.includes('rulesWithDiscussion')) { columns.push(`cast(concat('[', group_concat(distinct json_object ( - 'ruleId', ru.ruleId, - 'title', ru.title, - 'severity', ru.severity, - 'vulnDiscussion', ru.vulnDiscussion) order by ru.ruleId), ']') as json) as "rules"`) + 'ruleId', rgr.ruleId, + 'title', rgr.title, + 'severity', rgr.severity, + 'vulnDiscussion', rgr.vulnDiscussion) order by rgr.ruleId), ']') as json) as "rules"`) } // Not exposed in API, used internally if (inProjection.includes('stigsInfo')) { @@ -294,16 +292,16 @@ exports.queryFindings = async function (aggregator, inProjection = [], inPredica } if (inProjection.includes('rules')) { columns.push(`cast(concat('[', group_concat(distinct json_object ( - 'ruleId', ru.ruleId, - 'title', ru.title, - 'version', ru.version, - 'severity', ru.severity) order by ru.ruleId), ']') as json) as "rules"`) + 'ruleId', rgr.ruleId, + 'title', rgr.title, + 'version', rgr.version, + 'severity', rgr.severity) order by rgr.ruleId), ']') as json) as "rules"`) } if (inProjection.includes('groups')) { columns.push(`cast(concat('[', group_concat(distinct json_object ( - 'groupId', g.groupId, - 'title', g.title, - 'severity', g.severity) order by g.groupId), ']') as json) as "groups"`) + 'groupId', rgr.groupId, + 'title', rgr.groupTitle, + 'severity', rgr.groupSeverity) order by rgr.groupId), ']') as json) as "groups"`) } if (inProjection.includes('assets')) { columns.push(`cast(concat('[', group_concat(distinct json_object ( @@ -311,13 +309,13 @@ exports.queryFindings = async function (aggregator, inProjection = [], inPredica 'name', a.name) order by a.name), ']') as json) as "assets"`) } if (inProjection.includes('stigs')) { - columns.push(`cast( concat( '[', group_concat(distinct concat('"',cgr.benchmarkId,'"')), ']' ) as json ) as "stigs"`) + columns.push(`cast( concat( '[', group_concat(distinct concat('"',cr.benchmarkId,'"')), ']' ) as json ) as "stigs"`) } if (inProjection.includes('ccis')) { columns.push(`cast(concat('[', group_concat(distinct json_object ( - 'cci', rulecci.cci, + 'cci', cci.cci, 'definition', cci.definition, - 'apAcronym', cci.apAcronym) order by rulecci.cci), ']') as json) as "ccis"`) + 'apAcronym', cci.apAcronym) order by cci.cci), ']') as json) as "ccis"`) } @@ -341,7 +339,7 @@ exports.queryFindings = async function (aggregator, inProjection = [], inPredica predicates.binds.push( 3 ) } if ( inPredicates.benchmarkId ) { - predicates.statements.push('cgr.benchmarkId = ?') + predicates.statements.push('cr.benchmarkId = ?') predicates.binds.push( inPredicates.benchmarkId ) } predicates.statements.push('(cg.userId = ? AND CASE WHEN cg.accessLevel = 1 THEN usa.userId = cg.userId ELSE TRUE END)') @@ -713,12 +711,9 @@ exports.getChecklistByCollectionStig = async function (collectionId, benchmarkId try { const joins = [ 'asset a', - 'left join stig_asset_map sa on a.assetId=sa.assetId', - 'left join current_rev rev on sa.benchmarkId=rev.benchmarkId', - 'left join rev_group_map rg on rev.revId=rg.revId', - 'left join `group` g on rg.groupId=g.groupId', - 'left join rev_group_rule_map rgr on rg.rgId=rgr.rgId', - 'left join rule rules on rgr.ruleId=rules.ruleId', + 'left join stig_asset_map sa using (assetId)', + 'left join current_rev rev using (benchmarkId)', + 'left join rev_group_rule_map rgr using (revId)', 'left join review r on (rgr.ruleId=r.ruleId and sa.assetId=r.assetId)' ] @@ -759,35 +754,35 @@ exports.getChecklistByCollectionStig = async function (collectionId, benchmarkId const sql = ` select - r.ruleId - ,r.ruleTitle - ,r.groupId - ,r.groupTitle - ,r.version - ,r.severity + innerR.ruleId + ,innerR.ruleTitle + ,innerR.groupId + ,innerR.groupTitle + ,innerR.version + ,innerR.severity ,json_object( 'results', json_object( - 'pass', sum(CASE WHEN r.resultId = 3 THEN 1 ELSE 0 END), - 'fail', sum(CASE WHEN r.resultId = 4 THEN 1 ELSE 0 END), - 'notapplicable', sum(CASE WHEN r.resultId = 2 THEN 1 ELSE 0 END), - 'other', sum(CASE WHEN r.resultId is null OR (r.resultId != 2 AND r.resultId != 3 AND r.resultId != 4) THEN 1 ELSE 0 END) + 'pass', sum(CASE WHEN innerR.resultId = 3 THEN 1 ELSE 0 END), + 'fail', sum(CASE WHEN innerR.resultId = 4 THEN 1 ELSE 0 END), + 'notapplicable', sum(CASE WHEN innerR.resultId = 2 THEN 1 ELSE 0 END), + 'other', sum(CASE WHEN innerR.resultId is null OR (innerR.resultId != 2 AND innerR.resultId != 3 AND innerR.resultId != 4) THEN 1 ELSE 0 END) ), 'statuses', json_object( - 'saved', sum(CASE WHEN r.statusId = 0 THEN 1 ELSE 0 END), - 'submitted', sum(CASE WHEN r.statusId = 1 THEN 1 ELSE 0 END), - 'rejected', sum(CASE WHEN r.statusId = 2 THEN 1 ELSE 0 END), - 'accepted', sum(CASE WHEN r.statusId = 3 THEN 1 ELSE 0 END) + 'saved', sum(CASE WHEN innerR.statusId = 0 THEN 1 ELSE 0 END), + 'submitted', sum(CASE WHEN innerR.statusId = 1 THEN 1 ELSE 0 END), + 'rejected', sum(CASE WHEN innerR.statusId = 2 THEN 1 ELSE 0 END), + 'accepted', sum(CASE WHEN innerR.statusId = 3 THEN 1 ELSE 0 END) ) ) as counts from ( select a.assetId ,rgr.ruleId - ,rules.title as ruleTitle - ,rules.severity - ,rules.version - ,rg.groupId - ,g.title as groupTitle + ,rgr.title as ruleTitle + ,rgr.severity + ,rgr.version + ,rgr.groupId + ,rgr.groupTitle ,r.resultId ,r.statusId from @@ -796,24 +791,19 @@ exports.getChecklistByCollectionStig = async function (collectionId, benchmarkId ${predicates.statements.join(' and ')} group by a.assetId - ,rgr.ruleId - ,rules.title - ,rules.severity - ,rules.version - ,rg.groupId - ,g.title + ,rgr.rgrId ,r.resultId ,r.statusId - ) r + ) innerR group by - r.ruleId - ,r.ruleTitle - ,r.severity - ,r.groupId - ,r.groupTitle - ,r.version + innerR.ruleId + ,innerR.ruleTitle + ,innerR.severity + ,innerR.groupId + ,innerR.groupTitle + ,innerR.version order by - substring(r.groupId from 3) + 0 + substring(innerR.groupId from 3) + 0 ` // Send query connection = await dbUtils.pool.getConnection() @@ -1545,10 +1535,10 @@ async function queryUnreviewedByCollection ({ `json_arrayagg(json_object( 'result', result.api, 'ruleId', rgr.ruleId, - 'groupId', rg.groupId, - ${projections.includes('ruleTitle') ? "'ruleTitle', rule.title," : ''} - ${projections.includes('groupTitle') ? "'groupTitle', `group`.title," : ''} - 'severity', rule.severity, + 'groupId', rgr.groupId, + ${projections.includes('ruleTitle') ? "'ruleTitle', rgr.title," : ''} + ${projections.includes('groupTitle') ? "'groupTitle', rgr.title," : ''} + 'severity', rgr.severity, 'benchmarkId', cr.benchmarkId )) as unreviewed` ] @@ -1561,12 +1551,12 @@ async function queryUnreviewedByCollection ({ ] break case 'rule': - const projectionMap = projections.map( p => `${p === 'groupTitle' ? '`group`' : 'rule'}.${p}`) + const projectionMap = projections.map( p => `${p === 'groupTitle' ? 'rgr.groupTitle' : 'rgr.title'}`) columns = [ 'rgr.ruleId', - 'rg.groupId', + 'rgr.groupId', 'cr.benchmarkId', - 'rule.severity', + 'rgr.severity', ...projectionMap, `json_arrayagg(json_object( 'result', result.api, @@ -1588,9 +1578,9 @@ async function queryUnreviewedByCollection ({ ] groupBy = [ 'rgr.ruleId', - 'rg.groupId', + 'rgr.groupId', 'cr.benchmarkId', - 'rule.severity', + 'rgr.severity', ...projectionMap ] orderBy = [ @@ -1605,10 +1595,7 @@ async function queryUnreviewedByCollection ({ 'left join stig_asset_map sa on a.assetId = sa.assetId', 'left join user_stig_asset_map usa on sa.saId = usa.saId', 'left join current_rev cr on sa.benchmarkId = cr.benchmarkId', - 'left join rev_group_map rg on cr.revId = rg.revId', - 'left join `group` on rg.groupId = `group`.groupId', - 'left join rev_group_rule_map rgr on rg.rgId = rgr.rgId', - 'left join rule on rgr.ruleId = rule.ruleId', + 'left join rev_group_rule_map rgr on cr.revId = rgr.revId', 'left join review r on (a.assetId = r.assetId and rgr.ruleId = r.ruleId)', 'left join result on r.resultId = result.resultId' ] @@ -1642,7 +1629,7 @@ async function queryUnreviewedByCollection ({ predicates.binds.push(ruleId) } if (severities?.length) { - predicates.statements.push('rule.severity IN ?') + predicates.statements.push('rgr.severity IN ?') predicates.binds.push([severities]) } const sql = dbUtils.makeQueryString({columns, joins, predicates, groupBy}) diff --git a/api/source/service/mysql/ReviewService.js b/api/source/service/mysql/ReviewService.js index 245e15d0a..817ce57c3 100644 --- a/api/source/service/mysql/ReviewService.js +++ b/api/source/service/mysql/ReviewService.js @@ -75,7 +75,7 @@ function cteRuleGen({ruleIds, benchmarkIds}) { cte = dbUtils.pool.format(sql,[json]) } else if (benchmarkIds?.length) { - const sql = `select ruleId from current_group_rule where benchmarkId IN ?` + const sql = `select ruleId from v_current_group_rule where benchmarkId IN ?` cte = dbUtils.pool.format(sql,[[benchmarkIds]]) } return `cteRule AS (${cte})` @@ -91,8 +91,7 @@ from left join stig_asset_map sa using (assetId) left join user_stig_asset_map usa on sa.saId = usa.saId left join revision rev using (benchmarkId) - left join rev_group_map rg using (revId) - left join rev_group_rule_map rgr using (rgId) + left join rev_group_rule_map rgr using (revId) where cg.collectionId = @collectionId and a.assetId IN (select assetId from cteAsset) @@ -784,33 +783,18 @@ exports.getReviews = async function (inProjection = [], inPredicates = {}, userO ) as status` ] const groupBy = [ - 'r.assetId', + 'r.reviewId', 'asset.name', - 'r.ruleId', - 'rule.severity', - 'r.resultId', 'result.api', - 'r.resultEngine', - 'r.detail', - 'r.autoResult', - 'r.comment', 'status.api', - 'r.userId', 'ud.username', 'udStatus.username', - 'r.ts', - 'r.statusText', - 'r.statusUserId', - 'r.statusTs', - 'r.reviewId' ] const joins = [ 'review r', 'left join rev_group_rule_map rgr on r.ruleId = rgr.ruleId', - 'left join rev_group_map rg on rgr.rgId = rg.rgId', - 'left join revision on rg.revId = revision.revId', - 'left join current_rev on rg.revId = current_rev.revId', - 'left join rule on r.ruleId = rule.ruleId', + 'left join revision on rgr.revId = revision.revId', + 'left join current_rev on rgr.revId = current_rev.revId', 'left join result on r.resultId = result.resultId', 'left join status on r.statusId = status.statusId', 'left join user_data ud on r.userId = ud.userId', @@ -833,11 +817,12 @@ exports.getReviews = async function (inProjection = [], inPredicates = {}, userO } if (inProjection.includes('rule')) { columns.push(`json_object( - 'ruleId' , rule.ruleId, - 'title' , rule.title, - 'version' , rule.version, - 'severity' , rule.severity) as "rule"` + 'ruleId' , rgr.ruleId, + 'title' , rgr.title, + 'version' , rgr.version, + 'severity' , rgr.severity) as "rule"` ) + groupBy.push('rgr.severity','rgr.title','rgr.version','rgr.ruleId') } if (inProjection.includes('history')) { // OVER clauses and subquery needed to order the json_arrayagg @@ -927,17 +912,18 @@ exports.getReviews = async function (inProjection = [], inPredicates = {}, userO predicates.binds.push(inPredicates.ruleId) } if (inPredicates.groupId) { - predicates.statements.push(`rg.groupId = ?`) + predicates.statements.push(`rgr.groupId = ?`) predicates.binds.push(inPredicates.groupId) } if (inPredicates.cci) { predicates.statements.push(`r.ruleId IN ( SELECT - ruleId + distinct rgr.ruleId FROM - rule_cci_map + rev_group_rule_cci_map rgrcc + left join rev_group_rule_map rgr using (rgrId) WHERE - cci = ? + rgrcc.cci = ? )` ) predicates.binds.push(inPredicates.cci) } @@ -1233,8 +1219,7 @@ exports.getRulesByAssetUser = async function ( assetId, userObject ) { left join stig_asset_map sa using (assetId) left join user_stig_asset_map usa on sa.saId = usa.saId left join revision rev using (benchmarkId) - left join rev_group_map rg using (revId) - left join rev_group_rule_map rgr using (rgId) + left join rev_group_rule_map rgr using (revId) where a.assetid = ? and cg.userId = ? @@ -1261,8 +1246,7 @@ exports.checkRuleByAssetUser = async function (ruleId, assetId, userObject) { left join stig_asset_map sa using (assetId) left join user_stig_asset_map usa on sa.saId = usa.saId left join revision rev using (benchmarkId) - left join rev_group_map rg using (revId) - left join rev_group_rule_map rgr using (rgId) + left join rev_group_rule_map rgr using (revId) where a.assetId = ? and rgr.ruleId = ? diff --git a/api/source/service/mysql/STIGService.js b/api/source/service/mysql/STIGService.js index ec9b36994..1ea5a48a2 100644 --- a/api/source/service/mysql/STIGService.js +++ b/api/source/service/mysql/STIGService.js @@ -1,5 +1,6 @@ 'use strict'; const dbUtils = require('./utils') +const {createHash} = require('node:crypto') let _this = this @@ -61,8 +62,8 @@ Generalized queries for Groups **/ exports.queryGroups = async function ( inProjection, inPredicates ) { let columns = [ - 'g.groupId as "groupId"', - 'g.title as "title"', + 'rgr.groupId as "groupId"', + 'rgr.groupTitle as "title"', ] let joins @@ -85,23 +86,20 @@ exports.queryGroups = async function ( inProjection, inPredicates ) { joins = ['current_rev r'] } - joins.push('inner join rev_group_map rg on r.revId = rg.revId') - joins.push('inner join `group` g on rg.groupId = g.groupId') + joins.push('inner join rev_group_rule_map rgr on r.revId = rgr.revId') if (inPredicates.groupId) { - predicates.statements.push('g.groupId = ?') + predicates.statements.push('rgr.groupId = ?') predicates.binds.push(inPredicates.groupId) } // PROJECTIONS if (inProjection && inProjection.includes('rules')) { - joins.push('inner join rev_group_rule_map rgr on rg.rgId = rgr.rgId' ) - joins.push('inner join rule rule on rgr.ruleId = rule.ruleId' ) columns.push(`json_arrayagg(json_object( - 'ruleId', rule.ruleId, - 'version', rule.version, - 'title', rule.title, - 'severity', rule.severity)) as "rules"`) + 'ruleId', rgr.ruleId, + 'version', rgr.version, + 'title', rgr.title, + 'severity', rgr.severity)) as "rules"`) } // CONSTRUCT MAIN QUERY @@ -113,9 +111,9 @@ exports.queryGroups = async function ( inProjection, inPredicates ) { sql += "\nWHERE " + predicates.statements.join(" and ") } if (inProjection && inProjection.includes('rules')) { - sql += "\nGROUP BY g.groupId, g.title\n" + sql += "\nGROUP BY rgr.groupId, rgr.groupTitle\n" } - sql += ` order by substring(g.groupId from 3) + 0` + sql += ` order by substring(rgr.groupId from 3) + 0` try { let [rows, fields] = await dbUtils.pool.query(sql, predicates.binds) @@ -133,21 +131,21 @@ For specific Rule, allow for projections with Check and Fixes **/ exports.queryBenchmarkRules = async function ( benchmarkId, revisionStr, inProjection, inPredicates ) { let columns = [ - 'r.ruleId', - 'r.title', - 'g.groupId', - 'g.title as "groupTitle"', - 'r.version', - 'r.severity' + 'rgr.ruleId', + 'rgr.title', + 'rgr.groupId', + 'rgr.groupTitle', + 'rgr.version', + 'rgr.severity' ] let groupBy = [ - 'r.ruleId', - 'r.title', - 'g.groupId', - 'g.title', - 'r.version', - 'r.severity', + 'rgr.ruleId', + 'rgr.title', + 'rgr.groupId', + 'rgr.groupTitle', + 'rgr.version', + 'rgr.severity', 'rgr.rgrId' ] @@ -172,45 +170,42 @@ exports.queryBenchmarkRules = async function ( benchmarkId, revisionStr, inProje joins = ['current_rev rev'] } - if (inPredicates && inPredicates.ruleId) { + if (inPredicates?.ruleId) { predicates.statements.push('rgr.ruleId = ?') predicates.binds.push(inPredicates.ruleId) } - joins.push('left join rev_group_map rg on rev.revId = rg.revId') - joins.push('left join `group` g on rg.groupId = g.groupId') - joins.push('left join rev_group_rule_map rgr on rg.rgId = rgr.rgId' ) - joins.push('left join rule r on rgr.ruleId = r.ruleId' ) + joins.push('left join rev_group_rule_map rgr using (revId)' ) // PROJECTIONS if ( inProjection && inProjection.includes('detail') ) { columns.push(`json_object( - 'weight', r.weight, - 'vulnDiscussion', r.vulnDiscussion, - 'falsePositives', r.falsePositives, - 'falseNegatives', r.falseNegatives, - 'documentable', r.documentable, - 'mitigations', r.mitigations, - 'severityOverrideGuidance', r.severityOverrideGuidance, - 'potentialImpacts', r.potentialImpacts, - 'thirdPartyTools', r.thirdPartyTools, - 'mitigationControl', r.mitigationControl, - 'responsibility', r.responsibility + 'weight', rgr.weight, + 'vulnDiscussion', rgr.vulnDiscussion, + 'falsePositives', rgr.falsePositives, + 'falseNegatives', rgr.falseNegatives, + 'documentable', rgr.documentable, + 'mitigations', rgr.mitigations, + 'severityOverrideGuidance', rgr.severityOverrideGuidance, + 'potentialImpacts', rgr.potentialImpacts, + 'thirdPartyTools', rgr.thirdPartyTools, + 'mitigationControl', rgr.mitigationControl, + 'responsibility', rgr.responsibility ) as detail`) groupBy.push( - 'r.version', - 'r.weight', - 'r.vulnDiscussion', - 'r.falsePositives', - 'r.falseNegatives', - 'r.documentable', - 'r.mitigations', - 'r.severityOverrideGuidance', - 'r.potentialImpacts', - 'r.thirdPartyTools', - 'r.mitigationControl', - 'r.responsibility', - 'r.iacontrols' + 'rgr.version', + 'rgr.weight', + 'rgr.vulnDiscussion', + 'rgr.falsePositives', + 'rgr.falseNegatives', + 'rgr.documentable', + 'rgr.mitigations', + 'rgr.severityOverrideGuidance', + 'rgr.potentialImpacts', + 'rgr.thirdPartyTools', + 'rgr.mitigationControl', + 'rgr.responsibility', + 'rgr.iacontrols' ) } @@ -218,39 +213,36 @@ exports.queryBenchmarkRules = async function ( benchmarkId, revisionStr, inProje columns.push(`(select coalesce ( - (select json_arrayagg ( + (select json_arrayagg( json_object( - 'cci', rc.cci, + 'cci', rgrcc.cci, 'apAcronym', cci.apAcronym, - 'definition', cci.definition + 'definition', cci.definition, + 'control', cr.indexDisa ) ) from - rule_cci_map rc - left join cci cci on rc.cci = cci.cci - left join cci_reference_map cr on cci.cci = cr.cci + rev_group_rule_cci_map rgrcc + left join cci cci using (cci) + left join cci_reference_map cr using (cci) where - rc.ruleId = r.ruleId + rgrcc.rgrId = rgr.rgrId ), json_array() ) ) as "ccis"`) } - if ( inProjection && inProjection.includes('checks') ) { - columns.push(`(select json_arrayagg(json_object( - 'checkId', rck.checkId, - 'content', cc.content)) - from rev_group_rule_check_map rck - left join \`check\` chk on chk.checkId = rck.checkId - left join check_content cc on chk.ccId = cc.ccId - where rck.rgrId = rgr.rgrId) as "checks"`) - } - if ( inProjection && inProjection.includes('fixes') ) { - columns.push(`(select json_arrayagg(json_object( - 'fixId', rf.fixId, - 'text', fix.text)) - from rev_group_rule_fix_map rf left join fix fix on fix.fixId = rf.fixId - where rf.rgrId = rgr.rgrId) as "fixes"`) + if ( inProjection && inProjection.includes('check') ) { + joins.push('left join check_content cc on rgr.checkDigest = cc.digest' ) + columns.push(`json_object( + 'system', rgr.checkSystem, + 'content', cc.content) as \`check\``) + } + if ( inProjection && inProjection.includes('fix') ) { + joins.push('left join fix_text ft on rgr.fixDigest = ft.digest' ) + columns.push(`json_object( + 'fixref', rgr.fixref, + 'text', ft.text) as fix`) } @@ -265,7 +257,7 @@ exports.queryBenchmarkRules = async function ( benchmarkId, revisionStr, inProje if (inProjection && inProjection.includes('cci')) { sql += "\nGROUP BY " + groupBy.join(", ") + "\n" } - sql += ` order by substring(r.ruleId from 4) + 0` + sql += ` order by substring(rgr.ruleId from 4) + 0` try { let [rows] = await dbUtils.pool.query(sql, predicates.binds) @@ -282,28 +274,20 @@ Generalized queries for a single Rule, optionally with Check and Fix **/ exports.queryRules = async function ( ruleId, inProjection ) { let columns = [ - 'r.ruleId', - 'r.version', - 'r.title', - 'r.severity', - 'g.groupId', - 'g.Title as "groupTitle"' + 'rgr.ruleId', + 'rgr.version', + 'rgr.title', + 'rgr.severity', + 'rgr.groupId', + 'rgr.groupTitle' ] let groupBy = [ - 'r.ruleId', - 'r.version', - 'r.title', - 'r.severity', - 'g.groupId', - 'g.Title' + 'rgr.rgrId' ] let joins = [ - 'rule r', - 'left join rev_group_rule_map rgr on r.ruleId = rgr.ruleId', - 'left join rev_group_map rg on rgr.rgId = rg.rgId', - 'left join `group` g on rg.groupId = g.groupId' + 'rev_group_rule_map rgr' ] @@ -313,84 +297,61 @@ exports.queryRules = async function ( ruleId, inProjection ) { } // PREDICATES - predicates.statements.push('r.ruleId = ?') + predicates.statements.push('rgr.ruleId = ?') predicates.binds.push(ruleId) // PROJECTIONS if ( inProjection && inProjection.includes('detail') ) { columns.push(`json_object( - 'weight', r.weight, - 'vulnDiscussion', r.vulnDiscussion, - 'falsePositives', r.falsePositives, - 'falseNegatives', r.falseNegatives, - 'documentable', r.documentable, - 'mitigations', r.mitigations, - 'severityOverrideGuidance', r.severityOverrideGuidance, - 'potentialImpacts', r.potentialImpacts, - 'thirdPartyTools', r.thirdPartyTools, - 'mitigationControl', r.mitigationControl, - 'responsibility', r.responsibility + 'weight', rgr.weight, + 'vulnDiscussion', rgr.vulnDiscussion, + 'falsePositives', rgr.falsePositives, + 'falseNegatives', rgr.falseNegatives, + 'documentable', rgr.documentable, + 'mitigations', rgr.mitigations, + 'severityOverrideGuidance', rgr.severityOverrideGuidance, + 'potentialImpacts', rgr.potentialImpacts, + 'thirdPartyTools', rgr.thirdPartyTools, + 'mitigationControl', rgr.mitigationControl, + 'responsibility', rgr.responsibility ) as detail`) - let detailColumns = [ - 'r.weight', - 'r.vulnDiscussion', - 'r.falsePositives', - 'r.falseNegatives', - 'r.documentable', - 'r.mitigations', - 'r.severityOverrideGuidance', - 'r.potentialImpacts', - 'r.thirdPartyTools', - 'r.mitigationControl', - 'r.responsibility' - ] - groupBy.push(...detailColumns) + // let detailColumns = [ + // 'rgr.weight', + // 'rgr.vulnDiscussion', + // 'rgr.falsePositives', + // 'rgr.falseNegatives', + // 'rgr.documentable', + // 'rgr.mitigations', + // 'rgr.severityOverrideGuidance', + // 'rgr.potentialImpacts', + // 'rgr.thirdPartyTools', + // 'rgr.mitigationControl', + // 'rgr.responsibility' + // ] + // groupBy.push(...detailColumns) } if ( inProjection && inProjection.includes('ccis') ) { - columns.push(`(select - coalesce - ( - (select json_arrayagg ( - json_object( - 'cci', rc.cci, - 'apAcronym', cci.apAcronym, - 'definition', cci.definition - ) - ) - from - rule_cci_map rc - left join cci cci on rc.cci = cci.cci - left join cci_reference_map cr on cci.cci = cr.cci - where - rc.ruleId = r.ruleId - ), - json_array() - ) - ) as "ccis"`) + columns.push(`CASE WHEN count(rgrcc.cci) = 0 + THEN json_array() + ELSE CAST(CONCAT('[', GROUP_CONCAT(distinct json_object('cci', rgrcc.cci,'apAcronym',cci.apAcronym,'definition',cci.definition)), ']') as json) + END as ccis`) + joins.push( + 'left join rev_group_rule_cci_map rgrcc using (rgrId)', + 'left join cci using (cci)' + ) } - if ( inProjection && inProjection.includes('checks') ) { - columns.push(`(select json_arrayagg(json_object( - 'checkId', rck.checkId, - 'content', cc.content)) - from rev_group_rule_check_map rck - left join \`check\` chk on chk.checkId = rck.checkId - left join check_content cc on chk.ccId = cc.ccId - left join rev_group_rule_map rgr on rck.rgrId = rgr.rgrId - where rgr.ruleId = r.ruleId) as "checks"`) + if ( inProjection && inProjection.includes('check') ) { + columns.push(`json_object('system', rgr.checkSystem,'content', cc.content) as \`check\``) + joins.push('left join check_content cc on rgr.checkDigest = cc.digest') } - if ( inProjection && inProjection.includes('fixes') ) { - columns.push(`(select json_arrayagg(json_object( - 'fixId', rf.fixId, - 'text', fix.text)) - from rev_group_rule_fix_map rf - left join fix fix on fix.fixId = rf.fixId - left join rev_group_rule_map rgr on rf.rgrId = rgr.rgrId - where rgr.ruleId = r.ruleId) as "fixes"`) - } + if ( inProjection && inProjection.includes('fix') ) { + columns.push(`json_object('fixref', rgr.fixref,'text', ft.text) as fix`) + joins.push('left join fix_text ft on rgr.fixDigest = ft.digest') + } // CONSTRUCT MAIN QUERY @@ -405,7 +366,7 @@ exports.queryRules = async function ( ruleId, inProjection ) { sql += "\nGROUP BY " + groupBy.join(", ") + "\n" - sql += ` ORDER BY substring(r.ruleId from 4) + 0` + sql += ` ORDER BY substring(rgr.ruleId from 4) + 0` try { let [rows, fields] = await dbUtils.pool.query(sql, predicates.binds) @@ -417,12 +378,14 @@ exports.queryRules = async function ( ruleId, inProjection ) { } -exports.insertManualBenchmark = async function (b, svcStatus = {}) { +exports.insertManualBenchmark = async function (b, clobber, svcStatus = {}) { let connection try { + const stats = {} + let totalstart = process.hrtime() - const dml = dmlObjectFromBenchmarkData(b) // defined below + const {ddl, dml} = queriesFromBenchmarkData(b) // defined below connection = await dbUtils.pool.getConnection() connection.config.namedPlaceholders = true @@ -433,51 +396,54 @@ exports.insertManualBenchmark = async function (b, svcStatus = {}) { dml.revision.binds.version, dml.revision.binds.release ]) - const gExistingRevision = revision?.[0]?.revId + if (gExistingRevision && !clobber) { + return { + benchmarkId: dml.revision.binds.benchmarkId, + revisionStr: `V${dml.revision.binds.version}R${dml.revision.binds.release}`, + action: 'preserved' + } + } - // create temporary table(s) outside the transaction - await connection.query(dml.tempCheckImportDrop.sql) - await connection.query(dml.tempCheckImportCreate.sql) + // create temporary tables outside the transaction + for (const tempTable of Object.keys(ddl)) { + await connection.query(ddl[tempTable].drop) + await connection.query(ddl[tempTable].create) + } async function transaction() { - let result + let result, hrstart, hrend, action = 'inserted' await connection.query('START TRANSACTION') // purge any exitsing records for this revision so we can replace if (gExistingRevision) { + hrstart = process.hrtime() await connection.query('DELETE FROM revision WHERE revId = ?', [gExistingRevision]) const cleanupDml = [ - "DELETE FROM `group` WHERE groupId NOT IN (select groupId from rev_group_map)", - "DELETE FROM `rule` WHERE ruleId NOT IN (select ruleId from rev_group_rule_map )", - "DELETE FROM `check` WHERE checkId NOT IN (select checkId from rev_group_rule_check_map)", - "DELETE FROM check_content WHERE ccId NOT IN (select ccId from `check`)", - "DELETE FROM fix WHERE fixId NOT IN (select fixId from rev_group_rule_fix_map)" + "DELETE FROM check_content WHERE digest NOT IN (select checkDigest from rev_group_rule_map)", + "DELETE FROM fix_text WHERE digest NOT IN (select digest from rev_group_rule_map)" ] for (const query of cleanupDml) { await connection.query(query) } + hrend = process.hrtime(hrstart) + stats.delRev = `${hrend[0]}s ${hrend[1] / 1000000}ms` + action = 'replaced' } // insert new records for this revision const queryOrder = [ 'stig', 'revision', - 'group', - 'rule', - 'tempCheckImportInsert', + 'tempRuleCci', 'checkContent', - 'check', - 'fix', - 'revGroupMap', + 'fixText', 'revGroupRuleMap', - 'revGroupRuleCheckMap', - 'revGroupRuleFixMap', - 'ruleCciMap', - 'ruleCcId' + 'revGroupRuleCciMap' ] for (const query of queryOrder) { + hrstart = process.hrtime() if (Array.isArray(dml[query].binds)) { if (dml[query].binds.length === 0) { continue } ;[result] = await connection.query(dml[query].sql, [dml[query].binds]) @@ -485,9 +451,12 @@ exports.insertManualBenchmark = async function (b, svcStatus = {}) { else { ;[result] = await connection.query(dml[query].sql, dml[query].binds) } + hrend = process.hrtime(hrstart) + stats[query] = `${result.affectedRows} in ${hrend[0]}s ${hrend[1] / 1000000}ms` } // Update current_rev + hrstart = process.hrtime() let sqlDeleteCurrentRev = 'DELETE from current_rev where benchmarkId = ?' let sqlUpdateCurrentRev = `INSERT INTO current_rev ( revId, @@ -527,36 +496,45 @@ exports.insertManualBenchmark = async function (b, svcStatus = {}) { v_current_rev WHERE v_current_rev.benchmarkId = ?` - ;[result] = await connection.query(sqlDeleteCurrentRev, [dml.stig.binds.benchmarkId]) - ;[result] = await connection.query(sqlUpdateCurrentRev, [dml.stig.binds.benchmarkId]) + ;[result] = await connection.query(sqlDeleteCurrentRev, [dml.stig.binds.benchmarkId]) + ;[result] = await connection.query(sqlUpdateCurrentRev, [dml.stig.binds.benchmarkId]) + hrend = process.hrtime(hrstart) + stats.current_rev = `${hrend[0]}s ${hrend[1] / 1000000}ms` + // update current_group_rule + hrstart = process.hrtime() let sqlDeleteCurrentGroupRule = 'DELETE FROM current_group_rule WHERE benchmarkId = ?' let sqlInsertCurrentGroupRule = `INSERT INTO current_group_rule (groupId, ruleId, benchmarkId) - SELECT rg.groupId, + SELECT rgr.groupId, rgr.ruleId, cr.benchmarkId from current_rev cr - left join rev_group_map rg on rg.revId=cr.revId - left join rev_group_rule_map rgr on rgr.rgId=rg.rgId + left join rev_group_rule_map rgr on cr.revId = rgr.revId where - cr.benchmarkId = ? - order by - rg.groupId,rgr.ruleId,cr.benchmarkId` - ;[result] = await connection.query(sqlDeleteCurrentGroupRule, [dml.stig.binds.benchmarkId]) - ;[result] = await connection.query(sqlInsertCurrentGroupRule, [dml.stig.binds.benchmarkId]) + cr.benchmarkId = ?` + ;[result] = await connection.query(sqlDeleteCurrentGroupRule, [dml.stig.binds.benchmarkId]) + ;[result] = await connection.query(sqlInsertCurrentGroupRule, [dml.stig.binds.benchmarkId]) + hrend = process.hrtime(hrstart) + stats.current_rev = `${hrend[0]}s ${hrend[1] / 1000000}ms` // Stats + hrstart = process.hrtime() await dbUtils.updateStatsAssetStig(connection, { benchmarkId: dml.stig.binds.benchmarkId }) + hrend = process.hrtime(hrstart) + stats.statistics = `${hrend[0]}s ${hrend[1] / 1000000}ms` - // await connection.rollback() await connection.commit() + hrend = process.hrtime(totalstart) + stats.totalTime = `Completed in ${hrend[0]}s ${hrend[1] / 1000000}ms` + return { benchmarkId: dml.revision.binds.benchmarkId, - revisionStr: `V${dml.revision.binds.version}R${dml.revision.binds.release}` + revisionStr: `V${dml.revision.binds.version}R${dml.revision.binds.release}`, + action } } return await dbUtils.retryOnDeadlock(transaction, svcStatus) @@ -573,15 +551,21 @@ exports.insertManualBenchmark = async function (b, svcStatus = {}) { } } - function dmlObjectFromBenchmarkData(b) { - let dml = { + function queriesFromBenchmarkData(b) { + const tempFlag = true + const ddl = { + tempRuleCci: { + drop: 'drop table if exists temp_rule_cci', + create: `CREATE${tempFlag ? ' TEMPORARY' : ''} TABLE temp_rule_cci ( + ruleId varchar(255) NOT NULL, + cci varchar(20), + UNIQUE KEY (cci))` + } + } + const dml = { stig: { sql: "insert into stig (title, benchmarkId) VALUES (:title, :benchmarkId) as new on duplicate key update stig.title = new.title" }, - revisionDelete: { - sql: 'delete from revision where revId = ?', - binds: [] - }, revision: { sql: `insert into revision ( revId, @@ -616,187 +600,71 @@ exports.insertManualBenchmark = async function (b, svcStatus = {}) { :mediumCount, :highCount)`, }, - group: { - sql: `INSERT into \`group\` ( - groupId, - title, - severity - ) VALUES ? as new - ON DUPLICATE KEY UPDATE - \`group\`.groupId = new.groupId, - \`group\`.title = new.title, - \`group\`.severity = CASE WHEN \`group\`.severity <> new.severity THEN - 'mixed' ELSE new.severity END`, + checkContent: { + sql: `insert ignore into check_content (content) VALUES ?`, binds: [] }, - fix: { - sql: "insert into fix (fixId, text) VALUES ? as new on duplicate key update fix.text=new.text", + fixText: { + sql: `insert ignore into fix_text (\`text\`) VALUES ?`, binds: [] }, - check: { - sql: `insert into \`check\` (checkId, ccId) - select * from (select - tci.checkId, - cc.ccId as selectedCcId - from - temp_check_import tci - left join check_content cc on tci.digest = cc.digest) as dt - ON DUPLICATE KEY UPDATE - ccId = selectedCcId` - }, - checkContent: { - sql: `insert ignore into check_content (content) select content from temp_check_import` - }, - tempCheckImportCreate: { - sql: `CREATE TEMPORARY TABLE temp_check_import ( - checkId varchar(255) NOT NULL, - content TEXT NOT NULL, - digest BINARY(32) GENERATED ALWAYS AS (UNHEX(SHA2(content, 256))) STORED, - INDEX (digest))` - }, - tempCheckImportDrop: { - sql: "drop temporary table if exists temp_check_import" - }, - tempCheckImportInsert: { - sql: "insert into temp_check_import (checkId, content) VALUES ?", + tempGroupRule: { + sql: `insert ignore into temp_group_rule ( + groupId, + ruleId, + \`version\`, + title, + severity, + weight, + vulnDiscussion, + falsePositives, + falseNegatives, + documentable, + mitigations, + severityOverrideGuidance, + potentialImpacts, + thirdPartyTools, + mitigationControl, + responsibility, + iaControls, + checkSystem, + check + ) VALUES ?`, binds: [] }, - rule: { - sql: `insert into rule ( - ruleId, - version, - title, - severity, - weight, - vulnDiscussion, - falsePositives, - falseNegatives, - documentable, - mitigations, - severityOverrideGuidance, - potentialImpacts, - thirdPartyTools, - mitigationControl, - responsibility, - iaControls, - ccId -) VALUES ? as new -on duplicate key update - rule.version = new.version, - rule.title = new.title, - rule.severity = new.severity, - rule.weight = new.weight, - rule.vulnDiscussion = new.vulnDiscussion, - rule.falsePositives = new.falsePositives, - rule.falseNegatives = new.falseNegatives, - rule.documentable = new.documentable, - rule.mitigations = new.mitigations, - rule.severityOverrideGuidance = new.severityOverrideGuidance, - rule.potentialImpacts = new.potentialImpacts, - rule.thirdPartyTools = new.thirdPartyTools, - rule.mitigationControl = new.mitigationControl, - rule.responsibility = new.responsibility, - rule.iaControls = new.iaControls, - rule.ccId = new.ccId`, + tempRuleCheck: { + sql: `insert ignore into temp_rule_check (ruleId, \`system\`, content) VALUES ?`, binds: [] }, - ruleCcId: { - sql: `with cte1 as ( - select - rgr.ruleId, - MAX(c.checkId) as checkId - from - rev_group_map rg - left join rev_group_rule_map rgr on rg.rgId = rgr.rgId - left join rev_group_rule_check_map rgrc on rgr.rgrId = rgrc.rgrId - left join \`check\` c on rgrc.checkId = c.checkId - where - rg.revId = ? - group by - rgr.ruleId), - cte2 as ( - select - cte1.ruleId, - c.ccId - from - \`check\` c - inner join cte1 on c.checkId = cte1.checkId) - update rule - inner join cte2 on rule.ruleId = cte2.ruleId - set rule.ccId = cte2.ccId`, + tempRuleFix: { + sql: `insert ignore into temp_rule_fix (ruleId, fixref, \`text\`) VALUES ?`, binds: [] }, - revGroupMap: { - delete: { - sql: 'delete from rev_group_map where revId = ?', - binds: [] - }, - sql: "insert into rev_group_map (revId, groupId, rules) VALUES ?", + tempRuleCci: { + sql: `insert ignore into temp_rule_cci (ruleId, cci) VALUES ?`, binds: [] }, revGroupRuleMap: { - sql: `INSERT INTO rev_group_rule_map - (rgId, ruleId, checks, fixes, ccis) - SELECT - rg.rgId, - tt.ruleId, - tt.checks, - tt.fixes, - tt.ccis - FROM - rev_group_map rg, - JSON_TABLE( - rg.rules, - "$[*]" - COLUMNS( - ruleId VARCHAR(255) PATH "$.ruleId", - checks JSON PATH "$.checks", - fixes JSON PATH "$.fixes", - ccis JSON PATH "$.ccis" - )) AS tt - WHERE rg.revId = :revId` - }, - ruleCciMap: { - sql: `INSERT IGNORE INTO rule_cci_map - (ruleId, cci) - VALUES ?`, + sql: `INSERT INTO rev_group_rule_map ( + revId, + groupId, groupTitle, groupSeverity, + ruleId, \`version\`, title, severity, weight, vulnDiscussion, + falsePositives, falseNegatives, documentable, mitigations, + severityOverrideGuidance, potentialImpacts, thirdPartyTools, mitigationControl, + responsibility, iaControls, checkSystem, checkDigest, fixref, fixDigest) + VALUES ?`, binds: [] }, - revGroupRuleCheckMap: { - sql: `INSERT INTO rev_group_rule_check_map (rgrId, checkId) - SELECT - rgr.rgrId, - tt.checkId - FROM - rev_group_map rg, - rev_group_rule_map rgr, - JSON_TABLE( - rgr.checks, - "$[*]" COLUMNS( - checkId VARCHAR(255) PATH "$" - ) - ) AS tt - WHERE - rg.revId = :revId - AND rg.rgId=rgr.rgId` - }, - revGroupRuleFixMap: { - sql: `INSERT INTO rev_group_rule_fix_map (rgrId, fixId) - SELECT - rgr.rgrId, - tt.fixId - FROM - rev_group_map rg, - rev_group_rule_map rgr, - JSON_TABLE( - rgr.fixes, - "$[*]" COLUMNS( - fixId VARCHAR(255) PATH "$" - ) - ) AS tt - WHERE - rg.revId = :revId - AND rg.rgId=rgr.rgId` + revGroupRuleCciMap: { + sql: `INSERT INTO rev_group_rule_cci_map (rgrId, cci) + SELECT + rgr.rgrId, + tt.cci + FROM + rev_group_rule_map rgr + inner join temp_rule_cci tt using (ruleId) + WHERE + rgr.revId = :revId` }, } @@ -814,18 +682,16 @@ on duplicate key update revisionBinds.lowCount = revisionBinds.mediumCount = revisionBinds.highCount = 0 // QUERY: revision dml.revision.binds = revisionBinds - // QUERY: revisionDelete - dml.revisionDelete.binds.push(revisionBinds.revId) - // QUERY: ruleCcId - dml.ruleCcId.binds.push(revisionBinds.revId) - groups.forEach(group => { + let ruleCount = 0 + let checkCount = 0 + let fixCount = 0 + for (const group of groups) { let { rules, ...groupBinds } = group - let ruleMap = [] - let identsMap = [] let groupSeverity - rules.forEach(rule => { + for (const rule of rules) { + ruleCount++ let { checks, fixes, idents, ...ruleBinds } = rule // Group severity calculation if (!groupSeverity) { @@ -834,8 +700,27 @@ on duplicate key update else if (groupSeverity !== ruleBinds.severity) { groupSeverity = 'mixed' } - // QUERY: rule - dml.rule.binds.push([ + checkCount += checks.length + fixCount += fixes.length + const checkSystem = checks.map( check => check.system).join(',') + const checkContent = checks.map( check => check.content).join('\n\n-----AND-----\n\n') + const checkDigest = createHash('sha256').update(checkContent).digest() + const fixref = fixes.map( fix => fix.fixref).join(',') + const fixText = fixes.map( fix => fix.text).join('\n\n-----AND-----\n\n') + const fixDigest = createHash('sha256').update(fixText).digest() + + // QUERY: checkContent + dml.checkContent.binds.push([checkContent]) + + // QUERY: fixText + dml.fixText.binds.push([fixText]) + + // QUERY: revGroupRuleMap + dml.revGroupRuleMap.binds.push([ + revisionBinds.revId, + groupBinds.groupId, + groupBinds.title, + ruleBinds.severity, // groupSeverity hack ruleBinds.ruleId, ruleBinds.version, ruleBinds.title, @@ -852,77 +737,37 @@ on duplicate key update ruleBinds.mitigationControl, ruleBinds.responsibility, ruleBinds.iaControls, - null // ccId + checkSystem, + checkDigest, + fixref, + fixDigest ]) - if (checks) { - checks.forEach(check => { - // QUERY: tempCheckImportInsert - dml.tempCheckImportInsert.binds.push([ - check.checkId, - check.content - ]) - }) + + for (const ident of idents) { + if (ident.system === 'http://iase.disa.mil/cci' || ident.system === 'http://cyber.mil/cci') { + dml.tempRuleCci.binds.push([ + rule.ruleId, + ident.ident.replace('CCI-', '')]) + } } + } - fixes.forEach(fix => { - // QUERY: fix - dml.fix.binds.push([ - fix.fixId, - fix.text - ]) - }) + // QUERY: rev_group_rule_cci_map + dml.revGroupRuleCciMap.binds = { revId: revisionBinds.revId } + } - idents.forEach(ident => { - if (ident.system === 'http://iase.disa.mil/cci' || ident.system === 'http://cyber.mil/cci') { - dml.ruleCciMap.binds.push([rule.ruleId, ident.ident.replace('CCI-', '')]) - } - }) - - // JSON for rev_group_map.rules - ruleMap.push({ - ruleId: rule.ruleId, - checks: checks ? checks.map(c => c.checkId) : null, - fixes: fixes ? fixes.map(f => f.fixId) : null, - ccis: identsMap - }) - - }) // end rules.forEach - - // QUERY: group - dml.group.binds.push([ - groupBinds.groupId, - groupBinds.title, - groupSeverity - ]) - - // QUERY: rev_group_map - dml.revGroupMap.binds.push([ - revisionBinds.revId, - group.groupId, - JSON.stringify(ruleMap) - ]) - dml.revGroupMap.delete.binds.push(revisionBinds.revId) - - // QUERY: rev_group_rule_map - dml.revGroupRuleMap.binds = { revId: revisionBinds.revId } - // QUERY: rev_group_rule_check_map - dml.revGroupRuleCheckMap.binds = { revId: revisionBinds.revId } - // QUERY: rev_group_rule_fix_map - dml.revGroupRuleFixMap.binds = { revId: revisionBinds.revId } - - }) // end groups.forEach - - dml.revision.binds.groupCount = dml.group.binds.length - dml.revision.binds.checkCount = dml.tempCheckImportInsert.binds.length - dml.revision.binds.fixCount = dml.fix.binds.length - // add rule severity counts to the revision binds. rule[3] is the index of the severity value - dml.rule.binds.reduce((binds, rule) => { - const prop = `${rule[3]}Count` + dml.revision.binds.groupCount = groups.length + dml.revision.binds.checkCount = checkCount + dml.revision.binds.fixCount = fixCount + + // add rule severity counts to the revision binds. groupRule[7] is the location of the severity value + dml.revGroupRuleMap.binds.reduce((binds, groupRule) => { + const prop = `${groupRule[7]}Count` binds[prop] = (binds[prop] ?? 0) + 1 return binds }, dml.revision.binds) - return dml + return {ddl, dml} } } @@ -937,12 +782,9 @@ exports.deleteRevisionByString = async function(benchmarkId, revisionStr, svcSta let dmls = [ "DELETE FROM revision WHERE benchmarkId = :benchmarkId and `version` = :version and `release` = :release", - "DELETE FROM `rule` WHERE ruleId NOT IN (select ruleId from rev_group_rule_map )", - "DELETE FROM `check` WHERE checkId NOT IN (select checkId from rev_group_rule_check_map)", - "DELETE FROM check_content WHERE ccId NOT IN (select ccId from `check`)", - "DELETE FROM fix WHERE fixId NOT IN (select fixId from rev_group_rule_fix_map)", - "DELETE FROM `group` WHERE groupId NOT IN (select groupId from rev_group_map)" - ] + "DELETE FROM check_content WHERE digest NOT IN (select checkDigest from rev_group_rule_map)", + "DELETE FROM fix_text WHERE digest NOT IN (select fixDigest from rev_group_rule_map)" +] let currentRevDmls = [ "DELETE from current_rev where benchmarkId = :benchmarkId", `INSERT INTO current_rev ( @@ -983,19 +825,6 @@ exports.deleteRevisionByString = async function(benchmarkId, revisionStr, svcSta v_current_rev WHERE v_current_rev.benchmarkId = :benchmarkId`, - "DELETE FROM current_group_rule WHERE benchmarkId = :benchmarkId", - `INSERT INTO current_group_rule (groupId, ruleId, benchmarkId) - SELECT rg.groupId, - rgr.ruleId, - cr.benchmarkId - from - current_rev cr - left join rev_group_map rg on rg.revId=cr.revId - left join rev_group_rule_map rgr on rgr.rgId=rg.rgId - where - cr.benchmarkId = :benchmarkId - order by - rg.groupId,rgr.ruleId,cr.benchmarkId`, "DELETE FROM stig WHERE benchmarkId NOT IN (select benchmarkId FROM current_rev)" ] @@ -1060,15 +889,10 @@ exports.deleteRevisionByString = async function(benchmarkId, revisionStr, svcSta exports.deleteStigById = async function(benchmarkId, userObject, svcStatus = {}) { let dmls = [ - "DELETE FROM revision WHERE benchmarkId = :benchmarkId", - "DELETE from current_rev where benchmarkId = :benchmarkId", - "DELETE FROM current_group_rule WHERE benchmarkId = :benchmarkId", "DELETE from stig where benchmarkId = :benchmarkId", - "DELETE FROM `rule` WHERE ruleId NOT IN (select ruleId from rev_group_rule_map )", - "DELETE FROM `check` WHERE checkId NOT IN (select checkId from rev_group_rule_check_map)", - "DELETE FROM check_content WHERE ccId NOT IN (select ccId from `check`)", - "DELETE FROM fix WHERE fixId NOT IN (select fixId from rev_group_rule_fix_map)", - "DELETE FROM `group` WHERE groupId NOT IN (select groupId from rev_group_map)" + "DELETE from current_rev where benchmarkId = :benchmarkId", + "DELETE FROM check_content WHERE digest NOT IN (select checkDigest from rev_group_rule_map)", + "DELETE FROM fix_text WHERE digest NOT IN (select fixDigest from rev_group_rule_map)" ] let connection; @@ -1178,10 +1002,9 @@ exports.getCci = async function(cci, inProjection, userObject) { 'revisionStr', concat('V', rv.version, 'R', rv.release) ) as stig from cci ci - left join rule_cci_map as rcm on rcm.cci = ci.cci - left join rev_group_rule_map as rgrm on rgrm.ruleId = rcm.ruleId - left join rev_group_map as rgm on rgm.rgId = rgrm.rgId - left join revision as rv on rv.revId = rgm.revId + left join rev_group_rule_cci_map rgrcc using (cci) + left join rev_group_rule_map rgr using (rgrId) + left join revision rv using (revId) where ci.cci = c.cci and benchmarkId is not null ) as agg), json_array() @@ -1261,11 +1084,10 @@ exports.getCcisByRevision = async function(benchmarkId, revisionStr, userObject) joins = ['current_rev r'] } - joins.push('LEFT JOIN rev_group_map rgm on r.revId = rgm.revId') - joins.push('LEFT JOIN rev_group_rule_map AS rgrm ON rgrm.rgId = rgm.rgId') - joins.push('LEFT JOIN rule_cci_map AS rcm ON rgrm.ruleId = rcm.ruleId') - joins.push('LEFT JOIN cci AS c ON rcm.cci = c.cci') - joins.push('LEFT JOIN cci_reference_map AS crm ON crm.cci = c.cci') + joins.push('LEFT JOIN rev_group_rule_map rgr using (revId)') + joins.push('INNER JOIN rev_group_rule_cci_map rgrcc using (rgrId)') + joins.push('LEFT JOIN cci c using (cci)') + // joins.push('LEFT JOIN cci_reference_map crm using (cci)') // CONSTRUCT MAIN QUERY @@ -1352,7 +1174,6 @@ exports.getRevisionByString = async function(benchmarkId, revisionStr, userObjec date_format(${ro.table_alias}.benchmarkDateSql,'%Y-%m-%d') as "benchmarkDate", ${ro.table_alias}.status, ${ro.table_alias}.statusDate, - ${ro.table_alias}.description, ${ro.table_alias}.ruleCount FROM ${ro.table} ${ro.table_alias} @@ -1392,7 +1213,6 @@ exports.getRevisionsByBenchmarkId = async function(benchmarkId, userObject) { date_format(r.benchmarkDateSql,'%Y-%m-%d') as "benchmarkDate", r.status, r.statusDate, - r.description, r.ruleCount FROM revision r diff --git a/api/source/service/mysql/migrations/0020.js b/api/source/service/mysql/migrations/0020.js new file mode 100644 index 000000000..3a0bbeb3f --- /dev/null +++ b/api/source/service/mysql/migrations/0020.js @@ -0,0 +1,190 @@ +const MigrationHandler = require('./lib/MigrationHandler') + +const upMigration = [ + // if absent here, the query UPDATE rev_group_rule_map runs very slowly + `ALTER TABLE rule DROP FOREIGN KEY fk_rule_1`, + `ALTER TABLE rule DROP COLUMN ccId, DROP INDEX ccId`, + + // temp table for rule_fix + `drop table if exists temp_rule_fix`, + `create temporary table temp_rule_fix( + rgrId INT PRIMARY KEY, + fixref varchar(255), + text TEXT, + digest BINARY(32) GENERATED ALWAYS AS (UNHEX(SHA2(text, 256))) STORED)`, + `insert into temp_rule_fix(rgrId, fixref, text) + select + rgrf.rgrId, + group_concat(fix.fixId) as fixref, + group_concat(fix.text separator '\n\n-----AND-----\n\n') as text + from + rev_group_rule_fix_map rgrf + left join fix using (fixId) + group by rgrf.rgrId`, + + // create and populate fix_text + `CREATE TABLE fix_text ( + ftId INT NOT NULL AUTO_INCREMENT, + digest BINARY(32) GENERATED ALWAYS AS (UNHEX(SHA2(text, 256))) STORED, + text TEXT NOT NULL, + PRIMARY KEY (ftId), + UNIQUE INDEX digest_UNIQUE (digest ASC) VISIBLE)`, + `INSERT INTO fix_text (text) SELECT text from temp_rule_fix ON DUPLICATE KEY UPDATE text=temp_rule_fix.text`, + + // temp table for rule_check + `drop table if exists temp_rule_check`, + `create temporary table temp_rule_check( + rgrId INT PRIMARY KEY, + \`system\` varchar(255), + content TEXT, + digest BINARY(32) GENERATED ALWAYS AS (UNHEX(SHA2(content, 256))) STORED)`, + `insert into temp_rule_check(rgrId, \`system\`, content) + select + rgrc.rgrId, + group_concat(rgrc.checkId) as \`system\`, + group_concat(cc.content separator '\n\n-----AND-----\n\n') as content + from + rev_group_rule_check_map rgrc + left join \`check\` using (checkId) + left join check_content cc using (ccId) + group by rgrc.rgrId`, + + // populate check_content with multi-check content only. Migration 0019 populated single-check content + `insert into check_content(content) select content from temp_rule_check where \`system\` like '%,%'`, + + // update rev_group_rule_map + `ALTER TABLE rev_group_rule_map DROP COLUMN checks, DROP COLUMN fixes, DROP COLUMN ccis`, + `ALTER TABLE rev_group_rule_map + ADD COLUMN \`revId\` varchar(255) AFTER rgrId, + ADD COLUMN \`groupId\` varchar(45) AFTER revId, + ADD COLUMN \`groupTitle\` varchar(255) AFTER groupId, + ADD COLUMN \`groupSeverity\` varchar(45) AFTER groupTitle, + ADD COLUMN \`version\` varchar(45), + ADD COLUMN \`title\` varchar(1000), + ADD COLUMN \`severity\` varchar(45), + ADD COLUMN \`weight\` varchar(45), + ADD COLUMN \`vulnDiscussion\` text, + ADD COLUMN \`falsePositives\` text, + ADD COLUMN \`falseNegatives\` text, + ADD COLUMN \`documentable\` varchar(45) , + ADD COLUMN \`mitigations\` text, + ADD COLUMN \`severityOverrideGuidance\` text, + ADD COLUMN \`potentialImpacts\` text, + ADD COLUMN \`thirdPartyTools\` text, + ADD COLUMN \`mitigationControl\` text, + ADD COLUMN \`responsibility\` varchar(255) , + ADD COLUMN \`iaControls\` varchar(255), + ADD COLUMN \`checkSystem\` varchar (255), + ADD COLUMN \`checkDigest\` BINARY(32), + ADD COLUMN \`fixref\` varchar(255), + ADD COLUMN \`fixDigest\` BINARY(32)`, + + `UPDATE rev_group_rule_map rgr + LEFT JOIN rev_group_map rg using (rgId) + LEFT JOIN \`group\` g on rg.groupId = g.groupId + LEFT JOIN rule r using (ruleId) + LEFT JOIN temp_rule_check trc using (rgrId) + LEFT JOIN temp_rule_fix trf using (rgrId) + SET + rgr.revId = rg.revId, + rgr.groupId = rg.groupId, + rgr.groupTitle = g.title, + rgr.groupSeverity = g.severity, + rgr.\`version\` = r.\`version\`, + rgr.title = r.title, + rgr.severity = r.severity, + rgr.weight = r.weight, + rgr.vulnDiscussion = r.vulnDiscussion, + rgr.falsePositives = r.falsePositives, + rgr.falseNegatives = r.falseNegatives, + rgr.documentable = r.documentable, + rgr.mitigations = r.mitigations, + rgr.severityOverrideGuidance = r.severityOverrideGuidance, + rgr.potentialImpacts = r.potentialImpacts, + rgr.thirdPartyTools = r.thirdPartyTools, + rgr.mitigationControl = r.mitigationControl, + rgr.responsibility = r.responsibility, + rgr.iaControls = r.iaControls, + rgr.checkSystem = trc.system, + rgr.checkDigest = trc.digest, + rgr.fixref = trf.fixref, + rgr.fixDigest = trf.digest`, + `ALTER TABLE rev_group_rule_map DROP FOREIGN KEY FK_rev_group_rule_map_rule`, + `ALTER TABLE rev_group_rule_map DROP FOREIGN KEY FK_rev_group_rule_map_rev_group_map`, + `ALTER TABLE rev_group_rule_map DROP INDEX uidx_rgrm_rgId_ruleId`, + `ALTER TABLE rev_group_rule_map DROP COLUMN rgId`, + `ALTER TABLE rev_group_rule_map ADD INDEX index4 (checkDigest ASC) VISIBLE, ADD INDEX index5 (fixDigest ASC) VISIBLE`, + `ALTER TABLE rev_group_rule_map ADD UNIQUE INDEX rev_group_rule_UNIQUE (revId ASC, groupId ASC, ruleId ASC) VISIBLE`, + `ALTER TABLE rev_group_rule_map ADD CONSTRAINT fk_rev_group_rule_map_1 FOREIGN KEY (revId) REFERENCES revision (revId) ON DELETE CASCADE ON UPDATE CASCADE`, + + // rev_group_rule_cci_map + `CREATE TABLE rev_group_rule_cci_map ( + rgrccId INT NOT NULL AUTO_INCREMENT, + rgrId INT NOT NULL, + cci VARCHAR(20) NOT NULL, + PRIMARY KEY (rgrccId), + UNIQUE INDEX index2 (rgrId ASC, cci ASC) VISIBLE, + INDEX index3 (cci ASC) VISIBLE)`, + `INSERT INTO rev_group_rule_cci_map (rgrId, cci) + SELECT + rgrId, + rc.cci + FROM + rev_group_rule_map rgr + left join rule_cci_map rc using (ruleId) + WHERE + rc.cci is not null`, + + // drop legacy tables + `DROP TABLE rev_group_map`, + `DROP TABLE \`group\``, + `DROP TABLE rule_cci_map`, + `DROP TABLE rule`, + `DROP table rev_group_rule_check_map`, + `DROP table rev_group_rule_fix_map`, + `DROP table \`check\``, + `DROP table fix`, + `DROP table poam_rar_entry`, + + // VIEW for current_group_rule + `CREATE OR REPLACE VIEW v_current_group_rule AS + SELECT + cr.benchmarkId + ,rgr.groupId + ,rgr.groupTitle + ,rgr.groupSeverity + ,rgr.ruleId + ,rgr.\`version\` + ,rgr.title + ,rgr.severity + ,rgr.weight + ,rgr.vulnDiscussion + ,rgr.falsePositives + ,rgr.falseNegatives + ,rgr.documentable + ,rgr.mitigations + ,rgr.severityOverrideGuidance + ,rgr.potentialImpacts + ,rgr.thirdPartyTools + ,rgr.mitigationControl + ,rgr.responsibility + ,rgr.iaControls + ,rgr.checkSystem + ,rgr.checkDigest + ,rgr.fixref + ,rgr.fixDigest + from current_rev cr left join rev_group_rule_map rgr using(revId)` +] + +const downMigration = [ +] + +const migrationHandler = new MigrationHandler(upMigration, downMigration) +module.exports = { + up: async (pool) => { + await migrationHandler.up(pool, __filename) + }, + down: async (pool) => { + // await migrationHandler.down(pool, __filename) + } +} diff --git a/api/source/service/mysql/utils.js b/api/source/service/mysql/utils.js index 501d9e7c5..17184831b 100644 --- a/api/source/service/mysql/utils.js +++ b/api/source/service/mysql/utils.js @@ -198,8 +198,7 @@ module.exports.scrubReviewsByUser = async function(reviews, elevate, userObject) inner join asset a on cg.collectionId = a.collectionId inner join stig_asset_map sa on a.assetId = sa.assetId inner join revision rev on sa.benchmarkId = rev.benchmarkId - inner join rev_group_map rg on rev.revId = rg.revId - inner join rev_group_rule_map rgr on rg.rgId = rgr.rgId + inner join rev_group_rule_map rgr on rev.revId = rgr.revId WHERE cg.userId = ? and cg.accessLevel != 1 @@ -214,8 +213,7 @@ module.exports.scrubReviewsByUser = async function(reviews, elevate, userObject) inner join stig_asset_map sa on a.assetId = sa.assetId inner join user_stig_asset_map usa on (sa.saId = usa.saId and cg.userId = usa.userId) inner join revision rev on sa.benchmarkId = rev.benchmarkId - inner join rev_group_map rg on rev.revId = rg.revId - inner join rev_group_rule_map rgr on rg.rgId = rgr.rgId + inner join rev_group_rule_map rgr on rev.revId = rgr.revId WHERE cg.userId = ? and cg.accessLevel = 1 @@ -255,7 +253,7 @@ module.exports.updateStatsAssetStig = async function(connection, { collectionId, let whereClause = '' if (rules && rules.length > 0) { - predicates.push(`sa.benchmarkId IN (SELECT DISTINCT benchmarkId from current_group_rule where ruleId IN ?)`) + predicates.push(`sa.benchmarkId IN (SELECT DISTINCT benchmarkId from v_current_group_rule where ruleId IN ?)`) binds.push( [rules] ) } @@ -305,9 +303,9 @@ module.exports.updateStatsAssetStig = async function(connection, { collectionId, sum(CASE WHEN review.statusId = 3 THEN 1 ELSE 0 END) as accepted, sum(CASE WHEN review.resultEngine is not null and review.statusId = 3 THEN 1 ELSE 0 END) as acceptedResultEngine, - sum(CASE WHEN review.resultId=4 and r.severity='high' THEN 1 ELSE 0 END) as highCount, - sum(CASE WHEN review.resultId=4 and r.severity='medium' THEN 1 ELSE 0 END) as mediumCount, - sum(CASE WHEN review.resultId=4 and r.severity='low' THEN 1 ELSE 0 END) as lowCount, + sum(CASE WHEN review.resultId=4 and cgr.severity='high' THEN 1 ELSE 0 END) as highCount, + sum(CASE WHEN review.resultId=4 and cgr.severity='medium' THEN 1 ELSE 0 END) as mediumCount, + sum(CASE WHEN review.resultId=4 and cgr.severity='low' THEN 1 ELSE 0 END) as lowCount, sum(CASE WHEN review.resultId = 1 THEN 1 ELSE 0 END) as notchecked, sum(CASE WHEN review.resultEngine is not null and review.resultId = 1 THEN 1 ELSE 0 END) as notcheckedResultEngine, @@ -331,9 +329,8 @@ module.exports.updateStatsAssetStig = async function(connection, { collectionId, from asset a left join stig_asset_map sa using (assetId) - left join current_group_rule cgr using (benchmarkId) - left join rule r using (ruleId) - left join review on (r.ruleId=review.ruleId and review.assetId=sa.assetId) + left join v_current_group_rule cgr using (benchmarkId) + left join review on (cgr.ruleId=review.ruleId and review.assetId=sa.assetId) ${whereClause} group by sa.assetId, diff --git a/api/source/specification/stig-manager.yaml b/api/source/specification/stig-manager.yaml index ca36aa87e..ef43776a1 100644 --- a/api/source/specification/stig-manager.yaml +++ b/api/source/specification/stig-manager.yaml @@ -2854,6 +2854,13 @@ paths: - STIG summary: Import a STIG Benchmark operationId: importBenchmark + parameters: + - name: clobber + in: query + description: Should existing Revision data be overwritten + schema: + type: boolean + default: false requestBody: required: true content: @@ -2870,7 +2877,7 @@ paths: content: application/json: schema: - $ref: '#/components/schemas/Revision' + $ref: '#/components/schemas/RevisionPost' default: description: unexpected error content: @@ -2915,7 +2922,7 @@ paths: get: tags: - STIG - summary: Return the defintion and associated checks and fixes for the specified Rule + summary: Return the defintion and associated check and fix for the specified Rule operationId: getRuleByRuleId parameters: - $ref: '#/components/parameters/RuleProjectionQuery' @@ -3870,6 +3877,10 @@ components: $ref: '#/components/schemas/RevisionBasic' CciBasic: type: object + required: + - cci + - apAcronym + - definition additionalProperties: false properties: cci: @@ -3878,6 +3889,9 @@ components: type: string definition: type: string + control: + type: string + nullable: true CciListItem: type: object additionalProperties: false @@ -3922,7 +3936,7 @@ components: type: object additionalProperties: false properties: - checkId: + system: type: string content: type: string @@ -4689,7 +4703,7 @@ components: type: object additionalProperties: false properties: - fixId: + fixref: type: string text: type: string @@ -5815,7 +5829,6 @@ components: - benchmarkDate - status - statusDate - - description - ruleCount additionalProperties: false properties: @@ -5837,8 +5850,6 @@ components: statusDate: type: string format: date - description: - type: string ruleCount: type: integer RevisionBasic: @@ -5852,6 +5863,24 @@ components: $ref: '#/components/schemas/String255' revisionStr: $ref: '#/components/schemas/RevisionStr' + RevisionPost: + type: object + required: + - benchmarkId + - revisionStr + - action + additionalProperties: false + properties: + benchmarkId: + $ref: '#/components/schemas/String255' + revisionStr: + $ref: '#/components/schemas/RevisionStr' + action: + type: string + enum: + - inserted + - preserved + - replaced RevisionStr: type: string pattern: ^(V\d{1,3}R\d{1,3}(\.\d{1,3})?|latest)$ @@ -5901,8 +5930,10 @@ components: properties: weight: type: string + nullable: true vulnDiscussion: type: string + nullable: true falsePositives: type: string nullable: true @@ -5911,6 +5942,7 @@ components: nullable: true documentable: type: string + nullable: true mitigations: type: string nullable: true @@ -5933,14 +5965,10 @@ components: type: array items: $ref: '#/components/schemas/CciBasic' - checks: - type: array - items: - $ref: '#/components/schemas/Check' - fixes: - type: array - items: - $ref: '#/components/schemas/Fix' + check: + $ref: '#/components/schemas/Check' + fix: + $ref: '#/components/schemas/Fix' RuleSeverity: type: string enum: @@ -6401,7 +6429,7 @@ components: ChecklistProjectionQuery: name: projection in: query - description: Optional properties to return. Selecting "checks" or "fixes" will also select "fullrules" + description: Optional properties to return. Selecting "check" or "fix" will also select "fullrules" style: form explode: true schema: @@ -6413,8 +6441,8 @@ components: enum: - cci - fullrules - - checks - - fixes + - check + - fix CklModeQuery: name: mode in: query @@ -6773,8 +6801,8 @@ components: enum: - detail - ccis - - checks - - fixes + - check + - fix RuleSeverityQuery: name: severity in: query diff --git a/api/source/utils/parsers.js b/api/source/utils/parsers.js index ce90f14a6..d3be2df6e 100644 --- a/api/source/utils/parsers.js +++ b/api/source/utils/parsers.js @@ -62,6 +62,7 @@ module.exports.benchmarkFromXccdf = function (xccdfData) { localeRange: "", //To support non english character in tag/attribute values. parseTrueNumberOnly: false, arrayMode: true, //"strict" + alwaysCreateTextNode: true, //"strict" attrValueProcessor: (val) => tagValueProcessor(val, {isAttributeValue: true}), tagValueProcessor: (val) => tagValueProcessor(val) } @@ -91,24 +92,18 @@ module.exports.benchmarkFromXccdf = function (xccdfData) { let groups = bIn.Group.map(group => { let rules = group.Rule.map(rule => { - let checks - // Traditional STIG has no checks, so check for checks - if (rule.check) { - checks = rule.check.map(check => ({ - checkId: check.system, - content: isScap? check['check-content-ref'][0] : check['check-content'] - })) - } - - let fixes = rule.fixtext.map(fix => ({ - fixId: fix.fixref, + let checks = rule.check ? rule.check.map(check => ({ + system: check.system, + content: isScap? check['check-content-ref']?.[0]?._ : check['check-content']?.[0]?._ + })) : [] + let fixes = rule.fixtext ? rule.fixtext.map(fix => ({ + fixref: fix.fixref, text: fix._ - })) + })) : [] let idents = rule.ident ? rule.ident.map(ident => ({ ident: ident._, system: ident.system })) : [] - // The description element is often not well-formed XML, so we fallback on extracting content between expected tags function parseRuleDescription (d) { let parsed = {} @@ -138,12 +133,12 @@ module.exports.benchmarkFromXccdf = function (xccdfData) { return parsed } - let desc = parseRuleDescription(rule.description) + let desc = parseRuleDescription(rule.description?.[0]?._) return { ruleId: rule.id, - version: rule.version || null, - title: rule.title || null, + version: rule.version?.[0]._ || null, + title: rule.title?.[0]._ || null, severity: rule.severity || null, weight: rule.weight || null, vulnDiscussion: desc.VulnDiscussion || null, @@ -162,14 +157,16 @@ module.exports.benchmarkFromXccdf = function (xccdfData) { idents: idents } }) - let desc = Parser.parse(group.description, fastparseOptions) - + // let desc + // if (group.description?.[0]?._) { + // desc = Parser.parse(group.description[0]._, fastparseOptions) + // } return { groupId: group.id, - title: group.title || null, - description: desc.GroupDescription || null, + title: group.title[0]._ || null, + // description: desc?.GroupDescription || null, rules: rules } }) @@ -179,11 +176,11 @@ module.exports.benchmarkFromXccdf = function (xccdfData) { return { benchmarkId: bIn.id, - title: bIn.title, + title: bIn.title?.[0]._, scap: isScap, revision: { - revisionStr: `V${bIn.version}R${release}`, - version: isScap ? bIn.version[0]._ : bIn.version, + revisionStr: `V${bIn.version?.[0]._}R${release}`, + version: bIn.version?.[0]._, release: release, releaseInfo: releaseInfo, benchmarkDate: benchmarkDate, @@ -218,6 +215,7 @@ module.exports.benchmarkFromXccdf = function (xccdfData) { 'Aug': '08', 'August': '08', 'Sep': '09', + 'Sept': '09', 'September': '09', 'Oct': '10', 'October': '10', diff --git a/client/src/js/SM/Global.js b/client/src/js/SM/Global.js index 25355ab04..f0c0d0db3 100644 --- a/client/src/js/SM/Global.js +++ b/client/src/js/SM/Global.js @@ -157,16 +157,12 @@ SM.RuleContentTpl = new Ext.XTemplate( '
{[SM.he(values.content?.trim())]}', - '
{[SM.he(values.check?.content?.trim())]}', '
{[SM.he(values.text?.trim())]}', - '
{[SM.he(values.fix?.text?.trim())]}', '