Skip to content

Commit

Permalink
improvements to workaround for issue blocking cisagov#475, integratio…
Browse files Browse the repository at this point in the history
…n of sigma rules
  • Loading branch information
mmguero committed Feb 6, 2025
1 parent 4740e57 commit 5c3ced4
Show file tree
Hide file tree
Showing 5 changed files with 24 additions and 15 deletions.
23 changes: 16 additions & 7 deletions dashboards/scripts/shared-object-creation.sh
Original file line number Diff line number Diff line change
Expand Up @@ -34,21 +34,30 @@ STARTUP_IMPORT_PERFORMED_FILE=/tmp/shared-objects-created
function DoReplacersInFile() {
# Index pattern and time field name may be specified via environment variable, but need
# to be reflected in dashboards, templates, anomaly detectors, etc.
# This function takes a file and performs that replacement.
# This function takes a file and performs those and other replacements.
REPLFILE="$1"
DATASTORE_TYPE="$2"
FILE_TYPE="$3"
if [[ -n "$REPLFILE" ]] && [[ -f "$REPLFILE" ]]; then
sed -i "s/MALCOLM_NETWORK_INDEX_PATTERN_REPLACER/${INDEX_PATTERN}/g" "${REPLFILE}" || true
sed -i "s/MALCOLM_NETWORK_INDEX_TIME_FIELD_REPLACER/${INDEX_TIME_FIELD}/g" "${REPLFILE}" || true
sed -i "s/MALCOLM_OTHER_INDEX_PATTERN_REPLACER/${OTHER_INDEX_PATTERN}/g" "${REPLFILE}" || true
sed -i "s/MALCOLM_OTHER_INDEX_TIME_FIELD_REPLACER/${OTHER_INDEX_TIME_FIELD}/g" "${REPLFILE}" || true
if [[ "$DATASTORE_TYPE" == "elasticsearch" ]] && [[ "$FILE_TYPE" == "template" ]]; then
# OpenSearch - flat_object - https://opensearch.org/docs/latest/field-types/supported-field-types/flat-object/
# Elasticsearch - flattened - https://www.elastic.co/guide/en/elasticsearch/reference/current/flattened.html
sed -i "s/flat_object/flattened/g" "${REPLFILE}" || true
fi
fi
}

function DoReplacersForDir() {
REPLDIR="$1"
DATASTORE_TYPE="$2"
FILE_TYPE="$3"
if [[ -n "$REPLDIR" ]] && [[ -d "$REPLDIR" ]]; then
while IFS= read -r fname; do
DoReplacersInFile "$fname"
DoReplacersInFile "$fname" "$DATASTORE_TYPE" "$FILE_TYPE"
done < <( find "$REPLDIR"/ -type f 2>/dev/null )
fi
}
Expand Down Expand Up @@ -168,7 +177,7 @@ if [[ "${CREATE_OS_ARKIME_SESSION_INDEX:-true}" = "true" ]] ; then
TEMPLATES_IMPORTED=false
TEMPLATES_IMPORT_DIR="$(mktemp -d -t templates-XXXXXX)"
rsync -a "$MALCOLM_TEMPLATES_DIR"/ "$TEMPLATES_IMPORT_DIR"/
DoReplacersForDir "$TEMPLATES_IMPORT_DIR"
DoReplacersForDir "$TEMPLATES_IMPORT_DIR" "$DATASTORE_TYPE" template
MALCOLM_TEMPLATE_FILE_ORIG_TMP="$(echo "$MALCOLM_TEMPLATE_FILE_ORIG" | sed "s@$MALCOLM_TEMPLATES_DIR@$TEMPLATES_IMPORT_DIR@")"

# calculate combined SHA sum of all templates to save as _meta.hash to determine if
Expand Down Expand Up @@ -338,7 +347,7 @@ if [[ "${CREATE_OS_ARKIME_SESSION_INDEX:-true}" = "true" ]] ; then

DASHBOARDS_IMPORT_DIR="$(mktemp -d -t dashboards-XXXXXX)"
rsync -a /opt/dashboards/ "$DASHBOARDS_IMPORT_DIR"/
DoReplacersForDir "$DASHBOARDS_IMPORT_DIR"/
DoReplacersForDir "$DASHBOARDS_IMPORT_DIR" "$DATASTORE_TYPE" dashboard
for i in "${DASHBOARDS_IMPORT_DIR}"/*.json; do

# get info about the dashboard to be imported
Expand Down Expand Up @@ -378,7 +387,7 @@ if [[ "${CREATE_OS_ARKIME_SESSION_INDEX:-true}" = "true" ]] ; then
# manually add load our dashboards in /opt/dashboards/beats as well.
BEATS_DASHBOARDS_IMPORT_DIR="$(mktemp -d -t beats-XXXXXX)"
rsync -a /opt/dashboards/beats/ "$BEATS_DASHBOARDS_IMPORT_DIR"/
DoReplacersForDir "$BEATS_DASHBOARDS_IMPORT_DIR"
DoReplacersForDir "$BEATS_DASHBOARDS_IMPORT_DIR" "$DATASTORE_TYPE" dashboard
for i in "${BEATS_DASHBOARDS_IMPORT_DIR}"/*.json; do

# get info about the dashboard to be imported
Expand Down Expand Up @@ -493,7 +502,7 @@ if [[ "${CREATE_OS_ARKIME_SESSION_INDEX:-true}" = "true" ]] ; then
# Create anomaly detectors here
ANOMALY_IMPORT_DIR="$(mktemp -d -t anomaly-XXXXXX)"
rsync -a /opt/anomaly_detectors/ "$ANOMALY_IMPORT_DIR"/
DoReplacersForDir "$ANOMALY_IMPORT_DIR"
DoReplacersForDir "$ANOMALY_IMPORT_DIR" "$DATASTORE_TYPE" anomaly_detector
for i in "${ANOMALY_IMPORT_DIR}"/*.json; do
# identify the name of the anomaly detector, and, if it already exists, its
# ID and previous update time, as well as the update time of the file to import
Expand Down Expand Up @@ -579,7 +588,7 @@ if [[ "${CREATE_OS_ARKIME_SESSION_INDEX:-true}" = "true" ]] ; then
# monitors
ALERTING_IMPORT_DIR="$(mktemp -d -t alerting-XXXXXX)"
rsync -a /opt/alerting/monitors/ "$ALERTING_IMPORT_DIR"/
DoReplacersForDir "$ALERTING_IMPORT_DIR"
DoReplacersForDir "$ALERTING_IMPORT_DIR" "$DATASTORE_TYPE" monitor
for i in "${ALERTING_IMPORT_DIR}"/*.json; do
curl "${CURL_CONFIG_PARAMS[@]}" -w "\n" --location --silent --output /dev/null --show-error \
-XPOST "$OPENSEARCH_URL_TO_USE/_plugins/_alerting/monitors" \
Expand Down
8 changes: 4 additions & 4 deletions dashboards/templates/composable/component/malcolm_common.json
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
"service": { "type": "keyword" },
"site": { "type": "keyword" },
"url": { "type": "keyword" },
"details": { "type": "nested" }
"details": { "type": "flat_object" }
}
},
"destination.segment": {
Expand All @@ -25,7 +25,7 @@
"site": { "type": "keyword" },
"tenant": { "type": "keyword" },
"url": { "type": "keyword" },
"details": { "type": "nested" }
"details": { "type": "flat_object" }
}
},
"event.freq_score_v1": { "type": "float" },
Expand Down Expand Up @@ -59,7 +59,7 @@
"service": { "type": "keyword" },
"site": { "type": "keyword" },
"url": { "type": "keyword" },
"details": { "type": "nested" }
"details": { "type": "flat_object" }
}
},
"source.segment": {
Expand All @@ -69,7 +69,7 @@
"site": { "type": "keyword" },
"tenant": { "type": "keyword" },
"url": { "type": "keyword" },
"details": { "type": "nested" }
"details": { "type": "flat_object" }
}
},
"tls.client.ja4": { "type": "keyword" },
Expand Down
5 changes: 2 additions & 3 deletions dashboards/templates/composable/component/miscbeat.json
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@
"added": { "type": "keyword", "ignore_above": 1024, "fields": { "text": { "type": "text" } } },
"changed": { "type": "keyword", "ignore_above": 1024, "fields": { "text": { "type": "text" } } },
"removed": { "type": "keyword", "ignore_above": 1024, "fields": { "text": { "type": "text" } } },
"databases": { "type": "nested" }
"databases": { "type": "flat_object" }
}
},
"cpu": {
Expand Down Expand Up @@ -85,8 +85,7 @@
"drops.tx": { "type": "long" },
"drops.rx": { "type": "long" },
"drops.total": { "type": "long" },
"interface": { "type": "keyword" },
"details": { "type": "nested" }
"interface": { "type": "keyword" }
}
},
"syslog": {
Expand Down
2 changes: 1 addition & 1 deletion dashboards/templates/composable/component/zeek.json
Original file line number Diff line number Diff line change
Expand Up @@ -592,7 +592,7 @@
"zeek.syslog.severity": { "type": "keyword" },
"zeek.tds.command": { "type": "keyword" },
"zeek.tds_rpc.parameter": { "type": "keyword", "ignore_above": 1024, "fields": { "text": { "type": "text", "norms": false } } },
"zeek.tds_rpc.parameters": { "type": "nested" },
"zeek.tds_rpc.parameters": { "type": "flat_object" },
"zeek.tds_rpc.procedure_name": { "type": "keyword", "ignore_above": 1024, "fields": { "text": { "type": "text" } } },
"zeek.tds_sql_batch.header_type": { "type": "keyword" },
"zeek.tds_sql_batch.query": { "type": "keyword", "ignore_above": 16384, "fields": { "text": { "type": "text" } } },
Expand Down
1 change: 1 addition & 0 deletions logstash/pipelines/beats/11_beats_logs.conf
Original file line number Diff line number Diff line change
Expand Up @@ -470,6 +470,7 @@ filter {
event.set('[miscbeat][network][drops][rx]', drop_rx)
event.set('[miscbeat][network][drops][total]', drop_tx+drop_rx)
end"
remove_field => [ "[miscbeat][network][details]" ]
}

if (![event][hash]) {
Expand Down

0 comments on commit 5c3ced4

Please sign in to comment.