Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[Security Solution] Ensure alerts are scheduled when rule times out #128276

Merged
merged 7 commits into from
Mar 25, 2022
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,7 @@ export const createSecurityRuleTypeWrapper: CreateSecurityRuleTypeWrapper =
const persistenceRuleType = createPersistenceRuleTypeWrapper({ ruleDataClient, logger });
return persistenceRuleType({
...type,
cancelAlertsOnRuleTimeout: false,
useSavedObjectReferences: {
extractReferences: (params) => extractReferences({ logger, params }),
injectReferences: (params, savedObjectReferences) =>
Expand Down Expand Up @@ -304,51 +305,52 @@ export const createSecurityRuleTypeWrapper: CreateSecurityRuleTypeWrapper =
});
}

if (result.success) {
const createdSignalsCount = result.createdSignals.length;

if (actions.length) {
const fromInMs = parseScheduleDates(`now-${interval}`)?.format('x');
const toInMs = parseScheduleDates('now')?.format('x');
const resultsLink = getNotificationResultsLink({
from: fromInMs,
to: toInMs,
const createdSignalsCount = result.createdSignals.length;

if (actions.length) {
const fromInMs = parseScheduleDates(`now-${interval}`)?.format('x');
const toInMs = parseScheduleDates('now')?.format('x');
const resultsLink = getNotificationResultsLink({
from: fromInMs,
to: toInMs,
id: alertId,
kibanaSiemAppUrl: (meta as { kibana_siem_app_url?: string } | undefined)
?.kibana_siem_app_url,
});

logger.debug(
buildRuleMessage(`Found ${createdSignalsCount} signals for notification.`)
);

if (completeRule.ruleConfig.throttle != null) {
// NOTE: Since this is throttled we have to call it even on an error condition, otherwise it will "reset" the throttle and fire early
await scheduleThrottledNotificationActions({
alertInstance: services.alertFactory.create(alertId),
throttle: completeRule.ruleConfig.throttle ?? '',
startedAt,
id: alertId,
kibanaSiemAppUrl: (meta as { kibana_siem_app_url?: string } | undefined)
?.kibana_siem_app_url,
outputIndex: ruleDataClient.indexNameWithNamespace(spaceId),
ruleId,
esClient: services.scopedClusterClient.asCurrentUser,
Copy link
Contributor

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

If we try to schedule throttled notifications after a rule is cancelled, the search executed inside this function will also be cancelled and we won't be able to schedule the actions. We may need the alerting framework to provide a secondary "un-cancellable" client that we can use during the actions scheduling process.

Copy link
Contributor Author

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

Ah, good point!

notificationRuleParams,
signals: result.createdSignals,
logger,
});
} else if (createdSignalsCount) {
const alertInstance = services.alertFactory.create(alertId);
scheduleNotificationActions({
alertInstance,
signalsCount: createdSignalsCount,
signals: result.createdSignals,
resultsLink,
ruleParams: notificationRuleParams,
});

logger.debug(
buildRuleMessage(`Found ${createdSignalsCount} signals for notification.`)
);

if (completeRule.ruleConfig.throttle != null) {
await scheduleThrottledNotificationActions({
alertInstance: services.alertFactory.create(alertId),
throttle: completeRule.ruleConfig.throttle ?? '',
startedAt,
id: alertId,
kibanaSiemAppUrl: (meta as { kibana_siem_app_url?: string } | undefined)
?.kibana_siem_app_url,
outputIndex: ruleDataClient.indexNameWithNamespace(spaceId),
ruleId,
esClient: services.scopedClusterClient.asCurrentUser,
notificationRuleParams,
signals: result.createdSignals,
logger,
});
} else if (createdSignalsCount) {
const alertInstance = services.alertFactory.create(alertId);
scheduleNotificationActions({
alertInstance,
signalsCount: createdSignalsCount,
signals: result.createdSignals,
resultsLink,
ruleParams: notificationRuleParams,
});
}
}
}

if (result.success) {
logger.debug(buildRuleMessage('[+] Signal Rule execution completed.'));
logger.debug(
buildRuleMessage(
Expand Down Expand Up @@ -392,23 +394,6 @@ export const createSecurityRuleTypeWrapper: CreateSecurityRuleTypeWrapper =
indexingDurations: result.bulkCreateTimes,
},
});
// NOTE: Since this is throttled we have to call it even on an error condition, otherwise it will "reset" the throttle and fire early
if (completeRule.ruleConfig.throttle != null) {
await scheduleThrottledNotificationActions({
alertInstance: services.alertFactory.create(alertId),
throttle: completeRule.ruleConfig.throttle ?? '',
startedAt,
id: completeRule.alertId,
kibanaSiemAppUrl: (meta as { kibana_siem_app_url?: string } | undefined)
?.kibana_siem_app_url,
outputIndex: ruleDataClient.indexNameWithNamespace(spaceId),
ruleId,
esClient: services.scopedClusterClient.asCurrentUser,
notificationRuleParams,
signals: result.createdSignals,
logger,
});
}
}
} catch (error) {
const errorMessage = error.message ?? '(no error message given)';
Expand All @@ -426,8 +411,9 @@ export const createSecurityRuleTypeWrapper: CreateSecurityRuleTypeWrapper =
indexingDurations: result.bulkCreateTimes,
},
});

// NOTE: Since this is throttled we have to call it even on an error condition, otherwise it will "reset" the throttle and fire early
if (completeRule.ruleConfig.throttle != null) {
if (actions.length && completeRule.ruleConfig.throttle != null) {
await scheduleThrottledNotificationActions({
alertInstance: services.alertFactory.create(alertId),
throttle: completeRule.ruleConfig.throttle ?? '',
Expand Down