diff --git a/x-pack/plugins/rule_registry/server/utils/create_get_summarized_alerts_fn.test.ts b/x-pack/plugins/rule_registry/server/utils/create_get_summarized_alerts_fn.test.ts index 387bd0174dbc4..597d6f056fa15 100644 --- a/x-pack/plugins/rule_registry/server/utils/create_get_summarized_alerts_fn.test.ts +++ b/x-pack/plugins/rule_registry/server/utils/create_get_summarized_alerts_fn.test.ts @@ -9,6 +9,7 @@ import { RuleDataClientMock, } from '../rule_data_client/rule_data_client.mock'; import { + ALERT_ACTION_GROUP, ALERT_END, ALERT_INSTANCE_ID, ALERT_RULE_EXECUTION_UUID, @@ -18,7 +19,7 @@ import { EVENT_ACTION, TIMESTAMP, } from '../../common/technical_rule_data_field_names'; -import { createGetSummarizedAlertsFn } from './create_get_summarized_alerts_fn'; +import { AlertDocument, createGetSummarizedAlertsFn } from './create_get_summarized_alerts_fn'; describe('createGetSummarizedAlertsFn', () => { let ruleDataClientMock: RuleDataClientMock; @@ -1644,6 +1645,344 @@ describe('createGetSummarizedAlertsFn', () => { expect(summarizedAlerts.recovered.data).toEqual([]); }); + it('creates function that uses a custom format alerts function if defined', async () => { + ruleDataClientMock.getReader().search.mockResolvedValueOnce({ + hits: { + total: { + value: 6, + }, + hits: [ + { + _id: '1', + _index: '.alerts-default-000001', + _source: { + [TIMESTAMP]: '2020-01-01T12:00:00.000Z', + [ALERT_RULE_EXECUTION_UUID]: 'abc', + [ALERT_RULE_UUID]: 'rule-id', + [ALERT_INSTANCE_ID]: 'TEST_ALERT_3', + [ALERT_UUID]: 'uuid1', + kibana: { + alert: { + instance: { + id: 'TEST_ALERT_3', + }, + rule: { + execution: { + uuid: 'abc', + }, + }, + uuid: 'uuid1', + }, + }, + }, + }, + { + _id: '2', + _index: '.alerts-default-000001', + _source: { + [TIMESTAMP]: '2020-01-01T12:00:00.000Z', + [ALERT_RULE_EXECUTION_UUID]: 'abc', + [ALERT_RULE_UUID]: 'rule-id', + [ALERT_INSTANCE_ID]: 'TEST_ALERT_4', + [ALERT_UUID]: 'uuid2', + kibana: { + alert: { + instance: { + id: 'TEST_ALERT_4', + }, + rule: { + execution: { + uuid: 'abc', + }, + }, + uuid: 'uuid2', + }, + }, + }, + }, + { + _id: '3', + _index: '.alerts-default-000001', + _source: { + [TIMESTAMP]: '2020-01-01T12:10:00.000Z', + [ALERT_RULE_EXECUTION_UUID]: 'abc', + [ALERT_RULE_UUID]: 'rule-id', + [ALERT_INSTANCE_ID]: 'TEST_ALERT_1', + [ALERT_UUID]: 'uuid3', + kibana: { + alert: { + instance: { + id: 'TEST_ALERT_1', + }, + rule: { + execution: { + uuid: 'abc', + }, + }, + uuid: 'uuid3', + }, + }, + }, + }, + { + _id: '4', + _index: '.alerts-default-000001', + _source: { + [TIMESTAMP]: '2020-01-01T12:20:00.000Z', + [ALERT_RULE_EXECUTION_UUID]: 'abc', + [ALERT_RULE_UUID]: 'rule-id', + [ALERT_INSTANCE_ID]: 'TEST_ALERT_2', + [ALERT_UUID]: 'uuid4', + kibana: { + alert: { + instance: { + id: 'TEST_ALERT_2', + }, + rule: { + execution: { + uuid: 'abc', + }, + }, + uuid: 'uuid4', + }, + }, + }, + }, + { + _id: '5', + _index: '.alerts-default-000001', + _source: { + [TIMESTAMP]: '2020-01-01T12:00:00.000Z', + [ALERT_RULE_EXECUTION_UUID]: 'abc', + [ALERT_RULE_UUID]: 'rule-id', + [ALERT_INSTANCE_ID]: 'TEST_ALERT_5', + [ALERT_UUID]: 'uuid5', + kibana: { + alert: { + instance: { + id: 'TEST_ALERT_5', + }, + rule: { + execution: { + uuid: 'abc', + }, + }, + uuid: 'uuid5', + }, + }, + }, + }, + { + _id: '6', + _index: '.alerts-default-000001', + _source: { + [TIMESTAMP]: '2020-01-01T12:20:00.000Z', + [ALERT_RULE_EXECUTION_UUID]: 'abc', + [ALERT_RULE_UUID]: 'rule-id', + [ALERT_INSTANCE_ID]: 'TEST_ALERT_9', + [ALERT_UUID]: 'uuid6', + kibana: { + alert: { + instance: { + id: 'TEST_ALERT_9', + }, + rule: { + execution: { + uuid: 'abc', + }, + }, + uuid: 'uuid6', + }, + }, + }, + }, + ], + }, + } as any); + const getSummarizedAlertsFn = createGetSummarizedAlertsFn({ + ruleDataClient: ruleDataClientMock, + useNamespace: true, + isLifecycleAlert: false, + formatAlert: (alert: AlertDocument) => { + return { + ...alert, + [ALERT_ACTION_GROUP]: 'boopboopdedoo', + }; + }, + })(); + + const summarizedAlerts = await getSummarizedAlertsFn({ + start: new Date('2020-01-01T11:00:00.000Z'), + end: new Date('2020-01-01T12:25:00.000Z'), + ruleId: 'rule-id', + spaceId: 'space-id', + excludedAlertInstanceIds: ['TEST_ALERT_10'], + }); + expect(ruleDataClientMock.getReader).toHaveBeenCalledWith({ namespace: 'space-id' }); + expect(ruleDataClientMock.getReader().search).toHaveBeenCalledTimes(1); + expect(ruleDataClientMock.getReader().search).toHaveBeenCalledWith({ + body: { + size: 100, + track_total_hits: true, + query: { + bool: { + filter: [ + { + range: { + [TIMESTAMP]: { + gte: '2020-01-01T11:00:00.000Z', + lt: '2020-01-01T12:25:00.000Z', + }, + }, + }, + { + term: { + [ALERT_RULE_UUID]: 'rule-id', + }, + }, + { + bool: { + must_not: { + terms: { + [ALERT_INSTANCE_ID]: ['TEST_ALERT_10'], + }, + }, + }, + }, + ], + }, + }, + }, + }); + expect(summarizedAlerts.new.count).toEqual(6); + expect(summarizedAlerts.ongoing.count).toEqual(0); + expect(summarizedAlerts.recovered.count).toEqual(0); + expect(summarizedAlerts.new.data).toEqual([ + { + _id: '1', + _index: '.alerts-default-000001', + [TIMESTAMP]: '2020-01-01T12:00:00.000Z', + kibana: { + alert: { + action_group: 'boopboopdedoo', + instance: { + id: 'TEST_ALERT_3', + }, + rule: { + execution: { + uuid: 'abc', + }, + uuid: 'rule-id', + }, + uuid: 'uuid1', + }, + }, + }, + { + _id: '2', + _index: '.alerts-default-000001', + [TIMESTAMP]: '2020-01-01T12:00:00.000Z', + kibana: { + alert: { + action_group: 'boopboopdedoo', + instance: { + id: 'TEST_ALERT_4', + }, + rule: { + execution: { + uuid: 'abc', + }, + uuid: 'rule-id', + }, + uuid: 'uuid2', + }, + }, + }, + { + _id: '3', + _index: '.alerts-default-000001', + [TIMESTAMP]: '2020-01-01T12:10:00.000Z', + kibana: { + alert: { + action_group: 'boopboopdedoo', + instance: { + id: 'TEST_ALERT_1', + }, + rule: { + execution: { + uuid: 'abc', + }, + uuid: 'rule-id', + }, + uuid: 'uuid3', + }, + }, + }, + { + _id: '4', + _index: '.alerts-default-000001', + [TIMESTAMP]: '2020-01-01T12:20:00.000Z', + kibana: { + alert: { + action_group: 'boopboopdedoo', + instance: { + id: 'TEST_ALERT_2', + }, + rule: { + execution: { + uuid: 'abc', + }, + uuid: 'rule-id', + }, + uuid: 'uuid4', + }, + }, + }, + { + _id: '5', + _index: '.alerts-default-000001', + [TIMESTAMP]: '2020-01-01T12:00:00.000Z', + kibana: { + alert: { + action_group: 'boopboopdedoo', + instance: { + id: 'TEST_ALERT_5', + }, + rule: { + execution: { + uuid: 'abc', + }, + uuid: 'rule-id', + }, + uuid: 'uuid5', + }, + }, + }, + { + _id: '6', + _index: '.alerts-default-000001', + [TIMESTAMP]: '2020-01-01T12:20:00.000Z', + kibana: { + alert: { + action_group: 'boopboopdedoo', + instance: { + id: 'TEST_ALERT_9', + }, + rule: { + execution: { + uuid: 'abc', + }, + uuid: 'rule-id', + }, + uuid: 'uuid6', + }, + }, + }, + ]); + expect(summarizedAlerts.ongoing.data).toEqual([]); + expect(summarizedAlerts.recovered.data).toEqual([]); + }); + it('throws error if search throws error', async () => { ruleDataClientMock.getReader().search.mockImplementation(() => { throw new Error('search error'); diff --git a/x-pack/plugins/rule_registry/server/utils/create_get_summarized_alerts_fn.ts b/x-pack/plugins/rule_registry/server/utils/create_get_summarized_alerts_fn.ts index 4754d47f236e3..95c971230317e 100644 --- a/x-pack/plugins/rule_registry/server/utils/create_get_summarized_alerts_fn.ts +++ b/x-pack/plugins/rule_registry/server/utils/create_get_summarized_alerts_fn.ts @@ -27,12 +27,13 @@ import { ParsedExperimentalFields } from '../../common/parse_experimental_fields import { IRuleDataClient, IRuleDataReader } from '../rule_data_client'; const MAX_ALERT_DOCS_TO_RETURN = 100; -type AlertDocument = Partial; +export type AlertDocument = Partial; interface CreateGetSummarizedAlertsFnOpts { ruleDataClient: PublicContract; useNamespace: boolean; isLifecycleAlert: boolean; + formatAlert?: (alert: AlertDocument) => AlertDocument; } export const createGetSummarizedAlertsFn = @@ -73,6 +74,7 @@ export const createGetSummarizedAlertsFn = ruleId, executionUuid: executionUuid!, isLifecycleAlert: opts.isLifecycleAlert, + formatAlert: opts.formatAlert, excludedAlertInstanceIds, }); } @@ -83,6 +85,7 @@ export const createGetSummarizedAlertsFn = start: start!, end: end!, isLifecycleAlert: opts.isLifecycleAlert, + formatAlert: opts.formatAlert, excludedAlertInstanceIds, }); }; @@ -93,6 +96,7 @@ interface GetAlertsByExecutionUuidOpts { ruleDataClientReader: IRuleDataReader; isLifecycleAlert: boolean; excludedAlertInstanceIds: string[]; + formatAlert?: (alert: AlertDocument) => AlertDocument; } const getAlertsByExecutionUuid = async ({ @@ -101,12 +105,14 @@ const getAlertsByExecutionUuid = async ({ ruleDataClientReader, isLifecycleAlert, excludedAlertInstanceIds, + formatAlert, }: GetAlertsByExecutionUuidOpts) => { if (isLifecycleAlert) { return getLifecycleAlertsByExecutionUuid({ executionUuid, ruleId, ruleDataClientReader, + formatAlert, excludedAlertInstanceIds, }); } @@ -115,6 +121,7 @@ const getAlertsByExecutionUuid = async ({ executionUuid, ruleId, ruleDataClientReader, + formatAlert, excludedAlertInstanceIds, }); }; @@ -124,6 +131,7 @@ interface GetAlertsByExecutionUuidHelperOpts { ruleId: string; ruleDataClientReader: IRuleDataReader; excludedAlertInstanceIds: string[]; + formatAlert?: (alert: AlertDocument) => AlertDocument; } const getPersistentAlertsByExecutionUuid = async ({ @@ -131,17 +139,15 @@ const getPersistentAlertsByExecutionUuid = async { // persistent alerts only create new alerts so query by execution UUID to // get all alerts created during an execution const request = getQueryByExecutionUuid(executionUuid, ruleId, excludedAlertInstanceIds); - const response = (await ruleDataClientReader.search(request)) as ESSearchResponse< - AlertDocument, - TSearchRequest - >; + const response = await doSearch(ruleDataClientReader, request, formatAlert); return { - new: getHitsWithCount(response), + new: response, ongoing: { count: 0, data: [], @@ -158,6 +164,7 @@ const getLifecycleAlertsByExecutionUuid = async ({ ruleId, ruleDataClientReader, excludedAlertInstanceIds, + formatAlert, }: GetAlertsByExecutionUuidHelperOpts) => { // lifecycle alerts assign a different action to an alert depending // on whether it is new/ongoing/recovered. query for each action in order @@ -170,13 +177,13 @@ const getLifecycleAlertsByExecutionUuid = async ({ ]; const responses = await Promise.all( - requests.map((request) => ruleDataClientReader.search(request)) + requests.map((request) => doSearch(ruleDataClientReader, request, formatAlert)) ); return { - new: getHitsWithCount(responses[0]), - ongoing: getHitsWithCount(responses[1]), - recovered: getHitsWithCount(responses[2]), + new: responses[0], + ongoing: responses[1], + recovered: responses[2], }; }; @@ -197,24 +204,35 @@ const expandFlattenedAlert = (alert: object) => { }; const getHitsWithCount = ( - response: ESSearchResponse + response: ESSearchResponse, + formatAlert?: (alert: AlertDocument) => AlertDocument ) => { return { count: (response.hits.total as SearchTotalHits).value, data: response.hits.hits.map((hit) => { const { _id, _index, _source } = hit; - const rawAlert = { + const formattedSource = formatAlert ? formatAlert(_source) : _source; + + const expandedSource = expandFlattenedAlert(formattedSource as object); + return { _id, _index, - ..._source, + ...expandedSource, }; - - return expandFlattenedAlert(rawAlert as object); }), }; }; +const doSearch = async ( + ruleDataClientReader: IRuleDataReader, + request: ESSearchRequest, + formatAlert?: (alert: AlertDocument) => AlertDocument +) => { + const response = await ruleDataClientReader.search(request); + return getHitsWithCount(response, formatAlert); +}; + const getQueryByExecutionUuid = ( executionUuid: string, ruleId: string, @@ -272,6 +290,7 @@ interface GetAlertsByTimeRangeOpts { ruleDataClientReader: IRuleDataReader; isLifecycleAlert: boolean; excludedAlertInstanceIds: string[]; + formatAlert?: (alert: AlertDocument) => AlertDocument; } const getAlertsByTimeRange = async ({ @@ -281,6 +300,7 @@ const getAlertsByTimeRange = async ({ ruleDataClientReader, isLifecycleAlert, excludedAlertInstanceIds, + formatAlert, }: GetAlertsByTimeRangeOpts) => { if (isLifecycleAlert) { return getLifecycleAlertsByTimeRange({ @@ -288,6 +308,7 @@ const getAlertsByTimeRange = async ({ end, ruleId, ruleDataClientReader, + formatAlert, excludedAlertInstanceIds, }); } @@ -297,6 +318,7 @@ const getAlertsByTimeRange = async ({ end, ruleId, ruleDataClientReader, + formatAlert, excludedAlertInstanceIds, }); }; @@ -306,6 +328,7 @@ interface GetAlertsByTimeRangeHelperOpts { end: Date; ruleId: string; ruleDataClientReader: IRuleDataReader; + formatAlert?: (alert: AlertDocument) => AlertDocument; excludedAlertInstanceIds: string[]; } @@ -320,18 +343,16 @@ const getPersistentAlertsByTimeRange = async { // persistent alerts only create new alerts so query for all alerts within the time // range and treat them as NEW const request = getQueryByTimeRange(start, end, ruleId, excludedAlertInstanceIds); - const response = (await ruleDataClientReader.search(request)) as ESSearchResponse< - AlertDocument, - TSearchRequest - >; + const response = await doSearch(ruleDataClientReader, request, formatAlert); return { - new: getHitsWithCount(response), + new: response, ongoing: { count: 0, data: [], @@ -348,6 +369,7 @@ const getLifecycleAlertsByTimeRange = async ({ end, ruleId, ruleDataClientReader, + formatAlert, excludedAlertInstanceIds, }: GetAlertsByTimeRangeHelperOpts) => { const requests = [ @@ -357,13 +379,13 @@ const getLifecycleAlertsByTimeRange = async ({ ]; const responses = await Promise.all( - requests.map((request) => ruleDataClientReader.search(request)) + requests.map((request) => doSearch(ruleDataClientReader, request, formatAlert)) ); return { - new: getHitsWithCount(responses[0]), - ongoing: getHitsWithCount(responses[1]), - recovered: getHitsWithCount(responses[2]), + new: responses[0], + ongoing: responses[1], + recovered: responses[2], }; };