From b4a863e8dfe8017d96b43fd64ba526dcc62a4137 Mon Sep 17 00:00:00 2001 From: Marius Iversen Date: Thu, 23 Jan 2025 14:44:26 +0100 Subject: [PATCH] [Rule Migration] Add inference connector as supported LLM type (#208032) ## Summary Summarize your PR. If it involves visual changes include a screenshot or gif. Adds .inference as a supported type, so it can be tested with EIS both with custom providers and the default EIS provider. (cherry picked from commit 811c539fff9fe6ae0b25cd16e28b95eb9127172a) --- .../onboarding_body/cards/common/connectors/constants.ts | 2 +- .../rules/task/util/actions_client_chat.ts | 8 +++++--- 2 files changed, 6 insertions(+), 4 deletions(-) diff --git a/x-pack/solutions/security/plugins/security_solution/public/onboarding/components/onboarding_body/cards/common/connectors/constants.ts b/x-pack/solutions/security/plugins/security_solution/public/onboarding/components/onboarding_body/cards/common/connectors/constants.ts index 5c9c94e369854..1a41d3e5562dc 100644 --- a/x-pack/solutions/security/plugins/security_solution/public/onboarding/components/onboarding_body/cards/common/connectors/constants.ts +++ b/x-pack/solutions/security/plugins/security_solution/public/onboarding/components/onboarding_body/cards/common/connectors/constants.ts @@ -5,4 +5,4 @@ * 2.0. */ -export const AIActionTypeIds = ['.bedrock', '.gen-ai', '.gemini']; +export const AIActionTypeIds = ['.bedrock', '.gen-ai', '.gemini', '.inference']; diff --git a/x-pack/solutions/security/plugins/security_solution/server/lib/siem_migrations/rules/task/util/actions_client_chat.ts b/x-pack/solutions/security/plugins/security_solution/server/lib/siem_migrations/rules/task/util/actions_client_chat.ts index 1659862543078..555662c8312c9 100644 --- a/x-pack/solutions/security/plugins/security_solution/server/lib/siem_migrations/rules/task/util/actions_client_chat.ts +++ b/x-pack/solutions/security/plugins/security_solution/server/lib/siem_migrations/rules/task/util/actions_client_chat.ts @@ -5,16 +5,16 @@ * 2.0. */ +import type { ActionsClient } from '@kbn/actions-plugin/server'; +import type { Logger } from '@kbn/core/server'; import type { ActionsClientSimpleChatModel } from '@kbn/langchain/server'; import { ActionsClientBedrockChatModel, ActionsClientChatOpenAI, ActionsClientChatVertexAI, } from '@kbn/langchain/server'; -import type { Logger } from '@kbn/core/server'; -import type { ActionsClient } from '@kbn/actions-plugin/server'; -import type { ActionsClientChatOpenAIParams } from '@kbn/langchain/server/language_models/chat_openai'; import type { CustomChatModelInput as ActionsClientBedrockChatModelParams } from '@kbn/langchain/server/language_models/bedrock_chat'; +import type { ActionsClientChatOpenAIParams } from '@kbn/langchain/server/language_models/chat_openai'; import type { CustomChatModelInput as ActionsClientChatVertexAIParams } from '@kbn/langchain/server/language_models/gemini_chat'; import type { CustomChatModelInput as ActionsClientSimpleChatModelParams } from '@kbn/langchain/server/language_models/simple_chat_model'; @@ -39,6 +39,7 @@ const llmTypeDictionary: Record = { [`.gen-ai`]: `openai`, [`.bedrock`]: `bedrock`, [`.gemini`]: `gemini`, + [`.inference`]: `inference`, }; export class ActionsClientChat { @@ -83,6 +84,7 @@ export class ActionsClientChat { case 'gemini': return ActionsClientChatVertexAI; case 'openai': + case 'inference': default: return ActionsClientChatOpenAI; }