@@ -475,6 +475,8 @@ public class RestMLRAGSearchProcessorIT extends MLCommonsRestTestCase {
475
475
protected ClassLoader classLoader = RestMLRAGSearchProcessorIT .class .getClassLoader ();
476
476
private static final String INDEX_NAME = "test" ;
477
477
478
+ private static final String ML_RAG_REMOTE_MODEL_GROUP = "rag_remote_model_group" ;
479
+
478
480
// "client" gets initialized by the test framework at the instance level
479
481
// so we perform this per test case, not via @BeforeClass.
480
482
@ Before
@@ -528,7 +530,7 @@ public void testBM25WithOpenAI() throws Exception {
528
530
Response response = createConnector (OPENAI_CONNECTOR_BLUEPRINT );
529
531
Map responseMap = parseResponseToMap (response );
530
532
String connectorId = (String ) responseMap .get ("connector_id" );
531
- response = RestMLRemoteInferenceIT .registerRemoteModel ("openAI-GPT-3.5 completions" , connectorId );
533
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "openAI-GPT-3.5 completions" , connectorId );
532
534
responseMap = parseResponseToMap (response );
533
535
String taskId = (String ) responseMap .get ("task_id" );
534
536
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -582,7 +584,7 @@ public void testBM25WithOpenAIWithImage() throws Exception {
582
584
Response response = createConnector (OPENAI_4o_CONNECTOR_BLUEPRINT );
583
585
Map responseMap = parseResponseToMap (response );
584
586
String connectorId = (String ) responseMap .get ("connector_id" );
585
- response = RestMLRemoteInferenceIT .registerRemoteModel ("openAI-GPT-4o-mini completions" , connectorId );
587
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "openAI-GPT-4o-mini completions" , connectorId );
586
588
responseMap = parseResponseToMap (response );
587
589
String taskId = (String ) responseMap .get ("task_id" );
588
590
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -595,7 +597,7 @@ public void testBM25WithOpenAIWithImage() throws Exception {
595
597
waitForTask (taskId , MLTaskState .COMPLETED );
596
598
597
599
PipelineParameters pipelineParameters = new PipelineParameters ();
598
- pipelineParameters .tag = "testBM25WithOpenAI " ;
600
+ pipelineParameters .tag = "testBM25WithOpenAIWithImage " ;
599
601
pipelineParameters .description = "desc" ;
600
602
pipelineParameters .modelId = modelId ;
601
603
pipelineParameters .systemPrompt = "You are a helpful assistant" ;
@@ -669,7 +671,7 @@ public void testBM25WithBedrock() throws Exception {
669
671
Response response = createConnector (BEDROCK_CONNECTOR_BLUEPRINT );
670
672
Map responseMap = parseResponseToMap (response );
671
673
String connectorId = (String ) responseMap .get ("connector_id" );
672
- response = RestMLRemoteInferenceIT .registerRemoteModel ("Bedrock Anthropic Claude" , connectorId );
674
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "Bedrock Anthropic Claude" , connectorId );
673
675
responseMap = parseResponseToMap (response );
674
676
String taskId = (String ) responseMap .get ("task_id" );
675
677
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -682,7 +684,7 @@ public void testBM25WithBedrock() throws Exception {
682
684
waitForTask (taskId , MLTaskState .COMPLETED );
683
685
684
686
PipelineParameters pipelineParameters = new PipelineParameters ();
685
- pipelineParameters .tag = "testBM25WithOpenAI " ;
687
+ pipelineParameters .tag = "testBM25WithBedrock " ;
686
688
pipelineParameters .description = "desc" ;
687
689
pipelineParameters .modelId = modelId ;
688
690
pipelineParameters .systemPrompt = "You are a helpful assistant" ;
@@ -721,7 +723,7 @@ public void testBM25WithBedrockConverse() throws Exception {
721
723
Response response = createConnector (BEDROCK_CONVERSE_CONNECTOR_BLUEPRINT );
722
724
Map responseMap = parseResponseToMap (response );
723
725
String connectorId = (String ) responseMap .get ("connector_id" );
724
- response = RestMLRemoteInferenceIT .registerRemoteModel ("Bedrock Anthropic Claude" , connectorId );
726
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "Bedrock Anthropic Claude" , connectorId );
725
727
responseMap = parseResponseToMap (response );
726
728
String taskId = (String ) responseMap .get ("task_id" );
727
729
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -734,7 +736,7 @@ public void testBM25WithBedrockConverse() throws Exception {
734
736
waitForTask (taskId , MLTaskState .COMPLETED );
735
737
736
738
PipelineParameters pipelineParameters = new PipelineParameters ();
737
- pipelineParameters .tag = "testBM25WithOpenAI " ;
739
+ pipelineParameters .tag = "testBM25WithBedrockConverse " ;
738
740
pipelineParameters .description = "desc" ;
739
741
pipelineParameters .modelId = modelId ;
740
742
pipelineParameters .systemPrompt = "You are a helpful assistant" ;
@@ -773,7 +775,7 @@ public void testBM25WithBedrockConverseUsingLlmMessages() throws Exception {
773
775
Response response = createConnector (BEDROCK_CONVERSE_CONNECTOR_BLUEPRINT2 );
774
776
Map responseMap = parseResponseToMap (response );
775
777
String connectorId = (String ) responseMap .get ("connector_id" );
776
- response = RestMLRemoteInferenceIT .registerRemoteModel ("Bedrock Anthropic Claude" , connectorId );
778
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "Bedrock Anthropic Claude" , connectorId );
777
779
responseMap = parseResponseToMap (response );
778
780
String taskId = (String ) responseMap .get ("task_id" );
779
781
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -786,7 +788,7 @@ public void testBM25WithBedrockConverseUsingLlmMessages() throws Exception {
786
788
waitForTask (taskId , MLTaskState .COMPLETED );
787
789
788
790
PipelineParameters pipelineParameters = new PipelineParameters ();
789
- pipelineParameters .tag = "testBM25WithOpenAI " ;
791
+ pipelineParameters .tag = "testBM25WithBedrockConverseUsingLlmMessages " ;
790
792
pipelineParameters .description = "desc" ;
791
793
pipelineParameters .modelId = modelId ;
792
794
pipelineParameters .systemPrompt = "You are a helpful assistant" ;
@@ -833,7 +835,7 @@ public void testBM25WithBedrockConverseUsingLlmMessagesForDocumentChat() throws
833
835
Response response = createConnector (BEDROCK_DOCUMENT_CONVERSE_CONNECTOR_BLUEPRINT2 );
834
836
Map responseMap = parseResponseToMap (response );
835
837
String connectorId = (String ) responseMap .get ("connector_id" );
836
- response = RestMLRemoteInferenceIT .registerRemoteModel ("Bedrock Anthropic Claude" , connectorId );
838
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "Bedrock Anthropic Claude" , connectorId );
837
839
responseMap = parseResponseToMap (response );
838
840
String taskId = (String ) responseMap .get ("task_id" );
839
841
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -846,7 +848,7 @@ public void testBM25WithBedrockConverseUsingLlmMessagesForDocumentChat() throws
846
848
waitForTask (taskId , MLTaskState .COMPLETED );
847
849
848
850
PipelineParameters pipelineParameters = new PipelineParameters ();
849
- pipelineParameters .tag = "testBM25WithOpenAI " ;
851
+ pipelineParameters .tag = "testBM25WithBedrockConverseUsingLlmMessagesForDocumentChat " ;
850
852
pipelineParameters .description = "desc" ;
851
853
pipelineParameters .modelId = modelId ;
852
854
// pipelineParameters.systemPrompt = "You are a helpful assistant";
@@ -892,7 +894,7 @@ public void testBM25WithOpenAIWithConversation() throws Exception {
892
894
Response response = createConnector (OPENAI_CONNECTOR_BLUEPRINT );
893
895
Map responseMap = parseResponseToMap (response );
894
896
String connectorId = (String ) responseMap .get ("connector_id" );
895
- response = RestMLRemoteInferenceIT .registerRemoteModel ("openAI-GPT-3.5 completions" , connectorId );
897
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "openAI-GPT-3.5 completions" , connectorId );
896
898
responseMap = parseResponseToMap (response );
897
899
String taskId = (String ) responseMap .get ("task_id" );
898
900
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -905,7 +907,7 @@ public void testBM25WithOpenAIWithConversation() throws Exception {
905
907
waitForTask (taskId , MLTaskState .COMPLETED );
906
908
907
909
PipelineParameters pipelineParameters = new PipelineParameters ();
908
- pipelineParameters .tag = "testBM25WithOpenAI " ;
910
+ pipelineParameters .tag = "testBM25WithOpenAIWithConversation " ;
909
911
pipelineParameters .description = "desc" ;
910
912
pipelineParameters .modelId = modelId ;
911
913
pipelineParameters .systemPrompt = "You are a helpful assistant" ;
@@ -949,7 +951,7 @@ public void testBM25WithOpenAIWithConversationAndImage() throws Exception {
949
951
Response response = createConnector (OPENAI_4o_CONNECTOR_BLUEPRINT );
950
952
Map responseMap = parseResponseToMap (response );
951
953
String connectorId = (String ) responseMap .get ("connector_id" );
952
- response = RestMLRemoteInferenceIT .registerRemoteModel ("openAI-GPT-4 completions" , connectorId );
954
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "openAI-GPT-4 completions" , connectorId );
953
955
responseMap = parseResponseToMap (response );
954
956
String taskId = (String ) responseMap .get ("task_id" );
955
957
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -962,7 +964,7 @@ public void testBM25WithOpenAIWithConversationAndImage() throws Exception {
962
964
waitForTask (taskId , MLTaskState .COMPLETED );
963
965
964
966
PipelineParameters pipelineParameters = new PipelineParameters ();
965
- pipelineParameters .tag = "testBM25WithOpenAI " ;
967
+ pipelineParameters .tag = "testBM25WithOpenAIWithConversationAndImage " ;
966
968
pipelineParameters .description = "desc" ;
967
969
pipelineParameters .modelId = modelId ;
968
970
pipelineParameters .systemPrompt = "You are a helpful assistant" ;
@@ -1010,7 +1012,7 @@ public void testBM25WithBedrockWithConversation() throws Exception {
1010
1012
Response response = createConnector (BEDROCK_CONNECTOR_BLUEPRINT );
1011
1013
Map responseMap = parseResponseToMap (response );
1012
1014
String connectorId = (String ) responseMap .get ("connector_id" );
1013
- response = RestMLRemoteInferenceIT .registerRemoteModel ("Bedrock" , connectorId );
1015
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "Bedrock" , connectorId );
1014
1016
responseMap = parseResponseToMap (response );
1015
1017
String taskId = (String ) responseMap .get ("task_id" );
1016
1018
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -1023,7 +1025,7 @@ public void testBM25WithBedrockWithConversation() throws Exception {
1023
1025
waitForTask (taskId , MLTaskState .COMPLETED );
1024
1026
1025
1027
PipelineParameters pipelineParameters = new PipelineParameters ();
1026
- pipelineParameters .tag = "testBM25WithBedrock " ;
1028
+ pipelineParameters .tag = "testBM25WithBedrockWithConversation " ;
1027
1029
pipelineParameters .description = "desc" ;
1028
1030
pipelineParameters .modelId = modelId ;
1029
1031
pipelineParameters .systemPrompt = "You are a helpful assistant" ;
@@ -1067,7 +1069,7 @@ public void testBM25WithCohere() throws Exception {
1067
1069
Response response = createConnector (COHERE_CONNECTOR_BLUEPRINT );
1068
1070
Map responseMap = parseResponseToMap (response );
1069
1071
String connectorId = (String ) responseMap .get ("connector_id" );
1070
- response = RestMLRemoteInferenceIT .registerRemoteModel ("Cohere Chat Completion v1" , connectorId );
1072
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "Cohere Chat Completion v1" , connectorId );
1071
1073
responseMap = parseResponseToMap (response );
1072
1074
String taskId = (String ) responseMap .get ("task_id" );
1073
1075
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -1119,7 +1121,7 @@ public void testBM25WithCohereUsingLlmResponseField() throws Exception {
1119
1121
Response response = createConnector (COHERE_CONNECTOR_BLUEPRINT );
1120
1122
Map responseMap = parseResponseToMap (response );
1121
1123
String connectorId = (String ) responseMap .get ("connector_id" );
1122
- response = RestMLRemoteInferenceIT .registerRemoteModel ("Cohere Chat Completion v1" , connectorId );
1124
+ response = RestMLRemoteInferenceIT .registerRemoteModel (ML_RAG_REMOTE_MODEL_GROUP , "Cohere Chat Completion v1" , connectorId );
1123
1125
responseMap = parseResponseToMap (response );
1124
1126
String taskId = (String ) responseMap .get ("task_id" );
1125
1127
waitForTask (taskId , MLTaskState .COMPLETED );
@@ -1132,7 +1134,7 @@ public void testBM25WithCohereUsingLlmResponseField() throws Exception {
1132
1134
waitForTask (taskId , MLTaskState .COMPLETED );
1133
1135
1134
1136
PipelineParameters pipelineParameters = new PipelineParameters ();
1135
- pipelineParameters .tag = "testBM25WithCohereLlmResponseField " ;
1137
+ pipelineParameters .tag = "testBM25WithCohereUsingLlmResponseField " ;
1136
1138
pipelineParameters .description = "desc" ;
1137
1139
pipelineParameters .modelId = modelId ;
1138
1140
pipelineParameters .systemPrompt = "You are a helpful assistant" ;
0 commit comments