From a70d37622a95dea5ad411a9b6babd21c35b77518 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 5 Sep 2024 12:23:03 +0100
Subject: [PATCH 01/11] Addressed latest comments
---
.../OllamaTestHelper.cs | 50 -------------------
.../OllamaTextEmbeddingGenerationTests.cs | 3 +-
.../Services/OllamaTextGenerationTests.cs | 2 +-
.../OllamaKernelBuilderExtensions.cs | 12 +++++
.../OllamaServiceCollectionExtensions.cs | 12 +++++
.../OllamaPromptExecutionSettings.cs | 2 +-
6 files changed, 28 insertions(+), 53 deletions(-)
delete mode 100644 dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaTestHelper.cs
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaTestHelper.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaTestHelper.cs
deleted file mode 100644
index 33d2c24c87e3..000000000000
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaTestHelper.cs
+++ /dev/null
@@ -1,50 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System;
-using System.IO;
-using System.Net.Http;
-using System.Threading;
-using System.Threading.Tasks;
-using Moq;
-using Moq.Protected;
-
-namespace SemanticKernel.Connectors.Ollama.UnitTests;
-
-///
-/// Helper for HuggingFace test purposes.
-///
-internal static class OllamaTestHelper
-{
- ///
- /// Reads test response from file for mocking purposes.
- ///
- /// Name of the file with test response.
- internal static string GetTestResponse(string fileName)
- {
- return File.ReadAllText($"./TestData/{fileName}");
- }
-
- internal static ReadOnlyMemory GetTestResponseBytes(string fileName)
- {
- return File.ReadAllBytes($"./TestData/{fileName}");
- }
-
- ///
- /// Returns mocked instance of .
- ///
- /// Message to return for mocked .
- internal static HttpClientHandler GetHttpClientHandlerMock(HttpResponseMessage httpResponseMessage)
- {
- var httpClientHandler = new Mock();
-
- httpClientHandler
- .Protected()
- .Setup>(
- "SendAsync",
- ItExpr.IsAny(),
- ItExpr.IsAny())
- .ReturnsAsync(httpResponseMessage);
-
- return httpClientHandler.Object;
- }
-}
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
index 53462080eb06..c1348b1009d6 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
@@ -2,6 +2,7 @@
using System;
using System.Collections.Generic;
+using System.IO;
using System.Linq;
using System.Net.Http;
using System.Text.Json;
@@ -20,7 +21,7 @@ public sealed class OllamaTextEmbeddingGenerationTests : IDisposable
public OllamaTextEmbeddingGenerationTests()
{
this._messageHandlerStub = new HttpMessageHandlerStub();
- this._messageHandlerStub.ResponseToReturn.Content = new StringContent(OllamaTestHelper.GetTestResponse("embeddings_test_response.json"));
+ this._messageHandlerStub.ResponseToReturn.Content = new StringContent(File.ReadAllText("TestData/embeddings_test_response.json"));
this._httpClient = new HttpClient(this._messageHandlerStub, false);
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
index e5d9bd6d1884..5d926b00d931 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
@@ -22,7 +22,7 @@ public sealed class OllamaTextGenerationTests : IDisposable
public OllamaTextGenerationTests()
{
this._messageHandlerStub = new HttpMessageHandlerStub();
- this._messageHandlerStub.ResponseToReturn.Content = new StringContent(OllamaTestHelper.GetTestResponse("text_generation_test_response.txt"));
+ this._messageHandlerStub.ResponseToReturn.Content = new StringContent(File.ReadAllText("TestData/text_generation_test_response.txt"));
this._httpClient = new HttpClient(this._messageHandlerStub, false);
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
index fd54d4a535df..96f8340e68ff 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
@@ -17,6 +17,8 @@ namespace Microsoft.SemanticKernel;
///
public static class OllamaKernelBuilderExtensions
{
+ #region Text Generation
+
///
/// Add Ollama Text Generation service to the kernel builder.
///
@@ -68,6 +70,10 @@ public static IKernelBuilder AddOllamaTextGeneration(
return builder;
}
+ #endregion
+
+ #region Chat Completion
+
///
/// Add Ollama Chat Completion service to the kernel builder.
///
@@ -122,6 +128,10 @@ public static IKernelBuilder AddOllamaChatCompletion(
return builder;
}
+ #endregion
+
+ #region Text Embeddings
+
///
/// Add Ollama Text Embeddings Generation service to the kernel builder.
///
@@ -165,4 +175,6 @@ public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
return builder;
}
+
+ #endregion
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
index 6b43227c2a0c..e67328bbd2cb 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
@@ -18,6 +18,8 @@ namespace Microsoft.SemanticKernel;
///
public static class OllamaServiceCollectionExtensions
{
+ #region Text Generation
+
///
/// Add Ollama Text Generation service to the specified service collection.
///
@@ -65,6 +67,10 @@ public static IServiceCollection AddOllamaTextGeneration(
loggerFactory: serviceProvider.GetService()));
}
+ #endregion
+
+ #region Chat Completion
+
///
/// Add Ollama Chat Completion and Text Generation services to the specified service collection.
///
@@ -114,6 +120,10 @@ public static IServiceCollection AddOllamaChatCompletion(
loggerFactory: serviceProvider.GetService()));
}
+ #endregion
+
+ #region Text Embeddings
+
///
/// Add Ollama Text Embedding Generation services to the kernel builder.
///
@@ -162,4 +172,6 @@ public static IServiceCollection AddOllamaTextEmbeddingGeneration(
ollamaClient: ollamaClient,
loggerFactory: serviceProvider.GetService()));
}
+
+ #endregion
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
index 53ba15639008..237894d87696 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
@@ -111,7 +111,7 @@ public float? Temperature
}
}
- #region private ================================================================================
+ #region private
private List? _stop;
private float? _temperature;
From fbfa09f43970312cf9921509fdf0644885143401 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 5 Sep 2024 13:52:46 +0100
Subject: [PATCH 02/11] Current changes
---
dotnet/Directory.Packages.props | 2 +-
.../Core/OllamaChatResponseStreamer.cs | 26 ----
.../Connectors.Ollama/OllamaMetadata.cs | 145 ------------------
.../OllamaPromptExecutionSettings.cs | 43 ++++--
.../Services/OllamaChatCompletionService.cs | 69 ++++++---
.../OllamaTextEmbeddingGenerationService.cs | 6 +-
.../Services/OllamaTextGenerationService.cs | 52 ++++++-
7 files changed, 123 insertions(+), 220 deletions(-)
delete mode 100644 dotnet/src/Connectors/Connectors.Ollama/Core/OllamaChatResponseStreamer.cs
delete mode 100644 dotnet/src/Connectors/Connectors.Ollama/OllamaMetadata.cs
diff --git a/dotnet/Directory.Packages.props b/dotnet/Directory.Packages.props
index daf99c0a118b..3b7444d13794 100644
--- a/dotnet/Directory.Packages.props
+++ b/dotnet/Directory.Packages.props
@@ -39,7 +39,7 @@
-
+
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Core/OllamaChatResponseStreamer.cs b/dotnet/src/Connectors/Connectors.Ollama/Core/OllamaChatResponseStreamer.cs
deleted file mode 100644
index 6a7818e100f5..000000000000
--- a/dotnet/src/Connectors/Connectors.Ollama/Core/OllamaChatResponseStreamer.cs
+++ /dev/null
@@ -1,26 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Collections.Concurrent;
-using OllamaSharp.Models.Chat;
-using OllamaSharp.Streamer;
-
-namespace Microsoft.SemanticKernel.Connectors.Ollama.Core;
-
-internal class OllamaChatResponseStreamer : IResponseStreamer
-{
- private readonly ConcurrentQueue _messages = new();
- public void Stream(ChatResponseStream stream)
- {
- if (stream.Message?.Content is null)
- {
- return;
- }
-
- this._messages.Enqueue(stream.Message.Content);
- }
-
- public bool TryGetMessage(out string result)
- {
- return this._messages.TryDequeue(out result);
- }
-}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/OllamaMetadata.cs b/dotnet/src/Connectors/Connectors.Ollama/OllamaMetadata.cs
deleted file mode 100644
index fd7aba01819b..000000000000
--- a/dotnet/src/Connectors/Connectors.Ollama/OllamaMetadata.cs
+++ /dev/null
@@ -1,145 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System.Collections.Generic;
-using System.Collections.ObjectModel;
-using System.Runtime.CompilerServices;
-using OllamaSharp.Models;
-using OllamaSharp.Models.Chat;
-
-namespace Microsoft.SemanticKernel.Connectors.Ollama;
-
-///
-/// Represents the metadata of the Ollama response.
-///
-public sealed class OllamaMetadata : ReadOnlyDictionary
-{
- internal OllamaMetadata(GenerateCompletionResponseStream? ollamaResponse) : base(new Dictionary())
- {
- if (ollamaResponse is null)
- {
- return;
- }
-
- this.CreatedAt = ollamaResponse.CreatedAt;
- this.Done = ollamaResponse.Done;
-
- if (ollamaResponse is GenerateCompletionDoneResponseStream doneResponse)
- {
- this.TotalDuration = doneResponse.TotalDuration;
- this.EvalCount = doneResponse.EvalCount;
- this.EvalDuration = doneResponse.EvalDuration;
- this.LoadDuration = doneResponse.LoadDuration;
- this.PromptEvalCount = doneResponse.PromptEvalCount;
- this.PromptEvalDuration = doneResponse.PromptEvalDuration;
- }
- }
-
- internal OllamaMetadata(ChatResponseStream? message) : base(new Dictionary())
- {
- if (message is null)
- {
- return;
- }
- this.CreatedAt = message?.CreatedAt;
- this.Done = message?.Done;
-
- if (message is ChatDoneResponseStream doneMessage)
- {
- this.TotalDuration = doneMessage.TotalDuration;
- this.EvalCount = doneMessage.EvalCount;
- this.EvalDuration = doneMessage.EvalDuration;
- this.LoadDuration = doneMessage.LoadDuration;
- this.PromptEvalCount = doneMessage.PromptEvalCount;
- this.PromptEvalDuration = doneMessage.PromptEvalDuration;
- }
- }
-
- internal OllamaMetadata(ChatResponse response) : base(new Dictionary())
- {
- this.TotalDuration = response.TotalDuration;
- this.EvalCount = response.EvalCount;
- this.EvalDuration = response.EvalDuration;
- this.CreatedAt = response.CreatedAt;
- this.LoadDuration = response.LoadDuration;
- this.PromptEvalDuration = response.PromptEvalDuration;
- this.CreatedAt = response.CreatedAt;
- }
-
- ///
- /// Time spent in nanoseconds evaluating the prompt
- ///
- public long PromptEvalDuration
- {
- get => this.GetValueFromDictionary() as long? ?? 0;
- internal init => this.SetValueInDictionary(value);
- }
-
- ///
- /// Number of tokens in the prompt
- ///
- public int PromptEvalCount
- {
- get => this.GetValueFromDictionary() as int? ?? 0;
- internal init => this.SetValueInDictionary(value);
- }
-
- ///
- /// Time spent in nanoseconds loading the model
- ///
- public long LoadDuration
- {
- get => this.GetValueFromDictionary() as long? ?? 0;
- internal init => this.SetValueInDictionary(value);
- }
-
- ///
- /// Returns the prompt's feedback related to the content filters.
- ///
- public string? CreatedAt
- {
- get => this.GetValueFromDictionary() as string;
- internal init => this.SetValueInDictionary(value);
- }
-
- ///
- /// The response is done
- ///
- public bool? Done
- {
- get => this.GetValueFromDictionary() as bool?;
- internal init => this.SetValueInDictionary(value);
- }
-
- ///
- /// Time in nano seconds spent generating the response
- ///
- public long EvalDuration
- {
- get => this.GetValueFromDictionary() as long? ?? 0;
- internal init => this.SetValueInDictionary(value);
- }
-
- ///
- /// Number of tokens the response
- ///
- public int EvalCount
- {
- get => this.GetValueFromDictionary() as int? ?? 0;
- internal init => this.SetValueInDictionary(value);
- }
-
- ///
- /// Time spent in nanoseconds generating the response
- ///
- public long TotalDuration
- {
- get => this.GetValueFromDictionary() as long? ?? 0;
- internal init => this.SetValueInDictionary(value);
- }
-
- private void SetValueInDictionary(object? value, [CallerMemberName] string propertyName = "")
- => this.Dictionary[propertyName] = value;
-
- private object? GetValueFromDictionary([CallerMemberName] string propertyName = "")
- => this.Dictionary.TryGetValue(propertyName, out var value) ? value : null;
-}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
index 237894d87696..134ed5b8b45b 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
@@ -2,17 +2,28 @@
using System;
using System.Collections.Generic;
+using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel.Text;
+using OllamaSharp.Models;
namespace Microsoft.SemanticKernel.Connectors.Ollama;
///
-/// Ollama Prompt Execution Settings.
+/// Ollama Chat Completion and Text Generation Execution Settings.
///
public sealed class OllamaPromptExecutionSettings : PromptExecutionSettings
{
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// Optional breaking glass request options
+ public OllamaPromptExecutionSettings(RequestOptions? requestOptions = null)
+ {
+ this._requestOptions = requestOptions ?? new();
+ }
+
///
/// Gets the specialization for the Ollama execution settings.
///
@@ -43,18 +54,18 @@ public static OllamaPromptExecutionSettings FromExecutionSettings(PromptExecutio
///
/// Sets the stop sequences to use. When this pattern is encountered the
/// LLM will stop generating text and return. Multiple stop patterns may
- /// be set by specifying multiple separate stop parameters in a modelfile.
+ /// be set by specifying multiple separate stop parameters in a model file.
///
[JsonPropertyName("stop")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public List? Stop
+ public IList? Stop
{
- get => this._stop;
+ get => this._requestOptions.Stop;
set
{
this.ThrowIfFrozen();
- this._stop = value;
+ this._requestOptions.Stop = value?.ToArray();
}
}
@@ -67,12 +78,12 @@ public List? Stop
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? TopK
{
- get => this._topK;
+ get => this._requestOptions.TopK;
set
{
this.ThrowIfFrozen();
- this._topK = value;
+ this._requestOptions.TopK = value;
}
}
@@ -85,12 +96,12 @@ public int? TopK
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public float? TopP
{
- get => this._topP;
+ get => this._requestOptions.TopP;
set
{
this.ThrowIfFrozen();
- this._topP = value;
+ this._requestOptions.TopP = value;
}
}
@@ -102,21 +113,23 @@ public float? TopP
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public float? Temperature
{
- get => this._temperature;
+ get => this._requestOptions.Temperature;
set
{
this.ThrowIfFrozen();
- this._temperature = value;
+ this._requestOptions.Temperature = value;
}
}
+ ///
+ /// The breaking glass request options.
+ ///
+ internal RequestOptions RequestOptions => this._requestOptions;
+
#region private
- private List? _stop;
- private float? _temperature;
- private float? _topP;
- private int? _topK;
+ private readonly RequestOptions _requestOptions;
#endregion
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
index 3d3969bee7d8..0caa420688f9 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
@@ -2,9 +2,9 @@
using System;
using System.Collections.Generic;
-using System.Linq;
using System.Net.Http;
using System.Runtime.CompilerServices;
+using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
@@ -61,16 +61,40 @@ public async Task> GetChatMessageContentsAsync
CancellationToken cancellationToken = default)
{
var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
- var request = CreateChatRequest(chatHistory, settings, this._client.SelectedModel);
+ var request = CreateRequest(chatHistory, settings, this._client.SelectedModel);
+ var chatMessageContent = new ChatMessageContent();
+ var fullContent = new StringBuilder();
+ string? modelId = null;
+ AuthorRole? authorRole = null;
+ List innerContent = [];
- var response = await this._client.Chat(request, cancellationToken).ConfigureAwait(false);
+ await foreach (var responseStreamChunk in this._client.Chat(request, cancellationToken).ConfigureAwait(false))
+ {
+ if (responseStreamChunk is null)
+ {
+ continue;
+ }
+
+ innerContent.Add(responseStreamChunk);
+
+ if (responseStreamChunk.Message.Content is not null)
+ {
+ fullContent.Append(responseStreamChunk.Message.Content);
+ }
+
+ if (responseStreamChunk.Message.Role is not null)
+ {
+ authorRole = GetAuthorRole(responseStreamChunk.Message.Role)!.Value;
+ }
+
+ modelId ??= responseStreamChunk.Model;
+ }
return [new ChatMessageContent(
- role: GetAuthorRole(response.Message.Role) ?? AuthorRole.Assistant,
- content: response.Message.Content,
- modelId: response.Model,
- innerContent: response,
- metadata: new OllamaMetadata(response))];
+ role: authorRole ?? new(),
+ content: fullContent.ToString(),
+ modelId: modelId,
+ innerContent: innerContent)];
}
///
@@ -81,28 +105,30 @@ public async IAsyncEnumerable GetStreamingChatMessa
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
- var request = CreateChatRequest(chatHistory, settings, this._client.SelectedModel);
+ var request = CreateRequest(chatHistory, settings, this._client.SelectedModel);
- await foreach (var message in this._client.StreamChat(request, cancellationToken).ConfigureAwait(false))
+ await foreach (var message in this._client.Chat(request, cancellationToken).ConfigureAwait(false))
{
yield return new StreamingChatMessageContent(
role: GetAuthorRole(message!.Message.Role),
content: message.Message.Content,
modelId: message.Model,
- innerContent: message,
- metadata: new OllamaMetadata(message));
+ innerContent: message);
}
}
- private static AuthorRole? GetAuthorRole(ChatRole? role) => role.ToString().ToUpperInvariant() switch
+ #region Private
+
+ private static AuthorRole? GetAuthorRole(ChatRole? role) => role?.ToString().ToUpperInvariant() switch
{
"USER" => AuthorRole.User,
"ASSISTANT" => AuthorRole.Assistant,
"SYSTEM" => AuthorRole.System,
- _ => null
+ null => null,
+ _ => new AuthorRole(role.ToString())
};
- private static ChatRequest CreateChatRequest(ChatHistory chatHistory, OllamaPromptExecutionSettings settings, string selectedModel)
+ private static ChatRequest CreateRequest(ChatHistory chatHistory, OllamaPromptExecutionSettings settings, string selectedModel)
{
var messages = new List();
foreach (var chatHistoryMessage in chatHistory)
@@ -122,17 +148,14 @@ private static ChatRequest CreateChatRequest(ChatHistory chatHistory, OllamaProm
var request = new ChatRequest
{
- Options = new()
- {
- Temperature = settings.Temperature,
- TopP = settings.TopP,
- TopK = settings.TopK,
- Stop = settings.Stop?.ToArray()
- },
- Messages = messages.ToList(),
+ Options = settings.RequestOptions,
+ Messages = messages,
Model = selectedModel,
Stream = true
};
+
return request;
}
+
+ #endregion
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs
index 9e152f917f88..4dae642b6800 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs
@@ -59,13 +59,13 @@ public async Task>> GenerateEmbeddingsAsync(
Kernel? kernel = null,
CancellationToken cancellationToken = default)
{
- var request = new GenerateEmbeddingRequest
+ var request = new EmbedRequest
{
Model = this.GetModelId()!,
- Input = data.ToList()
+ Input = data.ToList(),
};
- var response = await this._client.GenerateEmbeddings(request, cancellationToken: cancellationToken).ConfigureAwait(false);
+ var response = await this._client.Embed(request, cancellationToken: cancellationToken).ConfigureAwait(false);
List> embeddings = [];
foreach (var embedding in response.Embeddings)
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
index 29acd5f342c5..4ce901345e37 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
@@ -4,12 +4,15 @@
using System.Collections.Generic;
using System.Net.Http;
using System.Runtime.CompilerServices;
+using System.Text;
using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel.Connectors.Ollama.Core;
using Microsoft.SemanticKernel.TextGeneration;
using OllamaSharp;
+using OllamaSharp.Models;
+using OllamaSharp.Models.Chat;
namespace Microsoft.SemanticKernel.Connectors.Ollama;
@@ -58,13 +61,30 @@ public async Task> GetTextContentsAsync(
Kernel? kernel = null,
CancellationToken cancellationToken = default)
{
- var content = await this._client.GetCompletion(prompt, null, cancellationToken).ConfigureAwait(false);
+ var fullContent = new StringBuilder();
+ List innerContent = [];
+ string? modelId = null;
- return [new(content.Response, modelId: this._client.SelectedModel, innerContent: content, metadata:
- new Dictionary()
+ var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
+ var request = CreateRequest(settings, this._client.SelectedModel);
+
+ await foreach (var responseStreamChunk in this._client.Generate(request, cancellationToken).ConfigureAwait(false))
+ {
+ if (responseStreamChunk is null)
{
- ["Context"] = content.Context
- })];
+ continue;
+ }
+
+ innerContent.Add(responseStreamChunk);
+ fullContent.Append(responseStreamChunk.Response);
+
+ modelId ??= responseStreamChunk.Model;
+ }
+
+ return [new TextContent(
+ text: fullContent.ToString(),
+ modelId: modelId,
+ innerContent: innerContent)];
}
///
@@ -74,9 +94,27 @@ public async IAsyncEnumerable GetStreamingTextContentsAsyn
Kernel? kernel = null,
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
- await foreach (var content in this._client.StreamCompletion(prompt, null, cancellationToken).ConfigureAwait(false))
+ var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
+ var request = CreateRequest(settings, this._client.SelectedModel);
+
+ await foreach (var content in this._client.Generate(request, cancellationToken).ConfigureAwait(false))
{
- yield return new StreamingTextContent(content?.Response, modelId: content?.Model, innerContent: content, metadata: new OllamaMetadata(content));
+ yield return new StreamingTextContent(
+ text: content?.Response,
+ modelId: content?.Model,
+ innerContent: content);
}
}
+
+ private static GenerateRequest CreateRequest(OllamaPromptExecutionSettings settings, string selectedModel)
+ {
+ var request = new GenerateRequest
+ {
+ Options = settings.RequestOptions,
+ Model = selectedModel,
+ Stream = true
+ };
+
+ return request;
+ }
}
From f34a14d4fddc0acce6f0a8b5abf2b9b1fa16ca44 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Thu, 5 Sep 2024 14:41:37 +0100
Subject: [PATCH 03/11] Breaking Glass Settings reverse
---
.../Connectors.Ollama/Core/ServiceBase.cs | 18 +-------
.../Services/OllamaChatCompletionService.cs | 14 +++++--
.../Services/OllamaTextGenerationService.cs | 9 +++-
.../OllamaPromptExecutionSettings.cs | 41 +++++++------------
4 files changed, 33 insertions(+), 49 deletions(-)
rename dotnet/src/Connectors/Connectors.Ollama/{ => Settings}/OllamaPromptExecutionSettings.cs (74%)
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Core/ServiceBase.cs b/dotnet/src/Connectors/Connectors.Ollama/Core/ServiceBase.cs
index 57b19adb0442..70d74a68b4b4 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Core/ServiceBase.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Core/ServiceBase.cs
@@ -18,11 +18,11 @@ public abstract class ServiceBase
///
/// Attributes of the service.
///
- internal Dictionary AttributesInternal { get; } = new();
+ internal Dictionary AttributesInternal { get; } = [];
internal readonly OllamaApiClient _client;
internal ServiceBase(string model,
- Uri endpoint,
+ Uri? endpoint,
HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null)
{
@@ -31,20 +31,6 @@ internal ServiceBase(string model,
if (httpClient is not null)
{
- httpClient.BaseAddress ??= endpoint;
-
- // Try to add User-Agent header.
- if (!httpClient.DefaultRequestHeaders.TryGetValues("User-Agent", out _))
- {
- httpClient.DefaultRequestHeaders.Add("User-Agent", HttpHeaderConstant.Values.UserAgent);
- }
-
- // Try to add Semantic Kernel Version header
- if (!httpClient.DefaultRequestHeaders.TryGetValues(HttpHeaderConstant.Names.SemanticKernelVersion, out _))
- {
- httpClient.DefaultRequestHeaders.Add(HttpHeaderConstant.Names.SemanticKernelVersion, HttpHeaderConstant.Values.GetAssemblyVersion(typeof(Kernel)));
- }
-
this._client = new(httpClient, model);
}
else
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
index 0caa420688f9..acaad26753fa 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
@@ -61,7 +61,7 @@ public async Task> GetChatMessageContentsAsync
CancellationToken cancellationToken = default)
{
var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
- var request = CreateRequest(chatHistory, settings, this._client.SelectedModel);
+ var request = CreateChatRequest(chatHistory, settings, this._client.SelectedModel);
var chatMessageContent = new ChatMessageContent();
var fullContent = new StringBuilder();
string? modelId = null;
@@ -105,7 +105,7 @@ public async IAsyncEnumerable GetStreamingChatMessa
[EnumeratorCancellation] CancellationToken cancellationToken = default)
{
var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
- var request = CreateRequest(chatHistory, settings, this._client.SelectedModel);
+ var request = CreateChatRequest(chatHistory, settings, this._client.SelectedModel);
await foreach (var message in this._client.Chat(request, cancellationToken).ConfigureAwait(false))
{
@@ -128,7 +128,7 @@ public async IAsyncEnumerable GetStreamingChatMessa
_ => new AuthorRole(role.ToString())
};
- private static ChatRequest CreateRequest(ChatHistory chatHistory, OllamaPromptExecutionSettings settings, string selectedModel)
+ private static ChatRequest CreateChatRequest(ChatHistory chatHistory, OllamaPromptExecutionSettings settings, string selectedModel)
{
var messages = new List();
foreach (var chatHistoryMessage in chatHistory)
@@ -148,7 +148,13 @@ private static ChatRequest CreateRequest(ChatHistory chatHistory, OllamaPromptEx
var request = new ChatRequest
{
- Options = settings.RequestOptions,
+ Options = new()
+ {
+ Temperature = settings.Temperature,
+ TopP = settings.TopP,
+ TopK = settings.TopK,
+ Stop = settings.Stop?.ToArray()
+ },
Messages = messages,
Model = selectedModel,
Stream = true
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
index 4ce901345e37..dc475a9fcc65 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
@@ -12,7 +12,6 @@
using Microsoft.SemanticKernel.TextGeneration;
using OllamaSharp;
using OllamaSharp.Models;
-using OllamaSharp.Models.Chat;
namespace Microsoft.SemanticKernel.Connectors.Ollama;
@@ -110,7 +109,13 @@ private static GenerateRequest CreateRequest(OllamaPromptExecutionSettings setti
{
var request = new GenerateRequest
{
- Options = settings.RequestOptions,
+ Options = new()
+ {
+ Temperature = settings.Temperature,
+ TopP = settings.TopP,
+ TopK = settings.TopK,
+ Stop = settings.Stop?.ToArray()
+ },
Model = selectedModel,
Stream = true
};
diff --git a/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs b/dotnet/src/Connectors/Connectors.Ollama/Settings/OllamaPromptExecutionSettings.cs
similarity index 74%
rename from dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
rename to dotnet/src/Connectors/Connectors.Ollama/Settings/OllamaPromptExecutionSettings.cs
index 134ed5b8b45b..30032bb981d4 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/OllamaPromptExecutionSettings.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Settings/OllamaPromptExecutionSettings.cs
@@ -2,28 +2,17 @@
using System;
using System.Collections.Generic;
-using System.Linq;
using System.Text.Json;
using System.Text.Json.Serialization;
using Microsoft.SemanticKernel.Text;
-using OllamaSharp.Models;
namespace Microsoft.SemanticKernel.Connectors.Ollama;
///
-/// Ollama Chat Completion and Text Generation Execution Settings.
+/// Ollama Prompt Execution Settings.
///
public sealed class OllamaPromptExecutionSettings : PromptExecutionSettings
{
- ///
- /// Initializes a new instance of the class.
- ///
- /// Optional breaking glass request options
- public OllamaPromptExecutionSettings(RequestOptions? requestOptions = null)
- {
- this._requestOptions = requestOptions ?? new();
- }
-
///
/// Gets the specialization for the Ollama execution settings.
///
@@ -58,14 +47,14 @@ public static OllamaPromptExecutionSettings FromExecutionSettings(PromptExecutio
///
[JsonPropertyName("stop")]
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
- public IList? Stop
+ public List? Stop
{
- get => this._requestOptions.Stop;
+ get => this._stop;
set
{
this.ThrowIfFrozen();
- this._requestOptions.Stop = value?.ToArray();
+ this._stop = value;
}
}
@@ -78,12 +67,12 @@ public IList? Stop
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public int? TopK
{
- get => this._requestOptions.TopK;
+ get => this._topK;
set
{
this.ThrowIfFrozen();
- this._requestOptions.TopK = value;
+ this._topK = value;
}
}
@@ -96,12 +85,12 @@ public int? TopK
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public float? TopP
{
- get => this._requestOptions.TopP;
+ get => this._topP;
set
{
this.ThrowIfFrozen();
- this._requestOptions.TopP = value;
+ this._topP = value;
}
}
@@ -113,23 +102,21 @@ public float? TopP
[JsonIgnore(Condition = JsonIgnoreCondition.WhenWritingNull)]
public float? Temperature
{
- get => this._requestOptions.Temperature;
+ get => this._temperature;
set
{
this.ThrowIfFrozen();
- this._requestOptions.Temperature = value;
+ this._temperature = value;
}
}
- ///
- /// The breaking glass request options.
- ///
- internal RequestOptions RequestOptions => this._requestOptions;
-
#region private
- private readonly RequestOptions _requestOptions;
+ private List? _stop;
+ private float? _temperature;
+ private float? _topP;
+ private int? _topK;
#endregion
}
From de899537af35cc659292def9fc7b4179a8202d76 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 6 Sep 2024 08:59:16 +0100
Subject: [PATCH 04/11] Address PR Comments
---
.../OpenAI_ChatCompletionStreaming.cs | 2 +-
.../Connectors.Ollama.UnitTests.csproj | 1 +
.../HttpMessageHandlerStub.cs | 48 ----------
.../Services/OllamaChatCompletionTests.cs | 56 ++----------
.../OllamaTextEmbeddingGenerationTests.cs | 44 +---------
.../Services/OllamaTextGenerationTests.cs | 60 +++----------
.../text_generation_test_response_stream.txt | 9 +-
.../OllamaKernelBuilderExtensions.cs | 87 +++++++++++++++----
.../OllamaServiceCollectionExtensions.cs | 74 +++++++++++++++-
.../Services/OllamaChatCompletionService.cs | 27 ++++--
.../OllamaTextEmbeddingGenerationService.cs | 23 ++++-
.../Services/OllamaTextGenerationService.cs | 23 ++++-
12 files changed, 226 insertions(+), 228 deletions(-)
delete mode 100644 dotnet/src/Connectors/Connectors.Ollama.UnitTests/HttpMessageHandlerStub.cs
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
index bd1285e29af3..c63f6ce37a8f 100644
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
@@ -99,7 +99,7 @@ public async Task StreamFunctionCallContentAsync()
OpenAIPromptExecutionSettings settings = new() { ToolCallBehavior = ToolCallBehavior.EnableKernelFunctions };
// Create chat history with initial user question
- ChatHistory chatHistory = new();
+ ChatHistory chatHistory = [];
chatHistory.AddUserMessage("Hi, what is the current time?");
// Start streaming chat based on the chat history
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Connectors.Ollama.UnitTests.csproj b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Connectors.Ollama.UnitTests.csproj
index 489e1b416d89..78afaac82621 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Connectors.Ollama.UnitTests.csproj
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Connectors.Ollama.UnitTests.csproj
@@ -33,6 +33,7 @@
+
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/HttpMessageHandlerStub.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/HttpMessageHandlerStub.cs
deleted file mode 100644
index 0da4dfa3d098..000000000000
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/HttpMessageHandlerStub.cs
+++ /dev/null
@@ -1,48 +0,0 @@
-// Copyright (c) Microsoft. All rights reserved.
-
-using System;
-using System.Net.Http;
-using System.Net.Http.Headers;
-using System.Text;
-using System.Threading;
-using System.Threading.Tasks;
-
-namespace SemanticKernel.Connectors.Ollama.UnitTests;
-
-internal sealed class HttpMessageHandlerStub : DelegatingHandler
-{
- public HttpRequestHeaders? RequestHeaders { get; private set; }
-
- public HttpContentHeaders? ContentHeaders { get; private set; }
-
- public byte[]? RequestContent { get; private set; }
-
- public Uri? RequestUri { get; private set; }
-
- public HttpMethod? Method { get; private set; }
-
- public HttpResponseMessage ResponseToReturn { get; set; }
-
- public HttpMessageHandlerStub()
- {
- this.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK);
- this.ResponseToReturn.Content = new StringContent("{}", Encoding.UTF8, "application/json");
- }
-
- protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
- {
- this.Method = request.Method;
- this.RequestUri = request.RequestUri;
- this.RequestHeaders = request.Headers;
- if (request.Content is not null)
- {
-#pragma warning disable CA2016 // Forward the 'CancellationToken' parameter to methods; overload doesn't exist on .NET Framework
- this.RequestContent = await request.Content.ReadAsByteArrayAsync();
-#pragma warning restore CA2016
- }
-
- this.ContentHeaders = request.Content?.Headers;
-
- return await Task.FromResult(this.ResponseToReturn);
- }
-}
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
index a3cf41d62706..862131e21196 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
@@ -26,52 +26,12 @@ public OllamaChatCompletionTests()
this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") };
}
- [Fact]
- public async Task UserAgentHeaderShouldBeUsedAsync()
- {
- //Arrange
- var sut = new OllamaChatCompletionService(
- "fake-model",
- new Uri("http://localhost:11434"),
- httpClient: this._httpClient);
-
- var chat = new ChatHistory();
- chat.AddMessage(AuthorRole.User, "fake-text");
-
- //Act
- await sut.GetChatMessageContentsAsync(chat);
-
- //Assert
- Assert.True(this._messageHandlerStub.RequestHeaders?.Contains("User-Agent"));
-
- var values = this._messageHandlerStub.RequestHeaders!.GetValues("User-Agent");
- var value = values.SingleOrDefault();
- Assert.Equal("Semantic-Kernel", value);
- }
-
- [Fact]
- public async Task WhenHttpClientDoesNotHaveBaseAddressProvidedEndpointShouldBeUsedAsync()
- {
- //Arrange
- this._httpClient.BaseAddress = null;
- var sut = new OllamaChatCompletionService("fake-model", new Uri("https://fake-random-test-host/fake-path/"), httpClient: this._httpClient);
- var chat = new ChatHistory();
- chat.AddMessage(AuthorRole.User, "fake-text");
-
- //Act
- await sut.GetChatMessageContentsAsync(chat);
-
- //Assert
- Assert.StartsWith("https://fake-random-test-host/fake-path", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase);
- }
-
[Fact]
public async Task ShouldSendPromptToServiceAsync()
{
//Arrange
var sut = new OllamaChatCompletionService(
"fake-model",
- new Uri("http://localhost:11434"),
httpClient: this._httpClient);
var chat = new ChatHistory();
chat.AddMessage(AuthorRole.User, "fake-text");
@@ -91,7 +51,6 @@ public async Task ShouldHandleServiceResponseAsync()
//Arrange
var sut = new OllamaChatCompletionService(
"fake-model",
- new Uri("http://localhost:11434"),
httpClient: this._httpClient);
var chat = new ChatHistory();
@@ -114,7 +73,6 @@ public async Task GetChatMessageContentsShouldHaveModelAndMetadataAsync()
//Arrange
var sut = new OllamaChatCompletionService(
"phi3",
- new Uri("http://localhost:11434"),
httpClient: this._httpClient);
this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
@@ -139,13 +97,12 @@ public async Task GetChatMessageContentsShouldHaveModelAndMetadataAsync()
}
[Fact]
- public async Task GetStreamingChatMessageContentsShouldHaveModelAndMetadataAsync()
+ public async Task GetStreamingChatMessageContentsShouldHaveModelAndInnerContentAsync()
{
//Arrange
var expectedModel = "phi3";
var sut = new OllamaChatCompletionService(
expectedModel,
- new Uri("http://localhost:11434"),
httpClient: this._httpClient);
this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
@@ -161,18 +118,17 @@ public async Task GetStreamingChatMessageContentsShouldHaveModelAndMetadataAsync
await foreach (var message in sut.GetStreamingChatMessageContentsAsync(chat))
{
lastMessage = message;
- Assert.NotNull(message.Metadata);
+ Assert.NotNull(message.InnerContent);
}
// Assert
Assert.NotNull(lastMessage!.ModelId);
Assert.Equal(expectedModel, lastMessage.ModelId);
- Assert.IsType(lastMessage.Metadata);
- var metadata = lastMessage.Metadata as OllamaMetadata;
- Assert.NotNull(metadata);
- Assert.NotEmpty(metadata);
- Assert.True(metadata.Done);
+ Assert.IsType(lastMessage.InnerContent);
+ var innerContent = lastMessage.InnerContent as ChatDoneResponseStream;
+ Assert.NotNull(innerContent);
+ Assert.True(innerContent.Done);
}
public void Dispose()
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
index c1348b1009d6..2c1b76ad4410 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
@@ -1,9 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System;
-using System.Collections.Generic;
using System.IO;
-using System.Linq;
using System.Net.Http;
using System.Text.Json;
using System.Threading.Tasks;
@@ -22,41 +20,7 @@ public OllamaTextEmbeddingGenerationTests()
{
this._messageHandlerStub = new HttpMessageHandlerStub();
this._messageHandlerStub.ResponseToReturn.Content = new StringContent(File.ReadAllText("TestData/embeddings_test_response.json"));
- this._httpClient = new HttpClient(this._messageHandlerStub, false);
- }
-
- [Fact]
- public async Task UserAgentHeaderShouldBeUsedAsync()
- {
- //Arrange
- var sut = new OllamaTextEmbeddingGenerationService(
- "fake-model",
- new Uri("http://localhost:11434"),
- httpClient: this._httpClient);
-
- //Act
- await sut.GenerateEmbeddingsAsync(new List { "fake-text" });
-
- //Assert
- Assert.True(this._messageHandlerStub.RequestHeaders?.Contains("User-Agent"));
-
- var values = this._messageHandlerStub.RequestHeaders!.GetValues("User-Agent");
- var value = values.SingleOrDefault();
- Assert.Equal("Semantic-Kernel", value);
- }
-
- [Fact]
- public async Task WhenHttpClientDoesNotHaveBaseAddressProvidedEndpointShouldBeUsedAsync()
- {
- //Arrange
- this._httpClient.BaseAddress = null;
- var sut = new OllamaTextEmbeddingGenerationService("fake-model", new Uri("https://fake-random-test-host/fake-path/"), httpClient: this._httpClient);
-
- //Act
- await sut.GenerateEmbeddingsAsync(new List { "fake-text" });
-
- //Assert
- Assert.StartsWith("https://fake-random-test-host/fake-path", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase);
+ this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") };
}
[Fact]
@@ -65,14 +29,13 @@ public async Task ShouldSendPromptToServiceAsync()
//Arrange
var sut = new OllamaTextEmbeddingGenerationService(
"fake-model",
- new Uri("http://localhost:11434"),
httpClient: this._httpClient);
//Act
await sut.GenerateEmbeddingsAsync(["fake-text"]);
//Assert
- var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
Assert.NotNull(requestPayload);
Assert.Equal("fake-text", requestPayload.Input[0]);
}
@@ -83,11 +46,10 @@ public async Task ShouldHandleServiceResponseAsync()
//Arrange
var sut = new OllamaTextEmbeddingGenerationService(
"fake-model",
- new Uri("http://localhost:11434"),
httpClient: this._httpClient);
//Act
- var contents = await sut.GenerateEmbeddingsAsync(new List { "fake-text" });
+ var contents = await sut.GenerateEmbeddingsAsync(["fake-text"]);
//Assert
Assert.NotNull(contents);
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
index 5d926b00d931..c87833fc1c7b 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
@@ -22,58 +22,27 @@ public sealed class OllamaTextGenerationTests : IDisposable
public OllamaTextGenerationTests()
{
this._messageHandlerStub = new HttpMessageHandlerStub();
- this._messageHandlerStub.ResponseToReturn.Content = new StringContent(File.ReadAllText("TestData/text_generation_test_response.txt"));
- this._httpClient = new HttpClient(this._messageHandlerStub, false);
- }
-
- [Fact]
- public async Task UserAgentHeaderShouldBeUsedAsync()
- {
- //Arrange
- var sut = new OllamaTextGenerationService(
- "fake-model",
- new Uri("http://localhost:11434"),
- httpClient: this._httpClient);
-
- //Act
- await sut.GetTextContentsAsync("fake-text");
-
- //Assert
- Assert.True(this._messageHandlerStub.RequestHeaders?.Contains("User-Agent"));
-
- var values = this._messageHandlerStub.RequestHeaders!.GetValues("User-Agent");
- var value = values.SingleOrDefault();
- Assert.Equal("Semantic-Kernel", value);
- }
-
- [Fact]
- public async Task WhenHttpClientDoesNotHaveBaseAddressProvidedEndpointShouldBeUsedAsync()
- {
- //Arrange
- this._httpClient.BaseAddress = null;
- var sut = new OllamaTextGenerationService("fake-model", new Uri("https://fake-random-test-host/fake-path/"), httpClient: this._httpClient);
-
- //Act
- await sut.GetTextContentsAsync("fake-text");
-
- //Assert
- Assert.StartsWith("https://fake-random-test-host/fake-path", this._messageHandlerStub.RequestUri?.AbsoluteUri, StringComparison.OrdinalIgnoreCase);
+ this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StreamContent(File.OpenRead("TestData/text_generation_test_response_stream.txt"))
+ };
+ this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") };
}
[Fact]
public async Task ShouldSendPromptToServiceAsync()
{
//Arrange
+ var expectedModel = "phi3";
var sut = new OllamaTextGenerationService(
- "fake-model",
- new Uri("http://localhost:11434"),
+ expectedModel,
httpClient: this._httpClient);
//Act
await sut.GetTextContentsAsync("fake-text");
//Assert
- var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
Assert.NotNull(requestPayload);
Assert.Equal("fake-text", requestPayload.Prompt);
}
@@ -84,7 +53,6 @@ public async Task ShouldHandleServiceResponseAsync()
//Arrange
var sut = new OllamaTextGenerationService(
"fake-model",
- new Uri("http://localhost:11434"),
httpClient: this._httpClient);
//Act
@@ -102,9 +70,9 @@ public async Task ShouldHandleServiceResponseAsync()
public async Task GetTextContentsShouldHaveModelIdDefinedAsync()
{
//Arrange
+ var expectedModel = "phi3";
var sut = new OllamaTextGenerationService(
- "fake-model",
- new Uri("http://localhost:11434"),
+ expectedModel,
httpClient: this._httpClient);
// Act
@@ -112,7 +80,7 @@ public async Task GetTextContentsShouldHaveModelIdDefinedAsync()
// Assert
Assert.NotNull(textContent.ModelId);
- Assert.Equal("fake-model", textContent.ModelId);
+ Assert.Equal(expectedModel, textContent.ModelId);
}
[Fact]
@@ -122,14 +90,8 @@ public async Task GetStreamingTextContentsShouldHaveModelIdDefinedAsync()
var expectedModel = "phi3";
var sut = new OllamaTextGenerationService(
expectedModel,
- new Uri("http://localhost:11434"),
httpClient: this._httpClient);
- this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
- {
- Content = new StreamContent(File.OpenRead("TestData/text_generation_test_response_stream.txt"))
- };
-
// Act
StreamingTextContent? lastTextContent = null;
await foreach (var textContent in sut.GetStreamingTextContentsAsync("Any prompt"))
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response_stream.txt b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response_stream.txt
index f662ae912202..d2fe45f536c9 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response_stream.txt
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response_stream.txt
@@ -1,5 +1,6 @@
-{"model":"phi3","created_at":"2024-07-02T12:22:37.03627019Z","response":" day","done":false}
-{"model":"phi3","created_at":"2024-07-02T12:22:37.048915655Z","response":"light","done":false}
-{"model":"phi3","created_at":"2024-07-02T12:22:37.060968719Z","response":" hours","done":false}
-{"model":"phi3","created_at":"2024-07-02T12:22:37.072390403Z","response":".","done":false}
+{"model":"phi3","created_at":"2024-07-02T12:22:37.03627019Z","response":"This ","done":false}
+{"model":"phi3","created_at":"2024-07-02T12:22:37.048915655Z","response":"is ","done":false}
+{"model":"phi3","created_at":"2024-07-02T12:22:37.060968719Z","response":"test ","done":false}
+{"model":"phi3","created_at":"2024-07-02T12:22:37.072390403Z","response":"completion ","done":false}
+{"model":"phi3","created_at":"2024-07-02T12:22:37.072390403Z","response":"response","done":false}
{"model":"phi3","created_at":"2024-07-02T12:22:37.091017292Z","response":"","done":true,"done_reason":"stop","context":[32010,3750,338,278,14744,7254,29973,32007,32001,450,2769,278,14744,5692,7254,304,502,373,11563,756,304,437,411,278,14801,292,310,6575,4366,491,278,25005,29889,8991,4366,29892,470,4796,3578,29892,338,1754,701,310,263,18272,310,11955,393,508,367,3595,297,263,17251,17729,313,1127,29892,24841,29892,13328,29892,7933,29892,7254,29892,1399,5973,29892,322,28008,1026,467,910,18272,310,11955,338,2998,408,4796,3578,1363,372,3743,599,278,1422,281,6447,1477,29879,12420,4208,29889,13,13,10401,6575,4366,24395,11563,29915,29879,25005,29892,21577,13206,21337,763,21767,307,1885,322,288,28596,14801,20511,29899,29893,6447,1477,3578,313,9539,322,28008,1026,29897,901,1135,5520,29899,29893,6447,1477,3578,313,1127,322,13328,467,4001,1749,5076,526,901,20502,304,7254,3578,322,278,8991,5692,901,4796,515,1749,18520,373,11563,2861,304,445,14801,292,2779,29892,591,17189,573,278,14744,408,7254,29889,13,13,2528,17658,29892,5998,1716,7254,322,28008,1026,281,6447,1477,29879,310,3578,526,29574,22829,491,4799,13206,21337,29892,1749,639,1441,338,451,28482,491,278,28008,1026,2927,1951,5199,5076,526,3109,20502,304,372,29889,12808,29892,6575,4366,20888,11563,29915,29879,7101,756,263,6133,26171,297,278,13328,29899,12692,760,310,278,18272,9401,304,2654,470,28008,1026,11955,2861,304,9596,280,1141,14801,292,29892,607,4340,26371,2925,1749,639,1441,310,278,7254,14744,29889,13,13,797,15837,29892,278,14801,292,310,20511,281,6447,1477,3578,313,9539,322,28008,1026,29897,491,11563,29915,29879,25005,9946,502,304,1074,263,758,24130,10835,7254,14744,2645,2462,4366,6199,29889,32007],"total_duration":64697743903,"load_duration":61368714283,"prompt_eval_count":10,"prompt_eval_duration":40919000,"eval_count":304,"eval_duration":3237325000}
\ No newline at end of file
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
index 96f8340e68ff..0ad8d895bdd7 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaKernelBuilderExtensions.cs
@@ -4,7 +4,6 @@
using System.Net.Http;
using Microsoft.Extensions.DependencyInjection;
using Microsoft.Extensions.Logging;
-using Microsoft.SemanticKernel.ChatCompletion;
using Microsoft.SemanticKernel.Connectors.Ollama;
using Microsoft.SemanticKernel.Http;
using Microsoft.SemanticKernel.TextGeneration;
@@ -41,6 +40,29 @@ public static IKernelBuilder AddOllamaTextGeneration(
new OllamaTextGenerationService(
modelId: modelId,
endpoint: endpoint,
+ loggerFactory: serviceProvider.GetService()));
+ return builder;
+ }
+
+ ///
+ /// Add Ollama Text Generation service to the kernel builder.
+ ///
+ /// The kernel builder.
+ /// The model for text generation.
+ /// The optional service ID.
+ /// The optional custom HttpClient.
+ /// The updated kernel builder.
+ public static IKernelBuilder AddOllamaTextGeneration(
+ this IKernelBuilder builder,
+ string modelId,
+ string? serviceId = null,
+ HttpClient? httpClient = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OllamaTextGenerationService(
+ modelId: modelId,
httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
loggerFactory: serviceProvider.GetService()));
return builder;
@@ -81,24 +103,38 @@ public static IKernelBuilder AddOllamaTextGeneration(
/// The model for text generation.
/// The endpoint to Ollama hosted service.
/// The optional service ID.
- /// The optional custom HttpClient.
/// The updated kernel builder.
public static IKernelBuilder AddOllamaChatCompletion(
this IKernelBuilder builder,
string modelId,
Uri endpoint,
- string? serviceId = null,
- HttpClient? httpClient = null)
+ string? serviceId = null)
{
Verify.NotNull(builder);
- Verify.NotNull(modelId);
- builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
- new OllamaChatCompletionService(
- modelId: modelId,
- endpoint: endpoint,
- httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
- loggerFactory: serviceProvider.GetService()));
+ builder.Services.AddOllamaChatCompletion(modelId, endpoint, serviceId);
+
+ return builder;
+ }
+
+ ///
+ /// Add Ollama Chat Completion service to the kernel builder.
+ ///
+ /// The kernel builder.
+ /// The model for text generation.
+ /// The optional custom HttpClient.
+ /// The optional service ID.
+ /// The updated kernel builder.
+ public static IKernelBuilder AddOllamaChatCompletion(
+ this IKernelBuilder builder,
+ string modelId,
+ HttpClient? httpClient = null,
+ string? serviceId = null
+ )
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddOllamaChatCompletion(modelId, httpClient, serviceId);
return builder;
}
@@ -119,11 +155,7 @@ public static IKernelBuilder AddOllamaChatCompletion(
{
Verify.NotNull(builder);
- builder.Services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
- new OllamaChatCompletionService(
- modelId: modelId,
- client: ollamaClient,
- loggerFactory: serviceProvider.GetService()));
+ builder.Services.AddOllamaChatCompletion(modelId, ollamaClient, serviceId);
return builder;
}
@@ -138,19 +170,38 @@ public static IKernelBuilder AddOllamaChatCompletion(
/// The kernel builder.
/// The model for text generation.
/// The endpoint to Ollama hosted service.
- /// The optional custom HttpClient.
/// The optional service ID.
/// The updated kernel builder.
public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
this IKernelBuilder builder,
string modelId,
Uri endpoint,
+ string? serviceId = null)
+ {
+ Verify.NotNull(builder);
+
+ builder.Services.AddOllamaTextEmbeddingGeneration(modelId, endpoint, serviceId);
+
+ return builder;
+ }
+
+ ///
+ /// Add Ollama Text Embeddings Generation service to the kernel builder.
+ ///
+ /// The kernel builder.
+ /// The model for text generation.
+ /// The optional custom HttpClient.
+ /// The optional service ID.
+ /// The updated kernel builder.
+ public static IKernelBuilder AddOllamaTextEmbeddingGeneration(
+ this IKernelBuilder builder,
+ string modelId,
HttpClient? httpClient = null,
string? serviceId = null)
{
Verify.NotNull(builder);
- builder.Services.AddOllamaTextEmbeddingGeneration(modelId, endpoint, httpClient, serviceId);
+ builder.Services.AddOllamaTextEmbeddingGeneration(modelId, httpClient, serviceId);
return builder;
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
index e67328bbd2cb..9ef438515e35 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Extensions/OllamaServiceCollectionExtensions.cs
@@ -40,6 +40,28 @@ public static IServiceCollection AddOllamaTextGeneration(
new OllamaTextGenerationService(
modelId: modelId,
endpoint: endpoint,
+ loggerFactory: serviceProvider.GetService()));
+ }
+
+ ///
+ /// Add Ollama Text Generation service to the specified service collection.
+ ///
+ /// The target service collection.
+ /// The model for text generation.
+ /// Optional custom HttpClient, picked from ServiceCollection if not provided.
+ /// The optional service ID.
+ /// The updated kernel builder.
+ public static IServiceCollection AddOllamaTextGeneration(
+ this IServiceCollection services,
+ string modelId,
+ HttpClient? httpClient = null,
+ string? serviceId = null)
+ {
+ Verify.NotNull(services);
+
+ return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OllamaTextGenerationService(
+ modelId: modelId,
httpClient: HttpClientProvider.GetHttpClient(serviceProvider),
loggerFactory: serviceProvider.GetService()));
}
@@ -91,7 +113,31 @@ public static IServiceCollection AddOllamaChatCompletion(
new OllamaChatCompletionService(
modelId: modelId,
endpoint: endpoint,
- httpClient: HttpClientProvider.GetHttpClient(serviceProvider),
+ loggerFactory: serviceProvider.GetService()));
+
+ return services;
+ }
+
+ ///
+ /// Add Ollama Chat Completion and Text Generation services to the specified service collection.
+ ///
+ /// The target service collection.
+ /// The model for text generation.
+ /// Optional custom HttpClient, picked from ServiceCollection if not provided.
+ /// Optional service ID.
+ /// The updated service collection.
+ public static IServiceCollection AddOllamaChatCompletion(
+ this IServiceCollection services,
+ string modelId,
+ HttpClient? httpClient = null,
+ string? serviceId = null)
+ {
+ Verify.NotNull(services);
+
+ services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OllamaChatCompletionService(
+ modelId: modelId,
+ httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
loggerFactory: serviceProvider.GetService()));
return services;
@@ -116,7 +162,7 @@ public static IServiceCollection AddOllamaChatCompletion(
return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
new OllamaChatCompletionService(
modelId: modelId,
- client: ollamaClient,
+ ollamaClient: ollamaClient,
loggerFactory: serviceProvider.GetService()));
}
@@ -130,14 +176,12 @@ public static IServiceCollection AddOllamaChatCompletion(
/// The target service collection.
/// The model for text generation.
/// The endpoint to Ollama hosted service.
- /// The optional custom HttpClient.
/// Optional service ID.
/// The updated kernel builder.
public static IServiceCollection AddOllamaTextEmbeddingGeneration(
this IServiceCollection services,
string modelId,
Uri endpoint,
- HttpClient? httpClient = null,
string? serviceId = null)
{
Verify.NotNull(services);
@@ -146,6 +190,28 @@ public static IServiceCollection AddOllamaTextEmbeddingGeneration(
new OllamaTextEmbeddingGenerationService(
modelId: modelId,
endpoint: endpoint,
+ loggerFactory: serviceProvider.GetService()));
+ }
+
+ ///
+ /// Add Ollama Text Embedding Generation services to the kernel builder.
+ ///
+ /// The target service collection.
+ /// The model for text generation.
+ /// Optional custom HttpClient, picked from ServiceCollection if not provided.
+ /// Optional service ID.
+ /// The updated kernel builder.
+ public static IServiceCollection AddOllamaTextEmbeddingGeneration(
+ this IServiceCollection services,
+ string modelId,
+ HttpClient? httpClient = null,
+ string? serviceId = null)
+ {
+ Verify.NotNull(services);
+
+ return services.AddKeyedSingleton(serviceId, (serviceProvider, _) =>
+ new OllamaTextEmbeddingGenerationService(
+ modelId: modelId,
httpClient: HttpClientProvider.GetHttpClient(httpClient, serviceProvider),
loggerFactory: serviceProvider.GetService()));
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
index acaad26753fa..1d9bc29f7e78 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
@@ -25,28 +25,43 @@ public sealed class OllamaChatCompletionService : ServiceBase, IChatCompletionSe
///
/// The hosted model.
/// The endpoint including the port where Ollama server is hosted
- /// Optional HTTP client to be used for communication with the Ollama API.
/// Optional logger factory to be used for logging.
public OllamaChatCompletionService(
string modelId,
Uri endpoint,
- HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null)
- : base(modelId, endpoint, httpClient, loggerFactory)
+ : base(modelId, endpoint, null, loggerFactory)
{
+ Verify.NotNull(endpoint);
}
///
/// Initializes a new instance of the class.
///
/// The hosted model.
- /// The Ollama API client.
+ /// HTTP client to be used for communication with the Ollama API.
/// Optional logger factory to be used for logging.
public OllamaChatCompletionService(
string modelId,
- OllamaApiClient client,
+ HttpClient httpClient,
ILoggerFactory? loggerFactory = null)
- : base(modelId, client, loggerFactory)
+ : base(modelId, null, httpClient, loggerFactory)
+ {
+ Verify.NotNull(httpClient);
+ Verify.NotNull(httpClient.BaseAddress);
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The hosted model.
+ /// The Ollama API client.
+ /// Optional logger factory to be used for logging.
+ public OllamaChatCompletionService(
+ string modelId,
+ OllamaApiClient ollamaClient,
+ ILoggerFactory? loggerFactory = null)
+ : base(modelId, ollamaClient, loggerFactory)
{
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs
index 4dae642b6800..4b152f243fff 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs
@@ -25,15 +25,30 @@ public sealed class OllamaTextEmbeddingGenerationService : ServiceBase, ITextEmb
///
/// The hosted model.
/// The endpoint including the port where Ollama server is hosted
- /// Optional HTTP client to be used for communication with the Ollama API.
/// Optional logger factory to be used for logging.
public OllamaTextEmbeddingGenerationService(
string modelId,
Uri endpoint,
- HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null)
- : base(modelId, endpoint, httpClient, loggerFactory)
+ : base(modelId, endpoint, null, loggerFactory)
{
+ Verify.NotNull(endpoint);
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The hosted model.
+ /// HTTP client to be used for communication with the Ollama API.
+ /// Optional logger factory to be used for logging.
+ public OllamaTextEmbeddingGenerationService(
+ string modelId,
+ HttpClient httpClient,
+ ILoggerFactory? loggerFactory = null)
+ : base(modelId, null, httpClient, loggerFactory)
+ {
+ Verify.NotNull(httpClient);
+ Verify.NotNull(httpClient.BaseAddress);
}
///
@@ -62,7 +77,7 @@ public async Task>> GenerateEmbeddingsAsync(
var request = new EmbedRequest
{
Model = this.GetModelId()!,
- Input = data.ToList(),
+ Input = (List)data,
};
var response = await this._client.Embed(request, cancellationToken: cancellationToken).ConfigureAwait(false);
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
index dc475a9fcc65..a9432c15d839 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextGenerationService.cs
@@ -25,15 +25,30 @@ public sealed class OllamaTextGenerationService : ServiceBase, ITextGenerationSe
///
/// The Ollama model for the text generation service.
/// The endpoint including the port where Ollama server is hosted
- /// Optional HTTP client to be used for communication with the Ollama API.
/// Optional logger factory to be used for logging.
public OllamaTextGenerationService(
string modelId,
Uri endpoint,
- HttpClient? httpClient = null,
ILoggerFactory? loggerFactory = null)
- : base(modelId, endpoint, httpClient, loggerFactory)
+ : base(modelId, endpoint, null, loggerFactory)
{
+ Verify.NotNull(endpoint);
+ }
+
+ ///
+ /// Initializes a new instance of the class.
+ ///
+ /// The Ollama model for the text generation service.
+ /// HTTP client to be used for communication with the Ollama API.
+ /// Optional logger factory to be used for logging.
+ public OllamaTextGenerationService(
+ string modelId,
+ HttpClient httpClient,
+ ILoggerFactory? loggerFactory = null)
+ : base(modelId, null, httpClient, loggerFactory)
+ {
+ Verify.NotNull(httpClient);
+ Verify.NotNull(httpClient.BaseAddress);
}
///
@@ -66,6 +81,7 @@ public async Task> GetTextContentsAsync(
var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
var request = CreateRequest(settings, this._client.SelectedModel);
+ request.Prompt = prompt;
await foreach (var responseStreamChunk in this._client.Generate(request, cancellationToken).ConfigureAwait(false))
{
@@ -95,6 +111,7 @@ public async IAsyncEnumerable GetStreamingTextContentsAsyn
{
var settings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
var request = CreateRequest(settings, this._client.SelectedModel);
+ request.Prompt = prompt;
await foreach (var content in this._client.Generate(request, cancellationToken).ConfigureAwait(false))
{
From 4f8879fb0eac0bf50c237a166e27c98e75dd2808 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 6 Sep 2024 09:41:23 +0100
Subject: [PATCH 05/11] Address PR Feedback
---
.../ChatCompletion/Ollama_ChatCompletion.cs | 49 +--------
.../Ollama_ChatCompletionStreaming.cs | 17 +--
.../OllamaKernelBuilderExtensionsTests.cs | 2 +-
.../OllamaServiceCollectionExtensionsTests.cs | 2 +-
.../Services/OllamaChatCompletionTests.cs | 101 +++++++++++++++---
.../OllamaTextEmbeddingGenerationTests.cs | 2 +-
.../Services/OllamaTextGenerationTests.cs | 84 ++++++++++++++-
.../OllamaPromptExecutionSettingsTests.cs | 6 +-
.../chat_completion_test_response.txt | 1 -
.../chat_completion_test_response_stream.txt | 10 +-
.../text_generation_test_response.txt | 1 -
.../Connectors.Ollama.csproj | 4 +-
.../samples/InternalUtilities/BaseTest.cs | 12 +++
13 files changed, 204 insertions(+), 87 deletions(-)
rename dotnet/src/Connectors/Connectors.Ollama.UnitTests/{ => Extensions}/OllamaKernelBuilderExtensionsTests.cs (96%)
rename dotnet/src/Connectors/Connectors.Ollama.UnitTests/{ => Extensions}/OllamaServiceCollectionExtensionsTests.cs (96%)
rename dotnet/src/Connectors/Connectors.Ollama.UnitTests/{ => Settings}/OllamaPromptExecutionSettingsTests.cs (96%)
delete mode 100644 dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response.txt
delete mode 100644 dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response.txt
diff --git a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs
index fbde45f78593..b76b4fff88a1 100644
--- a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletion.cs
@@ -28,49 +28,21 @@ public async Task ServicePromptAsync()
// First user message
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
- await MessageOutputAsync(chatHistory);
+ this.OutputLastMessage(chatHistory);
// First assistant message
var reply = await chatService.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
- await MessageOutputAsync(chatHistory);
+ this.OutputLastMessage(chatHistory);
// Second user message
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion");
- await MessageOutputAsync(chatHistory);
+ this.OutputLastMessage(chatHistory);
// Second assistant message
reply = await chatService.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
- await MessageOutputAsync(chatHistory);
-
- /* Output:
-
- Chat content:
- ------------------------
- System: You are a librarian, expert about books
- ------------------------
- User: Hi, I'm looking for book suggestions
- ------------------------
- Assistant: Sure, I'd be happy to help! What kind of books are you interested in? Fiction or non-fiction? Any particular genre?
- ------------------------
- User: I love history and philosophy, I'd like to learn something new about Greece, any suggestion?
- ------------------------
- Assistant: Great! For history and philosophy books about Greece, here are a few suggestions:
-
- 1. "The Greeks" by H.D.F. Kitto - This is a classic book that provides an overview of ancient Greek history and culture, including their philosophy, literature, and art.
-
- 2. "The Republic" by Plato - This is one of the most famous works of philosophy in the Western world, and it explores the nature of justice and the ideal society.
-
- 3. "The Peloponnesian War" by Thucydides - This is a detailed account of the war between Athens and Sparta in the 5th century BCE, and it provides insight into the political and military strategies of the time.
-
- 4. "The Iliad" by Homer - This epic poem tells the story of the Trojan War and is considered one of the greatest works of literature in the Western canon.
-
- 5. "The Histories" by Herodotus - This is a comprehensive account of the Persian Wars and provides a wealth of information about ancient Greek culture and society.
-
- I hope these suggestions are helpful!
- ------------------------
- */
+ this.OutputLastMessage(chatHistory);
}
[Fact]
@@ -98,17 +70,4 @@ public async Task ChatPromptAsync()
Console.WriteLine(reply);
}
-
- ///
- /// Outputs the last message of the chat history
- ///
- private Task MessageOutputAsync(ChatHistory chatHistory)
- {
- var message = chatHistory.Last();
-
- Console.WriteLine($"{message.Role}: {message.Content}");
- Console.WriteLine("------------------------");
-
- return Task.CompletedTask;
- }
}
diff --git a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs
index 98da41fec2a5..d83aac04e9bf 100644
--- a/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/Ollama_ChatCompletionStreaming.cs
@@ -94,18 +94,18 @@ private async Task StartStreamingChatAsync(IChatCompletionService chatCompletion
Console.WriteLine("------------------------");
var chatHistory = new ChatHistory("You are a librarian, expert about books");
- OutputLastMessage(chatHistory);
+ this.OutputLastMessage(chatHistory);
// First user message
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
- OutputLastMessage(chatHistory);
+ this.OutputLastMessage(chatHistory);
// First assistant message
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant);
// Second user message
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion?");
- OutputLastMessage(chatHistory);
+ this.OutputLastMessage(chatHistory);
// Second assistant message
await StreamMessageOutputAsync(chatCompletionService, chatHistory, AuthorRole.Assistant);
@@ -158,15 +158,4 @@ private async Task StreamMessageOutputFromKernelAsync(Kernel kernel, str
Console.WriteLine("\n------------------------");
return fullMessage;
}
-
- ///
- /// Outputs the last message of the chat history
- ///
- private void OutputLastMessage(ChatHistory chatHistory)
- {
- var message = chatHistory.Last();
-
- Console.WriteLine($"{message.Role}: {message.Content}");
- Console.WriteLine("------------------------");
- }
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaKernelBuilderExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs
similarity index 96%
rename from dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaKernelBuilderExtensionsTests.cs
rename to dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs
index 571f99983bbd..668044164ded 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaKernelBuilderExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaKernelBuilderExtensionsTests.cs
@@ -8,7 +8,7 @@
using Microsoft.SemanticKernel.TextGeneration;
using Xunit;
-namespace SemanticKernel.Connectors.Ollama.UnitTests;
+namespace SemanticKernel.Connectors.Ollama.UnitTests.Extensions;
///
/// Unit tests of .
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaServiceCollectionExtensionsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs
similarity index 96%
rename from dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaServiceCollectionExtensionsTests.cs
rename to dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs
index 4762acadc65e..2c3a4e79df04 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaServiceCollectionExtensionsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Extensions/OllamaServiceCollectionExtensionsTests.cs
@@ -9,7 +9,7 @@
using Microsoft.SemanticKernel.TextGeneration;
using Xunit;
-namespace SemanticKernel.Connectors.Ollama.UnitTests;
+namespace SemanticKernel.Connectors.Ollama.UnitTests.Extensions;
///
/// Unit tests of .
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
index 862131e21196..28fa16ef7884 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
@@ -12,7 +12,7 @@
using OllamaSharp.Models.Chat;
using Xunit;
-namespace SemanticKernel.Connectors.Ollama.UnitTests;
+namespace SemanticKernel.Connectors.Ollama.UnitTests.Services;
public sealed class OllamaChatCompletionTests : IDisposable
{
@@ -22,7 +22,10 @@ public sealed class OllamaChatCompletionTests : IDisposable
public OllamaChatCompletionTests()
{
this._messageHandlerStub = new HttpMessageHandlerStub();
- this._messageHandlerStub.ResponseToReturn.Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.txt"));
+ this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response_stream.txt"))
+ };
this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") };
}
@@ -75,11 +78,6 @@ public async Task GetChatMessageContentsShouldHaveModelAndMetadataAsync()
"phi3",
httpClient: this._httpClient);
- this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
- {
- Content = new StringContent(File.ReadAllText("TestData/chat_completion_test_response.txt"))
- };
-
var chat = new ChatHistory();
chat.AddMessage(AuthorRole.User, "fake-text");
@@ -92,6 +90,14 @@ public async Task GetChatMessageContentsShouldHaveModelAndMetadataAsync()
Assert.NotNull(message);
// Assert
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(requestPayload);
+ Assert.NotNull(requestPayload.Options);
+ Assert.Null(requestPayload.Options.Stop);
+ Assert.Null(requestPayload.Options.Temperature);
+ Assert.Null(requestPayload.Options.TopK);
+ Assert.Null(requestPayload.Options.TopP);
+
Assert.NotNull(message.ModelId);
Assert.Equal("phi3", message.ModelId);
}
@@ -105,11 +111,6 @@ public async Task GetStreamingChatMessageContentsShouldHaveModelAndInnerContentA
expectedModel,
httpClient: this._httpClient);
- this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
- {
- Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response_stream.txt"))
- };
-
var chat = new ChatHistory();
chat.AddMessage(AuthorRole.User, "fake-text");
@@ -122,6 +123,14 @@ public async Task GetStreamingChatMessageContentsShouldHaveModelAndInnerContentA
}
// Assert
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(requestPayload);
+ Assert.NotNull(requestPayload.Options);
+ Assert.Null(requestPayload.Options.Stop);
+ Assert.Null(requestPayload.Options.Temperature);
+ Assert.Null(requestPayload.Options.TopK);
+ Assert.Null(requestPayload.Options.TopP);
+
Assert.NotNull(lastMessage!.ModelId);
Assert.Equal(expectedModel, lastMessage.ModelId);
@@ -131,6 +140,74 @@ public async Task GetStreamingChatMessageContentsShouldHaveModelAndInnerContentA
Assert.True(innerContent.Done);
}
+ [Fact]
+ public async Task GetStreamingChatMessageContentsExecutionSettingsMustBeSentAsync()
+ {
+ //Arrange
+ var sut = new OllamaChatCompletionService(
+ "fake-model",
+ httpClient: this._httpClient);
+ var chat = new ChatHistory();
+ chat.AddMessage(AuthorRole.User, "fake-text");
+ string jsonSettings = """
+ {
+ "stop": ["stop me"],
+ "temperature": 0.5,
+ "top_p": 0.9,
+ "top_k": 100
+ }
+ """;
+
+ var executionSettings = JsonSerializer.Deserialize(jsonSettings);
+ var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
+
+ // Act
+ await sut.GetStreamingChatMessageContentsAsync(chat, ollamaExecutionSettings).GetAsyncEnumerator().MoveNextAsync();
+
+ // Assert
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(requestPayload);
+ Assert.NotNull(requestPayload.Options);
+ Assert.Equal(ollamaExecutionSettings.Stop, requestPayload.Options.Stop);
+ Assert.Equal(ollamaExecutionSettings.Temperature, requestPayload.Options.Temperature);
+ Assert.Equal(ollamaExecutionSettings.TopP, requestPayload.Options.TopP);
+ Assert.Equal(ollamaExecutionSettings.TopK, requestPayload.Options.TopK);
+ }
+
+ [Fact]
+ public async Task GetChatMessageContentsExecutionSettingsMustBeSentAsync()
+ {
+ //Arrange
+ var sut = new OllamaChatCompletionService(
+ "fake-model",
+ httpClient: this._httpClient);
+ var chat = new ChatHistory();
+ chat.AddMessage(AuthorRole.User, "fake-text");
+ string jsonSettings = """
+ {
+ "stop": ["stop me"],
+ "temperature": 0.5,
+ "top_p": 0.9,
+ "top_k": 100
+ }
+ """;
+
+ var executionSettings = JsonSerializer.Deserialize(jsonSettings);
+ var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
+
+ // Act
+ await sut.GetChatMessageContentsAsync(chat, ollamaExecutionSettings);
+
+ // Assert
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(requestPayload);
+ Assert.NotNull(requestPayload.Options);
+ Assert.Equal(ollamaExecutionSettings.Stop, requestPayload.Options.Stop);
+ Assert.Equal(ollamaExecutionSettings.Temperature, requestPayload.Options.Temperature);
+ Assert.Equal(ollamaExecutionSettings.TopP, requestPayload.Options.TopP);
+ Assert.Equal(ollamaExecutionSettings.TopK, requestPayload.Options.TopK);
+ }
+
public void Dispose()
{
this._httpClient.Dispose();
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
index 2c1b76ad4410..cf71a0327613 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
@@ -9,7 +9,7 @@
using OllamaSharp.Models;
using Xunit;
-namespace SemanticKernel.Connectors.Ollama.UnitTests;
+namespace SemanticKernel.Connectors.Ollama.UnitTests.Services;
public sealed class OllamaTextEmbeddingGenerationTests : IDisposable
{
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
index c87833fc1c7b..f490b55f24ba 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
@@ -10,9 +10,10 @@
using Microsoft.SemanticKernel.Connectors.Ollama;
using Microsoft.SemanticKernel.TextGeneration;
using OllamaSharp.Models;
+using OllamaSharp.Models.Chat;
using Xunit;
-namespace SemanticKernel.Connectors.Ollama.UnitTests;
+namespace SemanticKernel.Connectors.Ollama.UnitTests.Services;
public sealed class OllamaTextGenerationTests : IDisposable
{
@@ -79,6 +80,14 @@ public async Task GetTextContentsShouldHaveModelIdDefinedAsync()
var textContent = await sut.GetTextContentAsync("Any prompt");
// Assert
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(requestPayload);
+ Assert.NotNull(requestPayload.Options);
+ Assert.Null(requestPayload.Options.Stop);
+ Assert.Null(requestPayload.Options.Temperature);
+ Assert.Null(requestPayload.Options.TopK);
+ Assert.Null(requestPayload.Options.TopP);
+
Assert.NotNull(textContent.ModelId);
Assert.Equal(expectedModel, textContent.ModelId);
}
@@ -100,10 +109,83 @@ public async Task GetStreamingTextContentsShouldHaveModelIdDefinedAsync()
}
// Assert
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(requestPayload);
+ Assert.NotNull(requestPayload.Options);
+ Assert.Null(requestPayload.Options.Stop);
+ Assert.Null(requestPayload.Options.Temperature);
+ Assert.Null(requestPayload.Options.TopK);
+ Assert.Null(requestPayload.Options.TopP);
+
Assert.NotNull(lastTextContent!.ModelId);
Assert.Equal(expectedModel, lastTextContent.ModelId);
}
+ [Fact]
+ public async Task GetStreamingTextContentsExecutionSettingsMustBeSentAsync()
+ {
+ //Arrange
+ var sut = new OllamaTextGenerationService(
+ "fake-model",
+ httpClient: this._httpClient);
+
+ string jsonSettings = """
+ {
+ "stop": ["stop me"],
+ "temperature": 0.5,
+ "top_p": 0.9,
+ "top_k": 100
+ }
+ """;
+
+ var executionSettings = JsonSerializer.Deserialize(jsonSettings);
+ var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
+
+ // Act
+ await sut.GetStreamingTextContentsAsync("Any prompt", ollamaExecutionSettings).GetAsyncEnumerator().MoveNextAsync();
+
+ // Assert
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(requestPayload);
+ Assert.NotNull(requestPayload.Options);
+ Assert.Equal(ollamaExecutionSettings.Stop, requestPayload.Options.Stop);
+ Assert.Equal(ollamaExecutionSettings.Temperature, requestPayload.Options.Temperature);
+ Assert.Equal(ollamaExecutionSettings.TopP, requestPayload.Options.TopP);
+ Assert.Equal(ollamaExecutionSettings.TopK, requestPayload.Options.TopK);
+ }
+
+ [Fact]
+ public async Task GetTextContentsExecutionSettingsMustBeSentAsync()
+ {
+ //Arrange
+ var sut = new OllamaTextGenerationService(
+ "fake-model",
+ httpClient: this._httpClient);
+ string jsonSettings = """
+ {
+ "stop": ["stop me"],
+ "temperature": 0.5,
+ "top_p": 0.9,
+ "top_k": 100
+ }
+ """;
+
+ var executionSettings = JsonSerializer.Deserialize(jsonSettings);
+ var ollamaExecutionSettings = OllamaPromptExecutionSettings.FromExecutionSettings(executionSettings);
+
+ // Act
+ await sut.GetTextContentsAsync("Any prompt", ollamaExecutionSettings);
+
+ // Assert
+ var requestPayload = JsonSerializer.Deserialize(this._messageHandlerStub.RequestContent);
+ Assert.NotNull(requestPayload);
+ Assert.NotNull(requestPayload.Options);
+ Assert.Equal(ollamaExecutionSettings.Stop, requestPayload.Options.Stop);
+ Assert.Equal(ollamaExecutionSettings.Temperature, requestPayload.Options.Temperature);
+ Assert.Equal(ollamaExecutionSettings.TopP, requestPayload.Options.TopP);
+ Assert.Equal(ollamaExecutionSettings.TopK, requestPayload.Options.TopK);
+ }
+
///
/// Disposes resources used by this class.
///
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaPromptExecutionSettingsTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Settings/OllamaPromptExecutionSettingsTests.cs
similarity index 96%
rename from dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaPromptExecutionSettingsTests.cs
rename to dotnet/src/Connectors/Connectors.Ollama.UnitTests/Settings/OllamaPromptExecutionSettingsTests.cs
index 314d05876e6f..b7ff3d1c57c5 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/OllamaPromptExecutionSettingsTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Settings/OllamaPromptExecutionSettingsTests.cs
@@ -6,7 +6,7 @@
using Microsoft.SemanticKernel.Connectors.Ollama;
using Xunit;
-namespace SemanticKernel.Connectors.Ollama.UnitTests;
+namespace SemanticKernel.Connectors.Ollama.UnitTests.Settings;
///
/// Unit tests of .
@@ -14,7 +14,7 @@ namespace SemanticKernel.Connectors.Ollama.UnitTests;
public class OllamaPromptExecutionSettingsTests
{
[Fact]
- public void FromExecutionSettingsWhenAlreadyOllamaShouldReturnSameAsync()
+ public void FromExecutionSettingsWhenAlreadyOllamaShouldReturnSame()
{
// Arrange
var executionSettings = new OllamaPromptExecutionSettings();
@@ -27,7 +27,7 @@ public void FromExecutionSettingsWhenAlreadyOllamaShouldReturnSameAsync()
}
[Fact]
- public void FromExecutionSettingsWhenNullShouldReturnDefaultAsync()
+ public void FromExecutionSettingsWhenNullShouldReturnDefault()
{
// Arrange
OllamaPromptExecutionSettings? executionSettings = null;
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response.txt b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response.txt
deleted file mode 100644
index b27faf2a1fb5..000000000000
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response.txt
+++ /dev/null
@@ -1 +0,0 @@
-{"model":"phi3","created_at":"2024-07-02T12:30:00.295693434Z","message":{"role":"assistant","content":"This is test completion response"},"done_reason":"stop","done":true,"total_duration":69492224084,"load_duration":66637210792,"prompt_eval_count":10,"prompt_eval_duration":41164000,"eval_count":236,"eval_duration":2712234000}
\ No newline at end of file
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response_stream.txt b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response_stream.txt
index a0678c024d27..55b26d234500 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response_stream.txt
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/chat_completion_test_response_stream.txt
@@ -1,6 +1,6 @@
-{"model":"phi3","created_at":"2024-07-02T11:45:16.216898458Z","message":{"role":"assistant","content":" these"},"done":false}
-{"model":"phi3","created_at":"2024-07-02T11:45:16.22693076Z","message":{"role":"assistant","content":" times"},"done":false}
-{"model":"phi3","created_at":"2024-07-02T11:45:16.236570847Z","message":{"role":"assistant","content":" of"},"done":false}
-{"model":"phi3","created_at":"2024-07-02T11:45:16.246538945Z","message":{"role":"assistant","content":" day"},"done":false}
-{"model":"phi3","created_at":"2024-07-02T11:45:16.25611096Z","message":{"role":"assistant","content":"."},"done":false}
+{"model":"phi3","created_at":"2024-07-02T11:45:16.216898458Z","message":{"role":"assistant","content":"This "},"done":false}
+{"model":"phi3","created_at":"2024-07-02T11:45:16.22693076Z","message":{"role":"assistant","content":"is "},"done":false}
+{"model":"phi3","created_at":"2024-07-02T11:45:16.236570847Z","message":{"role":"assistant","content":"test "},"done":false}
+{"model":"phi3","created_at":"2024-07-02T11:45:16.246538945Z","message":{"role":"assistant","content":"completion "},"done":false}
+{"model":"phi3","created_at":"2024-07-02T11:45:16.25611096Z","message":{"role":"assistant","content":"response"},"done":false}
{"model":"phi3","created_at":"2024-07-02T11:45:16.265598822Z","message":{"role":"assistant","content":""},"done_reason":"stop","done":true,"total_duration":58123571935,"load_duration":55561676662,"prompt_eval_count":10,"prompt_eval_duration":34847000,"eval_count":239,"eval_duration":2381751000}
\ No newline at end of file
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response.txt b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response.txt
deleted file mode 100644
index b8d071565e02..000000000000
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/TestData/text_generation_test_response.txt
+++ /dev/null
@@ -1 +0,0 @@
-{"model":"llama2","created_at":"2024-07-02T14:32:06.227383499Z","response":"This is test completion response","done":true,"done_reason":"stop","context":[518,25580,29962,3532,298,434,29889],"total_duration":94608355007,"load_duration":91044187969,"prompt_eval_count":26,"prompt_eval_duration":22543000,"eval_count":313,"eval_duration":3490651000}
\ No newline at end of file
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Connectors.Ollama.csproj b/dotnet/src/Connectors/Connectors.Ollama/Connectors.Ollama.csproj
index e75d956fd50e..1ce5397d2e07 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Connectors.Ollama.csproj
+++ b/dotnet/src/Connectors/Connectors.Ollama/Connectors.Ollama.csproj
@@ -4,7 +4,7 @@
Microsoft.SemanticKernel.Connectors.Ollama
$(AssemblyName)
- netstandard2.0
+ net8;netstandard2.0
alpha
@@ -15,7 +15,7 @@
Semantic Kernel - Ollama AI connectors
- Semantic Kernel connector for Ollama. Contains clients for text generation.
+ Semantic Kernel connector for Ollama. Contains services for text generation, chat completion and text embeddings.
diff --git a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs
index d71d3c1f0032..5b1916984d30 100644
--- a/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs
+++ b/dotnet/src/InternalUtilities/samples/InternalUtilities/BaseTest.cs
@@ -4,6 +4,7 @@
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
+using Microsoft.SemanticKernel.ChatCompletion;
public abstract class BaseTest
{
@@ -101,6 +102,17 @@ public void WriteLine(string? message)
public void Write(object? target = null)
=> this.Output.WriteLine(target ?? string.Empty);
+ ///
+ /// Outputs the last message in the chat history.
+ ///
+ /// Chat history
+ protected void OutputLastMessage(ChatHistory chatHistory)
+ {
+ var message = chatHistory.Last();
+
+ Console.WriteLine($"{message.Role}: {message.Content}");
+ Console.WriteLine("------------------------");
+ }
protected sealed class LoggingHandler(HttpMessageHandler innerHandler, ITestOutputHelper output) : DelegatingHandler(innerHandler)
{
private static readonly JsonSerializerOptions s_jsonSerializerOptions = new() { WriteIndented = true };
From f13bb24c754a771fe8da2a605485df97af901108 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 6 Sep 2024 09:55:19 +0100
Subject: [PATCH 06/11] Fix warnings
---
.../ChatCompletion/OpenAI_ChatCompletion.cs | 21 ++++---------------
.../OpenAI_ChatCompletionStreaming.cs | 11 ----------
.../Services/OllamaChatCompletionTests.cs | 4 ++--
.../OllamaTextEmbeddingGenerationTests.cs | 2 +-
.../Services/OllamaTextGenerationTests.cs | 2 +-
.../Services/OllamaChatCompletionService.cs | 2 +-
6 files changed, 9 insertions(+), 33 deletions(-)
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs
index 42164d3fe8dc..a92c86dd977d 100644
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletion.cs
@@ -89,33 +89,20 @@ private async Task StartChatAsync(IChatCompletionService chatGPT)
// First user message
chatHistory.AddUserMessage("Hi, I'm looking for book suggestions");
- await MessageOutputAsync(chatHistory);
+ OutputLastMessage(chatHistory);
// First bot assistant message
var reply = await chatGPT.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
- await MessageOutputAsync(chatHistory);
+ OutputLastMessage(chatHistory);
// Second user message
chatHistory.AddUserMessage("I love history and philosophy, I'd like to learn something new about Greece, any suggestion");
- await MessageOutputAsync(chatHistory);
+ OutputLastMessage(chatHistory);
// Second bot assistant message
reply = await chatGPT.GetChatMessageContentAsync(chatHistory);
chatHistory.Add(reply);
- await MessageOutputAsync(chatHistory);
- }
-
- ///
- /// Outputs the last message of the chat history
- ///
- private Task MessageOutputAsync(ChatHistory chatHistory)
- {
- var message = chatHistory.Last();
-
- Console.WriteLine($"{message.Role}: {message.Content}");
- Console.WriteLine("------------------------");
-
- return Task.CompletedTask;
+ OutputLastMessage(chatHistory);
}
}
diff --git a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
index c63f6ce37a8f..fe0052a52db2 100644
--- a/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
+++ b/dotnet/samples/Concepts/ChatCompletion/OpenAI_ChatCompletionStreaming.cs
@@ -162,15 +162,4 @@ private async Task StreamMessageOutputAsync(IChatCompletionService chatCompletio
Console.WriteLine("\n------------------------");
chatHistory.AddMessage(authorRole, fullMessage);
}
-
- ///
- /// Outputs the last message of the chat history
- ///
- private void OutputLastMessage(ChatHistory chatHistory)
- {
- var message = chatHistory.Last();
-
- Console.WriteLine($"{message.Role}: {message.Content}");
- Console.WriteLine("------------------------");
- }
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
index 28fa16ef7884..acfab94a0e48 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
@@ -21,7 +21,7 @@ public sealed class OllamaChatCompletionTests : IDisposable
public OllamaChatCompletionTests()
{
- this._messageHandlerStub = new HttpMessageHandlerStub();
+ this._messageHandlerStub = new();
this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
{
Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response_stream.txt"))
@@ -71,7 +71,7 @@ public async Task ShouldHandleServiceResponseAsync()
}
[Fact]
- public async Task GetChatMessageContentsShouldHaveModelAndMetadataAsync()
+ public async Task GetChatMessageContentsShouldHaveModelAndInnerContentAsync()
{
//Arrange
var sut = new OllamaChatCompletionService(
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
index cf71a0327613..ec1e63c1cd56 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextEmbeddingGenerationTests.cs
@@ -18,7 +18,7 @@ public sealed class OllamaTextEmbeddingGenerationTests : IDisposable
public OllamaTextEmbeddingGenerationTests()
{
- this._messageHandlerStub = new HttpMessageHandlerStub();
+ this._messageHandlerStub = new();
this._messageHandlerStub.ResponseToReturn.Content = new StringContent(File.ReadAllText("TestData/embeddings_test_response.json"));
this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") };
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
index f490b55f24ba..5c03098909f9 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
@@ -22,7 +22,7 @@ public sealed class OllamaTextGenerationTests : IDisposable
public OllamaTextGenerationTests()
{
- this._messageHandlerStub = new HttpMessageHandlerStub();
+ this._messageHandlerStub = new();
this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
{
Content = new StreamContent(File.OpenRead("TestData/text_generation_test_response_stream.txt"))
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
index 1d9bc29f7e78..e8e0c2e965e9 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaChatCompletionService.cs
@@ -140,7 +140,7 @@ public async IAsyncEnumerable GetStreamingChatMessa
"ASSISTANT" => AuthorRole.Assistant,
"SYSTEM" => AuthorRole.System,
null => null,
- _ => new AuthorRole(role.ToString())
+ _ => new AuthorRole(role.ToString()!)
};
private static ChatRequest CreateChatRequest(ChatHistory chatHistory, OllamaPromptExecutionSettings settings, string selectedModel)
From 1ca0966dee1e54dfbd776ffc96e305e1a6991c24 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 6 Sep 2024 10:20:37 +0100
Subject: [PATCH 07/11] Fix IT step 1
---
.../Ollama/OllamaCompletionTests.cs | 16 ++++++++-----
.../Ollama/OllamaTextGenerationTests.cs | 23 ++++++++++---------
2 files changed, 22 insertions(+), 17 deletions(-)
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
index 4fabf80936ff..7ac58f730e8e 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
@@ -10,6 +10,8 @@
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.Ollama;
+using OllamaSharp.Models;
+using OllamaSharp.Models.Chat;
using SemanticKernel.IntegrationTests.TestSettings;
using Xunit;
using Xunit.Abstractions;
@@ -47,6 +49,7 @@ public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnsw
// Act
await foreach (var content in target.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }))
{
+ Assert.NotNull(content.InnerContent);
if (content is StreamingChatMessageContent messageContent)
{
Assert.NotNull(messageContent.Role);
@@ -60,7 +63,7 @@ public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnsw
}
[Fact(Skip = "For manual verification only")]
- public async Task ItShouldReturnMetadataAsync()
+ public async Task ItShouldReturnInnerContentAsync()
{
// Arrange
this._kernelBuilder.Services.AddSingleton(this._logger);
@@ -80,10 +83,12 @@ public async Task ItShouldReturnMetadataAsync()
// Assert
Assert.NotNull(lastUpdate);
- Assert.NotNull(lastUpdate.Metadata);
-
- // CreatedAt
- Assert.True(lastUpdate.Metadata.TryGetValue("CreatedAt", out object? createdAt));
+ Assert.NotNull(lastUpdate.InnerContent);
+ Assert.IsType(lastUpdate.InnerContent);
+ var innerContent = lastUpdate.InnerContent as ChatDoneResponseStream;
+ Assert.NotNull(innerContent);
+ Assert.NotNull(innerContent.CreatedAt);
+ Assert.True(innerContent.Done);
}
[Theory(Skip = "For manual verification only")]
@@ -167,7 +172,6 @@ public async Task ItShouldHaveSemanticKernelVersionHeaderAsync()
this._kernelBuilder.Services.AddSingleton(this._logger);
var builder = this._kernelBuilder;
builder.AddOllamaChatCompletion(
- endpoint: config.Endpoint,
modelId: config.ModelId,
httpClient: httpClient);
Kernel target = builder.Build();
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
index 597fdf331db2..5dcb6f5d7822 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
@@ -10,9 +10,12 @@
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.Ollama;
+using OllamaSharp.Models;
+using OllamaSharp.Models.Chat;
using SemanticKernel.IntegrationTests.TestSettings;
using Xunit;
using Xunit.Abstractions;
+using Xunit.Sdk;
namespace SemanticKernel.IntegrationTests.Connectors.Ollama;
@@ -48,7 +51,7 @@ public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnsw
await foreach (var content in target.InvokeStreamingAsync(plugins["ChatPlugin"]["Chat"], new() { [InputParameterName] = prompt }))
{
fullResult.Append(content);
- Assert.NotNull(content.Metadata);
+ Assert.NotNull(content.InnerContent);
}
// Assert
@@ -56,7 +59,7 @@ public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnsw
}
[Fact(Skip = "For manual verification only")]
- public async Task ItShouldReturnMetadataAsync()
+ public async Task ItShouldReturnInnerContentAsync()
{
// Arrange
this._kernelBuilder.Services.AddSingleton(this._logger);
@@ -76,15 +79,13 @@ public async Task ItShouldReturnMetadataAsync()
// Assert
Assert.NotNull(lastUpdate);
- Assert.NotNull(lastUpdate.Metadata);
-
- // CreatedAt
- Assert.True(lastUpdate.Metadata.TryGetValue("CreatedAt", out object? createdAt));
- Assert.IsType(lastUpdate.Metadata);
- OllamaMetadata ollamaMetadata = (OllamaMetadata)lastUpdate.Metadata;
- Assert.NotNull(ollamaMetadata.CreatedAt);
- Assert.NotEqual(0, ollamaMetadata.TotalDuration);
- Assert.NotEqual(0, ollamaMetadata.EvalDuration);
+ Assert.NotNull(lastUpdate.InnerContent);
+
+ Assert.IsType(lastUpdate.InnerContent);
+ var innerContent = lastUpdate.InnerContent as GenerateDoneResponseStream;
+ Assert.NotNull(innerContent);
+ Assert.NotNull(innerContent.CreatedAt);
+ Assert.True(innerContent.Done);
}
[Theory(Skip = "For manual verification only")]
From 0daeca8b1e561d84ad43ca54db40fa0f1ff8d1ec Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 6 Sep 2024 10:47:03 +0100
Subject: [PATCH 08/11] Integration Tests Passing
---
.../Services/OllamaChatCompletionTests.cs | 8 ++--
.../Services/OllamaTextGenerationTests.cs | 8 ++--
.../OllamaTextEmbeddingGenerationService.cs | 2 +-
.../Ollama/OllamaCompletionTests.cs | 41 +++--------------
.../Ollama/OllamaTextEmbeddingTests.cs | 9 ++--
.../Ollama/OllamaTextGenerationTests.cs | 46 ++++---------------
.../TestSettings/OllamaConfiguration.cs | 2 +-
7 files changed, 34 insertions(+), 82 deletions(-)
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
index acfab94a0e48..40e1b840beaf 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaChatCompletionTests.cs
@@ -21,10 +21,12 @@ public sealed class OllamaChatCompletionTests : IDisposable
public OllamaChatCompletionTests()
{
- this._messageHandlerStub = new();
- this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ this._messageHandlerStub = new()
{
- Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response_stream.txt"))
+ ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StreamContent(File.OpenRead("TestData/chat_completion_test_response_stream.txt"))
+ }
};
this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") };
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
index 5c03098909f9..c765bf1d678d 100644
--- a/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama.UnitTests/Services/OllamaTextGenerationTests.cs
@@ -22,10 +22,12 @@ public sealed class OllamaTextGenerationTests : IDisposable
public OllamaTextGenerationTests()
{
- this._messageHandlerStub = new();
- this._messageHandlerStub.ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ this._messageHandlerStub = new()
{
- Content = new StreamContent(File.OpenRead("TestData/text_generation_test_response_stream.txt"))
+ ResponseToReturn = new HttpResponseMessage(System.Net.HttpStatusCode.OK)
+ {
+ Content = new StreamContent(File.OpenRead("TestData/text_generation_test_response_stream.txt"))
+ }
};
this._httpClient = new HttpClient(this._messageHandlerStub, false) { BaseAddress = new Uri("http://localhost:11434") };
}
diff --git a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs
index 4b152f243fff..f5bee67d4ec5 100644
--- a/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs
+++ b/dotnet/src/Connectors/Connectors.Ollama/Services/OllamaTextEmbeddingGenerationService.cs
@@ -77,7 +77,7 @@ public async Task>> GenerateEmbeddingsAsync(
var request = new EmbedRequest
{
Model = this.GetModelId()!,
- Input = (List)data,
+ Input = data.ToList(),
};
var response = await this._client.Embed(request, cancellationToken: cancellationToken).ConfigureAwait(false);
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
index 7ac58f730e8e..406ec1f0ee57 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
@@ -31,7 +31,7 @@ public sealed class OllamaCompletionTests(ITestOutputHelper output) : IDisposabl
.AddUserSecrets()
.Build();
- [Theory(Skip = "For manual verification only")]
+ [Theory]//(Skip = "For manual verification only")]
[InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")]
public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnswerContains)
{
@@ -62,7 +62,7 @@ public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnsw
Assert.Contains(expectedAnswerContains, fullResult.ToString(), StringComparison.OrdinalIgnoreCase);
}
- [Fact(Skip = "For manual verification only")]
+ [Fact]//(Skip = "For manual verification only")]
public async Task ItShouldReturnInnerContentAsync()
{
// Arrange
@@ -84,14 +84,14 @@ public async Task ItShouldReturnInnerContentAsync()
// Assert
Assert.NotNull(lastUpdate);
Assert.NotNull(lastUpdate.InnerContent);
- Assert.IsType(lastUpdate.InnerContent);
+ Assert.IsType(lastUpdate.InnerContent);
var innerContent = lastUpdate.InnerContent as ChatDoneResponseStream;
Assert.NotNull(innerContent);
Assert.NotNull(innerContent.CreatedAt);
Assert.True(innerContent.Done);
}
- [Theory(Skip = "For manual verification only")]
+ [Theory]//(Skip = "For manual verification only")]
[InlineData("\n")]
[InlineData("\r\n")]
public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding)
@@ -118,7 +118,7 @@ public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding)
Assert.Contains(ExpectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase);
}
- [Fact(Skip = "For manual verification only")]
+ [Fact]//(Skip = "For manual verification only")]
public async Task ItInvokePromptTestAsync()
{
// Arrange
@@ -136,7 +136,7 @@ public async Task ItInvokePromptTestAsync()
Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase);
}
- [Theory(Skip = "For manual verification only")]
+ [Theory]//(Skip = "For manual verification only")]
[InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")]
public async Task ItInvokeTestAsync(string prompt, string expectedAnswerContains)
{
@@ -157,33 +157,6 @@ public async Task ItInvokeTestAsync(string prompt, string expectedAnswerContains
Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase);
}
- [Fact(Skip = "For manual verification only")]
- public async Task ItShouldHaveSemanticKernelVersionHeaderAsync()
- {
- // Arrange
- var config = this._configuration.GetSection("Ollama").Get();
- Assert.NotNull(config);
- Assert.NotNull(config.ModelId);
- Assert.NotNull(config.Endpoint);
-
- using var defaultHandler = new HttpClientHandler();
- using var httpHeaderHandler = new HttpHeaderHandler(defaultHandler);
- using var httpClient = new HttpClient(httpHeaderHandler);
- this._kernelBuilder.Services.AddSingleton(this._logger);
- var builder = this._kernelBuilder;
- builder.AddOllamaChatCompletion(
- modelId: config.ModelId,
- httpClient: httpClient);
- Kernel target = builder.Build();
-
- // Act
- var result = await target.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?");
-
- // Assert
- Assert.NotNull(httpHeaderHandler.RequestHeaders);
- Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values));
- }
-
#region internals
private readonly XunitLogger _logger = new(output);
@@ -205,7 +178,7 @@ private void ConfigureChatOllama(IKernelBuilder kernelBuilder)
kernelBuilder.AddOllamaChatCompletion(
modelId: config.ModelId,
- endpoint: config.Endpoint);
+ endpoint: new Uri(config.Endpoint));
}
private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler)
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs
index f530098b473b..36015ef9d5bf 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs
@@ -1,5 +1,6 @@
// Copyright (c) Microsoft. All rights reserved.
+using System;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
using Microsoft.SemanticKernel.Connectors.Ollama;
@@ -18,7 +19,7 @@ public sealed class OllamaTextEmbeddingTests
.AddUserSecrets()
.Build();
- [Theory(Skip = "For manual verification only")]
+ [Theory]//(Skip = "For manual verification only")]
[InlineData("mxbai-embed-large", 1024)]
[InlineData("nomic-embed-text", 768)]
[InlineData("all-minilm", 384)]
@@ -33,7 +34,7 @@ public async Task GenerateEmbeddingHasExpectedLengthForModelAsync(string modelId
var embeddingGenerator = new OllamaTextEmbeddingGenerationService(
modelId,
- config.Endpoint);
+ new Uri(config.Endpoint));
// Act
var result = await embeddingGenerator.GenerateEmbeddingAsync(TestInputString);
@@ -42,7 +43,7 @@ public async Task GenerateEmbeddingHasExpectedLengthForModelAsync(string modelId
Assert.Equal(expectedVectorLength, result.Length);
}
- [Theory(Skip = "For manual verification only")]
+ [Theory]//(Skip = "For manual verification only")]
[InlineData("mxbai-embed-large", 1024)]
[InlineData("nomic-embed-text", 768)]
[InlineData("all-minilm", 384)]
@@ -57,7 +58,7 @@ public async Task GenerateEmbeddingsHasExpectedResultsLengthForModelAsync(string
var embeddingGenerator = new OllamaTextEmbeddingGenerationService(
modelId,
- config.Endpoint);
+ new Uri(config.Endpoint));
// Act
var result = await embeddingGenerator.GenerateEmbeddingsAsync(testInputStrings);
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
index 5dcb6f5d7822..24d58ad5f679 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
@@ -32,7 +32,7 @@ public sealed class OllamaTextGenerationTests(ITestOutputHelper output) : IDispo
.AddUserSecrets()
.Build();
- [Theory(Skip = "For manual verification only")]
+ [Theory]//(Skip = "For manual verification only")]
[InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")]
public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnswerContains)
{
@@ -58,7 +58,7 @@ public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnsw
Assert.Contains(expectedAnswerContains, fullResult.ToString(), StringComparison.OrdinalIgnoreCase);
}
- [Fact(Skip = "For manual verification only")]
+ [Fact]//(Skip = "For manual verification only")]
public async Task ItShouldReturnInnerContentAsync()
{
// Arrange
@@ -81,14 +81,14 @@ public async Task ItShouldReturnInnerContentAsync()
Assert.NotNull(lastUpdate);
Assert.NotNull(lastUpdate.InnerContent);
- Assert.IsType(lastUpdate.InnerContent);
+ Assert.IsType(lastUpdate.InnerContent);
var innerContent = lastUpdate.InnerContent as GenerateDoneResponseStream;
Assert.NotNull(innerContent);
Assert.NotNull(innerContent.CreatedAt);
Assert.True(innerContent.Done);
}
- [Theory(Skip = "For manual verification only")]
+ [Theory]//(Skip = "For manual verification only")]
[InlineData("\n")]
[InlineData("\r\n")]
public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding)
@@ -115,7 +115,7 @@ public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding)
Assert.Contains(ExpectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase);
}
- [Fact(Skip = "For manual verification only")]
+ [Fact]//(Skip = "For manual verification only")]
public async Task ItInvokePromptTestAsync()
{
// Arrange
@@ -133,7 +133,7 @@ public async Task ItInvokePromptTestAsync()
Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase);
}
- [Theory(Skip = "For manual verification only")]
+ [Theory]//(Skip = "For manual verification only")]
[InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")]
public async Task ItInvokeTestAsync(string prompt, string expectedAnswerContains)
{
@@ -152,35 +152,9 @@ public async Task ItInvokeTestAsync(string prompt, string expectedAnswerContains
// Assert
Assert.Contains(expectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase);
- Assert.NotNull(actual.Metadata);
- }
-
- [Fact(Skip = "For manual verification only")]
- public async Task ItShouldHaveSemanticKernelVersionHeaderAsync()
- {
- // Arrange
- var config = this._configuration.GetSection("Ollama").Get();
- Assert.NotNull(config);
- Assert.NotNull(config.ModelId);
- Assert.NotNull(config.Endpoint);
-
- using var defaultHandler = new HttpClientHandler();
- using var httpHeaderHandler = new HttpHeaderHandler(defaultHandler);
- using var httpClient = new HttpClient(httpHeaderHandler);
- this._kernelBuilder.Services.AddSingleton(this._logger);
- var builder = this._kernelBuilder;
- builder.AddOllamaTextGeneration(
- endpoint: config.Endpoint,
- modelId: config.ModelId,
- httpClient: httpClient);
- Kernel target = builder.Build();
-
- // Act
- var result = await target.InvokePromptAsync("Where is the most famous fish market in Seattle, Washington, USA?");
-
- // Assert
- Assert.NotNull(httpHeaderHandler.RequestHeaders);
- Assert.True(httpHeaderHandler.RequestHeaders.TryGetValues("Semantic-Kernel-Version", out var values));
+ var content = actual.GetValue();
+ Assert.NotNull(content);
+ Assert.NotNull(content.InnerContent);
}
#region internals
@@ -204,7 +178,7 @@ private void ConfigureTextOllama(IKernelBuilder kernelBuilder)
kernelBuilder.AddOllamaTextGeneration(
modelId: config.ModelId,
- endpoint: config.Endpoint);
+ endpoint: new Uri(config.Endpoint));
}
private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler)
diff --git a/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs b/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs
index cbf6e52351c4..a24220d6ce2e 100644
--- a/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs
+++ b/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs
@@ -10,5 +10,5 @@ namespace SemanticKernel.IntegrationTests.TestSettings;
internal sealed class OllamaConfiguration
{
public string? ModelId { get; set; }
- public Uri? Endpoint { get; set; }
+ public string? Endpoint { get; set; }
}
From fc8de0cbc3c75704b4928ef0e4a318872c61c1c8 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 6 Sep 2024 10:54:23 +0100
Subject: [PATCH 09/11] Skipping Tests
---
.../Connectors/Ollama/OllamaCompletionTests.cs | 11 +++++------
.../Connectors/Ollama/OllamaTextEmbeddingTests.cs | 4 ++--
.../Connectors/Ollama/OllamaTextGenerationTests.cs | 12 +++++-------
3 files changed, 12 insertions(+), 15 deletions(-)
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
index 406ec1f0ee57..184f4aa5ec37 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
@@ -10,7 +10,6 @@
using Microsoft.Extensions.Logging;
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.Ollama;
-using OllamaSharp.Models;
using OllamaSharp.Models.Chat;
using SemanticKernel.IntegrationTests.TestSettings;
using Xunit;
@@ -31,7 +30,7 @@ public sealed class OllamaCompletionTests(ITestOutputHelper output) : IDisposabl
.AddUserSecrets()
.Build();
- [Theory]//(Skip = "For manual verification only")]
+ [Theory(Skip = "For manual verification only")]
[InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")]
public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnswerContains)
{
@@ -62,7 +61,7 @@ public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnsw
Assert.Contains(expectedAnswerContains, fullResult.ToString(), StringComparison.OrdinalIgnoreCase);
}
- [Fact]//(Skip = "For manual verification only")]
+ [Fact(Skip = "For manual verification only")]
public async Task ItShouldReturnInnerContentAsync()
{
// Arrange
@@ -91,7 +90,7 @@ public async Task ItShouldReturnInnerContentAsync()
Assert.True(innerContent.Done);
}
- [Theory]//(Skip = "For manual verification only")]
+ [Theory(Skip = "For manual verification only")]
[InlineData("\n")]
[InlineData("\r\n")]
public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding)
@@ -118,7 +117,7 @@ public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding)
Assert.Contains(ExpectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase);
}
- [Fact]//(Skip = "For manual verification only")]
+ [Fact(Skip = "For manual verification only")]
public async Task ItInvokePromptTestAsync()
{
// Arrange
@@ -136,7 +135,7 @@ public async Task ItInvokePromptTestAsync()
Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase);
}
- [Theory]//(Skip = "For manual verification only")]
+ [Theory(Skip = "For manual verification only")]
[InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")]
public async Task ItInvokeTestAsync(string prompt, string expectedAnswerContains)
{
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs
index 36015ef9d5bf..222873eccfb6 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextEmbeddingTests.cs
@@ -19,7 +19,7 @@ public sealed class OllamaTextEmbeddingTests
.AddUserSecrets()
.Build();
- [Theory]//(Skip = "For manual verification only")]
+ [Theory(Skip = "For manual verification only")]
[InlineData("mxbai-embed-large", 1024)]
[InlineData("nomic-embed-text", 768)]
[InlineData("all-minilm", 384)]
@@ -43,7 +43,7 @@ public async Task GenerateEmbeddingHasExpectedLengthForModelAsync(string modelId
Assert.Equal(expectedVectorLength, result.Length);
}
- [Theory]//(Skip = "For manual verification only")]
+ [Theory(Skip = "For manual verification only")]
[InlineData("mxbai-embed-large", 1024)]
[InlineData("nomic-embed-text", 768)]
[InlineData("all-minilm", 384)]
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
index 24d58ad5f679..f90e248949d7 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
@@ -11,11 +11,9 @@
using Microsoft.SemanticKernel;
using Microsoft.SemanticKernel.Connectors.Ollama;
using OllamaSharp.Models;
-using OllamaSharp.Models.Chat;
using SemanticKernel.IntegrationTests.TestSettings;
using Xunit;
using Xunit.Abstractions;
-using Xunit.Sdk;
namespace SemanticKernel.IntegrationTests.Connectors.Ollama;
@@ -32,7 +30,7 @@ public sealed class OllamaTextGenerationTests(ITestOutputHelper output) : IDispo
.AddUserSecrets()
.Build();
- [Theory]//(Skip = "For manual verification only")]
+ [Theory(Skip = "For manual verification only")]
[InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")]
public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnswerContains)
{
@@ -58,7 +56,7 @@ public async Task ItInvokeStreamingWorksAsync(string prompt, string expectedAnsw
Assert.Contains(expectedAnswerContains, fullResult.ToString(), StringComparison.OrdinalIgnoreCase);
}
- [Fact]//(Skip = "For manual verification only")]
+ [Fact(Skip = "For manual verification only")]
public async Task ItShouldReturnInnerContentAsync()
{
// Arrange
@@ -88,7 +86,7 @@ public async Task ItShouldReturnInnerContentAsync()
Assert.True(innerContent.Done);
}
- [Theory]//(Skip = "For manual verification only")]
+ [Theory(Skip = "For manual verification only")]
[InlineData("\n")]
[InlineData("\r\n")]
public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding)
@@ -115,7 +113,7 @@ public async Task ItCompletesWithDifferentLineEndingsAsync(string lineEnding)
Assert.Contains(ExpectedAnswerContains, actual.GetValue(), StringComparison.OrdinalIgnoreCase);
}
- [Fact]//(Skip = "For manual verification only")]
+ [Fact(Skip = "For manual verification only")]
public async Task ItInvokePromptTestAsync()
{
// Arrange
@@ -133,7 +131,7 @@ public async Task ItInvokePromptTestAsync()
Assert.Contains("Pike Place", actual.GetValue(), StringComparison.OrdinalIgnoreCase);
}
- [Theory]//(Skip = "For manual verification only")]
+ [Theory(Skip = "For manual verification only")]
[InlineData("Where is the most famous fish market in Seattle, Washington, USA?", "Pike Place")]
public async Task ItInvokeTestAsync(string prompt, string expectedAnswerContains)
{
From a94730628a835b5629681b315bb95e4d91e1f73b Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 6 Sep 2024 10:58:16 +0100
Subject: [PATCH 10/11] Fix warnings
---
.../Connectors/Ollama/OllamaCompletionTests.cs | 11 -----------
.../Connectors/Ollama/OllamaTextGenerationTests.cs | 11 -----------
.../TestSettings/OllamaConfiguration.cs | 1 -
3 files changed, 23 deletions(-)
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
index 184f4aa5ec37..2c2475964a09 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
@@ -180,16 +180,5 @@ private void ConfigureChatOllama(IKernelBuilder kernelBuilder)
endpoint: new Uri(config.Endpoint));
}
- private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler)
- {
- public System.Net.Http.Headers.HttpRequestHeaders? RequestHeaders { get; private set; }
-
- protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
- {
- this.RequestHeaders = request.Headers;
- return await base.SendAsync(request, cancellationToken);
- }
- }
-
#endregion
}
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
index f90e248949d7..524927ca3a63 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
@@ -179,16 +179,5 @@ private void ConfigureTextOllama(IKernelBuilder kernelBuilder)
endpoint: new Uri(config.Endpoint));
}
- private sealed class HttpHeaderHandler(HttpMessageHandler innerHandler) : DelegatingHandler(innerHandler)
- {
- public System.Net.Http.Headers.HttpRequestHeaders? RequestHeaders { get; private set; }
-
- protected override async Task SendAsync(HttpRequestMessage request, CancellationToken cancellationToken)
- {
- this.RequestHeaders = request.Headers;
- return await base.SendAsync(request, cancellationToken);
- }
- }
-
#endregion
}
diff --git a/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs b/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs
index a24220d6ce2e..51e8d77eee0a 100644
--- a/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs
+++ b/dotnet/src/IntegrationTests/TestSettings/OllamaConfiguration.cs
@@ -1,6 +1,5 @@
// Copyright (c) Microsoft. All rights reserved.
-using System;
using System.Diagnostics.CodeAnalysis;
namespace SemanticKernel.IntegrationTests.TestSettings;
From 71166bb8ea90c4bac7e9453f1e37c78fc68fca06 Mon Sep 17 00:00:00 2001
From: Roger Barreto <19890735+RogerBarreto@users.noreply.github.com>
Date: Fri, 6 Sep 2024 11:12:47 +0100
Subject: [PATCH 11/11] Fix warnings
---
.../IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs | 2 --
.../Connectors/Ollama/OllamaTextGenerationTests.cs | 2 --
2 files changed, 4 deletions(-)
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
index 2c2475964a09..5dced3f7b4b4 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaCompletionTests.cs
@@ -1,9 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System;
-using System.Net.Http;
using System.Text;
-using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
diff --git a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
index 524927ca3a63..126980f57ede 100644
--- a/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
+++ b/dotnet/src/IntegrationTests/Connectors/Ollama/OllamaTextGenerationTests.cs
@@ -1,9 +1,7 @@
// Copyright (c) Microsoft. All rights reserved.
using System;
-using System.Net.Http;
using System.Text;
-using System.Threading;
using System.Threading.Tasks;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;