Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update Microsoft.Extensions.AI to 9.3.0-preview.1.25114.11 #19

Merged
merged 2 commits into from
Feb 21, 2025
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
12 changes: 6 additions & 6 deletions Mistral.SDK.Tests/ChatClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ public async Task TestMistralCompletionModel()
{
IChatClient client = new MistralClient().Completions;

var response = await client.CompleteAsync(new List<ChatMessage>()
var response = await client.GetResponseAsync(new List<ChatMessage>()
{
new(ChatRole.System, "You are an expert at writing sonnets."),
new(ChatRole.User, "Write me a sonnet about the Statue of Liberty.")
Expand All @@ -32,7 +32,7 @@ public async Task TestMistralCompletionJsonMode()
{
IChatClient client = new MistralClient().Completions;

var response = await client.CompleteAsync(new List<ChatMessage>()
var response = await client.GetResponseAsync(new List<ChatMessage>()
{
new(ChatRole.System, "You are an expert at writing Json."),
new(ChatRole.User, "Write me a simple 'hello world' statement in a json object with a single 'result' key.")
Expand All @@ -50,7 +50,7 @@ public async Task TestMistralCompletionJsonModeStreaming()
IChatClient client = new MistralClient().Completions;

var sb = new StringBuilder();
await foreach (var update in client.CompleteStreamingAsync(new List<ChatMessage>()
await foreach (var update in client.GetStreamingResponseAsync(new List<ChatMessage>()
{
new(ChatRole.System, "You are an expert at writing Json."),
new(ChatRole.User, "Write me a simple 'hello world' statement in a json object with a single 'result' key.")
Expand All @@ -68,7 +68,7 @@ public async Task TestMistralCompletionSafeWithOptions()
{
IChatClient client = new MistralClient().Completions;

var response = await client.CompleteAsync(new List<ChatMessage>()
var response = await client.GetResponseAsync(new List<ChatMessage>()
{
new(ChatRole.System, "You are an expert at writing sonnets."),
new(ChatRole.User, "Write me a sonnet about the Statue of Liberty.")
Expand Down Expand Up @@ -106,7 +106,7 @@ public async Task TestNonStreamingFunctionCalls()
}, "GetPersonAge", "Gets the age of the person whose name is specified.")]
};

var res = await client.CompleteAsync("How old is Alice?", options);
var res = await client.GetResponseAsync("How old is Alice?", options);

Assert.IsTrue(
res.Message.Text?.Contains("25") is true,
Expand All @@ -133,7 +133,7 @@ public async Task TestStreamingFunctionCalls()
};

StringBuilder sb = new();
await foreach (var update in client.CompleteStreamingAsync("How old is Alice?", options))
await foreach (var update in client.GetStreamingResponseAsync("How old is Alice?", options))
{
sb.Append(update);
}
Expand Down
4 changes: 2 additions & 2 deletions Mistral.SDK.Tests/Mistral.SDK.Tests.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -13,9 +13,9 @@
<PackageReference Include="MSTest.TestAdapter" Version="3.7.1" />
<PackageReference Include="MSTest.TestFramework" Version="3.7.1" />
<PackageReference Include="coverlet.collector" Version="3.1.2" />
<PackageReference Include="Microsoft.Extensions.AI" Version="9.1.0-preview.1.25064.3" />
<PackageReference Include="Microsoft.Extensions.AI" Version="9.3.0-preview.1.25114.11" />
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.33.0" />
<PackageReference Include="Microsoft.SemanticKernel" Version="1.38.0" />
</ItemGroup>

<ItemGroup>
Expand Down
2 changes: 1 addition & 1 deletion Mistral.SDK.Tests/SemanticKernelInitializationTests.cs
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ public async Task TestSKInit()
OpenAIPromptExecutionSettings promptExecutionSettings = new()
{
FunctionChoiceBehavior = FunctionChoiceBehavior.Auto(),
ModelId = ModelDefinitions.MistralSmall,
ModelId = ModelDefinitions.MistralLarge,
MaxTokens = 1024,
Temperature = 0.0,
};
Expand Down
64 changes: 24 additions & 40 deletions Mistral.SDK/Completions/CompletionsEndpoint.ChatClient.cs
Original file line number Diff line number Diff line change
Expand Up @@ -15,18 +15,17 @@ namespace Mistral.SDK.Completions
{
public partial class CompletionsEndpoint : IChatClient
{

async Task<ChatCompletion> IChatClient.CompleteAsync(
async Task<ChatResponse> IChatClient.GetResponseAsync(
IList<Microsoft.Extensions.AI.ChatMessage> chatMessages, ChatOptions options, CancellationToken cancellationToken)
{
var response = await GetCompletionAsync(CreateRequest(chatMessages, options), cancellationToken).ConfigureAwait(false);

Microsoft.Extensions.AI.ChatMessage message = new(ChatRole.Assistant, ProcessResponseContent(response));

var completion = new ChatCompletion(message)
var completion = new ChatResponse(message)
{
CompletionId = response.Id,
ModelId = response.Model
ModelId = response.Model,
ResponseId = response.Id,
};

if (response.Usage is { } usage)
Expand All @@ -42,18 +41,18 @@ async Task<ChatCompletion> IChatClient.CompleteAsync(
return completion;
}

async IAsyncEnumerable<StreamingChatCompletionUpdate> IChatClient.CompleteStreamingAsync(
async IAsyncEnumerable<ChatResponseUpdate> IChatClient.GetStreamingResponseAsync(
IList<Microsoft.Extensions.AI.ChatMessage> chatMessages, ChatOptions options, [EnumeratorCancellation] CancellationToken cancellationToken)
{
await foreach (var response in StreamCompletionAsync(CreateRequest(chatMessages, options), cancellationToken).WithCancellation(cancellationToken).ConfigureAwait(false))
{
foreach (var choice in response.Choices)
{
var update = new StreamingChatCompletionUpdate {
var update = new ChatResponseUpdate {
ChoiceIndex = choice.Index,
CompletionId = response.Id,
ModelId = response.Model,
RawRepresentation = response,
ResponseId = response.Id,
Role = choice.Delta?.Role switch
{
DTOs.ChatMessage.RoleEnum.System => ChatRole.System,
Expand Down Expand Up @@ -97,9 +96,8 @@ async IAsyncEnumerable<StreamingChatCompletionUpdate> IChatClient.CompleteStream

if (response.Usage is { } usage)
{
yield return new StreamingChatCompletionUpdate()
yield return new ChatResponseUpdate()
{
CompletionId = response.Id,
ModelId = response.Model,
Contents = new List<AIContent>()
{
Expand All @@ -110,6 +108,7 @@ async IAsyncEnumerable<StreamingChatCompletionUpdate> IChatClient.CompleteStream
TotalTokenCount = usage.TotalTokens
})
},
ResponseId = response.Id,
};
}
}
Expand All @@ -130,7 +129,7 @@ private static ChatCompletionRequest CreateRequest(IList<Microsoft.Extensions.AI
switch (content)
{
case Microsoft.Extensions.AI.FunctionResultContent frc:
return new DTOs.ChatMessage(frc.CallId, frc.Name, frc.Result?.ToString());
return new DTOs.ChatMessage(frc.CallId, frc.CallId, frc.Result?.ToString());
case Microsoft.Extensions.AI.FunctionCallContent fcc:
return new DTOs.ChatMessage()
{
Expand Down Expand Up @@ -170,20 +169,26 @@ private static ChatCompletionRequest CreateRequest(IList<Microsoft.Extensions.AI

if (options.Tools is { Count: > 0 })
{

if (options.ToolMode is RequiredChatToolMode r)
{
request.ToolChoice = ToolChoiceType.Any;
}
else if (options.ToolMode is AutoChatToolMode a)
else if (options.ToolMode is AutoChatToolMode or null)
{
request.ToolChoice = ToolChoiceType.Auto;
}
else if (options.ToolMode is NoneChatToolMode)
{
request.ToolChoice = ToolChoiceType.none;
}

request.Tools = options
.Tools
.OfType<AIFunction>()
.Select(f => new Common.Tool(new Common.Function(f.Metadata.Name, f.Metadata.Description, FunctionParameters.CreateSchema(f))))
.Select(f => new Common.Tool(new Common.Function(
f.Name,
f.Description,
JsonSerializer.SerializeToNode(JsonSerializer.Deserialize<FunctionParameters>(f.JsonSchema)))))
.ToList();
}

Expand Down Expand Up @@ -225,18 +230,16 @@ private static List<AIContent> ProcessResponseContent(ChatCompletionResponse res

void IDisposable.Dispose() { }

object IChatClient.GetService(Type serviceType, object key) =>
key is null && serviceType?.IsInstanceOfType(this) is true ? this : null;

ChatClientMetadata IChatClient.Metadata => _metadata ??= new ChatClientMetadata(nameof(MistralClient), new Uri(Url));
object IChatClient.GetService(Type serviceType, object serviceKey) =>
serviceKey is not null ? null :
serviceType == typeof(ChatClientMetadata) ? (_metadata ??= new ChatClientMetadata(nameof(MistralClient), new Uri(Url))) :
serviceType?.IsInstanceOfType(this) is true ? this :
null;

private ChatClientMetadata _metadata;


private sealed class FunctionParameters
{
private static readonly JsonElement s_defaultParameterSchema = JsonDocument.Parse("{}").RootElement;

[JsonPropertyName("type")]
public string Type { get; set; } = "object";

Expand All @@ -245,25 +248,6 @@ private sealed class FunctionParameters

[JsonPropertyName("properties")]
public Dictionary<string, JsonElement> Properties { get; set; } = [];

public static JsonNode CreateSchema(AIFunction f)
{
var parameters = f.Metadata.Parameters;

FunctionParameters schema = new();

foreach (AIFunctionParameterMetadata parameter in parameters)
{
schema.Properties.Add(parameter.Name, parameter.Schema is JsonElement e ? e : s_defaultParameterSchema);

if (parameter.IsRequired)
{
schema.Required.Add(parameter.Name);
}
}

return JsonSerializer.SerializeToNode(schema);
}
}
}

Expand Down
10 changes: 5 additions & 5 deletions Mistral.SDK/Embeddings/EmbeddingsEndpoint.cs
Original file line number Diff line number Diff line change
Expand Up @@ -77,14 +77,14 @@ async Task<GeneratedEmbeddings<Embedding<float>>> IEmbeddingGenerator<string, Em
return embeddings;
}

object IEmbeddingGenerator<string, Embedding<float>>.GetService(Type serviceType, object key) =>
key is null && serviceType?.IsInstanceOfType(this) is true ? this : null;
object IEmbeddingGenerator<string, Embedding<float>>.GetService(Type serviceType, object serviceKey) =>
serviceKey is not null ? null :
serviceType == typeof(EmbeddingGeneratorMetadata) ? (_metadata ??= new EmbeddingGeneratorMetadata(nameof(MistralClient), new Uri(Url))) :
serviceType?.IsInstanceOfType(this) is true ? this :
null;

void IDisposable.Dispose() { }

EmbeddingGeneratorMetadata IEmbeddingGenerator<string, Embedding<float>>.Metadata =>
_metadata ??= new EmbeddingGeneratorMetadata(nameof(MistralClient), new Uri(Url));

private EmbeddingGeneratorMetadata _metadata;
}
}
2 changes: 1 addition & 1 deletion Mistral.SDK/Mistral.SDK.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@
<SymbolPackageFormat>snupkg</SymbolPackageFormat>
</PropertyGroup>
<ItemGroup>
<PackageReference Include="Microsoft.Extensions.AI.Abstractions" Version="9.1.0-preview.1.25064.3" />
<PackageReference Include="Microsoft.Extensions.AI.Abstractions" Version="9.3.0-preview.1.25114.11" />
<PackageReference Include="Microsoft.Bcl.AsyncInterfaces" Version="8.0.0" />
</ItemGroup>
<ItemGroup Condition="'$(TargetFramework)' == 'netstandard2.0'">
Expand Down
Loading