Skip to content

Commit

Permalink
addresses #1 updates readme, version bump
Browse files Browse the repository at this point in the history
  • Loading branch information
tghamm committed Jan 16, 2024
1 parent b0ed448 commit da9234e
Show file tree
Hide file tree
Showing 5 changed files with 100 additions and 22 deletions.
53 changes: 52 additions & 1 deletion Mistral.SDK.Tests/Completions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -21,15 +21,66 @@ public async Task TestMistralCompletion()

}

[TestMethod]
public async Task TestMistralCompletionSafeWithOptions()
{
var client = new MistralClient();
var request = new ChatCompletionRequest(
//define model - required
ModelDefinitions.MistralMedium,
//define messages - required
new List<ChatMessage>()
{
new ChatMessage(ChatMessage.RoleEnum.System,
"You are an expert at writing sonnets."),
new ChatMessage(ChatMessage.RoleEnum.User,
"Write me a sonnet about the Statue of Liberty.")
},
//optional - defaults to false
safePrompt: true,
//optional - defaults to 0.7
temperature: 0,
//optional - defaults to null
maxTokens: 500,
//optional - defaults to 1
topP: 1,
//optional - defaults to null
randomSeed: 32);
var response = await client.Completions.GetCompletionAsync(request);

}

[TestMethod]
public async Task TestMistralCompletionStreaming()
{
var client = new MistralClient();
var request = new ChatCompletionRequest(
ModelDefinitions.MistralMedium,
new List<ChatMessage>()
{
new ChatMessage(ChatMessage.RoleEnum.System,
"You are an expert at writing sonnets."),
new ChatMessage(ChatMessage.RoleEnum.User,
"Write me a sonnet about the Statue of Liberty.")
});
var results = new List<ChatCompletionResponse>();
await foreach (var res in client.Completions.StreamCompletionAsync(request))
{
results.Add(res);
Debug.Write(res.Choices.First().Delta.Content);
}
Assert.IsTrue(results.Any());
}

[TestMethod]
public async Task TestMistralCompletionStreamingSafePrompt()
{
var client = new MistralClient();
var request = new ChatCompletionRequest(ModelDefinitions.MistralMedium, new List<ChatMessage>()
{
new ChatMessage(ChatMessage.RoleEnum.System, "You are an expert at writing sonnets."),
new ChatMessage(ChatMessage.RoleEnum.User, "Write me a sonnet about the Statue of Liberty.")
});
}, safePrompt: true);
var results = new List<ChatCompletionResponse>();
await foreach (var res in client.Completions.StreamCompletionAsync(request))
{
Expand Down
5 changes: 4 additions & 1 deletion Mistral.SDK.Tests/Embeddings.cs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,10 @@ public class Embeddings
public async Task TestMistralEmbeddings()
{
var client = new MistralClient();
var request = new EmbeddingRequest(ModelDefinitions.MistralEmbed, new List<string>() { "Hello world" }, EmbeddingRequest.EncodingFormatEnum.Float);
var request = new EmbeddingRequest(
ModelDefinitions.MistralEmbed,
new List<string>() { "Hello world" },
EmbeddingRequest.EncodingFormatEnum.Float);
var response = await client.Embeddings.GetEmbeddingsAsync(request);
Assert.IsNotNull(response);
}
Expand Down
12 changes: 6 additions & 6 deletions Mistral.SDK/DTOs/ChatCompletionRequest.cs
Original file line number Diff line number Diff line change
Expand Up @@ -16,9 +16,9 @@ public class ChatCompletionRequest
/// <param name="topP">Nucleus sampling, where the model considers the results of the tokens with &#x60;top_p&#x60; probability mass. So 0.1 means only the tokens comprising the top 10% probability mass are considered. We generally recommend altering this or &#x60;temperature&#x60; but not both. (default to 1M).</param>
/// <param name="maxTokens">The maximum number of tokens to generate in the completion. The token count of your prompt plus &#x60;max_tokens&#x60; cannot exceed the model&#39;s context length. .</param>
/// <param name="stream">Whether to stream back partial progress. If set, tokens will be sent as data-only server-sent events as they become available, with the stream terminated by a data: [DONE] message. Otherwise, the server will hold the request open until the timeout or until completion, with the response containing the full result as JSON. (default to false).</param>
/// <param name="safeMode">Whether to inject a safety prompt before all conversations. (default to false).</param>
/// <param name="safePrompt">Whether to inject a safety prompt before all conversations. (default to false).</param>
/// <param name="randomSeed">The seed to use for random sampling. If set, different calls will generate deterministic results. .</param>
public ChatCompletionRequest(string model = default(string), List<ChatMessage> messages = default(List<ChatMessage>), decimal? temperature = 0.7M, decimal? topP = 1M, int? maxTokens = default(int?), bool? stream = false, bool safeMode = false, int randomSeed = default(int))
public ChatCompletionRequest(string model = default(string), List<ChatMessage> messages = default(List<ChatMessage>), decimal? temperature = 0.7M, decimal? topP = 1M, int? maxTokens = default(int?), bool? stream = false, bool safePrompt = false, int? randomSeed = default(int?))
{
// to ensure "model" is required (not null)
if (model == null)
Expand All @@ -39,7 +39,7 @@ public class ChatCompletionRequest
this.MaxTokens = maxTokens;
// use default value if no "stream" provided
this.Stream = stream ?? false;
this.SafeMode = safeMode;
this.SafePrompt = safePrompt;
this.RandomSeed = randomSeed;
}
/// <summary>
Expand Down Expand Up @@ -93,15 +93,15 @@ public class ChatCompletionRequest
/// Whether to inject a safety prompt before all conversations.
/// </summary>
/// <value>Whether to inject a safety prompt before all conversations. </value>
[JsonPropertyName("safe_mode")]
public bool SafeMode { get; set; }
[JsonPropertyName("safe_prompt")]
public bool SafePrompt { get; set; }

/// <summary>
/// The seed to use for random sampling. If set, different calls will generate deterministic results.
/// </summary>
/// <value>The seed to use for random sampling. If set, different calls will generate deterministic results. </value>
[JsonPropertyName("random_seed")]
public int RandomSeed { get; set; }
public int? RandomSeed { get; set; }

IEnumerable<ValidationResult> Validate()
{
Expand Down
10 changes: 5 additions & 5 deletions Mistral.SDK/Mistral.SDK.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -11,15 +11,15 @@
<PackageLicenseExpression>MIT</PackageLicenseExpression>
<PackageProjectUrl>https://github.com/tghamm/Mistral.SDK</PackageProjectUrl>
<RepositoryUrl>https://github.com/tghamm/Mistral.SDK</RepositoryUrl>
<PackageTags>Mistral, AI, ML, API, C#, .NET</PackageTags>
<PackageTags>Mistral, AI, ML, API, C#, .NET, Mixtral</PackageTags>
<Title>Mistral API</Title>
<PackageReleaseNotes>
Bug fix in streaming output.
Bug fix in safe_prompt parameter.
</PackageReleaseNotes>
<PackageId>Mistral.SDK</PackageId>
<Version>1.0.1</Version>
<AssemblyVersion>1.0.1.0</AssemblyVersion>
<FileVersion>1.0.1.0</FileVersion>
<Version>1.1.0</Version>
<AssemblyVersion>1.1.0.0</AssemblyVersion>
<FileVersion>1.1.0.0</FileVersion>
<GenerateDocumentationFile>True</GenerateDocumentationFile>
<PackageReadmeFile>README.md</PackageReadmeFile>
<ProduceReferenceAssembly>True</ProduceReferenceAssembly>
Expand Down
42 changes: 33 additions & 9 deletions README.md
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
# Mistral.SDK

[![.NET](https://github.com/tghamm/Mistral.SDK/actions/workflows/dotnet.yml/badge.svg)](https://github.com/tghamm/Mistral.SDK/actions/workflows/dotnet.yml)
[![.NET](https://github.com/tghamm/Mistral.SDK/actions/workflows/dotnet.yml/badge.svg)](https://github.com/tghamm/Mistral.SDK/actions/workflows/dotnet.yml) [![Nuget](https://img.shields.io/nuget/v/Mistral.SDK)](https://www.nuget.org/packages/Mistral.SDK/)

Mistral.SDK is an unofficial C# client designed for interacting with the Mistral API. This powerful interface simplifies the integration of Mistral AI into your C# applications. It targets netstandard2.0, and .net6.0.

Expand Down Expand Up @@ -46,11 +46,28 @@ Here's an example of a non-streaming call to the mistral-medium completions endp

```csharp
var client = new MistralClient();
var request = new ChatCompletionRequest(ModelDefinitions.MistralMedium, new List<ChatMessage>()
var client = new MistralClient();
var request = new ChatCompletionRequest(
//define model - required
ModelDefinitions.MistralMedium,
//define messages - required
new List<ChatMessage>()
{
new ChatMessage(ChatMessage.RoleEnum.System, "You are an expert at writing sonnets."),
new ChatMessage(ChatMessage.RoleEnum.User, "Write me a sonnet about the Statue of Liberty.")
});
new ChatMessage(ChatMessage.RoleEnum.System,
"You are an expert at writing sonnets."),
new ChatMessage(ChatMessage.RoleEnum.User,
"Write me a sonnet about the Statue of Liberty.")
},
//optional - defaults to false
safePrompt: true,
//optional - defaults to 0.7
temperature: 0,
//optional - defaults to null
maxTokens: 500,
//optional - defaults to 1
topP: 1,
//optional - defaults to null
randomSeed: 32);
var response = await client.Completions.GetCompletionAsync(request);
Console.WriteLine(response.Choices.First().Message.Content);
```
Expand All @@ -61,10 +78,14 @@ The following is an example of a streaming call to the mistral-medium completion

```csharp
var client = new MistralClient();
var request = new ChatCompletionRequest(ModelDefinitions.MistralMedium, new List<ChatMessage>()
var request = new ChatCompletionRequest(
ModelDefinitions.MistralMedium,
new List<ChatMessage>()
{
new ChatMessage(ChatMessage.RoleEnum.System, "You are an expert at writing sonnets."),
new ChatMessage(ChatMessage.RoleEnum.User, "Write me a sonnet about the Statue of Liberty.")
new ChatMessage(ChatMessage.RoleEnum.System,
"You are an expert at writing sonnets."),
new ChatMessage(ChatMessage.RoleEnum.User,
"Write me a sonnet about the Statue of Liberty.")
});
var results = new List<ChatCompletionResponse>();
await foreach (var res in client.Completions.StreamCompletionAsync(request))
Expand All @@ -90,7 +111,10 @@ The following is an example of a call to the mistral-embed embeddings model/endp

```csharp
var client = new MistralClient();
var request = new EmbeddingRequest(ModelDefinitions.MistralEmbed, new List<string>() { "Hello world" }, EmbeddingRequest.EncodingFormatEnum.Float);
var request = new EmbeddingRequest(
ModelDefinitions.MistralEmbed,
new List<string>() { "Hello world" },
EmbeddingRequest.EncodingFormatEnum.Float);
var response = await client.Embeddings.GetEmbeddingsAsync(request);
```

Expand Down

0 comments on commit da9234e

Please sign in to comment.