Skip to content

Commit

Permalink
Model selection by API (#10)
Browse files Browse the repository at this point in the history
* Added model selection dropdown
* Added interface for LLM clients
* Extended markdown for followup
  • Loading branch information
EmilianoMusso authored Jan 19, 2025
1 parent 18d6ceb commit 56f6a83
Show file tree
Hide file tree
Showing 14 changed files with 232 additions and 56 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
### v1.11.0 Added answer language
---
- LLMs are now selectable by querying Ollama APIs
- Markdown views can now use follow-up functions
- Minor refactoring

### v1.10.0 Added answer language
---
- It is now possibile to set a language in which the LLM must answer
Expand Down
4 changes: 3 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -14,12 +14,14 @@ To use the EntwineLLM extension, you need to have a local or Docker-hosted open
* [Ollama models](https://ollama.com/search)

#### Setup
After installing the EntwineLLM extension, its configuration options will be available in the Visual Studio Options menu. These options allow users to specify the base URL of the locally installed LLM, select the LLM model to use for each specific extension command (install them first), and configure the HTTP request timeout settings for communication with the LLM, along with the preferred language the LLM must use when answering.
After installing the EntwineLLM extension, its configuration options will be available in the Visual Studio Options menu. These options allow users to specify the base URL of the locally installed LLM, select the LLM model to use for each specific extension command, and configure the HTTP request timeout settings for communication with the LLM, along with the preferred language the LLM must use when answering.

![image](./src/EntwineLLM/Resources/vs-entwine-configuration.png)

![image](./src/EntwineLLM/Resources/vs-entwine-options.png)

> Note: Available LLMs for each commands are shown querying Ollama APIs. Please install the needed models first
These settings provide flexibility in customizing the behavior of the extension to match the user's environment and preferences.

#### Using the extension
Expand Down
17 changes: 17 additions & 0 deletions src/EntwineLLM/Clients/Interfacs/ILlmClient.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,17 @@
using EntwineLlm.Enums;
using EntwineLlm.Models;
using System;
using System.Threading.Tasks;

namespace EntwineLlm.Clients.Interfacs
{
internal interface ILlmClient
{
void SetBaseUrl(string baseUrl);
string GetBaseUrl();
void SetTimeOut(TimeSpan timeOut);
TimeSpan GetTimeOut();
Task<string[]> GetModelListAsync();
Task<CodeSuggestionResponse> GetCodeSuggestionsAsync(CodeType codeType, string prompt);
}
}
Original file line number Diff line number Diff line change
@@ -1,26 +1,81 @@
using EntwineLlm.Enums;
using EntwineLlm.Clients.Interfacs;
using EntwineLlm.Enums;
using EntwineLlm.Models;
using Newtonsoft.Json.Linq;
using System;
using System.Collections.Generic;
using System.Linq;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;

namespace EntwineLlm.Clients
{
internal class LlmClient : IDisposable
internal class OllamaClient : ILlmClient, IDisposable
{
private readonly string _baseUrl;
private readonly TimeSpan _timeOut;
private string _baseUrl;
private TimeSpan _timeOut;
private bool disposedValue;

public LlmClient(string baseUrl, TimeSpan timeOut)
public OllamaClient(GeneralOptions options)
{
_baseUrl = options.LlmUrl;
_timeOut = options.LlmRequestTimeOut;
}

public OllamaClient(string baseUrl, TimeSpan timeOut)
{
_baseUrl = baseUrl;
_timeOut = timeOut;
}

public void SetBaseUrl(string baseUrl)
{
_baseUrl = baseUrl;
}

public string GetBaseUrl()
{
return _baseUrl;
}

public void SetTimeOut(TimeSpan timeOut)
{
_timeOut = timeOut;
}

public TimeSpan GetTimeOut()
{
return _timeOut;
}

public async Task<string[]> GetModelListAsync()
{
using var client = new HttpClient();
client.Timeout = _timeOut;

try
{
List<string> modelList = [];
var response = await client.GetAsync($"{_baseUrl}/api/tags");
response.EnsureSuccessStatusCode();

var responseContent = await response.Content.ReadAsStringAsync();
var models = JObject.Parse(responseContent)["models"];
foreach (var model in models)
{
modelList.Add(model["name"].ToString());
}

return [.. modelList];
}
catch
{
return Enumerable.Empty<string>().ToArray();
}
}

public async Task<CodeSuggestionResponse> GetCodeSuggestionsAsync(CodeType codeType, string prompt)
{
using var client = new HttpClient();
Expand Down
15 changes: 8 additions & 7 deletions src/EntwineLLM/Commands/BaseCommand.cs
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
using EntwineLlm.Enums;
using EntwineLlm.Helpers;
using ICSharpCode.AvalonEdit;
using Microsoft.VisualStudio.Shell;
using System;
using System.Threading.Tasks;
Expand All @@ -14,7 +13,6 @@ internal class BaseCommand

public string ActiveDocumentPath;
public TextBox ManualPromptTextBox;
public TextEditor SuggestedCodeEditor;

public BaseCommand(AsyncPackage package)
{
Expand Down Expand Up @@ -52,21 +50,24 @@ public async Task PerformRefactoringSuggestionAsync(CodeType codeType, string ma
var progressBarHelper = new ProgressBarHelper(ServiceProvider.GlobalProvider);
progressBarHelper.StartIndeterminateDialog(message);

var methodCode = SuggestedCodeEditor != null ?
SuggestedCodeEditor.Text
: GetCurrentMethodCode();
var methodCode = GetCurrentMethodCode();

if (string.IsNullOrEmpty(methodCode))
if (NoProcessableCodeDetected(manualPrompt, methodCode))
{
progressBarHelper.StopDialog();
WindowHelper.MsgBox("It is necessary to select the source code to be processed from the editor");
return;
}

var refactoringHelper = new RefactoringHelper(package);
await refactoringHelper.RequestCodeSuggestionsAsync(methodCode, ActiveDocumentPath, codeType, manualPrompt);
await refactoringHelper.RequestSuggestionsAsync(methodCode, ActiveDocumentPath, codeType, manualPrompt);

progressBarHelper.StopDialog();
}

private static bool NoProcessableCodeDetected(string manualPrompt, string methodCode)
{
return string.IsNullOrEmpty(methodCode) && string.IsNullOrEmpty(manualPrompt);
}
}
}
15 changes: 15 additions & 0 deletions src/EntwineLLM/Converters/LlmModelConverter.cs
Original file line number Diff line number Diff line change
@@ -0,0 +1,15 @@
using Microsoft.VisualStudio.Shell;
using System.ComponentModel;

namespace EntwineLlm.Converters
{
internal class LlmModelConverter : StringConverter
{
public override bool GetStandardValuesSupported(ITypeDescriptorContext context) => true;

public override StandardValuesCollection GetStandardValues(ITypeDescriptorContext context)
=> new(ThreadHelper.JoinableTaskFactory.Run(async () => await EntwineLlmPackage.LlmClient.GetModelListAsync()));

public override bool GetStandardValuesExclusive(ITypeDescriptorContext context) => true;
}
}
4 changes: 3 additions & 1 deletion src/EntwineLLM/EntwineLLM.csproj
Original file line number Diff line number Diff line change
Expand Up @@ -52,7 +52,8 @@
<WarningLevel>4</WarningLevel>
</PropertyGroup>
<ItemGroup>
<Compile Include="Clients\LlmClient.cs" />
<Compile Include="Clients\Interfacs\ILlmClient.cs" />
<Compile Include="Clients\OllamaClient.cs" />
<Compile Include="CommandsMenu.cs" />
<Compile Include="Commands\BaseCommand.cs" />
<Compile Include="Commands\CodeReviewCommand.cs" />
Expand All @@ -62,6 +63,7 @@
<Compile Include="Commands\Interfaces\IBaseCommand.cs" />
<Compile Include="Commands\RequestRefactorCommand.cs" />
<Compile Include="Converters\LanguageConverter.cs" />
<Compile Include="Converters\LlmModelConverter.cs" />
<Compile Include="EntwineLLMOptions.cs">
<SubType>Component</SubType>
</Compile>
Expand Down
29 changes: 27 additions & 2 deletions src/EntwineLLM/EntwineLLMOptions.cs
Original file line number Diff line number Diff line change
Expand Up @@ -10,13 +10,33 @@ public class GeneralOptions : DialogPage
[Category("Configuration")]
[DisplayName("Large Language Model Base Url")]
[Description("Sets the base URL for local LLM")]
public string LlmUrl { get; set; } = "http://localhost:11434";
public string LlmUrl
{
get
{
return EntwineLlmPackage.LlmClient?.GetBaseUrl() ?? "http://localhost:11434";
}
set
{
EntwineLlmPackage.LlmClient?.SetBaseUrl(value);
}
}

[Category("Configuration")]
[DisplayName("Requests timeout")]
[Description("Sets timeout for HTTP requests")]

public TimeSpan LlmRequestTimeOut { get; set; } = new TimeSpan(0, 10, 0);
public TimeSpan LlmRequestTimeOut
{
get
{
return EntwineLlmPackage.LlmClient?.GetTimeOut() ?? new TimeSpan(0, 10, 0);
}
set
{
EntwineLlmPackage.LlmClient?.SetTimeOut(value);
}
}

[Category("Configuration")]
[DisplayName("LLM response language")]
Expand All @@ -30,26 +50,31 @@ public class ModelsOptions : DialogPage
[Category("Models")]
[DisplayName("Refactor queries")]
[Description("Sets the model to be used when querying LLM for refactor")]
[TypeConverter(typeof(LlmModelConverter))]
public string LlmRefactor { get; set; } = "llama3.2";

[Category("Models")]
[DisplayName("Unit tests generation")]
[Description("Sets the model to be used when querying LLM for unit tests generation")]
[TypeConverter(typeof(LlmModelConverter))]
public string LlmUnitTests { get; set; } = "llama3.2";

[Category("Models")]
[DisplayName("Documentation generation")]
[Description("Sets the model to be used when querying LLM for code documentation generation")]
[TypeConverter(typeof(LlmModelConverter))]
public string LlmDocumentation { get; set; } = "llama3.2";

[Category("Models")]
[DisplayName("Code review query")]
[Description("Sets the model to be used when querying LLM for code review")]
[TypeConverter(typeof(LlmModelConverter))]
public string LlmReview { get; set; } = "llama3.2";

[Category("Models")]
[DisplayName("Follow-up query")]
[Description("Sets the model to be used when querying LLM for follow-up prompts")]
[TypeConverter(typeof(LlmModelConverter))]
public string LlmFollowUp { get; set; } = "llama3.2";
}
}
10 changes: 8 additions & 2 deletions src/EntwineLLM/EntwineLLMPackage.cs
Original file line number Diff line number Diff line change
@@ -1,4 +1,6 @@
using EntwineLlm.Commands.Interfaces;
using EntwineLlm.Clients;
using EntwineLlm.Clients.Interfacs;
using EntwineLlm.Commands.Interfaces;
using EntwineLlm.Models;
using Microsoft.VisualStudio.Shell;
using System;
Expand All @@ -16,17 +18,21 @@ namespace EntwineLlm
[ProvideToolWindow(typeof(MarkdownViewerWindow))]
[ProvideOptionPage(typeof(GeneralOptions), "EntwineLlm", "Configuration", 0, 0, true)]
[ProvideOptionPage(typeof(ModelsOptions), "EntwineLlm", "Models", 0, 0, true)]
public sealed class EntwineLlmPackage : AsyncPackage
internal sealed class EntwineLlmPackage : AsyncPackage
{
public const string PackageGuidString = "3c995b0e-1f37-4cef-9ac7-9771b3fb6162";

public static AsyncPackage Instance { get; set; }
public static ILlmClient LlmClient { get; set; }

protected override async Task InitializeAsync(CancellationToken cancellationToken, IProgress<ServiceProgressData> progress)
{
await JoinableTaskFactory.SwitchToMainThreadAsync(cancellationToken);
Instance = this;

var generalOptions = this.GetDialogPage(typeof(GeneralOptions)) as GeneralOptions;
LlmClient = new OllamaClient(generalOptions);

var commandsMenu = new CommandsMenu();
await commandsMenu.InitializeAsync(this);

Expand Down
19 changes: 6 additions & 13 deletions src/EntwineLLM/Helpers/RefactoringHelper.cs
Original file line number Diff line number Diff line change
@@ -1,15 +1,9 @@
using EntwineLlm.Clients;
using EntwineLlm.Enums;
using EntwineLlm.Enums;
using EntwineLlm.Helpers;
using EntwineLlm.Models;
using Microsoft.VisualStudio.Shell;
using Newtonsoft.Json.Linq;
using System;
using System.Net.Http;
using System.Text;
using System.Text.RegularExpressions;
using System.Threading.Tasks;
using Task = System.Threading.Tasks.Task;

namespace EntwineLlm
{
Expand All @@ -26,15 +20,15 @@ public RefactoringHelper(AsyncPackage package)
_modelsOptions = package.GetDialogPage(typeof(ModelsOptions)) as ModelsOptions;
}

public async Task RequestCodeSuggestionsAsync(
public async Task RequestSuggestionsAsync(
string methodCode,
string activeDocumentPath,
CodeType codeType,
string manualPrompt = "")
{
var suggestion = await GetCodeSuggestionsAsync(methodCode, codeType, manualPrompt);
var suggestion = await GetSuggestionsAsync(methodCode, codeType, manualPrompt);

switch (suggestion.Type)
switch (codeType)
{
case CodeType.Documentation:
case CodeType.Review:
Expand All @@ -47,7 +41,7 @@ public async Task RequestCodeSuggestionsAsync(
}
}

private async Task<CodeSuggestionResponse> GetCodeSuggestionsAsync(string methodCode, CodeType codeType, string manualPrompt)
private async Task<CodeSuggestionResponse> GetSuggestionsAsync(string methodCode, CodeType codeType, string manualPrompt)
{
var promptHelper = new PromptHelper(_generalOptions.Language);

Expand All @@ -61,8 +55,7 @@ private async Task<CodeSuggestionResponse> GetCodeSuggestionsAsync(string method
_ => throw new ArgumentException("Invalid requested code type"),
};

using var llmClient = new LlmClient(_generalOptions.LlmUrl, _generalOptions.LlmRequestTimeOut);
return await llmClient.GetCodeSuggestionsAsync(codeType, prompt);
return await EntwineLlmPackage.LlmClient.GetCodeSuggestionsAsync(codeType, prompt);
}

private async Task ShowSuggestionWindowAsync(string suggestion, string activeDocumentPath)
Expand Down
Binary file modified src/EntwineLLM/Resources/vs-entwine-options.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading

0 comments on commit 56f6a83

Please sign in to comment.