Skip to content

Commit

Permalink
fix: update notification for response time issues not to show some su… (
Browse files Browse the repository at this point in the history
#588)

* fix: update notification for response time issues not to show some suggestions when using local port.

* fix: update notification messages for completion response time issues.

* fix: lint.
  • Loading branch information
icycodes authored Oct 19, 2023
1 parent aacfd35 commit 0dc7e98
Show file tree
Hide file tree
Showing 4 changed files with 44 additions and 30 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -8,6 +8,7 @@ import com.intellij.openapi.application.invokeLater
import com.intellij.openapi.components.service
import com.intellij.openapi.diagnostic.Logger
import com.intellij.openapi.ui.Messages
import com.tabbyml.intellijtabby.agent.Agent
import com.tabbyml.intellijtabby.agent.AgentService
import com.tabbyml.intellijtabby.settings.ApplicationSettingsState
import kotlinx.coroutines.launch
Expand All @@ -23,28 +24,24 @@ class CheckIssueDetail : AnAction() {
agentService.scope.launch {
val detail = agentService.getCurrentIssueDetail() ?: return@launch
val serverHealthState = agentService.getServerHealthState()
val settingsState = service<ApplicationSettingsState>().state.value
logger.info("Show issue detail: $detail, $serverHealthState, $settingsState")
val agentConfig = agentService.getConfig()
logger.info("Show issue detail: $detail, $serverHealthState, $agentConfig")
val title = when (detail["name"]) {
"slowCompletionResponseTime" -> "Completion Requests Appear to Take Too Much Time"
"highCompletionTimeoutRate" -> "Most Completion Requests Timed Out"
else -> return@launch
}
val message = buildDetailMessage(detail, serverHealthState, settingsState)
val message = buildDetailMessage(detail, serverHealthState, agentConfig)
invokeLater {
val result =
Messages.showOkCancelDialog(message, title, "Supported Models", "Dismiss", Messages.getInformationIcon())
if (result == Messages.OK) {
BrowserUtil.browse("https://tabby.tabbyml.com/docs/models/")
}
Messages.showMessageDialog(message, title, Messages.getInformationIcon())
}
}
}

private fun buildDetailMessage(
detail: Map<String, Any>,
serverHealthState: Map<String, Any>?,
settingsState: ApplicationSettingsState.State
agentConfig: Agent.Config
): String {
val stats = detail["completionResponseStats"] as Map<*, *>?
val statsMessages = when (detail["name"]) {
Expand Down Expand Up @@ -72,19 +69,19 @@ class CheckIssueDetail : AnAction() {
val helpMessageForRunningLargeModelOnCPU = if (device == "cpu" && model.endsWith("B")) {
"""
Your Tabby server is running model <i>$model</i> on CPU.
This model is too large to run on CPU, please try a smaller model or switch to GPU.
You can find supported model list in online documents.
This model may be performing poorly due to its large parameter size, please consider trying smaller models or switch to GPU.
You can find a list of supported models in the <a href='https://tabby.tabbyml.com/docs/models/'>model directory</a>.
""".trimIndent()
} else {
""
}
var commonHelpMessage = ""
val host = URL(settingsState.serverEndpoint).host
val host = URL(agentConfig.server?.endpoint).host
if (helpMessageForRunningLargeModelOnCPU.isEmpty()) {
commonHelpMessage += "<li>The running model <i>$model</i> is too large to run on your Tabby server.<br/>"
commonHelpMessage += "Please try a smaller model. You can find supported model list in online documents.</li>"
commonHelpMessage += "<li>The running model <i>$model</i> may be performing poorly due to its large parameter size.<br/>"
commonHelpMessage += "Please consider trying smaller models. You can find a list of supported models in the <a href='https://tabby.tabbyml.com/docs/models/'>model directory</a>.</li>"
}
if (!(host == "localhost" || host == "127.0.0.1")) {
if (!(host.startsWith("localhost") || host.startsWith("127.0.0.1"))) {
commonHelpMessage += "<li>A poor network connection. Please check your network and proxy settings.</li>"
commonHelpMessage += "<li>Server overload. Please contact your Tabby server administrator for assistance.</li>"
}
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -202,6 +202,11 @@ class AgentService : Disposable {
agent.clearConfig(key)
}

suspend fun getConfig(): Agent.Config {
waitForInitialized()
return agent.getConfig()
}

suspend fun provideCompletion(editor: Editor, offset: Int, manually: Boolean = false): Agent.CompletionResponse? {
waitForInitialized()
return ReadAction.compute<PsiFile, Throwable> {
Expand Down
2 changes: 1 addition & 1 deletion clients/vscode/package.json
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@
"repository": "https://github.com/TabbyML/tabby",
"bugs": "https://github.com/TabbyML/tabby/issues",
"license": "Apache-2.0",
"version": "0.6.1",
"version": "1.0.0-dev",
"keywords": [
"ai",
"autocomplete",
Expand Down
40 changes: 26 additions & 14 deletions clients/vscode/src/notifications.ts
Original file line number Diff line number Diff line change
Expand Up @@ -138,21 +138,33 @@ function getHelpMessageForCompletionResponseTimeIssue() {
if (serverHealthState?.device === "cpu" && serverHealthState?.model?.match(/[0-9\.]+B$/)) {
helpMessageForRunningLargeModelOnCPU +=
`Your Tabby server is running model ${serverHealthState?.model} on CPU. ` +
"This model is too large to run on CPU, please try a smaller model or switch to GPU. " +
"You can find supported model list in online documents. \n";
"This model may be performing poorly due to its large parameter size, please consider trying smaller models or switch to GPU. " +
"You can find a list of supported models in the model directory.\n";
}
let commonHelpMessage = "";
const host = new URL(agent().getConfig().server.endpoint).host;
if (helpMessageForRunningLargeModelOnCPU.length == 0) {
commonHelpMessage += ` - The running model ${
serverHealthState?.model ?? ""
} may be performing poorly due to its large parameter size. `;
commonHelpMessage +=
"Please consider trying smaller models. You can find a list of supported models in the model directory.\n";
}
if (!(host.startsWith("localhost") || host.startsWith("127.0.0.1"))) {
commonHelpMessage += " - A poor network connection. Please check your network and proxy settings.\n";
commonHelpMessage += " - Server overload. Please contact your Tabby server administrator for assistance.\n";
}
let message = "";
if (helpMessageForRunningLargeModelOnCPU.length > 0) {
message += helpMessageForRunningLargeModelOnCPU + "\n";
message += "Other possible causes of this issue are: \n";
if (commonHelpMessage.length > 0) {
message += "Other possible causes of this issue: \n";
message += commonHelpMessage;
}
} else {
message += "Possible causes of this issue are: \n";
}
message += " - A poor network connection. Please check your network and proxy settings.\n";
message += " - Server overload. Please contact your Tabby server administrator for assistance.\n";
if (helpMessageForRunningLargeModelOnCPU.length == 0) {
message += ` - The running model ${serverHealthState?.model ?? ""} is too large to run on your Tabby server. `;
message += "Please try a smaller model. You can find supported model list in online documents.\n";
// commonHelpMessage should not be empty here
message += "Possible causes of this issue: \n";
message += commonHelpMessage;
}
return message;
}
Expand All @@ -173,11 +185,11 @@ function showInformationWhenSlowCompletionResponseTime(modal: boolean = false) {
modal: true,
detail: statsMessage + getHelpMessageForCompletionResponseTimeIssue(),
},
"Supported Models",
"Model Directory",
)
.then((selection) => {
switch (selection) {
case "Supported Models":
case "Model Directory":
env.openExternal(Uri.parse("https://tabby.tabbyml.com/docs/models/"));
break;
}
Expand Down Expand Up @@ -212,11 +224,11 @@ function showInformationWhenHighCompletionTimeoutRate(modal: boolean = false) {
modal: true,
detail: statsMessage + getHelpMessageForCompletionResponseTimeIssue(),
},
"Supported Models",
"Model Directory",
)
.then((selection) => {
switch (selection) {
case "Supported Models":
case "Model Directory":
env.openExternal(Uri.parse("https://tabby.tabbyml.com/docs/models/"));
break;
}
Expand Down

0 comments on commit 0dc7e98

Please sign in to comment.