Skip to content

Commit bd1190a

Browse files
committed
new: xAI integration
1 parent 21e166b commit bd1190a

File tree

3 files changed

+49
-0
lines changed

3 files changed

+49
-0
lines changed

README.md

+1
Original file line numberDiff line numberDiff line change
@@ -37,6 +37,7 @@ Nerve features integrations for any model accessible via the following providers
3737
| **Anthropic** | `ANTHROPIC_API_KEY` | `anthropic://claude` |
3838
| **Nvidia NIM** | `NIM_API_KEY` | `nim://nvidia/nemotron-4-340b-instruct` |
3939
| **DeepSeek** | `DEEPSEEK_API_KEY` | `deepseek://deepseek-chat` |
40+
| **xAI** | `XAI_API_KEY` | `xai://grok-beta` |
4041
| **Novita** | `NOVITA_API_KEY` | `novita://meta-llama/llama-3.1-70b-instruct` |
4142

4243
¹ Refer to [this document](https://huggingface.co/blog/tgi-messages-api#using-inference-endpoints-with-openai-client-libraries) for how to configure a custom Huggingface endpoint.

src/agent/generator/mod.rs

+7
Original file line numberDiff line numberDiff line change
@@ -19,6 +19,7 @@ mod novita;
1919
mod ollama;
2020
mod openai;
2121
mod openai_compatible;
22+
mod xai;
2223

2324
mod options;
2425

@@ -201,6 +202,12 @@ macro_rules! factory_body {
201202
$model_name,
202203
$context_window,
203204
)?)),
205+
"xai" => Ok(Box::new(xai::XAIClient::new(
206+
$url,
207+
$port,
208+
$model_name,
209+
$context_window,
210+
)?)),
204211
"http" => Ok(Box::new(openai_compatible::OpenAiCompatibleClient::new(
205212
$url,
206213
$port,

src/agent/generator/xai.rs

+41
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
use anyhow::Result;
2+
use async_trait::async_trait;
3+
4+
use crate::agent::state::SharedState;
5+
6+
use super::{openai::OpenAIClient, ChatOptions, ChatResponse, Client};
7+
8+
pub struct XAIClient {
9+
client: OpenAIClient,
10+
}
11+
12+
#[async_trait]
13+
impl Client for XAIClient {
14+
fn new(_: &str, _: u16, model_name: &str, _: u32) -> anyhow::Result<Self>
15+
where
16+
Self: Sized,
17+
{
18+
let client = OpenAIClient::custom(model_name, "XAI_API_KEY", "https://api.x.ai/v1/")?;
19+
20+
Ok(Self { client })
21+
}
22+
23+
async fn check_native_tools_support(&self) -> Result<bool> {
24+
self.client.check_native_tools_support().await
25+
}
26+
27+
async fn chat(
28+
&self,
29+
state: SharedState,
30+
options: &ChatOptions,
31+
) -> anyhow::Result<ChatResponse> {
32+
self.client.chat(state, options).await
33+
}
34+
}
35+
36+
#[async_trait]
37+
impl mini_rag::Embedder for XAIClient {
38+
async fn embed(&self, text: &str) -> Result<mini_rag::Embeddings> {
39+
self.client.embed(text).await
40+
}
41+
}

0 commit comments

Comments
 (0)