Skip to content

Commit cc78cf9

Browse files
authored
Merge pull request #22 from jasonhp/main
feat: add new llm provider: Novita AI
2 parents bf48414 + ad52234 commit cc78cf9

File tree

4 files changed

+63
-2
lines changed

4 files changed

+63
-2
lines changed

README.md

+10-1
Original file line numberDiff line numberDiff line change
@@ -34,7 +34,7 @@ While Nerve was inspired by other projects such as Autogen and Rigging, its main
3434

3535
## LLM Support
3636

37-
Nerve features integrations for any model accessible via the [ollama](https://github.com/ollama/ollama), [groq](https://groq.com), [OpenAI](https://openai.com/index/openai-api/), [Fireworks](https://fireworks.ai/) and [Huggingface](https://huggingface.co/blog/tgi-messages-api#using-inference-endpoints-with-openai-client-libraries) APIs.
37+
Nerve features integrations for any model accessible via the [ollama](https://github.com/ollama/ollama), [groq](https://groq.com), [OpenAI](https://openai.com/index/openai-api/), [Fireworks](https://fireworks.ai/), [Huggingface](https://huggingface.co/blog/tgi-messages-api#using-inference-endpoints-with-openai-client-libraries) and [NovitaAI](https://novita.ai/model-api/product/llm-api) APIs.
3838

3939
**The tool will automatically detect if the selected model natively supports function calling. If not, it will provide a compatibility layer that empowers older models to perform function calling anyway.**
4040

@@ -72,6 +72,15 @@ Refer to [this document](https://huggingface.co/blog/tgi-messages-api#using-infe
7272
HF_API_TOKEN=you-api-key nerve -G "hf://[email protected]" ...
7373
```
7474

75+
For **Novita**:
76+
77+
```sh
78+
NOVITA_API_KEY=you-api-key nerve -G "novita://meta-llama/llama-3.1-70b-instruct" ...
79+
```
80+
81+
You can check your API keys [here](https://novita.ai/settings#key-management), and check all our models [here](https://novita.ai/model-api/product/llm-api).
82+
83+
7584
## Example
7685

7786
Let's take a look at the `examples/ssh_agent` example tasklet (a "tasklet" is a YAML file describing a task and the instructions):

nerve-core/Cargo.toml

+2-1
Original file line numberDiff line numberDiff line change
@@ -47,10 +47,11 @@ serde_json = "1.0.120"
4747
clap = { version = "4.5.6", features = ["derive"] }
4848

4949
[features]
50-
default = ["ollama", "groq", "openai", "fireworks", "hf"]
50+
default = ["ollama", "groq", "openai", "fireworks", "hf", "novita"]
5151

5252
ollama = ["dep:ollama-rs"]
5353
groq = ["dep:groq-api-rs", "dep:duration-string"]
5454
openai = ["dep:openai_api_rust"]
5555
fireworks = ["dep:openai_api_rust"]
5656
hf = ["dep:openai_api_rust"]
57+
novita = ["dep:openai_api_rust"]

nerve-core/src/agent/generator/mod.rs

+10
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,8 @@ mod fireworks;
1515
mod groq;
1616
#[cfg(feature = "hf")]
1717
mod huggingface;
18+
#[cfg(feature = "novita")]
19+
mod novita;
1820
#[cfg(feature = "ollama")]
1921
mod ollama;
2022
#[cfg(feature = "openai")]
@@ -159,6 +161,7 @@ macro_rules! factory_body {
159161
$model_name,
160162
$context_window,
161163
)?)),
164+
#[cfg(feature = "hf")]
162165
"hf" => Ok(Box::new(huggingface::HuggingfaceMessageClient::new(
163166
$url,
164167
$port,
@@ -172,6 +175,13 @@ macro_rules! factory_body {
172175
$model_name,
173176
$context_window,
174177
)?)),
178+
#[cfg(feature = "novita")]
179+
"novita" => Ok(Box::new(novita::NovitaClient::new(
180+
$url,
181+
$port,
182+
$model_name,
183+
$context_window,
184+
)?)),
175185
_ => Err(anyhow!("generator '{}' not supported yet", $name)),
176186
}
177187
};
+41
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
use anyhow::Result;
2+
use async_trait::async_trait;
3+
4+
use crate::agent::{state::SharedState, Invocation};
5+
6+
use super::{openai::OpenAIClient, Client, ChatOptions};
7+
8+
pub struct NovitaClient {
9+
client: OpenAIClient,
10+
}
11+
12+
#[async_trait]
13+
impl Client for NovitaClient {
14+
fn new(_: &str, _: u16, model_name: &str, _: u32) -> anyhow::Result<Self>
15+
where
16+
Self: Sized,
17+
{
18+
let client = OpenAIClient::custom(
19+
model_name,
20+
"NOVITA_API_KEY",
21+
"https://api.novita.ai/v3/openai/",
22+
)?;
23+
24+
Ok(Self { client })
25+
}
26+
27+
async fn chat(
28+
&self,
29+
state: SharedState,
30+
options: &ChatOptions,
31+
) -> anyhow::Result<(String, Vec<Invocation>)> {
32+
self.client.chat(state, options).await
33+
}
34+
}
35+
36+
#[async_trait]
37+
impl mini_rag::Embedder for NovitaClient {
38+
async fn embed(&self, text: &str) -> Result<mini_rag::Embeddings> {
39+
self.client.embed(text).await
40+
}
41+
}

0 commit comments

Comments
 (0)