Skip to content

Commit

Permalink
chore(ai-help): remove token_model
Browse files Browse the repository at this point in the history
  • Loading branch information
caugner committed May 17, 2024
1 parent d24a195 commit 5b2fc74
Show file tree
Hide file tree
Showing 2 changed files with 3 additions and 6 deletions.
3 changes: 0 additions & 3 deletions src/ai/constants.rs
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,6 @@ use crate::ai::embeddings::RelatedDoc;
pub struct AIHelpConfig {
pub name: &'static str,
pub model: &'static str,
pub token_model: &'static str,
pub full_doc: bool,
pub system_prompt: &'static str,
pub user_prompt: Option<&'static str>,
Expand All @@ -28,7 +27,6 @@ fn join_with_tags(related_docs: Vec<RelatedDoc>) -> String {
pub const AI_HELP_GPT3_5_FULL_DOC_NEW_PROMPT: AIHelpConfig = AIHelpConfig {
name: "20230901-full_doc-new_prompt",
model: "gpt-3.5-turbo-0125",
token_model: "gpt-3.5-turbo-0125",
full_doc: true,
system_prompt: include_str!("prompts/new_prompt/system.md"),
user_prompt: None,
Expand All @@ -42,7 +40,6 @@ pub const AI_HELP_GPT3_5_FULL_DOC_NEW_PROMPT: AIHelpConfig = AIHelpConfig {
pub const AI_HELP_GPT4_FULL_DOC_NEW_PROMPT: AIHelpConfig = AIHelpConfig {
name: "20240125-gpt4-full_doc-new_prompt",
model: "gpt-4o-2024-05-13",
token_model: "gpt-4-0125-preview",
full_doc: true,
system_prompt: include_str!("prompts/new_prompt/system.md"),
user_prompt: None,
Expand Down
6 changes: 3 additions & 3 deletions src/ai/helpers.rs
Original file line number Diff line number Diff line change
Expand Up @@ -26,19 +26,19 @@ pub fn cap_messages(
mut init_messages: Vec<ChatCompletionRequestMessage>,
context_messages: Vec<ChatCompletionRequestMessage>,
) -> Result<Vec<ChatCompletionRequestMessage>, AIError> {
let init_tokens = num_tokens_from_messages(config.token_model, &init_messages)?;
let init_tokens = num_tokens_from_messages(config.model, &init_messages)?;
if init_tokens + config.max_completion_tokens > config.token_limit {
return Err(AIError::TokenLimit);
}
let mut context_tokens = num_tokens_from_messages(config.token_model, &context_messages)?;
let mut context_tokens = num_tokens_from_messages(config.model, &context_messages)?;

let mut skip = 0;
while context_tokens + init_tokens + config.max_completion_tokens > config.token_limit {
skip += 1;
if skip >= context_messages.len() {
return Err(AIError::TokenLimit);
}
context_tokens = num_tokens_from_messages(config.token_model, &context_messages[skip..])?;
context_tokens = num_tokens_from_messages(config.model, &context_messages[skip..])?;
}
init_messages.extend(context_messages.into_iter().skip(skip));
Ok(init_messages)
Expand Down

0 comments on commit 5b2fc74

Please sign in to comment.