Skip to content

Commit

Permalink
feat(ai-help): upgrade from GPT-3.5 Turbo to GPT-4o mini (#546)
Browse files Browse the repository at this point in the history
  • Loading branch information
caugner authored Aug 2, 2024
1 parent dab5a4c commit 0e57060
Show file tree
Hide file tree
Showing 3 changed files with 16 additions and 16 deletions.
18 changes: 8 additions & 10 deletions src/ai/constants.rs
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@ use crate::ai::embeddings::RelatedDoc;
// Whenever changing the model: bump the AI_EXPLAIN_VERSION!
#[derive(Debug, Copy, Clone)]
pub struct AIHelpConfig {
pub name: &'static str,
pub model: &'static str,
pub full_doc: bool,
pub system_prompt: &'static str,
Expand All @@ -24,9 +23,12 @@ fn join_with_tags(related_docs: Vec<RelatedDoc>) -> String {
.join("\n")
}

pub const AI_HELP_GPT3_5_FULL_DOC_NEW_PROMPT: AIHelpConfig = AIHelpConfig {
name: "20230901-full_doc-new_prompt",
model: "gpt-3.5-turbo-0125",
pub const BASIC_MODEL: &str = "gpt-4o-mini-2024-07-18";
pub const ADVANCED_MODEL: &str = "gpt-4o-2024-05-13";
pub const EMBEDDING_MODEL: &str = "text-embedding-3-small";

pub const AI_HELP_BASIC: AIHelpConfig = AIHelpConfig {
model: BASIC_MODEL,
full_doc: true,
system_prompt: include_str!("prompts/new_prompt/system.md"),
user_prompt: None,
Expand All @@ -37,9 +39,8 @@ pub const AI_HELP_GPT3_5_FULL_DOC_NEW_PROMPT: AIHelpConfig = AIHelpConfig {
make_context: join_with_tags,
};

pub const AI_HELP_GPT4_FULL_DOC_NEW_PROMPT: AIHelpConfig = AIHelpConfig {
name: "20240125-gpt4-full_doc-new_prompt",
model: "gpt-4o-2024-05-13",
pub const AI_HELP_ADVANCED: AIHelpConfig = AIHelpConfig {
model: ADVANCED_MODEL,
full_doc: true,
system_prompt: include_str!("prompts/new_prompt/system.md"),
user_prompt: None,
Expand All @@ -50,9 +51,6 @@ pub const AI_HELP_GPT4_FULL_DOC_NEW_PROMPT: AIHelpConfig = AIHelpConfig {
make_context: join_with_tags,
};

pub const MODEL: &str = "gpt-3.5-turbo";
pub const EMBEDDING_MODEL: &str = "text-embedding-3-small";

pub const AI_HELP_SYSTEM_MESSAGE: &str = "You are a very enthusiastic MDN AI who loves \
to help people! Given the following information from MDN, answer the user's question \
using only that information, outputted in markdown format.\
Expand Down
4 changes: 2 additions & 2 deletions src/ai/explain.rs
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ use sha2::{Digest, Sha256};

use crate::{
ai::{
constants::{EXPLAIN_SYSTEM_MESSAGE, MODEL},
constants::{BASIC_MODEL, EXPLAIN_SYSTEM_MESSAGE},
error::AIError,
},
api::error::ApiError,
Expand Down Expand Up @@ -103,7 +103,7 @@ pub async fn prepare_explain_req(
.build()
.unwrap();
let req = CreateChatCompletionRequestArgs::default()
.model(MODEL)
.model(BASIC_MODEL)
.messages(vec![system_message, context_message, user_message])
.temperature(0.0)
.build()?;
Expand Down
10 changes: 6 additions & 4 deletions src/ai/help.rs
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@ use serde::{Deserialize, Serialize};

use crate::{
ai::{
constants::{AI_HELP_GPT3_5_FULL_DOC_NEW_PROMPT, AI_HELP_GPT4_FULL_DOC_NEW_PROMPT},
constants::{AI_HELP_ADVANCED, AI_HELP_BASIC},
embeddings::{get_related_docs, get_related_macro_docs},
error::AIError,
helpers::{cap_messages, into_user_messages, sanitize_messages},
Expand All @@ -23,6 +23,8 @@ use crate::{
settings::SETTINGS,
};

use super::constants::BASIC_MODEL;

#[derive(Eq, Hash, PartialEq, Serialize, Deserialize, Debug, Clone)]
pub struct RefDoc {
pub url: String,
Expand Down Expand Up @@ -54,9 +56,9 @@ pub async fn prepare_ai_help_req(
request_meta: &mut AIHelpRequestMeta,
) -> Result<AIHelpRequest, AIError> {
let config = if is_subscriber {
AI_HELP_GPT4_FULL_DOC_NEW_PROMPT
AI_HELP_ADVANCED
} else {
AI_HELP_GPT3_5_FULL_DOC_NEW_PROMPT
AI_HELP_BASIC
};

// // check for secret error trigger in the last message
Expand Down Expand Up @@ -197,7 +199,7 @@ pub fn prepare_ai_help_summary_req(
let messages = [&[system_message], &messages[..], &[user_message]].concat();

let req = CreateChatCompletionRequestArgs::default()
.model("gpt-3.5-turbo")
.model(BASIC_MODEL)
.messages(messages)
.temperature(0.0)
.build()?;
Expand Down

0 comments on commit 0e57060

Please sign in to comment.