From d01870952ab2f484ebd8a46fe9745ccbeed3a4b2 Mon Sep 17 00:00:00 2001 From: Craig Mayhew Date: Mon, 5 Feb 2024 10:56:32 +0000 Subject: [PATCH] Added first role flag, --code-review --- .github/workflows/rust.yml | 2 +- src/main.rs | 34 +++++++++++++++++++++++++++++----- 2 files changed, 30 insertions(+), 6 deletions(-) diff --git a/.github/workflows/rust.yml b/.github/workflows/rust.yml index 70e7f06..36e5af7 100644 --- a/.github/workflows/rust.yml +++ b/.github/workflows/rust.yml @@ -23,4 +23,4 @@ jobs: - name: GPT Code Review env: OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} - run: pwd && find . -path './target' -prune -o -name '*.rs' -exec echo {} \; -exec cat {} \; | ./target/debug/pipe-gpt -p "how would you improve this code? include line numbers in your comments so I can tell where you mean" --markdown + run: pwd && find . -path './target' -prune -o -name '*.rs' -exec echo {} \; -exec cat {} \; | ./target/debug/pipe-gpt -p --code-review --markdown diff --git a/src/main.rs b/src/main.rs index f24f4f3..ebf7b80 100644 --- a/src/main.rs +++ b/src/main.rs @@ -57,13 +57,23 @@ const MAX_TOKENS: &i32 = &4096; /// Defines default temperature of response const TEMPERATURE: &f32 = &0.6; +enum AssistantPurpose { + CodeReviewer, + Default, +} + /// # Create Conversation Vector /// /// Add the prepend string if present. Add piped stream if present. -fn create_conversation(prepend: String, input: &str) -> Vec { - let mut conversation_messages = vec![ - Message { role: Role::System, content: "You are a helpful assistant.".to_string() }, - ]; +fn create_conversation(prepend: String, input: &str, purpose: AssistantPurpose) -> Vec { + let mut conversation_messages = match purpose { + AssistantPurpose::Default => { + vec![Message { role: Role::System, content: "You are a helpful assistant.".to_string() }] + }, + AssistantPurpose::CodeReviewer => { + vec![Message { role: Role::System, content: "You are a helpful assistant. How would you improve this code? Include line numbers in your comments so I can tell where you mean. ".to_string() }] + } + }; if !&prepend.is_empty() { conversation_messages.push(Message { role: Role::User, content: prepend }); } @@ -113,6 +123,13 @@ async fn send_to_gpt4(body: ChatBody) -> Result { /// - `-s [top_p]`: Advanced: Adjust top_p of response between 0.0 and 1.0. It's the nucleus /// sampling parameter. fn setup_arguments() -> Command { + let code_review_flag = Arg::new("code-review") + .long("code-review") + .value_name("code-review") + .help("Use a default prompt that will review your piped code") + .required(false) + .action(ArgAction::SetTrue); + let markdown_flag = Arg::new("markdown") .long("markdown") .value_name("markdown") @@ -153,6 +170,7 @@ fn setup_arguments() -> Command { command!() // requires `cargo` feature .about("Sends piped content to GPT-4. Author: Craig Mayhew") + .arg(code_review_flag) .arg(markdown_flag) .arg(max_tokens_arg) .arg(prepend_arg) @@ -174,6 +192,12 @@ fn parse_arguments(input: &str, args_setup: Command) -> (ChatBody, bool) { let top_p = *matches.get_one::("top_p").unwrap_or(&0.95); let render_markdown = *matches.get_one::("markdown").unwrap_or(&false); + let assistant_purpose = if *matches.get_one::("code-review").unwrap_or(&false) { + AssistantPurpose::CodeReviewer + } else { + AssistantPurpose::Default + }; + let chatbody = ChatBody { model: MODEL.to_owned(), max_tokens: Some(max_tokens), @@ -186,7 +210,7 @@ fn parse_arguments(input: &str, args_setup: Command) -> (ChatBody, bool) { frequency_penalty: None, logit_bias: None, user: None, - messages: create_conversation(prepend, input), + messages: create_conversation(prepend, input, assistant_purpose), }; info!("ChatBody struct generated");