From 340dec010010a07aba235f5a1b8b2ca9a28e918d Mon Sep 17 00:00:00 2001 From: Claas Augner Date: Wed, 24 Apr 2024 15:34:12 +0200 Subject: [PATCH] enhance(ai-test): skip prompts for which result exists --- ai-test/src/ai_help.rs | 79 ++++++++++++++++++++++++------------------ 1 file changed, 46 insertions(+), 33 deletions(-) diff --git a/ai-test/src/ai_help.rs b/ai-test/src/ai_help.rs index 833c130a..395fefa8 100644 --- a/ai-test/src/ai_help.rs +++ b/ai-test/src/ai_help.rs @@ -81,40 +81,45 @@ pub async fn ai_help_all( let prompts = prompts::read(path)?; let total_samples = prompts.len(); let before = Instant::now(); - stream::iter(prompts.into_iter().enumerate()) - .map(Ok::<(usize, Vec), Error>) - .try_for_each_concurrent(10, |(i, prompts)| async move { - println!("processing: {:0>2}", i); - let json_out = PathBuf::from(out.as_ref()).join(format!("{:0>2}.json", i)); - let md_out = PathBuf::from(out.as_ref()).join(format!("{:0>2}.md", i)); - let messages = prompts - .into_iter() - .map(|prompt| ChatCompletionRequestMessage { - role: User, - content: Some(prompt), - name: None, - function_call: None, - }) - .collect(); - let mut meta = Default::default(); - let req = prepare_ai_help_req( - openai_client, - supabase_pool, - !no_subscription, - messages, - &mut meta, - ) - .await?; - let mut res = openai_client.chat().create(req.req.clone()).await?; - let res = res.choices.pop().map(|res| res.message); - let storage = Storage { req, res }; - println!("writing: {}", json_out.display()); - fs::write(json_out, serde_json::to_vec_pretty(&storage)?).await?; - println!("writing: {}", md_out.display()); - fs::write(md_out, storage.to_md().as_bytes()).await?; - Ok(()) - }) + stream::iter( + prompts + .into_iter() + .enumerate() + .filter(|(i, _val)| std::fs::metadata(json_path(out, *i)).is_err()), + ) + .map(Ok::<(usize, Vec), Error>) + .try_for_each_concurrent(10, |(i, prompts)| async move { + println!("processing: {:0>2}", i); + let json_out = json_path(out, i); + let md_out = md_path(out, i); + let messages = prompts + .into_iter() + .map(|prompt| ChatCompletionRequestMessage { + role: User, + content: Some(prompt), + name: None, + function_call: None, + }) + .collect(); + let mut meta = Default::default(); + let req = prepare_ai_help_req( + openai_client, + supabase_pool, + !no_subscription, + messages, + &mut meta, + ) .await?; + let mut res = openai_client.chat().create(req.req.clone()).await?; + let res = res.choices.pop().map(|res| res.message); + let storage = Storage { req, res }; + println!("writing: {}", json_out.display()); + fs::write(json_out, serde_json::to_vec_pretty(&storage)?).await?; + println!("writing: {}", md_out.display()); + fs::write(md_out, storage.to_md().as_bytes()).await?; + Ok(()) + }) + .await?; let after = Instant::now(); println!( "Tested {} prompts in {} seconds", @@ -123,3 +128,11 @@ pub async fn ai_help_all( ); Ok(()) } + +fn json_path(out: &impl AsRef, i: usize) -> PathBuf { + PathBuf::from(out.as_ref()).join(format!("{:0>2}.json", i)) +} + +fn md_path(out: &impl AsRef, i: usize) -> PathBuf { + PathBuf::from(out.as_ref()).join(format!("{:0>2}.md", i)) +}