Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Run code blocks #22

Merged
merged 37 commits into from
Sep 20, 2023
Merged
Show file tree
Hide file tree
Changes from 27 commits
Commits
Show all changes
37 commits
Select commit Hold shift + click to select a range
ba60c88
Add `executor`
glcraft Sep 16, 2023
4b20c6d
add Executor new
glcraft Sep 16, 2023
80e7355
add debugger to read file
glcraft Sep 16, 2023
0bdc838
fix executor newline
glcraft Sep 16, 2023
4c4bb48
implement executor
glcraft Sep 16, 2023
b8f0750
rename execution => runner
glcraft Sep 17, 2023
c5f09b6
working execution of code
glcraft Sep 17, 2023
6a63f66
better error display
glcraft Sep 17, 2023
57e7b93
execute every code blocks
glcraft Sep 17, 2023
1e7204c
Restructure runner
glcraft Sep 17, 2023
a3c2162
Remove impl from for SearchStatus
glcraft Sep 17, 2023
088f26e
add nushell runner
glcraft Sep 17, 2023
29560f4
powershell runner
glcraft Sep 17, 2023
2eff8e5
add tempfile dependency
glcraft Sep 17, 2023
f8cdf02
Add rust runner
glcraft Sep 17, 2023
3bf8cf4
Remove unused SearchError variant
glcraft Sep 18, 2023
19c6e98
search_program windows compatible
glcraft Sep 18, 2023
d46f774
remove debug output
glcraft Sep 18, 2023
8516ff0
Add interactive run
glcraft Sep 18, 2023
82c9e3e
Add argument do run code
glcraft Sep 18, 2023
88bc037
Implement run choice
glcraft Sep 18, 2023
652d8c8
smol improv
glcraft Sep 18, 2023
9bd550c
Add python runner
glcraft Sep 18, 2023
0e502b7
add home_dir fn
glcraft Sep 18, 2023
4798d63
Add cache for search programs
glcraft Sep 18, 2023
cbf1a15
check for program cache existance
glcraft Sep 18, 2023
faca981
better interactive display
glcraft Sep 18, 2023
37f4a2e
code opti based on review
glcraft Sep 18, 2023
08d2a4c
add shell variant based on code lang
glcraft Sep 18, 2023
e3cbda9
RunChoice derive Default
glcraft Sep 18, 2023
5239d73
check python for version 3 or more
glcraft Sep 19, 2023
04767f6
fix warnings
glcraft Sep 19, 2023
766ab6a
better clap arguments code
glcraft Sep 19, 2023
c158390
replace lazy_static with once_cell
glcraft Sep 19, 2023
71dade6
Fix home_dir
glcraft Sep 19, 2023
2d2b0d6
rewrite cache
glcraft Sep 19, 2023
927daf6
remove unused file
glcraft Sep 20, 2023
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
49 changes: 29 additions & 20 deletions Cargo.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

2 changes: 2 additions & 0 deletions Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -32,9 +32,11 @@ serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0.95"
serde_yaml = "0.9"
smartstring = { version = "1.0", features = ["serde"] }
tempfile = "3.8"
thiserror = "1.0"
tokio = { version = "1", features = ["full"] }
tokio-stream = "0.1.12"
tokio-util = {version = "0.7", features = ["io"]}

aio-cargo-info = { path = "./crates/aio-cargo-info", version = "0.1" }

Expand Down
77 changes: 54 additions & 23 deletions src/arguments.rs
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,35 @@ use std::fmt::Display;

use clap::{Parser, ValueEnum};

/// Program to communicate with large language models and AI API
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
pub struct Args {
/// Configuration file
#[arg(long, default_value_t = String::from("~/.config/aio/config.yaml"))]
pub config_path: String,
/// Credentials file
#[arg(long, default_value_t = String::from("~/.config/aio/creds.yaml"))]
pub creds_path: String,
/// Engine name
///
/// The name can be followed by custom prompt name from the configuration file
/// (ex: openai:command)
#[arg(long, short)]
pub engine: String,
/// Formatter
///
/// Possible values: markdown, raw
#[arg(long, short, default_value_t = Default::default())]
pub formatter: FormatterChoice,
/// Run code block if the language is supported
#[arg(long, short, default_value_t = Default::default())]
pub run: RunChoice,
/// Force to run code
/// User text prompt
pub input: Option<String>,
}

#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
pub enum FormatterChoice {
Markdown,
Expand All @@ -28,36 +57,37 @@ impl Display for FormatterChoice {
}
}

/// Program to communicate with large language models and AI API
#[derive(Parser, Debug)]
#[command(author, version, about, long_about = None)]
pub struct Args {
/// Configuration file
#[arg(long, default_value_t = String::from("~/.config/aio/config.yaml"))]
pub config_path: String,
/// Credentials file
#[arg(long, default_value_t = String::from("~/.config/aio/creds.yaml"))]
pub creds_path: String,
/// Engine name
///
/// The name can be followed by custom prompt name from the configuration file
/// (ex: openai:command)
#[arg(long, short)]
pub engine: String,
/// Formatter
///
/// Possible values: markdown, raw
#[arg(long, short, default_value_t = Default::default())]
pub formatter: FormatterChoice,
/// User text prompt
pub input: Option<String>,
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
pub enum RunChoice {
/// Doesn't run anything
No,
/// Ask to run code
Ask,
/// Run code without asking
Force
}

impl Default for RunChoice {
glcraft marked this conversation as resolved.
Show resolved Hide resolved
fn default() -> Self {
RunChoice::No
}
}

impl Display for RunChoice {
glcraft marked this conversation as resolved.
Show resolved Hide resolved
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
RunChoice::No => write!(f, "no"),
RunChoice::Ask => write!(f, "ask"),
RunChoice::Force => write!(f, "force"),
}
}
}
pub struct ProcessedArgs {
pub config_path: String,
pub creds_path: String,
pub engine: String,
pub formatter: FormatterChoice,
pub run: RunChoice,
pub input: String,
}

Expand All @@ -68,6 +98,7 @@ impl From<Args> for ProcessedArgs {
creds_path: args.creds_path,
engine: args.engine,
formatter: args.formatter,
run: args.run,
input: args.input.unwrap_or_default(),
}
}
Expand Down
14 changes: 14 additions & 0 deletions src/generators/debug.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,14 @@
use crate::args;
use super::{ResultRun, ResultStream, Error};

pub async fn run(config: crate::config::Config, args: args::ProcessedArgs) -> ResultRun {
use tokio_stream::StreamExt;
let input = args.input;
let file = tokio::fs::File::open(&input).await.map_err(|e| Error::Custom(std::borrow::Cow::Owned(e.to_string())))?;

let stream = tokio_util::io::ReaderStream::new(file).map(|mut r| -> ResultStream {
let bytes = r.map_err(|e| Error::Custom(std::borrow::Cow::Owned(e.to_string())))?;
Ok(String::from_utf8(bytes.as_ref().to_vec()).map_err(|e| Error::Custom(std::borrow::Cow::Owned(e.to_string())))?)
});
Ok(Box::pin(stream))
}
1 change: 1 addition & 0 deletions src/generators/mod.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod openai;
pub mod debug;

use tokio_stream::Stream;
use thiserror::Error;
Expand Down
34 changes: 25 additions & 9 deletions src/main.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
pub mod arguments;
mod runner;
mod generators;
mod formatters;
mod config;
Expand All @@ -21,13 +22,21 @@ macro_rules! raise_str {
};
}

fn home_dir() -> &'static str {
#[cfg(unix)]
lazy_static::lazy_static! {
glcraft marked this conversation as resolved.
Show resolved Hide resolved
static ref HOME: String = std::env::var("HOME").expect("Failed to resolve home path");
}
#[cfg(windows)]
lazy_static::lazy_static! {
static ref HOME: String = std::env::var("USERPROFILE").expect("Failed to resolve user profile path");
}
&*HOME
}

fn resolve_path(path: &str) -> Cow<str> {
if path.starts_with("~/") {
#[cfg(unix)]
let home = std::env::var("HOME").expect("Failed to resolve home path");
#[cfg(windows)]
let home = std::env::var("USERPROFILE").expect("Failed to resolve user profile path");
Cow::Owned(format!("{}{}{}", home, std::path::MAIN_SEPARATOR, &path[2..]))
Cow::Owned(format!("{}{}{}", home_dir(), std::path::MAIN_SEPARATOR, &path[2..]))
} else {
Cow::Borrowed(path)
}
Expand Down Expand Up @@ -67,23 +76,30 @@ async fn main() -> Result<(), String> {
args::FormatterChoice::Markdown => Box::new(formatters::new_markdown_formatter()),
args::FormatterChoice::Raw => Box::new(formatters::new_raw_formatter()),
};
let mut runner = runner::Runner::new(args.run);

let engine = args.engine
let (engine, _prompt) = args.engine
.find(':')
.map(|i| &args.engine[..i])
.unwrap_or(args.engine.as_str());
.map(|i| (&args.engine[..i], Some(&args.engine[i+1..])))
.unwrap_or((args.engine.as_str(), None));

let mut stream = match engine {
"openai" => generators::openai::run(creds.openai, config, args).await,
"from-file" => generators::debug::run(config, args).await,
_ => panic!("Unknown engine: {}", engine),
}.map_err(|e| format!("Failed to request OpenAI API: {}", e))?;

loop {
match stream.next().await {
Some(Ok(token)) => raise_str!(formatter.push(&token), "Failed to parse markdown: {}"),
Some(Ok(token)) => {
raise_str!(formatter.push(&token), "Failed to parse markdown: {}");
raise_str!(runner.push(&token), "Failed push text in the runner system: {}");
},
Some(Err(e)) => Err(e.to_string())?,
None => break,
}
}
raise_str!(formatter.end_of_document(), "Failed to end markdown: {}");
raise_str!(runner.end_of_document(), "Failed to run code: {}");
Ok(())
}
Loading