-
Notifications
You must be signed in to change notification settings - Fork 98
/
Copy pathconversational_chain.rs
72 lines (66 loc) · 2.44 KB
/
conversational_chain.rs
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
use std::io::{stdout, Write};
use futures_util::StreamExt;
use langchain_rust::{
chain::{builder::ConversationalChainBuilder, Chain},
// fmt_message, fmt_template,
llm::openai::{OpenAI, OpenAIModel},
memory::SimpleMemory,
// message_formatter,
// prompt::HumanMessagePromptTemplate,
prompt_args,
// schemas::Message,
// template_fstring,
};
#[tokio::main]
async fn main() {
let llm = OpenAI::default().with_model(OpenAIModel::Gpt35);
//We initialise a simple memory. By default conversational chain have this memory, but we
//initialise it as an example, if you dont want to have memory use DummyMemory
let memory = SimpleMemory::new();
let chain = ConversationalChainBuilder::new()
.llm(llm)
//IF YOU WANT TO ADD A CUSTOM PROMPT YOU CAN UN COMMENT THIS:
// .prompt(message_formatter![
// fmt_message!(Message::new_system_message("You are a helpful assistant")),
// fmt_template!(HumanMessagePromptTemplate::new(
// template_fstring!("
// The following is a friendly conversation between a human and an AI. The AI is talkative and provides lots of specific details from its context. If the AI does not know the answer to a question, it truthfully says it does not know.
//
// Current conversation:
// {history}
// Human: {input}
// AI:
// ",
// "input","history")))
//
// ])
.memory(memory.into())
.build()
.expect("Error building ConversationalChain");
let input_variables = prompt_args! {
"input" => "Im from Peru",
};
let mut stream = chain.stream(input_variables).await.unwrap();
while let Some(result) = stream.next().await {
match result {
Ok(data) => {
//If you just want to print to stdout, you can use data.to_stdout().unwrap();
print!("{}", data.content);
stdout().flush().unwrap();
}
Err(e) => {
println!("Error: {:?}", e);
}
}
}
let input_variables = prompt_args! {
"input" => "Which are the typical dish",
};
match chain.invoke(input_variables).await {
Ok(result) => {
println!("\n");
println!("Result: {:?}", result);
}
Err(e) => panic!("Error invoking LLMChain: {:?}", e),
}
}