-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathopeninterpreter_chainlit.py
89 lines (71 loc) · 2.92 KB
/
openinterpreter_chainlit.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
from interpreter import interpreter
interpreter.llm.model = "ollama/deepseek-coder-v2"
interpreter.auto_run = True
import chainlit as cl
from chainlit.input_widget import Select
import sys, os
# interpreter.api_key = os.getenv("OPENAI_API_KEY")
# interpreter.debug_mode=True
# 1. Custom StdOut class to output prints to Chainlit UI
# 2. Custom StdIn class to receive input from Chainlit UI
# WARNING: Do not write prints in there, otherwise infinite loop
class CustomStdout:
def __init__(self, original_stdout):
self.original_stdout = original_stdout
def write(self, data):
# React to the data being written. For this example, I'm just printing to stderr.
# language = ""
# if interpreter.active_block and type(interpreter.active_block).__name__ == "CodeBlock":
# if interpreter.active_block.language:
# language = interpreter.active_block.language
if data != "\n" and data != "":
# cl.run_sync(cl.Message(content=data, language=language).send())
cl.run_sync(cl.Message(content=data).send())
# Write the data to the original stdout (so it still gets displayed)
self.original_stdout.write(data)
def flush(self):
# If needed, you can also implement flush
self.original_stdout.flush()
class CustomStdin:
def __init__(self, original_stdin):
self.original_stdin = original_stdin
def readline(self):
response_from_ui = cl.run_sync(cl.AskUserMessage(content="").send())
return str(response_from_ui["content"])
def flush(self):
self.original_stdin.flush()
@cl.on_chat_start
async def start():
sys.stdout = CustomStdout(sys.__stdout__)
sys.stdin = CustomStdin(sys.__stdin__)
settings = await cl.ChatSettings(
[
Select(
id="model",
label="Ollama - Model",
#values=["gpt-3.5-turbo", "gpt-3.5-turbo-16k", "gpt-4", "gpt-4-32k"],
values = ["ollama/deepseek-coder-v2", "ollama/llama3"],
initial_index=0,
),
]
).send()
interpreter.model = settings["model"]
@cl.on_settings_update
async def setup_agent(settings):
interpreter.model = settings["model"]
await cl.Message(content=f"Chose Ollama model {settings['model']}").send()
@cl.on_message
async def main(message: cl.Message):
if message.elements:
for element in message.elements:
file_name = element.name
content = element.content
# If want to show content Content: {content.decode('utf-8')}\n\n
await cl.Message(content=f"Uploaded file: {file_name}\n").send()
# Save the file locally
with open(file_name, "wb") as file:
file.write(content)
interpreter.load(
[{"role": "assistant", "content": f"User uploaded file: {file_name}"}]
)
interpreter.chat(message.content)