forked from InternLM/lagent
-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathinternlm2_agent_cli_demo.py
99 lines (89 loc) · 3.31 KB
/
internlm2_agent_cli_demo.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
from argparse import ArgumentParser
from lagent.actions import ActionExecutor, ArxivSearch, IPythonInterpreter
from lagent.agents.internlm2_agent import INTERPRETER_CN, META_CN, PLUGIN_CN, Internlm2Agent, Internlm2Protocol
from lagent.llms import HFTransformer
from lagent.llms.meta_template import INTERNLM2_META as META
from lagent.schema import AgentStatusCode
def parse_args():
parser = ArgumentParser(description='chatbot')
parser.add_argument(
'--path',
type=str,
default='internlm/internlm2-chat-20b',
help='The path to the model')
args = parser.parse_args()
return args
def main():
args = parse_args()
# Initialize the HFTransformer-based Language Model (llm)
model = HFTransformer(
path=args.path,
meta_template=META,
max_new_tokens=1024,
top_p=0.8,
top_k=None,
temperature=0.1,
repetition_penalty=1.0,
stop_words=['<|im_end|>'])
plugin_executor = ActionExecutor(actions=[ArxivSearch()]) # noqa: F841
interpreter_executor = ActionExecutor(actions=[IPythonInterpreter()])
chatbot = Internlm2Agent(
llm=model,
plugin_executor=None,
interpreter_executor=interpreter_executor,
protocol=Internlm2Protocol(
meta_prompt=META_CN,
interpreter_prompt=INTERPRETER_CN,
plugin_prompt=PLUGIN_CN,
tool=dict(
begin='{start_token}{name}\n',
start_token='<|action_start|>',
name_map=dict(
plugin='<|plugin|>', interpreter='<|interpreter|>'),
belong='assistant',
end='<|action_end|>\n',
),
),
)
def input_prompt():
print('\ndouble enter to end input >>> ', end='', flush=True)
sentinel = '' # ends when this string is seen
return '\n'.join(iter(input, sentinel))
history = []
while True:
try:
prompt = input_prompt()
except UnicodeDecodeError:
print('UnicodeDecodeError')
continue
if prompt == 'exit':
exit(0)
history.append(dict(role='user', content=prompt))
print('\nInternLm2:', end='')
current_length = 0
last_status = None
for agent_return in chatbot.stream_chat(history):
status = agent_return.state
if status not in [
AgentStatusCode.STREAM_ING, AgentStatusCode.CODING,
AgentStatusCode.PLUGIN_START
]:
continue
if status != last_status:
current_length = 0
print('')
if isinstance(agent_return.response, dict):
action = f"\n\n {agent_return.response['name']}: \n\n"
action_input = agent_return.response['parameters']
if agent_return.response['name'] == 'IPythonInterpreter':
action_input = action_input['command']
response = action + action_input
else:
response = agent_return.response
print(response[current_length:], end='', flush=True)
current_length = len(response)
last_status = status
print('')
history.extend(agent_return.inner_steps)
if __name__ == '__main__':
main()