Skip to content

Commit

Permalink
fix temp issue of o1
Browse files Browse the repository at this point in the history
  • Loading branch information
binary-husky committed Dec 24, 2024
1 parent 1dd1d0e commit 97a81e9
Show file tree
Hide file tree
Showing 2 changed files with 8 additions and 2 deletions.
4 changes: 4 additions & 0 deletions request_llms/bridge_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -273,6 +273,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
"openai_disable_system_prompt": True,
"openai_disable_stream": True,
"openai_force_temperature_one": True,
},

"o1-mini": {
Expand All @@ -284,6 +285,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
"openai_disable_system_prompt": True,
"openai_disable_stream": True,
"openai_force_temperature_one": True,
},

"o1-2024-12-17": {
Expand All @@ -295,6 +297,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
"openai_disable_system_prompt": True,
"openai_disable_stream": True,
"openai_force_temperature_one": True,
},

"o1": {
Expand All @@ -306,6 +309,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
"openai_disable_system_prompt": True,
"openai_disable_stream": True,
"openai_force_temperature_one": True,
},

"gpt-4-turbo": {
Expand Down
6 changes: 4 additions & 2 deletions request_llms/bridge_chatgpt.py
Original file line number Diff line number Diff line change
Expand Up @@ -351,7 +351,7 @@ def predict(inputs:str, llm_kwargs:dict, plugin_kwargs:dict, chatbot:ChatBotWith
raise ValueError(f'无法读取以下数据,请检查配置。\n\n{chunk_decoded}')
# 前者是API2D & One-API的结束条件,后者是OPENAI的结束条件
one_api_terminate = ('data: [DONE]' in chunk_decoded)
openai_terminate = (len(chunkjson['choices'][0]["delta"]) == 0)
openai_terminate = (has_choices) and (len(chunkjson['choices'][0]["delta"]) == 0)
if one_api_terminate or openai_terminate:
is_termination_certain = False
if one_api_terminate: is_termination_certain = True # 抓取符合规范的结束条件
Expand Down Expand Up @@ -563,6 +563,8 @@ def generate_payload(inputs:str, llm_kwargs:dict, history:list, system_prompt:st
"n": 1,
"stream": stream,
}

openai_force_temperature_one = model_info[llm_kwargs['llm_model']].get('openai_force_temperature_one', False)
if openai_force_temperature_one:
payload.pop('temperature')
return headers,payload

0 comments on commit 97a81e9

Please sign in to comment.