From 97a81e9388a275a622df296898e159bee93b1fcb Mon Sep 17 00:00:00 2001 From: binary-husky Date: Wed, 25 Dec 2024 00:54:03 +0800 Subject: [PATCH] fix temp issue of o1 --- request_llms/bridge_all.py | 4 ++++ request_llms/bridge_chatgpt.py | 6 ++++-- 2 files changed, 8 insertions(+), 2 deletions(-) diff --git a/request_llms/bridge_all.py b/request_llms/bridge_all.py index 20d7798be..2d6d3f50f 100644 --- a/request_llms/bridge_all.py +++ b/request_llms/bridge_all.py @@ -273,6 +273,7 @@ def decode(self, *args, **kwargs): "token_cnt": get_token_num_gpt4, "openai_disable_system_prompt": True, "openai_disable_stream": True, + "openai_force_temperature_one": True, }, "o1-mini": { @@ -284,6 +285,7 @@ def decode(self, *args, **kwargs): "token_cnt": get_token_num_gpt4, "openai_disable_system_prompt": True, "openai_disable_stream": True, + "openai_force_temperature_one": True, }, "o1-2024-12-17": { @@ -295,6 +297,7 @@ def decode(self, *args, **kwargs): "token_cnt": get_token_num_gpt4, "openai_disable_system_prompt": True, "openai_disable_stream": True, + "openai_force_temperature_one": True, }, "o1": { @@ -306,6 +309,7 @@ def decode(self, *args, **kwargs): "token_cnt": get_token_num_gpt4, "openai_disable_system_prompt": True, "openai_disable_stream": True, + "openai_force_temperature_one": True, }, "gpt-4-turbo": { diff --git a/request_llms/bridge_chatgpt.py b/request_llms/bridge_chatgpt.py index 1563ae3d0..04b846da8 100644 --- a/request_llms/bridge_chatgpt.py +++ b/request_llms/bridge_chatgpt.py @@ -351,7 +351,7 @@ def predict(inputs:str, llm_kwargs:dict, plugin_kwargs:dict, chatbot:ChatBotWith raise ValueError(f'无法读取以下数据,请检查配置。\n\n{chunk_decoded}') # 前者是API2D & One-API的结束条件,后者是OPENAI的结束条件 one_api_terminate = ('data: [DONE]' in chunk_decoded) - openai_terminate = (len(chunkjson['choices'][0]["delta"]) == 0) + openai_terminate = (has_choices) and (len(chunkjson['choices'][0]["delta"]) == 0) if one_api_terminate or openai_terminate: is_termination_certain = False if one_api_terminate: is_termination_certain = True # 抓取符合规范的结束条件 @@ -563,6 +563,8 @@ def generate_payload(inputs:str, llm_kwargs:dict, history:list, system_prompt:st "n": 1, "stream": stream, } - + openai_force_temperature_one = model_info[llm_kwargs['llm_model']].get('openai_force_temperature_one', False) + if openai_force_temperature_one: + payload.pop('temperature') return headers,payload