From 5d33030ff285aa6eb7bca1980ffd5b51c16d8bd4 Mon Sep 17 00:00:00 2001 From: YuchenJin Date: Wed, 8 Nov 2023 10:12:51 -0800 Subject: [PATCH] small fixes --- python/mlc_chat/chat_module.py | 8 +++----- 1 file changed, 3 insertions(+), 5 deletions(-) diff --git a/python/mlc_chat/chat_module.py b/python/mlc_chat/chat_module.py index 1e47729ac9..d5fcd5793f 100644 --- a/python/mlc_chat/chat_module.py +++ b/python/mlc_chat/chat_module.py @@ -41,10 +41,10 @@ class ConvConfig: # pylint: disable=too-many-instance-attributes roles : Optional[List[str]] An array that describes the role names of the user and the model. These names are specific to the model being used. - messages : Optional[List[str]] + messages : Optional[List[List[str]]] The chat history represented as an array of string pairs in the following format: ``[[role_0, msg_0], [role_1, msg_1], ...]``. - offset : Optional[str] + offset : Optional[int] The offset used to begin the chat from the chat history. When offset is not ``0``, ``messages[0:offset-1]`` will be encoded. separator_style : Optional[int] @@ -69,7 +69,7 @@ class ConvConfig: # pylint: disable=too-many-instance-attributes system: Optional[str] = None roles: Optional[List[str]] = None messages: Optional[List[List[str]]] = None - offset: Optional[str] = None + offset: Optional[int] = None separator_style: Optional[int] = None seps: Optional[List[str]] = None role_msg_sep: Optional[str] = None @@ -842,8 +842,6 @@ def generate( if (generation_config is not None) and (generation_config.n is not None): num_return_sequences = generation_config.n return_str = False - else: - num_return_sequences = 1 for _ in range(num_return_sequences): self.reset_chat()