From ef651247d0daaafcab0d8f1c1ce7e3838bcd95a2 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?DAN=E2=84=A2?= Date: Sat, 2 Mar 2024 21:56:45 -0500 Subject: [PATCH 1/3] Support special tokens as reverse/anti prompt. --- examples/main/main.cpp | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 34e84d0d42f87..861a88d58da42 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -760,7 +760,8 @@ int main(int argc, char ** argv) { ? last_output.length() - static_cast(antiprompt.length() + extra_padding) : 0; - if (last_output.find(antiprompt, search_start_pos) != std::string::npos) { + auto tmp = ::llama_tokenize(ctx, antiprompt, false, true); + if (last_output.find(antiprompt, search_start_pos) != std::string::npos || (tmp.size() == 1 && llama_sampling_last(ctx_sampling) == tmp[0])) { if (params.interactive) { is_interacting = true; } From 60325ec78e381790ba31ec5289855d6a20b5a694 Mon Sep 17 00:00:00 2001 From: =?UTF-8?q?DAN=E2=84=A2?= Date: Sun, 3 Mar 2024 08:46:43 -0500 Subject: [PATCH 2/3] Tokenize antiprompts only once. --- examples/main/main.cpp | 23 +++++++++++++++++++++-- 1 file changed, 21 insertions(+), 2 deletions(-) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 861a88d58da42..0a5bbbbd1fe9e 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -760,8 +760,27 @@ int main(int argc, char ** argv) { ? last_output.length() - static_cast(antiprompt.length() + extra_padding) : 0; - auto tmp = ::llama_tokenize(ctx, antiprompt, false, true); - if (last_output.find(antiprompt, search_start_pos) != std::string::npos || (tmp.size() == 1 && llama_sampling_last(ctx_sampling) == tmp[0])) { + if (last_output.find(antiprompt, search_start_pos) != std::string::npos) { + if (params.interactive) { + is_interacting = true; + } + is_antiprompt = true; + break; + } + } + + // tokenize reverse/antiprompt special tokens only once using static + static std::vector> antiprompt_ids; + if (antiprompt_ids.empty()) { + for (std::string& antiprompt : params.antiprompt) { + antiprompt_ids.push_back(::llama_tokenize(ctx, antiprompt, false, true)); + } + } + + // check for reverse prompt using special tokens + llama_token last_token = llama_sampling_last(ctx_sampling); + for (std::vector ids : antiprompt_ids) { + if (ids.size() == 1 && last_token == ids[0]) { if (params.interactive) { is_interacting = true; } From 98dae326b1826493c4abc7200ea74a04fee679d9 Mon Sep 17 00:00:00 2001 From: Georgi Gerganov Date: Mon, 4 Mar 2024 09:55:39 +0200 Subject: [PATCH 3/3] main : minor --- examples/main/main.cpp | 16 ++++++++-------- 1 file changed, 8 insertions(+), 8 deletions(-) diff --git a/examples/main/main.cpp b/examples/main/main.cpp index 0a5bbbbd1fe9e..47059e582a0d4 100644 --- a/examples/main/main.cpp +++ b/examples/main/main.cpp @@ -511,6 +511,14 @@ int main(int argc, char ** argv) { std::vector embd; std::vector embd_guidance; + // tokenized antiprompts + std::vector> antiprompt_ids; + + antiprompt_ids.reserve(params.antiprompt.size()); + for (const std::string & antiprompt : params.antiprompt) { + antiprompt_ids.emplace_back(::llama_tokenize(ctx, antiprompt, false, true)); + } + struct llama_sampling_context * ctx_sampling = llama_sampling_init(sparams); while ((n_remain != 0 && !is_antiprompt) || params.interactive) { @@ -769,14 +777,6 @@ int main(int argc, char ** argv) { } } - // tokenize reverse/antiprompt special tokens only once using static - static std::vector> antiprompt_ids; - if (antiprompt_ids.empty()) { - for (std::string& antiprompt : params.antiprompt) { - antiprompt_ids.push_back(::llama_tokenize(ctx, antiprompt, false, true)); - } - } - // check for reverse prompt using special tokens llama_token last_token = llama_sampling_last(ctx_sampling); for (std::vector ids : antiprompt_ids) {