From 4be7ecf25e5a13c4a4f614f4b7d3bf095805c9ba Mon Sep 17 00:00:00 2001 From: Xuan Son Nguyen Date: Sat, 12 Oct 2024 23:19:52 +0200 Subject: [PATCH] fix perplexity --- examples/perplexity/perplexity.cpp | 13 ++++++------- 1 file changed, 6 insertions(+), 7 deletions(-) diff --git a/examples/perplexity/perplexity.cpp b/examples/perplexity/perplexity.cpp index 252ef56ba6246..e803ff143f7d1 100644 --- a/examples/perplexity/perplexity.cpp +++ b/examples/perplexity/perplexity.cpp @@ -408,16 +408,15 @@ static results_perplexity perplexity_v2(llama_context * ctx, const common_params // clear the KV cache llama_kv_cache_clear(ctx); + llama_batch batch = llama_batch_init(n_batch, 0, 1); + for (int j = 0; j < num_batches; ++j) { const int batch_start = start + j * n_batch; const int batch_size = std::min(end - batch_start, n_batch); - llama_batch batch = llama_batch_init(batch_size, 0, 1); + common_batch_clear(batch); for (int i = 0; i < batch_size; i++) { - batch. token[i] = tokens[batch_start + i]; - batch. pos[i] = j*n_batch + i; - batch.logits[i] = true; - batch.seq_id[i][0] = 0; + common_batch_add(batch, tokens[batch_start + i], j*n_batch + i, {0}, true); } //LOG_DBG(" Batch %d: starts at %d, size is %d, n_past is %d\n",j,batch_start,batch_size,j * n_batch); @@ -427,8 +426,6 @@ static results_perplexity perplexity_v2(llama_context * ctx, const common_params return {tokens, -1, logit_history, prob_history}; } - llama_batch_free(batch); - // save original token and restore it after eval const auto token_org = tokens[batch_start]; @@ -445,6 +442,8 @@ static results_perplexity perplexity_v2(llama_context * ctx, const common_params } } + llama_batch_free(batch); + const auto t_end = std::chrono::high_resolution_clock::now(); if (i == 0) {