Skip to content

Commit

Permalink
fix perplexity
Browse files Browse the repository at this point in the history
  • Loading branch information
ngxson committed Oct 12, 2024
1 parent 6395174 commit 4be7ecf
Showing 1 changed file with 6 additions and 7 deletions.
13 changes: 6 additions & 7 deletions examples/perplexity/perplexity.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -408,16 +408,15 @@ static results_perplexity perplexity_v2(llama_context * ctx, const common_params
// clear the KV cache
llama_kv_cache_clear(ctx);

llama_batch batch = llama_batch_init(n_batch, 0, 1);

for (int j = 0; j < num_batches; ++j) {
const int batch_start = start + j * n_batch;
const int batch_size = std::min(end - batch_start, n_batch);

llama_batch batch = llama_batch_init(batch_size, 0, 1);
common_batch_clear(batch);
for (int i = 0; i < batch_size; i++) {
batch. token[i] = tokens[batch_start + i];
batch. pos[i] = j*n_batch + i;
batch.logits[i] = true;
batch.seq_id[i][0] = 0;
common_batch_add(batch, tokens[batch_start + i], j*n_batch + i, {0}, true);
}

//LOG_DBG(" Batch %d: starts at %d, size is %d, n_past is %d\n",j,batch_start,batch_size,j * n_batch);
Expand All @@ -427,8 +426,6 @@ static results_perplexity perplexity_v2(llama_context * ctx, const common_params
return {tokens, -1, logit_history, prob_history};
}

llama_batch_free(batch);

// save original token and restore it after eval
const auto token_org = tokens[batch_start];

Expand All @@ -445,6 +442,8 @@ static results_perplexity perplexity_v2(llama_context * ctx, const common_params
}
}

llama_batch_free(batch);

const auto t_end = std::chrono::high_resolution_clock::now();

if (i == 0) {
Expand Down

0 comments on commit 4be7ecf

Please sign in to comment.