Skip to content

Commit

Permalink
simple : fix batch handling (#3803)
Browse files Browse the repository at this point in the history
  • Loading branch information
tterrasson authored Oct 27, 2023
1 parent 2f9ec7e commit c8d6a1f
Showing 1 changed file with 4 additions and 14 deletions.
18 changes: 4 additions & 14 deletions examples/simple/simple.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -95,13 +95,8 @@ int main(int argc, char ** argv) {
llama_batch batch = llama_batch_init(512, 0, 1);

// evaluate the initial prompt
batch.n_tokens = tokens_list.size();

for (int32_t i = 0; i < batch.n_tokens; i++) {
batch.token[i] = tokens_list[i];
batch.pos[i] = i;
batch.seq_id[i] = 0;
batch.logits[i] = false;
for (size_t i = 0; i < tokens_list.size(); i++) {
llama_batch_add(batch, tokens_list[i], i, { 0 }, false);
}

// llama_decode will output logits only for the last token of the prompt
Expand Down Expand Up @@ -148,15 +143,10 @@ int main(int argc, char ** argv) {
fflush(stdout);

// prepare the next batch
batch.n_tokens = 0;
llama_batch_clear(batch);

// push this new token for next evaluation
batch.token [batch.n_tokens] = new_token_id;
batch.pos [batch.n_tokens] = n_cur;
batch.seq_id[batch.n_tokens] = 0;
batch.logits[batch.n_tokens] = true;

batch.n_tokens += 1;
llama_batch_add(batch, new_token_id, n_cur, { 0 }, true);

n_decode += 1;
}
Expand Down

0 comments on commit c8d6a1f

Please sign in to comment.