Skip to content

Commit

Permalink
clean
Browse files Browse the repository at this point in the history
  • Loading branch information
masahi committed Jan 11, 2024
1 parent 4ccbb27 commit f1314a5
Showing 1 changed file with 3 additions and 8 deletions.
11 changes: 3 additions & 8 deletions examples/python/run_llama_batched_vllm.py
Original file line number Diff line number Diff line change
Expand Up @@ -276,14 +276,9 @@ def _prepare_eval_queries(

if sliding_window:
seq_lens.append(min(num_past_tokens + num_queries, sliding_window))
# TODO: verify this
past_slot_mapping += all_slot_mappings[request_id][
: min(num_past_tokens, sliding_window)
]
slot_mapping += all_slot_mappings[request_id][
min(num_past_tokens, sliding_window) : min(num_past_tokens, sliding_window)
+ num_queries
]
num_past = min(num_past_tokens, sliding_window)
past_slot_mapping += all_slot_mappings[request_id][num_past:]
slot_mapping += all_slot_mappings[request_id][num_past: num_past + num_queries]
else:
seq_lens.append(num_past_tokens + num_queries)
past_slot_mapping += all_slot_mappings[request_id][:num_past_tokens]
Expand Down

0 comments on commit f1314a5

Please sign in to comment.