Skip to content

Commit

Permalink
[Prompt lookup] (openvinotoolkit#1245)
Browse files Browse the repository at this point in the history
*Description:*
* Implementation of Prompt lookup decoding based on continuous batching
pipeline (cb_promp_lookup_impl + prompt_lookup_impl)
* Update `prompt_lookup_sample` to use new API
* Update statistic to make of printing more usable

*Ticket:*
* https://jira.devtools.intel.com/browse/CVS-137987

*Example of usage:*
* **Input:** `return 0;`
* **Result Prompt lookup:** 
```
=============================== 
Total duration, ms: 3.02267
Draft model duration, ms: 0.000724718
Main model duration, ms: 3.02195
Draft model duration, %: 0.0239761
Main model duration, %: 99.976
AVG acceptance rate, %: 10.8333
=============================== 
Request_id: 0 ||| 0 0 0 0 0 0 0 0 20 20 0 0 0 0 20 100 80 0 0 0 0 0 0 60 0 0 20 0 0 0 0 0 20 0 0 50
```
* **Result Greedy:** 
```
=============================== 
Total duration, ms: 3.18111
Draft model duration, ms: 1.538e-06
Main model duration, ms: 3.18111
Draft model duration, %: 4.83479e-05
Main model duration, %: 100
AVG acceptance rate, %: -nan
===============================
```
* **Speedup**: 100 Generated tokens: 5.24% && 300 Generated tokens: 81%
(9.42 vs 5.19)

---------

Co-authored-by: Ilya Lavrenov <[email protected]>
  • Loading branch information
iefode and ilya-lavrenov authored Dec 18, 2024
1 parent 7d2a303 commit 9bcadf7
Show file tree
Hide file tree
Showing 27 changed files with 606 additions and 406 deletions.
31 changes: 8 additions & 23 deletions .github/workflows/causal_lm_cpp.yml
Original file line number Diff line number Diff line change
Expand Up @@ -491,7 +491,6 @@ jobs:
python -m pip install ./thirdparty/openvino_tokenizers/[transformers] --extra-index-url https://storage.openvinotoolkit.org/simple/wheels/nightly
python -m pip install -r ./samples/requirements.txt
optimum-cli export openvino --trust-remote-code --weight-format fp16 --model TinyLlama/TinyLlama-1.1B-Chat-v1.0 TinyLlama-1.1B-Chat-v1.0
optimum-cli export openvino --trust-remote-code --weight-format fp16 --model Qwen/Qwen-7B-Chat Qwen-7B-Chat --task text-generation-with-past
- name: run and compare
run: |
source ./ov/setupvars.sh
Expand All @@ -505,36 +504,22 @@ jobs:
./build/samples/cpp/prompt_lookup_decoding_lm/prompt_lookup_decoding_lm ./TinyLlama-1.1B-Chat-v1.0/ "$(<prompt.txt)" > predictions_prompt_lookup.txt
./build/samples/cpp/text_generation/greedy_causal_lm ./TinyLlama-1.1B-Chat-v1.0/ "$(<prompt.txt)" > predictions_greedy.txt
python ./samples/python/prompt_lookup_decoding_lm/prompt_lookup_decoding_lm.py ./TinyLlama-1.1B-Chat-v1.0/ "$(<prompt.txt)" > predictions_py.txt
python -c "
with open('predictions_greedy.txt', 'r') as f:
predicted_greedy = f.readline()
with open('predictions_prompt_lookup.txt', 'r') as f:
predicted_prompt_lookup = f.readline()
with open('predictions_py.txt', 'r') as f:
predicted_prompt_lookup_py = f.readline()
assert predicted_greedy == predicted_prompt_lookup
assert predicted_greedy == predicted_prompt_lookup_py
assert predicted_prompt_lookup == predicted_prompt_lookup_py
"
echo "Prompt lookup" passed
- name: run and compare (model with seq_length_axis = 1)
run: |
source ./ov/setupvars.sh
echo 'Code:```python
def add(a, b):
return a + b
```
Question: Can you please add 2 and 3
A:' > ./prompt.txt
./build/samples/cpp/prompt_lookup_decoding_lm/prompt_lookup_decoding_lm ./Qwen-7B-Chat/ "$(<prompt.txt)" > predictions_prompt_lookup.txt
./build/samples/cpp/text_generation/greedy_causal_lm ./Qwen-7B-Chat/ "$(<prompt.txt)" > predictions_greedy.txt
python -c "
with open('predictions_greedy.txt', 'r') as f:
predicted_greedy = f.readline()
with open('predictions_prompt_lookup.txt', 'r') as f:
predicted_prompt_lookup = f.readline()
assert predicted_greedy == predicted_prompt_lookup
"
echo "Prompt lookup" passed
env:
PYTHONPATH: "./build/:$PYTHONPATH"
LD_LIBRARY_PATH: "./build/openvino_genai/:$LD_LIBRARY_PATH"
cpp-Phi-1_5:
runs-on: ubuntu-20.04-16-cores
defaults:
Expand Down
21 changes: 7 additions & 14 deletions samples/cpp/prompt_lookup_decoding_lm/CMakeLists.txt
Original file line number Diff line number Diff line change
@@ -1,30 +1,23 @@
# Copyright (C) 2023-2024 Intel Corporation
# SPDX-License-Identifier: Apache-2.0

find_package(OpenVINO REQUIRED COMPONENTS Runtime Threading)

find_package(OpenVINOGenAI REQUIRED
PATHS
"${CMAKE_BINARY_DIR}" # Reuse the package from the build.
${OpenVINO_DIR} # GenAI may be installed alogside OpenVINO.
NO_CMAKE_FIND_ROOT_PATH
)

add_executable(prompt_lookup_decoding_lm prompt_lookup_decoding_lm.cpp)
target_link_libraries(prompt_lookup_decoding_lm PRIVATE openvino::runtime openvino::threading)
set_target_properties(prompt_lookup_decoding_lm PROPERTIES
COMPILE_PDB_NAME prompt_lookup_decoding_lm
set(TARGET_NAME prompt_lookup_decoding_lm)
add_executable(${TARGET_NAME} ${TARGET_NAME}.cpp)
target_link_libraries(${TARGET_NAME} PRIVATE openvino::genai)

set_target_properties(${TARGET_NAME} PROPERTIES
COMPILE_PDB_NAME ${TARGET_NAME}
# Ensure out of box LC_RPATH on macOS with SIP
INSTALL_RPATH_USE_LINK_PATH ON)
target_compile_features(prompt_lookup_decoding_lm PRIVATE cxx_std_17)

get_target_property(genai_imported openvino::genai IMPORTED_LOCATION)
set(OPENVINO_TOKENIZERS_PATH $<IF:$<BOOL:${genai_imported}>,${genai_imported},$<TARGET_FILE_DIR:openvino::genai>>)
set(OPENVINO_TOKENIZERS_FILENAME "${CMAKE_SHARED_LIBRARY_PREFIX}openvino_tokenizers${CMAKE_SHARED_LIBRARY_SUFFIX}")
target_compile_definitions(prompt_lookup_decoding_lm PRIVATE
OPENVINO_TOKENIZERS_PATH="${OPENVINO_TOKENIZERS_PATH}/${OPENVINO_TOKENIZERS_FILENAME}")

install(TARGETS prompt_lookup_decoding_lm
install(TARGETS ${TARGET_NAME}
RUNTIME DESTINATION samples_bin/
COMPONENT samples_bin
EXCLUDE_FROM_ALL)
Loading

0 comments on commit 9bcadf7

Please sign in to comment.