Skip to content

Commit

Permalink
Fix to throw exception in case of empty chat template in chat scenario
Browse files Browse the repository at this point in the history
  • Loading branch information
olpipi committed Jul 29, 2024
1 parent 9d35767 commit ce57885
Show file tree
Hide file tree
Showing 3 changed files with 19 additions and 1 deletion.
6 changes: 6 additions & 0 deletions src/cpp/include/openvino/genai/tokenizer.hpp
Original file line number Diff line number Diff line change
Expand Up @@ -83,6 +83,12 @@ class OPENVINO_GENAI_EXPORTS Tokenizer {
bool add_generation_prompt,
const std::string& chat_template="") const;

/**
* @brief Returns true if chat tempate exists and is ready to be applied; false otherwise
* @return is chat tempate ready
*/
bool is_chat_template_ready() const;

// information about <bos>, <eos> tokens should be public,
// they are used at least in StreamerBase descendants
int64_t get_bos_token_id() const;
Expand Down
5 changes: 5 additions & 0 deletions src/cpp/src/llm_pipeline.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -266,11 +266,16 @@ class StatefulLLMPipeline final : public LLMPipelineImplBase {
m_history = {};
m_templated_chat_history = "";
}
OPENVINO_ASSERT(m_tokenizer.is_chat_template_ready(),
"There is no existing chat template for actual model. LLMPipeline cannot work in chat mode."
" Please add chat template to tokenizer_config.json or use another model.");

if (system_message.empty())
return;

m_history.push_back({{"role", "system"}, {"content", system_message}});
constexpr bool add_generation_prompt = false;

m_templated_chat_history = m_tokenizer.apply_chat_template(m_history, add_generation_prompt);
}

Expand Down
9 changes: 8 additions & 1 deletion src/cpp/src/tokenizer.cpp
Original file line number Diff line number Diff line change
Expand Up @@ -434,7 +434,9 @@ class Tokenizer::TokenizerImpl {
}
}


bool is_chat_template_ready() {
return !m_chat_template.empty();
}
};

Tokenizer::Tokenizer(const std::string& tokenizer_path, const ov::AnyMap& plugin_config) {
Expand Down Expand Up @@ -500,6 +502,11 @@ std::string Tokenizer::apply_chat_template(ChatHistory history,
return m_pimpl->apply_chat_template(history, add_generation_prompt, chat_template);
}

bool Tokenizer::is_chat_template_ready() const {
return m_pimpl->is_chat_template_ready();
};


Tokenizer::~Tokenizer() = default;
} // namespace genai
} // namespace ov

0 comments on commit ce57885

Please sign in to comment.