diff --git a/src/cpp/include/openvino/genai/tokenizer.hpp b/src/cpp/include/openvino/genai/tokenizer.hpp index 425c30128b..0b71ccfb9e 100644 --- a/src/cpp/include/openvino/genai/tokenizer.hpp +++ b/src/cpp/include/openvino/genai/tokenizer.hpp @@ -83,6 +83,12 @@ class OPENVINO_GENAI_EXPORTS Tokenizer { bool add_generation_prompt, const std::string& chat_template="") const; + /** + * @brief Returns true if chat tempate exists and is ready to be applied; false otherwise + * @return is chat tempate ready + */ + bool is_chat_template_ready() const; + // information about , tokens should be public, // they are used at least in StreamerBase descendants int64_t get_bos_token_id() const; diff --git a/src/cpp/src/llm_pipeline.cpp b/src/cpp/src/llm_pipeline.cpp index 507d988a6a..e86ada33c5 100644 --- a/src/cpp/src/llm_pipeline.cpp +++ b/src/cpp/src/llm_pipeline.cpp @@ -266,11 +266,16 @@ class StatefulLLMPipeline final : public LLMPipelineImplBase { m_history = {}; m_templated_chat_history = ""; } + OPENVINO_ASSERT(m_tokenizer.is_chat_template_ready(), + "There is no existing chat template for actual model. LLMPipeline cannot work in chat mode." + " Please add chat template to tokenizer_config.json or use another model."); + if (system_message.empty()) return; m_history.push_back({{"role", "system"}, {"content", system_message}}); constexpr bool add_generation_prompt = false; + m_templated_chat_history = m_tokenizer.apply_chat_template(m_history, add_generation_prompt); } diff --git a/src/cpp/src/tokenizer.cpp b/src/cpp/src/tokenizer.cpp index b1e36033ee..98a9d1a027 100644 --- a/src/cpp/src/tokenizer.cpp +++ b/src/cpp/src/tokenizer.cpp @@ -434,7 +434,9 @@ class Tokenizer::TokenizerImpl { } } - + bool is_chat_template_ready() { + return !m_chat_template.empty(); + } }; Tokenizer::Tokenizer(const std::string& tokenizer_path, const ov::AnyMap& plugin_config) { @@ -500,6 +502,11 @@ std::string Tokenizer::apply_chat_template(ChatHistory history, return m_pimpl->apply_chat_template(history, add_generation_prompt, chat_template); } +bool Tokenizer::is_chat_template_ready() const { + return m_pimpl->is_chat_template_ready(); +}; + + Tokenizer::~Tokenizer() = default; } // namespace genai } // namespace ov