Skip to content

Commit

Permalink
Browse files Browse the repository at this point in the history
…xamples into enhancement/news-summarizer
  • Loading branch information
MarcSkovMadsen committed Feb 3, 2024
2 parents 8f23251 + d0d719f commit 75b049c
Show file tree
Hide file tree
Showing 12 changed files with 120 additions and 42 deletions.
Binary file added docs/assets/images/panel-chat-examples-card.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/assets/thumbnails/langchain_lcel.png
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Loading
Sorry, something went wrong. Reload?
Sorry, we cannot display this file.
Sorry, this file is invalid so it cannot be displayed.
Binary file added docs/assets/videos/langchain_lcel.mp4
Binary file not shown.
Binary file not shown.
34 changes: 34 additions & 0 deletions docs/examples/langchain/langchain_lcel.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,34 @@
"""
Demonstrates how to use the `ChatInterface` to create a chatbot using
[LangChain Expression Language](https://python.langchain.com/docs/expression_language/) (LCEL).
"""

import panel as pn
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_openai import ChatOpenAI

pn.extension()


async def callback(contents, user, instance):
message = ""
async for token in chain.astream(contents):
message += token
yield message


prompt = ChatPromptTemplate.from_template("Tell me a top-notch joke about {topic}")
model = ChatOpenAI(model="gpt-3.5-turbo")
output_parser = StrOutputParser()
chain = {"topic": RunnablePassthrough()} | prompt | model | output_parser

chat_interface = pn.chat.ChatInterface(
pn.chat.ChatMessage(
"Offer a topic and ChatGPT will respond with a joke!", user="System"
),
callback=callback,
callback_user="ChatGPT",
)
chat_interface.servable()
56 changes: 56 additions & 0 deletions docs/examples/langchain/langchain_streaming_lcel_with_memory.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,56 @@
"""
Demonstrates how to use the `ChatInterface` to create a chatbot using
[LangChain Expression Language](https://python.langchain.com/docs/expression_language/) (LCEL)
with streaming and memory.
"""

from operator import itemgetter

import panel as pn
from langchain.memory import ConversationSummaryBufferMemory
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableLambda, RunnablePassthrough
from langchain_openai import ChatOpenAI

pn.extension()

SYSTEM_PROMPT = "Try to be a silly comedian."


async def callback(contents, user, instance):
message = ""
inputs = {"input": contents}
async for token in chain.astream(inputs):
message += token
yield message
memory.save_context(inputs, {"output": message})


model = ChatOpenAI(model="gpt-3.5-turbo")
memory = ConversationSummaryBufferMemory(return_messages=True, llm=model)
prompt = ChatPromptTemplate.from_messages(
[
("system", SYSTEM_PROMPT),
MessagesPlaceholder(variable_name="history"),
("human", "{input}"),
]
)
output_parser = StrOutputParser()
chain = (
RunnablePassthrough.assign(
history=RunnableLambda(memory.load_memory_variables) | itemgetter("history")
)
| prompt
| model
| output_parser
)

chat_interface = pn.chat.ChatInterface(
pn.chat.ChatMessage(
"Offer a topic and ChatGPT will try to be funny!", user="System"
),
callback=callback,
callback_user="ChatGPT",
)
chat_interface.servable()
12 changes: 11 additions & 1 deletion docs/external_resources.md
Original file line number Diff line number Diff line change
Expand Up @@ -92,12 +92,22 @@ Links: [Medium](https://sophiamyang.medium.com/building-ai-chatbots-with-mistral

The news summarizer app by [Eduardo Blancas](https://github.com/edublancas) is a RAG built *from scratch* without any LLM framework. Its deployed to [Ploomber Cloud](https://ploomber.io/cloud/). The methodology is nicely described in [rag-from-scratch.ipynb](https://github.com/ploomber/doc/blob/main/notebooks/rag-from-scratch.ipynb).

Try asking `What is the latest sports news?`.

<iframe
src="https://ancient-wind-3019.ploomberapp.io/app"
frameborder="0"
style="width:100%;height:800px"
style="width:100%;height:600px"
></iframe>
### Talk with Plot

Authors: [Andrew Huang](https://twitter.com/IAteAnDrew1) | [Sophia Yang](https://twitter.com/sophiamyang)

Links: [Blog Post](https://blog.holoviz.org/posts/tweak-mpl-chat/) | [Hugging Face Spaces](https://huggingface.co/spaces/ahuang11/tweak-mpl-chat)

![Talk to Plot](https://blog.holoviz.org/posts/tweak-mpl-chat/images/app1.gif)

## Ragna

[Ragna](https://ragna.chat/) is an open source RAG orchestration framework. The UI is built on top of Panel.
Expand Down
8 changes: 4 additions & 4 deletions overrides/main.html
Original file line number Diff line number Diff line change
Expand Up @@ -12,13 +12,13 @@
<meta name="twitter:creator" content="@Panel_org" />
<meta name="twitter:title" content="{{ title }}" />
<meta name="twitter:description" content="{{ config.site_description }}" />
<meta name="twitter:image" content="https://holoviz-topics.github.io/panel-chat-examples/assets/images/panel-chat-examples-card-800x418.png?version=2" />
<meta name="twitter:image" content="https://holoviz-topics.github.io/panel-chat-examples/assets/images/panel-chat-examples-card.png?version=1" />
<meta property="og:type" content="website" />
<meta property="og:title" content="{{ title }}" />
<meta property="og:description" content="{{ config.site_description }}" />
<meta property="og:url" content="https://holoviz-topics.github.io/panel-chat-examples" />
<meta property="og:image" content="https://holoviz-topics.github.io/panel-chat-examples/assets/images/panel-chat-examples-card-1600x837.png?version=2" />
<meta property="og:image" content="https://holoviz-topics.github.io/panel-chat-examples/assets/images/panel-chat-examples-card.png?version=1" />
<meta property="og:image:type" content="image/png" />
<meta property="og:image:width" content="1600" />
<meta property="og:image:height" content="837" />
<meta property="og:image:width" content="1024" />
<meta property="og:image:height" content="1024" />
{% endblock %}
1 change: 1 addition & 0 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -99,6 +99,7 @@ dependencies = [
"tabulate",
"tiktoken",
"mistralai",
"langchain-openai",
]

[project.urls]
Expand Down
37 changes: 0 additions & 37 deletions scripts/social_card_app.py

This file was deleted.

14 changes: 14 additions & 0 deletions tests/ui/user.py
Original file line number Diff line number Diff line change
Expand Up @@ -169,6 +169,20 @@ def langchain_pdf_assistant(page: Page):
page.wait_for_timeout(10000)


def langchain_lcel(page: Page):
chat = ChatInterface(page)
chat.send("Python")
page.wait_for_timeout(5000)


def langchain_streaming_lcel_with_memory(page: Page):
chat = ChatInterface(page)
chat.send("Remember this number: 8. Be concise.")
page.wait_for_timeout(10000)
chat.send("What number did I just ask you to remember?")
page.wait_for_timeout(10000)


def mistral_and_llama(page: Page):
chat = ChatInterface(page)
chat.send("What do you think about HoloViz in a single sentence?")
Expand Down

0 comments on commit 75b049c

Please sign in to comment.