diff --git a/demo/chatbot_multimodal/run.ipynb b/demo/chatbot_multimodal/run.ipynb index 450ac4c23a9f1..8130dab1c330a 100644 --- a/demo/chatbot_multimodal/run.ipynb +++ b/demo/chatbot_multimodal/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: chatbot_multimodal"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "def add_text(history, text):\n", " history = history + [(text, None)]\n", " return history, \"\"\n", "\n", "def add_file(history, file):\n", " history = history + [((file.name,), None)]\n", " return history\n", "\n", "def bot(history):\n", " response = \"**That's cool!**\"\n", " history[-1][1] = response\n", " return history\n", "\n", "with gr.Blocks() as demo:\n", " chatbot = gr.Chatbot([], elem_id=\"chatbot\").style(height=750)\n", " \n", " with gr.Row():\n", " with gr.Column(scale=0.85):\n", " txt = gr.Textbox(\n", " show_label=False,\n", " placeholder=\"Enter text and press enter, or upload an image\",\n", " ).style(container=False)\n", " with gr.Column(scale=0.15, min_width=0):\n", " btn = gr.UploadButton(\"\ud83d\udcc1\", file_types=[\"image\", \"video\", \"audio\"])\n", " \n", " txt.submit(add_text, [chatbot, txt], [chatbot, txt]).then(\n", " bot, chatbot, chatbot\n", " )\n", " btn.upload(add_file, [chatbot, btn], [chatbot]).then(\n", " bot, chatbot, chatbot\n", " )\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: chatbot_multimodal"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "\n", "def add_text(history, text):\n", " history = history + [(text, None)]\n", " return history, gr.update(value=\"\", interactive=False)\n", "\n", "\n", "def add_file(history, file):\n", " history = history + [((file.name,), None)]\n", " return history\n", "\n", "\n", "def bot(history):\n", " response = \"**That's cool!**\"\n", " history[-1][1] = response\n", " return history\n", "\n", "\n", "with gr.Blocks() as demo:\n", " chatbot = gr.Chatbot([], elem_id=\"chatbot\").style(height=750)\n", "\n", " with gr.Row():\n", " with gr.Column(scale=0.85):\n", " txt = gr.Textbox(\n", " show_label=False,\n", " placeholder=\"Enter text and press enter, or upload an image\",\n", " ).style(container=False)\n", " with gr.Column(scale=0.15, min_width=0):\n", " btn = gr.UploadButton(\"\ud83d\udcc1\", file_types=[\"image\", \"video\", \"audio\"])\n", "\n", " txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then(\n", " bot, chatbot, chatbot\n", " )\n", " txt_msg.then(lambda: gr.update(interactive=True), None, [txt], queue=False)\n", " file_msg = btn.upload(add_file, [chatbot, btn], [chatbot], queue=False).then(\n", " bot, chatbot, chatbot\n", " )\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/chatbot_multimodal/run.py b/demo/chatbot_multimodal/run.py index 1bc15a67fbc6c..824c7c0fc761f 100644 --- a/demo/chatbot_multimodal/run.py +++ b/demo/chatbot_multimodal/run.py @@ -1,21 +1,25 @@ import gradio as gr + def add_text(history, text): history = history + [(text, None)] - return history, "" + return history, gr.update(value="", interactive=False) + def add_file(history, file): history = history + [((file.name,), None)] return history + def bot(history): response = "**That's cool!**" history[-1][1] = response return history + with gr.Blocks() as demo: chatbot = gr.Chatbot([], elem_id="chatbot").style(height=750) - + with gr.Row(): with gr.Column(scale=0.85): txt = gr.Textbox( @@ -24,11 +28,12 @@ def bot(history): ).style(container=False) with gr.Column(scale=0.15, min_width=0): btn = gr.UploadButton("📁", file_types=["image", "video", "audio"]) - - txt.submit(add_text, [chatbot, txt], [chatbot, txt]).then( + + txt_msg = txt.submit(add_text, [chatbot, txt], [chatbot, txt], queue=False).then( bot, chatbot, chatbot ) - btn.upload(add_file, [chatbot, btn], [chatbot]).then( + txt_msg.then(lambda: gr.update(interactive=True), None, [txt], queue=False) + file_msg = btn.upload(add_file, [chatbot, btn], [chatbot], queue=False).then( bot, chatbot, chatbot ) diff --git a/demo/chatbot_streaming/run.ipynb b/demo/chatbot_streaming/run.ipynb index 72a22daacc75d..7497baa3c3a7e 100644 --- a/demo/chatbot_streaming/run.ipynb +++ b/demo/chatbot_streaming/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: chatbot_streaming"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import random\n", "import time\n", "\n", "with gr.Blocks() as demo:\n", " chatbot = gr.Chatbot()\n", " msg = gr.Textbox()\n", " clear = gr.Button(\"Clear\")\n", "\n", " def user(user_message, history):\n", " return \"\", history + [[user_message, None]]\n", "\n", " def bot(history):\n", " bot_message = random.choice([\"How are you?\", \"I love you\", \"I'm very hungry\"])\n", " history[-1][1] = \"\"\n", " for character in bot_message:\n", " history[-1][1] += character\n", " time.sleep(0.05)\n", " yield history\n", "\n", " msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(\n", " bot, chatbot, chatbot\n", " )\n", " clear.click(lambda: None, None, chatbot, queue=False)\n", " \n", "demo.queue()\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: chatbot_streaming"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import random\n", "import time\n", "\n", "with gr.Blocks() as demo:\n", " chatbot = gr.Chatbot()\n", " msg = gr.Textbox()\n", " clear = gr.Button(\"Clear\")\n", "\n", " def user(user_message, history):\n", " return gr.update(value=\"\", interactive=False), history + [[user_message, None]]\n", "\n", " def bot(history):\n", " bot_message = random.choice([\"How are you?\", \"I love you\", \"I'm very hungry\"])\n", " history[-1][1] = \"\"\n", " for character in bot_message:\n", " history[-1][1] += character\n", " time.sleep(0.05)\n", " yield history\n", "\n", " response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then(\n", " bot, chatbot, chatbot\n", " )\n", " response.then(lambda: gr.update(interactive=True), None, [msg], queue=False)\n", " clear.click(lambda: None, None, chatbot, queue=False)\n", "\n", "demo.queue()\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/chatbot_streaming/run.py b/demo/chatbot_streaming/run.py index 3c559715121ae..c236c91d0a40d 100644 --- a/demo/chatbot_streaming/run.py +++ b/demo/chatbot_streaming/run.py @@ -8,7 +8,7 @@ clear = gr.Button("Clear") def user(user_message, history): - return "", history + [[user_message, None]] + return gr.update(value="", interactive=False), history + [[user_message, None]] def bot(history): bot_message = random.choice(["How are you?", "I love you", "I'm very hungry"]) @@ -18,11 +18,12 @@ def bot(history): time.sleep(0.05) yield history - msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( + response = msg.submit(user, [msg, chatbot], [msg, chatbot], queue=False).then( bot, chatbot, chatbot ) + response.then(lambda: gr.update(interactive=True), None, [msg], queue=False) clear.click(lambda: None, None, chatbot, queue=False) - + demo.queue() if __name__ == "__main__": demo.launch() diff --git a/guides/07_other-tutorials/creating-a-chatbot.md b/guides/07_other-tutorials/creating-a-chatbot.md index c15cb7202cca9..0d6b2de364bfe 100644 --- a/guides/07_other-tutorials/creating-a-chatbot.md +++ b/guides/07_other-tutorials/creating-a-chatbot.md @@ -44,12 +44,14 @@ There are several ways we can improve the user experience of the chatbot above. $code_chatbot_streaming -You'll notice that when a user submits their message, we now *chain* two event events with `.then()`: +You'll notice that when a user submits their message, we now *chain* three event events with `.then()`: -1. The first method `user()` updates the chatbot with the user message and clears the input field. Because we want this to happen instantly, we set `queue=False`, which would skip any queue if it had been enabled. The chatbot's history is appended with `(user_message, None)`, the `None` signifying that the bot has not responded. +1. The first method `user()` updates the chatbot with the user message and clears the input field. This method also makes the input field non interactive so that the user can't send another message while the chatbot is responding. Because we want this to happen instantly, we set `queue=False`, which would skip any queue had it been enabled. The chatbot's history is appended with `(user_message, None)`, the `None` signifying that the bot has not responded. 2. The second method, `bot()` updates the chatbot history with the bot's response. Instead of creating a new message, we just replace the previously-created `None` message with the bot's response. Finally, we construct the message character by character and `yield` the intermediate outputs as they are being constructed. Gradio automatically turns any function with the `yield` keyword [into a streaming output interface](/key-features/#iterative-outputs). +3. The third method makes the input field interactive again so that users can send another message to the bot. + Of course, in practice, you would replace `bot()` with your own more complex function, which might call a pretrained model or an API, to generate a response. Finally, we enable queuing by running `demo.queue()`, which is required for streaming intermediate outputs. You can try the improved chatbot by scrolling to the demo at the top of this page.