From 1e2712c43b76eaef2c920131d7c45fb672bce771 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 23 Feb 2023 15:59:39 -0800 Subject: [PATCH 01/93] placeholder --- client/python/__init__.py | 1 + client/python/client.py | 2 ++ 2 files changed, 3 insertions(+) create mode 100644 client/python/__init__.py create mode 100644 client/python/client.py diff --git a/client/python/__init__.py b/client/python/__init__.py new file mode 100644 index 0000000000000..136d06384a4c9 --- /dev/null +++ b/client/python/__init__.py @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/client/python/client.py b/client/python/client.py new file mode 100644 index 0000000000000..5892ab1cec2f7 --- /dev/null +++ b/client/python/client.py @@ -0,0 +1,2 @@ +class Client: + pass \ No newline at end of file From 1e34aa7c94a3672fd0417574b32bd7846338dbde Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 24 Feb 2023 01:35:12 -0800 Subject: [PATCH 02/93] changelog --- CHANGELOG.md | 1 + client/python/README.md | 0 client/python/client.py | 3 +++ 3 files changed, 4 insertions(+) create mode 100644 client/python/README.md diff --git a/CHANGELOG.md b/CHANGELOG.md index a2d6f884b899e..247e33dc307df 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -24,6 +24,7 @@ By [@freddyaboulton](https://github.com/freddyaboulton) in [PR 3297](https://git - Adds a disabled mode to the `gr.Button` component by setting `interactive=False` by [@abidlabs](https://github.com/abidlabs) in [PR 3266](https://github.com/gradio-app/gradio/pull/3266) and [PR 3288](https://github.com/gradio-app/gradio/pull/3288) - Allow the setting of `brush_radius` for the `Image` component both as a default and via `Image.update()` by [@pngwn](https://github.com/pngwn) in [PR 3277](https://github.com/gradio-app/gradio/pull/3277) - Added `info=` argument to form components to enable extra context provided to users, by [@aliabid94](https://github.com/aliabid94) in [PR 3291](https://github.com/gradio-app/gradio/pull/3291) +- Adds a Python client class `Client()` that makes makes it easier to connect to hosted Gradio apps and make predictions from them, by [@abidlabs](https://github.com/abidlabs) in [PR 3300](https://github.com/gradio-app/gradio/pull/3300) ## Bug Fixes: - Ensure `mirror_webcam` is always respected by [@pngwn](https://github.com/pngwn) in [PR 3245](https://github.com/gradio-app/gradio/pull/3245) diff --git a/client/python/README.md b/client/python/README.md new file mode 100644 index 0000000000000..e69de29bb2d1d diff --git a/client/python/client.py b/client/python/client.py index 5892ab1cec2f7..801d5818ba7ad 100644 --- a/client/python/client.py +++ b/client/python/client.py @@ -1,2 +1,5 @@ +""" +""" + class Client: pass \ No newline at end of file From 5f0daa44abe1232a1bfbd52c8a69ab63b15a9a05 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 24 Feb 2023 01:50:03 -0800 Subject: [PATCH 03/93] added to readme --- client/python/README.md | 48 +++++++++++++++++++++++++++++++++++++++++ 1 file changed, 48 insertions(+) diff --git a/client/python/README.md b/client/python/README.md index e69de29bb2d1d..feded429c8f1b 100644 --- a/client/python/README.md +++ b/client/python/README.md @@ -0,0 +1,48 @@ +# `gradio_client`: Use any Gradio app as an API -- in 2 lines of Python + +This directory contains the source code for `gradio_client`, a lightweight Python library that makes it very easy to use any Gradio app as an API. This library is + +As an example, consider the Stable Diffusion Gradio app, which is hosted on Hugging Face Spaces, and which generates images given a text prompt. Using the `gradio_client` library, we can easily use the Gradio as an API to generates images programmatically. + +Here's the entire code to do it: + +```python +import gradio_client as grc + +client = grc.Client(space="stability-ai/stable-diffusion") +client.run("a hyperrealistic portrait of a cat wearing cyberpunk armor") + +>> URL +``` + +## Installation + +The lightweit `gradio_client` package can be installed from pip: + +```bash +$ pip install gradio_client +``` + +## Usage + +### Connecting to a Space or a Gradio app + +1. Connecting to a Space +2. Connecting a general Gradio app + +### Inspecting the API + +1. Listing all of the available APIs +2. Getting more info about the parameters for a speciic API + +### Making a prediction + +1. client.run + +### Submitting a job (for asynchronous worklows) + +1. job = client.submit +2. job.status +3. callbacks + + From 3081ab2dd0ef38584b6474a1f1d50862a3635b93 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 24 Feb 2023 02:01:35 -0800 Subject: [PATCH 04/93] client --- client/python/client.py | 13 ++++++++++--- 1 file changed, 10 insertions(+), 3 deletions(-) diff --git a/client/python/client.py b/client/python/client.py index 801d5818ba7ad..8a3a11db6aad2 100644 --- a/client/python/client.py +++ b/client/python/client.py @@ -1,5 +1,12 @@ -""" -""" +"""The main Client class for the Python client.""" class Client: - pass \ No newline at end of file + def __init__(self, space, src): + # Create persistent websocket connection + pass + + def run(self, args, api_name): + pass + + def submit(self, args, api_name): + pass \ No newline at end of file From 4123328f2eede83dd808c9d2a465045b6534da2b Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 1 Mar 2023 01:07:08 -0800 Subject: [PATCH 05/93] implement futures --- client/python/README.md | 6 ++++-- client/python/client.py | 20 +++++++++++++++----- 2 files changed, 19 insertions(+), 7 deletions(-) diff --git a/client/python/README.md b/client/python/README.md index feded429c8f1b..720ce48616eb0 100644 --- a/client/python/README.md +++ b/client/python/README.md @@ -1,4 +1,4 @@ -# `gradio_client`: Use any Gradio app as an API -- in 2 lines of Python +# `gradio_client`: Use any Gradio app as an API -- in 3 lines of Python This directory contains the source code for `gradio_client`, a lightweight Python library that makes it very easy to use any Gradio app as an API. This library is @@ -10,9 +10,11 @@ Here's the entire code to do it: import gradio_client as grc client = grc.Client(space="stability-ai/stable-diffusion") -client.run("a hyperrealistic portrait of a cat wearing cyberpunk armor") +job = client.predict("a hyperrealistic portrait of a cat wearing cyberpunk armor") +job.result() >> URL + ``` ## Installation diff --git a/client/python/client.py b/client/python/client.py index 8a3a11db6aad2..fb2fdd3d1e93d 100644 --- a/client/python/client.py +++ b/client/python/client.py @@ -1,12 +1,22 @@ """The main Client class for the Python client.""" +import concurrent.futures +from concurrent.futures import Future + + class Client: - def __init__(self, space, src): + def __init__(self, space, src, max_workers=5): # Create persistent websocket connection - pass + self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) - def run(self, args, api_name): + def predict_api(self, *args): + # Should make the connection with the remote API and return the result pass - def submit(self, args, api_name): - pass \ No newline at end of file + def __del__(self): + self.executor.shutdown(wait=True) + + def predict(self, args, api_name=None) -> Future: + future = self.executor.submit(self.predict_api, *args) + return future + From 0e0b459d0ff64dd6d7a6e575b638f921acb25fb9 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 1 Mar 2023 17:10:57 -0800 Subject: [PATCH 06/93] utils --- client/python/client.py | 14 ++++++++++++-- client/python/utils.py | 10 ++++++++++ 2 files changed, 22 insertions(+), 2 deletions(-) create mode 100644 client/python/utils.py diff --git a/client/python/client.py b/client/python/client.py index fb2fdd3d1e93d..9fde58fc36d67 100644 --- a/client/python/client.py +++ b/client/python/client.py @@ -3,9 +3,18 @@ import concurrent.futures from concurrent.futures import Future +import utils class Client: - def __init__(self, space, src, max_workers=5): + def __init__(self, space=None, src=None, max_workers=5): + if space is None and src is None: + raise ValueError('Either `space` or `src` must be provided') + elif space and src: + raise ValueError('Only one of `space` or `src` must be provided') + else: + self.space = space + self.src = src or utils.space_name_to_src(space) + # Create persistent websocket connection self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) @@ -14,7 +23,8 @@ def predict_api(self, *args): pass def __del__(self): - self.executor.shutdown(wait=True) + if hasattr(self, 'executor'): + self.executor.shutdown(wait=True) def predict(self, args, api_name=None) -> Future: future = self.executor.submit(self.predict_api, *args) diff --git a/client/python/utils.py b/client/python/utils.py new file mode 100644 index 0000000000000..cd7c1038db600 --- /dev/null +++ b/client/python/utils.py @@ -0,0 +1,10 @@ +import requests + +def space_name_to_src(space_name, access_token=None): + headers = {} if access_token is None else {"Authorization": "Bearer {api_key}"} + + return requests.get( + f"https://huggingface.co/api/spaces/{space_name}/host", headers=headers + ).json().get("host") + + \ No newline at end of file From da96e53d9da166550ee001865ed0277c4862f0d3 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 1 Mar 2023 17:14:58 -0800 Subject: [PATCH 07/93] scripts --- client/python/client.py | 18 +++++++++--------- client/python/utils.py | 11 +++++++---- gradio/utils.py | 4 ++-- scripts/format_backend.sh | 6 +++--- scripts/lint_backend.sh | 6 +++--- 5 files changed, 24 insertions(+), 21 deletions(-) diff --git a/client/python/client.py b/client/python/client.py index 9fde58fc36d67..531eeacb76920 100644 --- a/client/python/client.py +++ b/client/python/client.py @@ -5,28 +5,28 @@ import utils + class Client: def __init__(self, space=None, src=None, max_workers=5): if space is None and src is None: - raise ValueError('Either `space` or `src` must be provided') + raise ValueError("Either `space` or `src` must be provided") elif space and src: - raise ValueError('Only one of `space` or `src` must be provided') + raise ValueError("Only one of `space` or `src` must be provided") else: self.space = space self.src = src or utils.space_name_to_src(space) - + # Create persistent websocket connection self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) - + def predict_api(self, *args): # Should make the connection with the remote API and return the result pass - + def __del__(self): - if hasattr(self, 'executor'): + if hasattr(self, "executor"): self.executor.shutdown(wait=True) - + def predict(self, args, api_name=None) -> Future: future = self.executor.submit(self.predict_api, *args) - return future - + return future diff --git a/client/python/utils.py b/client/python/utils.py index cd7c1038db600..ff56dd7500545 100644 --- a/client/python/utils.py +++ b/client/python/utils.py @@ -1,10 +1,13 @@ import requests + def space_name_to_src(space_name, access_token=None): headers = {} if access_token is None else {"Authorization": "Bearer {api_key}"} - return requests.get( + return ( + requests.get( f"https://huggingface.co/api/spaces/{space_name}/host", headers=headers - ).json().get("host") - - \ No newline at end of file + ) + .json() + .get("host") + ) diff --git a/gradio/utils.py b/gradio/utils.py index 0e6b626e0db3a..705b6d12e34a1 100644 --- a/gradio/utils.py +++ b/gradio/utils.py @@ -886,11 +886,11 @@ def tex2svg(formula, *args): svg_start = xml_code.index(".*<\/metadata>", "", svg_code, flags=re.DOTALL) - svg_code = re.sub(r' width="[^"]+"', '', svg_code) + svg_code = re.sub(r' width="[^"]+"', "", svg_code) height_match = re.search(r'height="([\d.]+)pt"', svg_code) if height_match: height = float(height_match.group(1)) - new_height = height / FONTSIZE # conversion from pt to em + new_height = height / FONTSIZE # conversion from pt to em svg_code = re.sub(r'height="[\d.]+pt"', f'height="{new_height}em"', svg_code) copy_code = f"{formula}" return f"{copy_code}{svg_code}" diff --git a/scripts/format_backend.sh b/scripts/format_backend.sh index dbab16eca22a6..d87257062ea42 100755 --- a/scripts/format_backend.sh +++ b/scripts/format_backend.sh @@ -3,6 +3,6 @@ cd "$(dirname ${0})/.." echo "Formatting the backend... Our style follows the Black code style." -python -m black gradio test -python -m isort --profile=black gradio test -python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403 gradio test --exclude gradio/__init__.py +python -m black gradio test client/python +python -m isort --profile=black gradio test client/python +python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403 gradio test client/python --exclude gradio/__init__.py diff --git a/scripts/lint_backend.sh b/scripts/lint_backend.sh index a3c39b215f984..515ccd1751c27 100644 --- a/scripts/lint_backend.sh +++ b/scripts/lint_backend.sh @@ -2,6 +2,6 @@ cd "$(dirname ${0})/.." -python -m black --check gradio test -python -m isort --profile=black --check-only gradio test -python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403,F541 gradio test --exclude gradio/__init__.py \ No newline at end of file +python -m black --check gradio test client/python +python -m isort --profile=black --check-only gradio test client/python +python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403,F541 gradio test client/python --exclude gradio/__init__.py \ No newline at end of file From a7da8e0f992a61bdd5c4086ec1bc66953bc3c21a Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 1 Mar 2023 17:16:22 -0800 Subject: [PATCH 08/93] lint --- client/python/utils.py | 1 - 1 file changed, 1 deletion(-) diff --git a/client/python/utils.py b/client/python/utils.py index ff56dd7500545..7e33b79d2da95 100644 --- a/client/python/utils.py +++ b/client/python/utils.py @@ -3,7 +3,6 @@ def space_name_to_src(space_name, access_token=None): headers = {} if access_token is None else {"Authorization": "Bearer {api_key}"} - return ( requests.get( f"https://huggingface.co/api/spaces/{space_name}/host", headers=headers From 72295d731cbea6a10f81f5a9c24a6604beece6f0 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 1 Mar 2023 18:26:49 -0800 Subject: [PATCH 09/93] reorg --- client/python/client.py | 32 -------------- client/python/{ => gradio_client}/README.md | 0 client/python/{ => gradio_client}/__init__.py | 0 client/python/gradio_client/client.py | 42 +++++++++++++++++++ client/python/{ => gradio_client}/utils.py | 3 ++ 5 files changed, 45 insertions(+), 32 deletions(-) delete mode 100644 client/python/client.py rename client/python/{ => gradio_client}/README.md (100%) rename client/python/{ => gradio_client}/__init__.py (100%) create mode 100644 client/python/gradio_client/client.py rename client/python/{ => gradio_client}/utils.py (85%) diff --git a/client/python/client.py b/client/python/client.py deleted file mode 100644 index 531eeacb76920..0000000000000 --- a/client/python/client.py +++ /dev/null @@ -1,32 +0,0 @@ -"""The main Client class for the Python client.""" - -import concurrent.futures -from concurrent.futures import Future - -import utils - - -class Client: - def __init__(self, space=None, src=None, max_workers=5): - if space is None and src is None: - raise ValueError("Either `space` or `src` must be provided") - elif space and src: - raise ValueError("Only one of `space` or `src` must be provided") - else: - self.space = space - self.src = src or utils.space_name_to_src(space) - - # Create persistent websocket connection - self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) - - def predict_api(self, *args): - # Should make the connection with the remote API and return the result - pass - - def __del__(self): - if hasattr(self, "executor"): - self.executor.shutdown(wait=True) - - def predict(self, args, api_name=None) -> Future: - future = self.executor.submit(self.predict_api, *args) - return future diff --git a/client/python/README.md b/client/python/gradio_client/README.md similarity index 100% rename from client/python/README.md rename to client/python/gradio_client/README.md diff --git a/client/python/__init__.py b/client/python/gradio_client/__init__.py similarity index 100% rename from client/python/__init__.py rename to client/python/gradio_client/__init__.py diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py new file mode 100644 index 0000000000000..303c1ae5181fe --- /dev/null +++ b/client/python/gradio_client/client.py @@ -0,0 +1,42 @@ +"""The main Client class for the Python client.""" + +import concurrent.futures +from concurrent.futures import Future +from typing import Callable + +from gradio_client import utils + + +class Client: + def __init__( + self, + space: str | None = None, + src: str | None = None, + access_token: str | None = None, + max_workers: int = 5 + ): + if space is None and src is None: + raise ValueError("Either `space` or `src` must be provided") + elif space and src: + raise ValueError("Only one of `space` or `src` must be provided") + + self.space = space + self.src = src or utils.space_name_to_src(space) + self.api_url = utils.API_URL.format(self.src) + self.ws_url = utils.WS_URL.format(self.src).replace("https", "wss") + + # Create a pool of threads to handle the requests + self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) + + def get_predict_fn(self, api_name: str | None) -> Callable: + # Should make the connection with the remote API and return the result + return lambda *args: None + + def __del__(self): + if hasattr(self, "executor"): + self.executor.shutdown(wait=True) + + def predict(self, args, api_name: str | None = None) -> Future: + predict_fn = self.get_predict_fn(api_name) + future = self.executor.submit(predict_fn, *args) + return future diff --git a/client/python/utils.py b/client/python/gradio_client/utils.py similarity index 85% rename from client/python/utils.py rename to client/python/gradio_client/utils.py index 7e33b79d2da95..fa1ed49ee4b5c 100644 --- a/client/python/utils.py +++ b/client/python/gradio_client/utils.py @@ -1,6 +1,9 @@ import requests +API_URL = "{}/api/predict/" +WS_URL = "{}/queue/join" + def space_name_to_src(space_name, access_token=None): headers = {} if access_token is None else {"Authorization": "Bearer {api_key}"} return ( From 27ecb668dd78cad963cb0d91176f2249c1a39628 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 1 Mar 2023 18:30:59 -0800 Subject: [PATCH 10/93] scripts --- client/python/gradio_client/__init__.py | 2 +- client/python/gradio_client/client.py | 1 + scripts/format_backend.sh | 6 +++--- scripts/lint_backend.sh | 6 +++--- 4 files changed, 8 insertions(+), 7 deletions(-) diff --git a/client/python/gradio_client/__init__.py b/client/python/gradio_client/__init__.py index 136d06384a4c9..889eae463158f 100644 --- a/client/python/gradio_client/__init__.py +++ b/client/python/gradio_client/__init__.py @@ -1 +1 @@ - \ No newline at end of file +from gradio_client.client import Client \ No newline at end of file diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 303c1ae5181fe..c1fd72f866439 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -1,4 +1,5 @@ """The main Client class for the Python client.""" +from __future__ import annotations import concurrent.futures from concurrent.futures import Future diff --git a/scripts/format_backend.sh b/scripts/format_backend.sh index d87257062ea42..aec3b882e003d 100755 --- a/scripts/format_backend.sh +++ b/scripts/format_backend.sh @@ -3,6 +3,6 @@ cd "$(dirname ${0})/.." echo "Formatting the backend... Our style follows the Black code style." -python -m black gradio test client/python -python -m isort --profile=black gradio test client/python -python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403 gradio test client/python --exclude gradio/__init__.py +python -m black gradio test client/python/gradio_client +python -m isort --profile=black gradio test client/python/gradio_client +python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403 gradio test client/python/gradio_client --exclude gradio/__init__.py client/python/gradio_client/__init__.py diff --git a/scripts/lint_backend.sh b/scripts/lint_backend.sh index 515ccd1751c27..525436b272562 100644 --- a/scripts/lint_backend.sh +++ b/scripts/lint_backend.sh @@ -2,6 +2,6 @@ cd "$(dirname ${0})/.." -python -m black --check gradio test client/python -python -m isort --profile=black --check-only gradio test client/python -python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403,F541 gradio test client/python --exclude gradio/__init__.py \ No newline at end of file +python -m black --check gradio test client/python/gradio_client +python -m isort --profile=black --check-only gradio test client/python/gradio_client +python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403,F541 gradio test client/python/gradio_client --exclude gradio/__init__.py client/python/gradio_client/__init__.py \ No newline at end of file From 125b4c2a5d8524b48876e10bcd99580d636fa351 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 2 Mar 2023 02:31:40 -0800 Subject: [PATCH 11/93] serialization --- client/python/gradio_client/client.py | 67 +++++- client/python/gradio_client/serializers.py | 224 +++++++++++++++++++++ client/python/gradio_client/utils.py | 10 +- 3 files changed, 283 insertions(+), 18 deletions(-) create mode 100644 client/python/gradio_client/serializers.py diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index c1fd72f866439..0c59f757dce26 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -3,7 +3,11 @@ import concurrent.futures from concurrent.futures import Future -from typing import Callable +import json +import re +from typing import Callable, Dict + +import requests from gradio_client import utils @@ -14,7 +18,7 @@ def __init__( space: str | None = None, src: str | None = None, access_token: str | None = None, - max_workers: int = 5 + max_workers: int = 40 ): if space is None and src is None: raise ValueError("Either `space` or `src` must be provided") @@ -22,22 +26,67 @@ def __init__( raise ValueError("Only one of `space` or `src` must be provided") self.space = space - self.src = src or utils.space_name_to_src(space) + self.src = src or self._space_name_to_src() + if self.src is None: + raise ValueError(f"Could not find Space: {space}. If it is a private Space, please provide an access_token.") + self.access_token = access_token + self.headers = {"Authorization": "Bearer {access_token}"} if access_token else {} self.api_url = utils.API_URL.format(self.src) self.ws_url = utils.WS_URL.format(self.src).replace("https", "wss") + self.config = self._get_config() # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) - def get_predict_fn(self, api_name: str | None) -> Callable: - # Should make the connection with the remote API and return the result + def predict(self, args, api_name: str | None = None, fn_index: int = 0) -> Future: + predict_fn = self._get_predict_fn(api_name, fn_index) + future = self.executor.submit(predict_fn, *args) + return future + + ################# + # Helper methods + ################# + + def _get_predict_fn(self, api_name: str | None, fn_index: int) -> Callable: + if api_name is not None: + inferred_fn_index = next( + ( + i + for i, d in enumerate(self.config["dependencies"]) + if d.get("api_name") == api_name + ), + None, + ) + if inferred_fn_index is None: + raise ValueError(f"Cannot find a function with api_name: {api_name}") + fn_index = inferred_fn_index + return lambda *args: None def __del__(self): if hasattr(self, "executor"): self.executor.shutdown(wait=True) + + def _space_name_to_src(self) -> str | None: + return ( + requests.get( + f"https://huggingface.co/api/spaces/{self.space}/host", headers=self.headers + ) + .json() + .get("host") + ) + + def _get_config(self) -> Dict: + assert self.src is not None + r = requests.get(self.src, headers=self.headers) + # some basic regex to extract the config + result = re.search(r"window.gradio_config = (.*?);[\s]*", r.text) + try: + config = json.loads(result.group(1)) # type: ignore + except AttributeError: + raise ValueError(f"Could not get Gradio config from: {self.src}") + if "allow_flagging" in config: + raise ValueError(f"Gradio 2.x is not supported by this client. Please upgrade this app to Gradio 3.x.") + return config + - def predict(self, args, api_name: str | None = None) -> Future: - predict_fn = self.get_predict_fn(api_name) - future = self.executor.submit(predict_fn, *args) - return future diff --git a/client/python/gradio_client/serializers.py b/client/python/gradio_client/serializers.py new file mode 100644 index 0000000000000..8c7e9d66b76a6 --- /dev/null +++ b/client/python/gradio_client/serializers.py @@ -0,0 +1,224 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from pathlib import Path +from typing import Any, Dict + +from gradio import processing_utils, utils +from gradio.context import Context + + +class Serializable(ABC): + @abstractmethod + def serialize( + self, x: Any, load_dir: str | Path = "", encryption_key: bytes | None = None + ): + """ + Convert data from human-readable format to serialized format for a browser. + """ + pass + + @abstractmethod + def deserialize( + self, + x: Any, + save_dir: str | Path | None = None, + encryption_key: bytes | None = None, + root_url: str | None = None, + ): + """ + Convert data from serialized format for a browser to human-readable format. + """ + pass + + +class SimpleSerializable(Serializable): + def serialize( + self, x: Any, load_dir: str | Path = "", encryption_key: bytes | None = None + ) -> Any: + """ + Convert data from human-readable format to serialized format. For SimpleSerializable components, this is a no-op. + Parameters: + x: Input data to serialize + load_dir: Ignored + encryption_key: Ignored + """ + return x + + def deserialize( + self, + x: Any, + save_dir: str | Path | None = None, + encryption_key: bytes | None = None, + root_url: str | None = None, + ): + """ + Convert data from serialized format to human-readable format. For SimpleSerializable components, this is a no-op. + Parameters: + x: Input data to deserialize + save_dir: Ignored + encryption_key: Ignored + root_url: Ignored + """ + return x + + +class ImgSerializable(Serializable): + def serialize( + self, + x: str | None, + load_dir: str | Path = "", + encryption_key: bytes | None = None, + ) -> str | None: + """ + Convert from human-friendly version of a file (string filepath) to a seralized + representation (base64). + Parameters: + x: String path to file to serialize + load_dir: Path to directory containing x + encryption_key: Used to encrypt the file + """ + if x is None or x == "": + return None + is_url = utils.validate_url(x) + path = x if is_url else Path(load_dir) / x + return processing_utils.encode_url_or_file_to_base64( + path, encryption_key=encryption_key + ) + + def deserialize( + self, + x: str | None, + save_dir: str | Path | None = None, + encryption_key: bytes | None = None, + root_url: str | None = None, + ) -> str | None: + """ + Convert from serialized representation of a file (base64) to a human-friendly + version (string filepath). Optionally, save the file to the directory specified by save_dir + Parameters: + x: Base64 representation of image to deserialize into a string filepath + save_dir: Path to directory to save the deserialized image to + encryption_key: Used to decrypt the file + root_url: Ignored + """ + if x is None or x == "": + return None + file = processing_utils.decode_base64_to_file( + x, dir=save_dir, encryption_key=encryption_key + ) + return file.name + + +class FileSerializable(Serializable): + def serialize( + self, + x: str | None, + load_dir: str | Path = "", + encryption_key: bytes | None = None, + ) -> Dict | None: + """ + Convert from human-friendly version of a file (string filepath) to a + seralized representation (base64) + Parameters: + x: String path to file to serialize + load_dir: Path to directory containing x + encryption_key: Used to encrypt the file + """ + if x is None or x == "": + return None + filename = str(Path(load_dir) / x) + return { + "name": filename, + "data": processing_utils.encode_url_or_file_to_base64( + filename, encryption_key=encryption_key + ), + "orig_name": Path(filename).name, + "is_file": False, + } + + def deserialize( + self, + x: str | Dict | None, + save_dir: Path | str | None = None, + encryption_key: bytes | None = None, + root_url: str | None = None, + ) -> str | None: + """ + Convert from serialized representation of a file (base64) to a human-friendly + version (string filepath). Optionally, save the file to the directory specified by `save_dir` + Parameters: + x: Base64 representation of file to deserialize into a string filepath + save_dir: Path to directory to save the deserialized file to + encryption_key: Used to decrypt the file + root_url: If this component is loaded from an external Space, this is the URL of the Space + """ + if x is None: + return None + if isinstance(save_dir, Path): + save_dir = str(save_dir) + if isinstance(x, str): + file_name = processing_utils.decode_base64_to_file( + x, dir=save_dir, encryption_key=encryption_key + ).name + elif isinstance(x, dict): + if x.get("is_file", False): + if root_url is not None: + file_name = processing_utils.download_tmp_copy_of_file( + root_url + "file=" + x["name"], + access_token=Context.access_token, + dir=save_dir, + ).name + else: + file_name = processing_utils.create_tmp_copy_of_file( + x["name"], dir=save_dir + ).name + else: + file_name = processing_utils.decode_base64_to_file( + x["data"], dir=save_dir, encryption_key=encryption_key + ).name + else: + raise ValueError( + f"A FileSerializable component cannot only deserialize a string or a dict, not a: {type(x)}" + ) + return file_name + + +class JSONSerializable(Serializable): + def serialize( + self, + x: str | None, + load_dir: str | Path = "", + encryption_key: bytes | None = None, + ) -> Dict | None: + """ + Convert from a a human-friendly version (string path to json file) to a + serialized representation (json string) + Parameters: + x: String path to json file to read to get json string + load_dir: Path to directory containing x + encryption_key: Ignored + """ + if x is None or x == "": + return None + return processing_utils.file_to_json(Path(load_dir) / x) + + def deserialize( + self, + x: str | Dict, + save_dir: str | Path | None = None, + encryption_key: bytes | None = None, + root_url: str | None = None, + ) -> str | None: + """ + Convert from serialized representation (json string) to a human-friendly + version (string path to json file). Optionally, save the file to the directory specified by `save_dir` + Parameters: + x: Json string + save_dir: Path to save the deserialized json file to + encryption_key: Ignored + root_url: Ignored + """ + if x is None: + return None + return processing_utils.dict_or_str_to_json_file(x, dir=save_dir).name diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index fa1ed49ee4b5c..8309ef77c9234 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -4,12 +4,4 @@ API_URL = "{}/api/predict/" WS_URL = "{}/queue/join" -def space_name_to_src(space_name, access_token=None): - headers = {} if access_token is None else {"Authorization": "Bearer {api_key}"} - return ( - requests.get( - f"https://huggingface.co/api/spaces/{space_name}/host", headers=headers - ) - .json() - .get("host") - ) + From ac1ee6424e6d291f6092353b3a12b283d5b992bc Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 2 Mar 2023 02:58:34 -0800 Subject: [PATCH 12/93] cleanup --- client/python/gradio_client/__init__.py | 2 +- client/python/gradio_client/client.py | 46 ++++++++++++---------- client/python/gradio_client/serializers.py | 30 +++++--------- client/python/gradio_client/utils.py | 2 - 4 files changed, 37 insertions(+), 43 deletions(-) diff --git a/client/python/gradio_client/__init__.py b/client/python/gradio_client/__init__.py index 889eae463158f..0bae3214abdce 100644 --- a/client/python/gradio_client/__init__.py +++ b/client/python/gradio_client/__init__.py @@ -1 +1 @@ -from gradio_client.client import Client \ No newline at end of file +from gradio_client.client import Client diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 0c59f757dce26..7162e95645340 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -14,27 +14,32 @@ class Client: def __init__( - self, - space: str | None = None, - src: str | None = None, - access_token: str | None = None, - max_workers: int = 40 + self, + space: str | None = None, + src: str | None = None, + access_token: str | None = None, + max_workers: int = 40, ): + self.access_token = access_token + self.headers = ( + {"Authorization": "Bearer {access_token}"} if access_token else {} + ) + if space is None and src is None: raise ValueError("Either `space` or `src` must be provided") elif space and src: raise ValueError("Only one of `space` or `src` must be provided") - self.space = space self.src = src or self._space_name_to_src() if self.src is None: - raise ValueError(f"Could not find Space: {space}. If it is a private Space, please provide an access_token.") - self.access_token = access_token - self.headers = {"Authorization": "Bearer {access_token}"} if access_token else {} - self.api_url = utils.API_URL.format(self.src) + raise ValueError( + f"Could not find Space: {space}. If it is a private Space, please provide an access_token." + ) + + self.api_url = utils.API_URL.format(self.src) self.ws_url = utils.WS_URL.format(self.src).replace("https", "wss") self.config = self._get_config() - + # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) @@ -46,7 +51,7 @@ def predict(self, args, api_name: str | None = None, fn_index: int = 0) -> Futur ################# # Helper methods ################# - + def _get_predict_fn(self, api_name: str | None, fn_index: int) -> Callable: if api_name is not None: inferred_fn_index = next( @@ -59,18 +64,19 @@ def _get_predict_fn(self, api_name: str | None, fn_index: int) -> Callable: ) if inferred_fn_index is None: raise ValueError(f"Cannot find a function with api_name: {api_name}") - fn_index = inferred_fn_index - + fn_index = inferred_fn_index + return lambda *args: None def __del__(self): if hasattr(self, "executor"): self.executor.shutdown(wait=True) - + def _space_name_to_src(self) -> str | None: return ( requests.get( - f"https://huggingface.co/api/spaces/{self.space}/host", headers=self.headers + f"https://huggingface.co/api/spaces/{self.space}/host", + headers=self.headers, ) .json() .get("host") @@ -78,7 +84,7 @@ def _space_name_to_src(self) -> str | None: def _get_config(self) -> Dict: assert self.src is not None - r = requests.get(self.src, headers=self.headers) + r = requests.get(self.src, headers=self.headers) # some basic regex to extract the config result = re.search(r"window.gradio_config = (.*?);[\s]*", r.text) try: @@ -86,7 +92,7 @@ def _get_config(self) -> Dict: except AttributeError: raise ValueError(f"Could not get Gradio config from: {self.src}") if "allow_flagging" in config: - raise ValueError(f"Gradio 2.x is not supported by this client. Please upgrade this app to Gradio 3.x.") + raise ValueError( + f"Gradio 2.x is not supported by this client. Please upgrade this app to Gradio 3.x." + ) return config - - diff --git a/client/python/gradio_client/serializers.py b/client/python/gradio_client/serializers.py index 8c7e9d66b76a6..6da0a8ba27fd6 100644 --- a/client/python/gradio_client/serializers.py +++ b/client/python/gradio_client/serializers.py @@ -5,14 +5,11 @@ from typing import Any, Dict from gradio import processing_utils, utils -from gradio.context import Context class Serializable(ABC): @abstractmethod - def serialize( - self, x: Any, load_dir: str | Path = "", encryption_key: bytes | None = None - ): + def serialize(self, x: Any, load_dir: str | Path = ""): """ Convert data from human-readable format to serialized format for a browser. """ @@ -23,7 +20,6 @@ def deserialize( self, x: Any, save_dir: str | Path | None = None, - encryption_key: bytes | None = None, root_url: str | None = None, ): """ @@ -33,9 +29,7 @@ def deserialize( class SimpleSerializable(Serializable): - def serialize( - self, x: Any, load_dir: str | Path = "", encryption_key: bytes | None = None - ) -> Any: + def serialize(self, x: Any, load_dir: str | Path = "") -> Any: """ Convert data from human-readable format to serialized format. For SimpleSerializable components, this is a no-op. Parameters: @@ -49,7 +43,6 @@ def deserialize( self, x: Any, save_dir: str | Path | None = None, - encryption_key: bytes | None = None, root_url: str | None = None, ): """ @@ -68,7 +61,6 @@ def serialize( self, x: str | None, load_dir: str | Path = "", - encryption_key: bytes | None = None, ) -> str | None: """ Convert from human-friendly version of a file (string filepath) to a seralized @@ -82,16 +74,14 @@ def serialize( return None is_url = utils.validate_url(x) path = x if is_url else Path(load_dir) / x - return processing_utils.encode_url_or_file_to_base64( - path, encryption_key=encryption_key - ) + return processing_utils.encode_url_or_file_to_base64(path) def deserialize( self, x: str | None, save_dir: str | Path | None = None, - encryption_key: bytes | None = None, root_url: str | None = None, + access_token: str | None = None, ) -> str | None: """ Convert from serialized representation of a file (base64) to a human-friendly @@ -104,9 +94,7 @@ def deserialize( """ if x is None or x == "": return None - file = processing_utils.decode_base64_to_file( - x, dir=save_dir, encryption_key=encryption_key - ) + file = processing_utils.decode_base64_to_file(x, dir=save_dir) return file.name @@ -141,8 +129,8 @@ def deserialize( self, x: str | Dict | None, save_dir: Path | str | None = None, - encryption_key: bytes | None = None, root_url: str | None = None, + access_token: str | None = None, ) -> str | None: """ Convert from serialized representation of a file (base64) to a human-friendly @@ -166,7 +154,7 @@ def deserialize( if root_url is not None: file_name = processing_utils.download_tmp_copy_of_file( root_url + "file=" + x["name"], - access_token=Context.access_token, + access_token=access_token, dir=save_dir, ).name else: @@ -207,7 +195,6 @@ def deserialize( self, x: str | Dict, save_dir: str | Path | None = None, - encryption_key: bytes | None = None, root_url: str | None = None, ) -> str | None: """ @@ -222,3 +209,6 @@ def deserialize( if x is None: return None return processing_utils.dict_or_str_to_json_file(x, dir=save_dir).name + + +serializer_mapping = {cls.__name__: cls for cls in Serializable.__subclasses__()} diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index 8309ef77c9234..7571cbd36e481 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -3,5 +3,3 @@ API_URL = "{}/api/predict/" WS_URL = "{}/queue/join" - - From 2b1c4d6b3840548e4cdf88138921b55ce78c0b4f Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 2 Mar 2023 03:55:36 -0800 Subject: [PATCH 13/93] fns --- client/python/gradio_client/client.py | 78 ++++++++++++++++++++++++--- client/python/gradio_client/utils.py | 49 ++++++++++++++++- 2 files changed, 118 insertions(+), 9 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 7162e95645340..21773f5a7891e 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -2,13 +2,15 @@ from __future__ import annotations import concurrent.futures -from concurrent.futures import Future import json import re +from concurrent.futures import Future +from packaging import version from typing import Callable, Dict +import uuid +import websockets import requests - from gradio_client import utils @@ -44,15 +46,15 @@ def __init__( self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) def predict(self, args, api_name: str | None = None, fn_index: int = 0) -> Future: - predict_fn = self._get_predict_fn(api_name, fn_index) - future = self.executor.submit(predict_fn, *args) + complete_fn = self._get_complete_fn(api_name, fn_index) + future = self.executor.submit(complete_fn, *args) return future ################# # Helper methods ################# - - def _get_predict_fn(self, api_name: str | None, fn_index: int) -> Callable: + + def _get_complete_fn(self, api_name: str | None, fn_index: int) -> Callable: if api_name is not None: inferred_fn_index = next( ( @@ -64,9 +66,69 @@ def _get_predict_fn(self, api_name: str | None, fn_index: int) -> Callable: ) if inferred_fn_index is None: raise ValueError(f"Cannot find a function with api_name: {api_name}") - fn_index = inferred_fn_index + fn_index = inferred_fn_index + + dependency = self.config["dependencies"][fn_index] + + predict_fn = self._get_predict_fn(fn_index, dependency) + serialize_fn = self._get_serialize_fn(dependency) + deserialize_fn = self._get_deserialize_fn(dependency) + + return lambda *args: deserialize_fn(predict_fn(*serialize_fn(*args))) + + def _use_websocket(self, dependency: Dict) -> bool: + queue_enabled = self.config.get("enable_queue", False) + queue_uses_websocket = version.parse( + self.config.get("version", "2.0") + ) >= version.Version("3.2") + dependency_uses_queue = dependency.get("queue", False) is not False + return queue_enabled and queue_uses_websocket and dependency_uses_queue + + async def _ws_fn(self, data, hash_data): + async with websockets.connect( # type: ignore + self.ws_url, open_timeout=10, extra_headers=self.headers + ) as websocket: + return await utils.get_pred_from_ws(websocket, data, hash_data) - return lambda *args: None + def _get_predict_fn(self, fn_index: int, dependency: Dict) -> Callable: + use_ws = self._use_websocket(dependency) + def predict_fn(*data): + if not dependency["backend_fn"]: + return None + data = json.dumps({"data": data, "fn_index": fn_index}) + hash_data = json.dumps( + {"fn_index": fn_index, "session_hash": str(uuid.uuid4())} + ) + if use_ws: + result = utils.synchronize_async(self._ws_fn, data, hash_data) + output = result["data"] + else: + response = requests.post(self.api_url, headers=self.headers, data=data) + result = json.loads(response.content.decode("utf-8")) + try: + output = result["data"] + except KeyError: + if "error" in result and "429" in result["error"]: + raise utils.TooManyRequestsError( + "Too many requests to the Hugging Face API" + ) + raise KeyError( + f"Could not find 'data' key in response. Response received: {result}" + ) + return output + return predict_fn + + def _get_serialize_fn(self, dependency: Dict) -> Callable: + def serialize_fn(*data): + return data + return serialize_fn + + def _get_deserialize_fn(self, dependency: Dict) -> Callable: + def deserialize_fn(*data): + if len(dependency["outputs"]) == 1: + data = data[0] + return data + return deserialize_fn def __del__(self): if hasattr(self, "executor"): diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index 7571cbd36e481..46952315181c9 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -1,5 +1,52 @@ -import requests +import json +from typing import Callable, Any, Dict +from websockets.legacy.protocol import WebSocketCommonProtocol +import fsspec.asyn API_URL = "{}/api/predict/" WS_URL = "{}/queue/join" + +class TooManyRequestsError(Exception): + """Raised when the API returns a 429 status code.""" + pass + + +class QueueError(Exception): + """Raised when the queue is full or there is an issue adding a job to the queue.""" + pass + +async def get_pred_from_ws( + websocket: WebSocketCommonProtocol, data: str, hash_data: str +) -> Dict[str, Any]: + completed = False + resp = {} + while not completed: + msg = await websocket.recv() + resp = json.loads(msg) + if resp["msg"] == "queue_full": + raise QueueError("Queue is full! Please try again.") + if resp["msg"] == "send_hash": + await websocket.send(hash_data) + elif resp["msg"] == "send_data": + await websocket.send(data) + completed = resp["msg"] == "process_completed" + return resp["output"] + + +def synchronize_async(func: Callable, *args, **kwargs) -> Any: + """ + Runs async functions in sync scopes. + + Can be used in any scope. See run_coro_in_background for more details. + + Example: + if inspect.iscoroutinefunction(block_fn.fn): + predictions = utils.synchronize_async(block_fn.fn, *processed_input) + + Args: + func: + *args: + **kwargs: + """ + return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) From 9e02818792318274a1bd81a523f4e7abfc89c995 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 2 Mar 2023 05:03:52 -0800 Subject: [PATCH 14/93] serialize --- client/python/gradio_client/client.py | 54 ++++++++++++++++--- .../{serializers.py => serializing.py} | 34 +++++++++++- 2 files changed, 79 insertions(+), 9 deletions(-) rename client/python/gradio_client/{serializers.py => serializing.py} (87%) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 21773f5a7891e..0a760157558ee 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -11,7 +11,7 @@ import websockets import requests -from gradio_client import utils +from gradio_client import utils, serializing class Client: @@ -45,7 +45,7 @@ def __init__( # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) - def predict(self, args, api_name: str | None = None, fn_index: int = 0) -> Future: + def predict(self, *args, api_name: str | None = None, fn_index: int = 0) -> Future: complete_fn = self._get_complete_fn(api_name, fn_index) future = self.executor.submit(complete_fn, *args) return future @@ -74,7 +74,7 @@ def _get_complete_fn(self, api_name: str | None, fn_index: int) -> Callable: serialize_fn = self._get_serialize_fn(dependency) deserialize_fn = self._get_deserialize_fn(dependency) - return lambda *args: deserialize_fn(predict_fn(*serialize_fn(*args))) + return lambda *args: deserialize_fn(*predict_fn(*serialize_fn(*args))) def _use_websocket(self, dependency: Dict) -> bool: queue_enabled = self.config.get("enable_queue", False) @@ -93,6 +93,7 @@ async def _ws_fn(self, data, hash_data): def _get_predict_fn(self, fn_index: int, dependency: Dict) -> Callable: use_ws = self._use_websocket(dependency) def predict_fn(*data): + print("data", data) if not dependency["backend_fn"]: return None data = json.dumps({"data": data, "fn_index": fn_index}) @@ -115,19 +116,56 @@ def predict_fn(*data): raise KeyError( f"Could not find 'data' key in response. Response received: {result}" ) - return output + print("output", output) + print("output", tuple(output)) + return tuple(output) return predict_fn def _get_serialize_fn(self, dependency: Dict) -> Callable: + inputs = dependency["inputs"] + serializers = [] + + for i in inputs: + for component in self.config["components"]: + if component["id"] == i: + if component.get("serializer", None): + serializer_name = component["serializer"] + assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + serializer = serializing.SERIALIZER_MAPPING[serializer_name] + else: + component_name = component["type"] + assert component_name in serializing.COMPONENT_MAPPING, f"Unknown component: {component_name}, you may need to update your gradio_client version." + serializer = serializing.COMPONENT_MAPPING[component_name] + serializers.append(serializer()) # type: ignore + def serialize_fn(*data): - return data + assert len(data) == len(serializers), f"Expected {len(serializers)} arguments, got {len(data)}" + return [s.serialize(d) for s, d in zip(serializers, data)] + return serialize_fn def _get_deserialize_fn(self, dependency: Dict) -> Callable: + outputs = dependency["outputs"] + deserializers = [] + + for i in outputs: + for component in self.config["components"]: + if component["id"] == i: + if component.get("serializer", None): + serializer_name = component["serializer"] + assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + deserializer = serializing.SERIALIZER_MAPPING[serializer_name] + else: + component_name = component["type"] + assert component_name in serializing.COMPONENT_MAPPING, f"Unknown component: {component_name}, you may need to update your gradio_client version." + deserializer = serializing.COMPONENT_MAPPING[component_name] + deserializers.append(deserializer()) # type: ignore + def deserialize_fn(*data): - if len(dependency["outputs"]) == 1: - data = data[0] - return data + result = [s.deserialize(d) for s, d in zip(deserializers, data)] + if len(outputs) == 1: + result = result[0] + return result return deserialize_fn def __del__(self): diff --git a/client/python/gradio_client/serializers.py b/client/python/gradio_client/serializing.py similarity index 87% rename from client/python/gradio_client/serializers.py rename to client/python/gradio_client/serializing.py index 6da0a8ba27fd6..d8f01ac83a6c5 100644 --- a/client/python/gradio_client/serializers.py +++ b/client/python/gradio_client/serializing.py @@ -211,4 +211,36 @@ def deserialize( return processing_utils.dict_or_str_to_json_file(x, dir=save_dir).name -serializer_mapping = {cls.__name__: cls for cls in Serializable.__subclasses__()} +SERIALIZER_MAPPING = {cls.__name__: cls for cls in Serializable.__subclasses__()} + +COMPONENT_MAPPING = { + 'textbox': SimpleSerializable, + 'number': SimpleSerializable, + 'slider': SimpleSerializable, + 'checkbox': SimpleSerializable, + 'checkboxgroup': SimpleSerializable, + 'radio': SimpleSerializable, + 'dropdown': SimpleSerializable, + 'image': ImgSerializable, + 'video': FileSerializable, + 'audio': FileSerializable, + 'file': FileSerializable, + 'dataframe': JSONSerializable, + 'timeseries': JSONSerializable, + 'state': SimpleSerializable, + 'button': SimpleSerializable, + 'uploadbutton': FileSerializable, + 'colorpicker': SimpleSerializable, + 'label': JSONSerializable, + 'highlightedtext': JSONSerializable, + 'json': JSONSerializable, + 'html': SimpleSerializable, + 'gallery': SimpleSerializable, # TODO: Make this a proper Serializable class + 'chatbot': JSONSerializable, + 'model3d': FileSerializable, + 'plot': JSONSerializable, + 'markdown': SimpleSerializable, + 'dataset': SimpleSerializable + } + + \ No newline at end of file From de6eef181e37a21021d5090c8786e9c552bbbf2d Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 2 Mar 2023 06:53:33 -0800 Subject: [PATCH 15/93] cache --- client/python/gradio_client/client.py | 184 +++++++++++++++----------- 1 file changed, 108 insertions(+), 76 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 0a760157558ee..0ef4a0618a315 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -6,7 +6,7 @@ import re from concurrent.futures import Future from packaging import version -from typing import Callable, Dict +from typing import Callable, Dict, List import uuid import websockets @@ -42,6 +42,10 @@ def __init__( self.ws_url = utils.WS_URL.format(self.src).replace("https", "wss") self.config = self._get_config() + self.predict_fns = self._setup_predict_fns() + self.serialize_fns = self._setup_serialize_fn() + self.deserialize_fns = self._setup_deserialize_fn() + # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) @@ -68,11 +72,9 @@ def _get_complete_fn(self, api_name: str | None, fn_index: int) -> Callable: raise ValueError(f"Cannot find a function with api_name: {api_name}") fn_index = inferred_fn_index - dependency = self.config["dependencies"][fn_index] - - predict_fn = self._get_predict_fn(fn_index, dependency) - serialize_fn = self._get_serialize_fn(dependency) - deserialize_fn = self._get_deserialize_fn(dependency) + predict_fn = self._get_predict_fn(fn_index) + serialize_fn = self._get_serialize_fn(fn_index) + deserialize_fn = self._get_deserialize_fn(fn_index) return lambda *args: deserialize_fn(*predict_fn(*serialize_fn(*args))) @@ -90,83 +92,113 @@ async def _ws_fn(self, data, hash_data): ) as websocket: return await utils.get_pred_from_ws(websocket, data, hash_data) - def _get_predict_fn(self, fn_index: int, dependency: Dict) -> Callable: - use_ws = self._use_websocket(dependency) - def predict_fn(*data): - print("data", data) + def _get_predict_fn(self, fn_index: int) -> Callable: + return self.predict_fns[fn_index] + + def _setup_predict_fns(self) -> List[Callable]: + def create_fn(fn_index, dependency: Dict) -> Callable: if not dependency["backend_fn"]: - return None - data = json.dumps({"data": data, "fn_index": fn_index}) - hash_data = json.dumps( - {"fn_index": fn_index, "session_hash": str(uuid.uuid4())} - ) - if use_ws: - result = utils.synchronize_async(self._ws_fn, data, hash_data) - output = result["data"] - else: - response = requests.post(self.api_url, headers=self.headers, data=data) - result = json.loads(response.content.decode("utf-8")) - try: + return lambda *args: args + use_ws = self._use_websocket(dependency) + def predict_fn(*data): + if not dependency["backend_fn"]: + return None + data = json.dumps({"data": data, "fn_index": fn_index}) + hash_data = json.dumps( + {"fn_index": fn_index, "session_hash": str(uuid.uuid4())} + ) + if use_ws: + result = utils.synchronize_async(self._ws_fn, data, hash_data) output = result["data"] - except KeyError: - if "error" in result and "429" in result["error"]: - raise utils.TooManyRequestsError( - "Too many requests to the Hugging Face API" + else: + response = requests.post(self.api_url, headers=self.headers, data=data) + result = json.loads(response.content.decode("utf-8")) + try: + output = result["data"] + except KeyError: + if "error" in result and "429" in result["error"]: + raise utils.TooManyRequestsError( + "Too many requests to the Hugging Face API" + ) + raise KeyError( + f"Could not find 'data' key in response. Response received: {result}" ) - raise KeyError( - f"Could not find 'data' key in response. Response received: {result}" - ) - print("output", output) - print("output", tuple(output)) - return tuple(output) - return predict_fn - - def _get_serialize_fn(self, dependency: Dict) -> Callable: - inputs = dependency["inputs"] - serializers = [] - - for i in inputs: - for component in self.config["components"]: - if component["id"] == i: - if component.get("serializer", None): - serializer_name = component["serializer"] - assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." - serializer = serializing.SERIALIZER_MAPPING[serializer_name] - else: - component_name = component["type"] - assert component_name in serializing.COMPONENT_MAPPING, f"Unknown component: {component_name}, you may need to update your gradio_client version." - serializer = serializing.COMPONENT_MAPPING[component_name] - serializers.append(serializer()) # type: ignore + return tuple(output) + return predict_fn - def serialize_fn(*data): - assert len(data) == len(serializers), f"Expected {len(serializers)} arguments, got {len(data)}" - return [s.serialize(d) for s, d in zip(serializers, data)] - - return serialize_fn + fns = [] + for fn_index, dependency in enumerate(self.config["dependencies"]): + fns.append(create_fn(fn_index, dependency)) + return fns + + def _get_serialize_fn(self, fn_index: int) -> Callable: + return self.serialize_fns[fn_index] - def _get_deserialize_fn(self, dependency: Dict) -> Callable: - outputs = dependency["outputs"] - deserializers = [] + def _setup_serialize_fn(self) -> List[Callable]: + def create_fn(dependency: Dict) -> Callable: + if not dependency["backend_fn"]: + return lambda *args: args + inputs = dependency["inputs"] + serializers = [] + + for i in inputs: + for component in self.config["components"]: + if component["id"] == i: + if component.get("serializer", None): + serializer_name = component["serializer"] + assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + serializer = serializing.SERIALIZER_MAPPING[serializer_name] + else: + component_name = component["type"] + assert component_name in serializing.COMPONENT_MAPPING, f"Unknown component: {component_name}, you may need to update your gradio_client version." + serializer = serializing.COMPONENT_MAPPING[component_name] + serializers.append(serializer()) # type: ignore + + def serialize_fn(*data): + assert len(data) == len(serializers), f"Expected {len(serializers)} arguments, got {len(data)}" + return [s.serialize(d) for s, d in zip(serializers, data)] + return serialize_fn - for i in outputs: - for component in self.config["components"]: - if component["id"] == i: - if component.get("serializer", None): - serializer_name = component["serializer"] - assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." - deserializer = serializing.SERIALIZER_MAPPING[serializer_name] - else: - component_name = component["type"] - assert component_name in serializing.COMPONENT_MAPPING, f"Unknown component: {component_name}, you may need to update your gradio_client version." - deserializer = serializing.COMPONENT_MAPPING[component_name] - deserializers.append(deserializer()) # type: ignore + fns = [] + for dependency in self.config["dependencies"]: + fns.append(create_fn(dependency)) + return fns + + def _get_deserialize_fn(self, fn_index: int) -> Callable: + return self.deserialize_fns[fn_index] + + def _setup_deserialize_fn(self) -> List[Callable]: + def create_fn(dependency: Dict) -> Callable: + if not dependency["backend_fn"]: + return lambda *args: args + + outputs = dependency["outputs"] + deserializers = [] + + for i in outputs: + for component in self.config["components"]: + if component["id"] == i: + if component.get("serializer", None): + serializer_name = component["serializer"] + assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + deserializer = serializing.SERIALIZER_MAPPING[serializer_name] + else: + component_name = component["type"] + assert component_name in serializing.COMPONENT_MAPPING, f"Unknown component: {component_name}, you may need to update your gradio_client version." + deserializer = serializing.COMPONENT_MAPPING[component_name] + deserializers.append(deserializer()) # type: ignore + + def deserialize_fn(*data): + result = [s.deserialize(d) for s, d in zip(deserializers, data)] + if len(outputs) == 1: + result = result[0] + return result + return deserialize_fn - def deserialize_fn(*data): - result = [s.deserialize(d) for s, d in zip(deserializers, data)] - if len(outputs) == 1: - result = result[0] - return result - return deserialize_fn + fns = [] + for dependency in self.config["dependencies"]: + fns.append(create_fn(dependency)) + return fns def __del__(self): if hasattr(self, "executor"): From 6fad4e889d01dcf570aeac728eab8048a32fac2c Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 2 Mar 2023 07:55:23 -0800 Subject: [PATCH 16/93] callbacks --- client/python/gradio_client/client.py | 66 ++++++++++++++++++++------- gradio/components.py | 21 ++++++++- 2 files changed, 68 insertions(+), 19 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 0ef4a0618a315..a51ab26e6bb1d 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -49,28 +49,60 @@ def __init__( # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) - def predict(self, *args, api_name: str | None = None, fn_index: int = 0) -> Future: + def predict(self, *args, api_name: str | None = None, fn_index: int = 0, callbacks: List[Callable] | None = None) -> Future: complete_fn = self._get_complete_fn(api_name, fn_index) future = self.executor.submit(complete_fn, *args) + if callbacks: + def create_fn(callback) -> Callable: + def fn(future): + if isinstance(future.result(), tuple): + callback(*future.result()) + else: + callback(future.result()) + return fn + + for callback in callbacks: + future.add_done_callback(create_fn(callback)) return future + + def info(self, api_name: str | None = None) -> Dict: + if api_name: + fn_index = self._infer_fn_index(api_name) + dependency = self.config["dependencies"][fn_index] + return {api_name: {"input_parameters": ["(str) value"], "output_values": ["(str) value"]}} + else: + api_info = {"named_endpoints": {}} + for dependency in self.config["dependencies"]: + if dependency.get("api_name") and dependency["backend_fn"]: + api_name = dependency["api_name"] + api_info["named_endpoints"] = self.info(api_name) + api_info["num_named_endpoints"] = len(api_info) # type: ignore + return api_info + + def pprint(self, api_name: str | None = None) -> None: + print(json.dumps(self.info(api_name), indent=2)) - ################# - # Helper methods - ################# + ################################## + # Private helper methods + ################################## + def _infer_fn_index(self, api_name: str) -> int: + inferred_fn_index = next( + ( + i + for i, d in enumerate(self.config["dependencies"]) + if d.get("api_name") == api_name + ), + None, + ) + if inferred_fn_index is None: + raise ValueError(f"Cannot find a function with api_name: {api_name}") + return inferred_fn_index + + def _get_complete_fn(self, api_name: str | None, fn_index: int) -> Callable: if api_name is not None: - inferred_fn_index = next( - ( - i - for i, d in enumerate(self.config["dependencies"]) - if d.get("api_name") == api_name - ), - None, - ) - if inferred_fn_index is None: - raise ValueError(f"Cannot find a function with api_name: {api_name}") - fn_index = inferred_fn_index + fn_index = self._infer_fn_index(api_name) predict_fn = self._get_predict_fn(fn_index) serialize_fn = self._get_serialize_fn(fn_index) @@ -144,7 +176,7 @@ def create_fn(dependency: Dict) -> Callable: for i in inputs: for component in self.config["components"]: if component["id"] == i: - if component.get("serializer", None): + if component.get("serializer"): serializer_name = component["serializer"] assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." serializer = serializing.SERIALIZER_MAPPING[serializer_name] @@ -178,7 +210,7 @@ def create_fn(dependency: Dict) -> Callable: for i in outputs: for component in self.config["components"]: if component["id"] == i: - if component.get("serializer", None): + if component.get("serializer"): serializer_name = component["serializer"] assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." deserializer = serializing.SERIALIZER_MAPPING[serializer_name] diff --git a/gradio/components.py b/gradio/components.py index 376c9aca5041b..ae832b1ec7996 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -72,7 +72,7 @@ class _Keywords(Enum): FINISHED_ITERATING = "FINISHED_ITERATING" # Used to skip processing of a component's value (needed for generators + state) -class Component(Block): +class Component(Block, Serializable): """ A base class for defining the methods that all gradio components should have. """ @@ -149,8 +149,13 @@ def style( self.parent.variant = "compact" return self + def serialize_info(self): + return { + "input": "value", + "output": "value", + } -class IOComponent(Component, Serializable): +class IOComponent(Component): """ A base class for defining methods that all input/output components should have. """ @@ -318,6 +323,12 @@ def __init__( self.cleared_value = "" self.test_input = value self.type = type + + def serialize_info(self): + return { + "input": "(str) value", + "output": "(str) value", + } def get_config(self): return { @@ -1462,6 +1473,12 @@ def get_config(self): **IOComponent.get_config(self), } + def serialize_info(self): + return { + "input": "(str) filepath or URL to image", + "output": "(str) filepath or URL to image", + } + @staticmethod def update( value: Any | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, From b7e12cc6b1804e728ee9b1d7e4681ab681567f4f Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 2 Mar 2023 15:04:10 -0800 Subject: [PATCH 17/93] updates --- client/python/gradio_client/client.py | 32 +++++++++++++++++++++++++-- 1 file changed, 30 insertions(+), 2 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index a51ab26e6bb1d..e86d5c98e1535 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -37,6 +37,8 @@ def __init__( raise ValueError( f"Could not find Space: {space}. If it is a private Space, please provide an access_token." ) + else: + print(f"Loaded as API: {self.src} ✔") self.api_url = utils.API_URL.format(self.src) self.ws_url = utils.WS_URL.format(self.src).replace("https", "wss") @@ -52,6 +54,7 @@ def __init__( def predict(self, *args, api_name: str | None = None, fn_index: int = 0, callbacks: List[Callable] | None = None) -> Future: complete_fn = self._get_complete_fn(api_name, fn_index) future = self.executor.submit(complete_fn, *args) + job = Job(future) if callbacks: def create_fn(callback) -> Callable: def fn(future): @@ -62,8 +65,8 @@ def fn(future): return fn for callback in callbacks: - future.add_done_callback(create_fn(callback)) - return future + job.add_done_callback(create_fn(callback)) + return job def info(self, api_name: str | None = None) -> Dict: if api_name: @@ -81,6 +84,7 @@ def info(self, api_name: str | None = None) -> Dict: def pprint(self, api_name: str | None = None) -> None: print(json.dumps(self.info(api_name), indent=2)) + ################################## # Private helper methods @@ -260,3 +264,27 @@ def _get_config(self) -> Dict: f"Gradio 2.x is not supported by this client. Please upgrade this app to Gradio 3.x." ) return config + + +class Job(Future): + """A Job is a thin wrapper over the Future class that can be cancelled.""" + def __init__(self, future: Future): + self.future = future + + def __getattr__(self, name): + """Forwards any properties to the Future class.""" + return getattr(self.future, name) + + def cancel(self) -> bool: + """Cancels the job.""" + if self.future.cancelled() or self.future.done(): + pass + return False + elif self.future.running(): + pass # TODO: Handle this case + return True + else: + return self.future.cancel() + + + \ No newline at end of file From ea90f25beb357f2745348d3fa0f17a394cfa2391 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 12:51:44 -0800 Subject: [PATCH 18/93] formatting --- client/python/gradio_client/client.py | 121 +++++++++++++-------- client/python/gradio_client/serializing.py | 58 +++++----- client/python/gradio_client/utils.py | 8 +- gradio/components.py | 3 +- 4 files changed, 110 insertions(+), 80 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index e86d5c98e1535..4fff2b0a14229 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -4,14 +4,14 @@ import concurrent.futures import json import re +import uuid from concurrent.futures import Future -from packaging import version from typing import Callable, Dict, List -import uuid -import websockets import requests -from gradio_client import utils, serializing +import websockets +from gradio_client import serializing, utils +from packaging import version class Client: @@ -47,32 +47,45 @@ def __init__( self.predict_fns = self._setup_predict_fns() self.serialize_fns = self._setup_serialize_fn() self.deserialize_fns = self._setup_deserialize_fn() - + # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) - def predict(self, *args, api_name: str | None = None, fn_index: int = 0, callbacks: List[Callable] | None = None) -> Future: + def predict( + self, + *args, + api_name: str | None = None, + fn_index: int = 0, + callbacks: List[Callable] | None = None, + ) -> Future: complete_fn = self._get_complete_fn(api_name, fn_index) future = self.executor.submit(complete_fn, *args) job = Job(future) - if callbacks: + if callbacks: + def create_fn(callback) -> Callable: def fn(future): if isinstance(future.result(), tuple): callback(*future.result()) else: callback(future.result()) + return fn - + for callback in callbacks: job.add_done_callback(create_fn(callback)) return job - + def info(self, api_name: str | None = None) -> Dict: if api_name: fn_index = self._infer_fn_index(api_name) dependency = self.config["dependencies"][fn_index] - return {api_name: {"input_parameters": ["(str) value"], "output_values": ["(str) value"]}} + return { + api_name: { + "input_parameters": ["(str) value"], + "output_values": ["(str) value"], + } + } else: api_info = {"named_endpoints": {}} for dependency in self.config["dependencies"]: @@ -81,15 +94,14 @@ def info(self, api_name: str | None = None) -> Dict: api_info["named_endpoints"] = self.info(api_name) api_info["num_named_endpoints"] = len(api_info) # type: ignore return api_info - + def pprint(self, api_name: str | None = None) -> None: print(json.dumps(self.info(api_name), indent=2)) - ################################## # Private helper methods ################################## - + def _infer_fn_index(self, api_name: str) -> int: inferred_fn_index = next( ( @@ -102,17 +114,16 @@ def _infer_fn_index(self, api_name: str) -> int: if inferred_fn_index is None: raise ValueError(f"Cannot find a function with api_name: {api_name}") return inferred_fn_index - - + def _get_complete_fn(self, api_name: str | None, fn_index: int) -> Callable: if api_name is not None: - fn_index = self._infer_fn_index(api_name) - + fn_index = self._infer_fn_index(api_name) + predict_fn = self._get_predict_fn(fn_index) serialize_fn = self._get_serialize_fn(fn_index) deserialize_fn = self._get_deserialize_fn(fn_index) - - return lambda *args: deserialize_fn(*predict_fn(*serialize_fn(*args))) + + return lambda *args: deserialize_fn(*predict_fn(*serialize_fn(*args))) def _use_websocket(self, dependency: Dict) -> bool: queue_enabled = self.config.get("enable_queue", False) @@ -121,7 +132,7 @@ def _use_websocket(self, dependency: Dict) -> bool: ) >= version.Version("3.2") dependency_uses_queue = dependency.get("queue", False) is not False return queue_enabled and queue_uses_websocket and dependency_uses_queue - + async def _ws_fn(self, data, hash_data): async with websockets.connect( # type: ignore self.ws_url, open_timeout=10, extra_headers=self.headers @@ -130,12 +141,13 @@ async def _ws_fn(self, data, hash_data): def _get_predict_fn(self, fn_index: int) -> Callable: return self.predict_fns[fn_index] - - def _setup_predict_fns(self) -> List[Callable]: + + def _setup_predict_fns(self) -> List[Callable]: def create_fn(fn_index, dependency: Dict) -> Callable: if not dependency["backend_fn"]: return lambda *args: args use_ws = self._use_websocket(dependency) + def predict_fn(*data): if not dependency["backend_fn"]: return None @@ -147,7 +159,9 @@ def predict_fn(*data): result = utils.synchronize_async(self._ws_fn, data, hash_data) output = result["data"] else: - response = requests.post(self.api_url, headers=self.headers, data=data) + response = requests.post( + self.api_url, headers=self.headers, data=data + ) result = json.loads(response.content.decode("utf-8")) try: output = result["data"] @@ -160,8 +174,9 @@ def predict_fn(*data): f"Could not find 'data' key in response. Response received: {result}" ) return tuple(output) - return predict_fn - + + return predict_fn + fns = [] for fn_index, dependency in enumerate(self.config["dependencies"]): fns.append(create_fn(fn_index, dependency)) @@ -169,68 +184,82 @@ def predict_fn(*data): def _get_serialize_fn(self, fn_index: int) -> Callable: return self.serialize_fns[fn_index] - + def _setup_serialize_fn(self) -> List[Callable]: def create_fn(dependency: Dict) -> Callable: if not dependency["backend_fn"]: return lambda *args: args inputs = dependency["inputs"] serializers = [] - + for i in inputs: for component in self.config["components"]: if component["id"] == i: if component.get("serializer"): serializer_name = component["serializer"] - assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + assert ( + serializer_name in serializing.SERIALIZER_MAPPING + ), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." serializer = serializing.SERIALIZER_MAPPING[serializer_name] else: component_name = component["type"] - assert component_name in serializing.COMPONENT_MAPPING, f"Unknown component: {component_name}, you may need to update your gradio_client version." + assert ( + component_name in serializing.COMPONENT_MAPPING + ), f"Unknown component: {component_name}, you may need to update your gradio_client version." serializer = serializing.COMPONENT_MAPPING[component_name] serializers.append(serializer()) # type: ignore - + def serialize_fn(*data): - assert len(data) == len(serializers), f"Expected {len(serializers)} arguments, got {len(data)}" + assert len(data) == len( + serializers + ), f"Expected {len(serializers)} arguments, got {len(data)}" return [s.serialize(d) for s, d in zip(serializers, data)] + return serialize_fn - + fns = [] for dependency in self.config["dependencies"]: fns.append(create_fn(dependency)) return fns - + def _get_deserialize_fn(self, fn_index: int) -> Callable: return self.deserialize_fns[fn_index] - + def _setup_deserialize_fn(self) -> List[Callable]: def create_fn(dependency: Dict) -> Callable: if not dependency["backend_fn"]: return lambda *args: args - + outputs = dependency["outputs"] deserializers = [] - + for i in outputs: for component in self.config["components"]: if component["id"] == i: if component.get("serializer"): serializer_name = component["serializer"] - assert serializer_name in serializing.SERIALIZER_MAPPING, f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." - deserializer = serializing.SERIALIZER_MAPPING[serializer_name] + assert ( + serializer_name in serializing.SERIALIZER_MAPPING + ), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + deserializer = serializing.SERIALIZER_MAPPING[ + serializer_name + ] else: component_name = component["type"] - assert component_name in serializing.COMPONENT_MAPPING, f"Unknown component: {component_name}, you may need to update your gradio_client version." + assert ( + component_name in serializing.COMPONENT_MAPPING + ), f"Unknown component: {component_name}, you may need to update your gradio_client version." deserializer = serializing.COMPONENT_MAPPING[component_name] deserializers.append(deserializer()) # type: ignore - + def deserialize_fn(*data): result = [s.deserialize(d) for s, d in zip(deserializers, data)] if len(outputs) == 1: result = result[0] return result + return deserialize_fn - + fns = [] for dependency in self.config["dependencies"]: fns.append(create_fn(dependency)) @@ -268,23 +297,21 @@ def _get_config(self) -> Dict: class Job(Future): """A Job is a thin wrapper over the Future class that can be cancelled.""" + def __init__(self, future: Future): self.future = future - + def __getattr__(self, name): """Forwards any properties to the Future class.""" return getattr(self.future, name) - + def cancel(self) -> bool: """Cancels the job.""" if self.future.cancelled() or self.future.done(): pass return False elif self.future.running(): - pass # TODO: Handle this case + pass # TODO: Handle this case return True else: return self.future.cancel() - - - \ No newline at end of file diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index d8f01ac83a6c5..7e2844fea7a36 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -214,33 +214,31 @@ def deserialize( SERIALIZER_MAPPING = {cls.__name__: cls for cls in Serializable.__subclasses__()} COMPONENT_MAPPING = { - 'textbox': SimpleSerializable, - 'number': SimpleSerializable, - 'slider': SimpleSerializable, - 'checkbox': SimpleSerializable, - 'checkboxgroup': SimpleSerializable, - 'radio': SimpleSerializable, - 'dropdown': SimpleSerializable, - 'image': ImgSerializable, - 'video': FileSerializable, - 'audio': FileSerializable, - 'file': FileSerializable, - 'dataframe': JSONSerializable, - 'timeseries': JSONSerializable, - 'state': SimpleSerializable, - 'button': SimpleSerializable, - 'uploadbutton': FileSerializable, - 'colorpicker': SimpleSerializable, - 'label': JSONSerializable, - 'highlightedtext': JSONSerializable, - 'json': JSONSerializable, - 'html': SimpleSerializable, - 'gallery': SimpleSerializable, # TODO: Make this a proper Serializable class - 'chatbot': JSONSerializable, - 'model3d': FileSerializable, - 'plot': JSONSerializable, - 'markdown': SimpleSerializable, - 'dataset': SimpleSerializable - } - - \ No newline at end of file + "textbox": SimpleSerializable, + "number": SimpleSerializable, + "slider": SimpleSerializable, + "checkbox": SimpleSerializable, + "checkboxgroup": SimpleSerializable, + "radio": SimpleSerializable, + "dropdown": SimpleSerializable, + "image": ImgSerializable, + "video": FileSerializable, + "audio": FileSerializable, + "file": FileSerializable, + "dataframe": JSONSerializable, + "timeseries": JSONSerializable, + "state": SimpleSerializable, + "button": SimpleSerializable, + "uploadbutton": FileSerializable, + "colorpicker": SimpleSerializable, + "label": JSONSerializable, + "highlightedtext": JSONSerializable, + "json": JSONSerializable, + "html": SimpleSerializable, + "gallery": SimpleSerializable, # TODO: Make this a proper Serializable class + "chatbot": JSONSerializable, + "model3d": FileSerializable, + "plot": JSONSerializable, + "markdown": SimpleSerializable, + "dataset": SimpleSerializable, +} diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index 46952315181c9..dee6161631881 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -1,21 +1,25 @@ import json -from typing import Callable, Any, Dict -from websockets.legacy.protocol import WebSocketCommonProtocol +from typing import Any, Callable, Dict import fsspec.asyn +from websockets.legacy.protocol import WebSocketCommonProtocol API_URL = "{}/api/predict/" WS_URL = "{}/queue/join" + class TooManyRequestsError(Exception): """Raised when the API returns a 429 status code.""" + pass class QueueError(Exception): """Raised when the queue is full or there is an issue adding a job to the queue.""" + pass + async def get_pred_from_ws( websocket: WebSocketCommonProtocol, data: str, hash_data: str ) -> Dict[str, Any]: diff --git a/gradio/components.py b/gradio/components.py index 7fd30eef4c0a1..8ad0927cf122c 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -155,6 +155,7 @@ def serialize_info(self): "output": "value", } + class IOComponent(Component): """ A base class for defining methods that all input/output components should have. @@ -315,7 +316,7 @@ def __init__( self.cleared_value = "" self.test_input = value self.type = type - + def serialize_info(self): return { "input": "(str) value", From 978c088df9d1f95a5b5ea1dbc80b5ffe5c7324c5 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 15:29:20 -0800 Subject: [PATCH 19/93] packaging --- client/python/{gradio_client => }/README.md | 0 client/python/gradio_client/version.txt | 1 + client/python/pyproject.toml | 43 +++++++++++++++++++++ client/python/requirements.txt | 4 ++ 4 files changed, 48 insertions(+) rename client/python/{gradio_client => }/README.md (100%) create mode 100644 client/python/gradio_client/version.txt create mode 100644 client/python/pyproject.toml create mode 100644 client/python/requirements.txt diff --git a/client/python/gradio_client/README.md b/client/python/README.md similarity index 100% rename from client/python/gradio_client/README.md rename to client/python/README.md diff --git a/client/python/gradio_client/version.txt b/client/python/gradio_client/version.txt new file mode 100644 index 0000000000000..8a9ecc2ea99d6 --- /dev/null +++ b/client/python/gradio_client/version.txt @@ -0,0 +1 @@ +0.0.1 \ No newline at end of file diff --git a/client/python/pyproject.toml b/client/python/pyproject.toml new file mode 100644 index 0000000000000..9c64c12f987a6 --- /dev/null +++ b/client/python/pyproject.toml @@ -0,0 +1,43 @@ +[build-system] +requires = ["hatchling", "hatch-requirements-txt", "hatch-fancy-pypi-readme>=22.5.0"] +build-backend = "hatchling.build" + +[project] +name = "gradio_client" +dynamic = ["version", "dependencies", "readme"] +description = "Python library for easily interacting with trained machine learning models" +license = "Apache-2.0" +requires-python = ">=3.7" +authors = [ + { name = "Abubakar Abid", email = "team@gradio.app" }, + { name = "Ali Abid", email = "team@gradio.app" }, + { name = "Ali Abdalla", email = "team@gradio.app" }, + { name = "Dawood Khan", email = "team@gradio.app" }, + { name = "Ahsen Khaliq", email = "team@gradio.app" }, + { name = "Pete Allen", email = "team@gradio.app" }, + { name = "Freddy Boulton", email = "team@gradio.app" }, +] +keywords = ["machine learning", "client", "API"] + +[project.urls] +Homepage = "https://github.com/gradio-app/gradio" + +[tool.hatch.version] +path = "gradio_client/version.txt" +pattern = "(?P.+)" + +[tool.hatch.metadata.hooks.requirements_txt] +filename = "requirements.txt" + +[tool.hatch.metadata.hooks.fancy-pypi-readme] +content-type = "text/markdown" +fragments = [ + { path = "README.md" }, +] + +[tool.hatch.build.targets.sdist] +include = [ + "/gradio_client", + "/README.md", + "/requirements.txt", +] diff --git a/client/python/requirements.txt b/client/python/requirements.txt new file mode 100644 index 0000000000000..50d8962759dc7 --- /dev/null +++ b/client/python/requirements.txt @@ -0,0 +1,4 @@ +requests +websockets +packaging +fsspec \ No newline at end of file From 37593ade6574d1ec8a0321d7cdb356ae7a9e52a0 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 15:32:06 -0800 Subject: [PATCH 20/93] requirements --- client/python/gradio_client/__init__.py | 4 ++++ requirements.txt | 3 ++- 2 files changed, 6 insertions(+), 1 deletion(-) diff --git a/client/python/gradio_client/__init__.py b/client/python/gradio_client/__init__.py index 0bae3214abdce..3f51421edef3e 100644 --- a/client/python/gradio_client/__init__.py +++ b/client/python/gradio_client/__init__.py @@ -1 +1,5 @@ +import pkgutil + from gradio_client.client import Client + +__version__ = (pkgutil.get_data(__name__, "version.txt") or b"").decode("ascii").strip() diff --git a/requirements.txt b/requirements.txt index 331e8713444b3..6754f915462a2 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,4 +22,5 @@ httpx pydantic websockets>=10.0 typing_extensions -aiofiles \ No newline at end of file +aiofiles +gradio_client \ No newline at end of file From 007ec5495f19c8734d8b32b37c0f3e9f28c9d020 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 15:34:07 -0800 Subject: [PATCH 21/93] remove changelog --- CHANGELOG.md | 1 - 1 file changed, 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index cea40c6b6581d..ab0e310420b90 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -30,7 +30,6 @@ No changes to highlight. - Persist file names of files uploaded through any Gradio component by [@abidlabs](https://github.com/abidlabs) in [PR 3412](https://github.com/gradio-app/gradio/pull/3412) - Fix markdown embedded component in docs by [@aliabd](https://github.com/aliabd) in [PR 3410](https://github.com/gradio-app/gradio/pull/3410) - Fix css issue with spaces logo by [@aliabd](https://github.com/aliabd) in [PR 3422](https://github.com/gradio-app/gradio/pull/3422) -- Adds a Python client class `Client()` that makes makes it easier to connect to hosted Gradio apps and make predictions from them, by [@abidlabs](https://github.com/abidlabs) in [PR 3300](https://github.com/gradio-app/gradio/pull/3300) ## Contributors Shoutout: From f3e0737f07b04948ff76310023046e5b8cc07caa Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 15:52:00 -0800 Subject: [PATCH 22/93] client --- client/python/gradio_client/serializing.py | 42 ++--- client/python/gradio_client/utils.py | 193 +++++++++++++++++++- gradio/components.py | 2 +- gradio/processing_utils.py | 54 ------ gradio/serializing.py | 196 --------------------- 5 files changed, 201 insertions(+), 286 deletions(-) delete mode 100644 gradio/serializing.py diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index 7e2844fea7a36..9445f1ae63443 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -4,7 +4,7 @@ from pathlib import Path from typing import Any, Dict -from gradio import processing_utils, utils +from gradio_client import utils class Serializable(ABC): @@ -35,7 +35,6 @@ def serialize(self, x: Any, load_dir: str | Path = "") -> Any: Parameters: x: Input data to serialize load_dir: Ignored - encryption_key: Ignored """ return x @@ -50,7 +49,6 @@ def deserialize( Parameters: x: Input data to deserialize save_dir: Ignored - encryption_key: Ignored root_url: Ignored """ return x @@ -68,20 +66,18 @@ def serialize( Parameters: x: String path to file to serialize load_dir: Path to directory containing x - encryption_key: Used to encrypt the file """ if x is None or x == "": return None - is_url = utils.validate_url(x) + is_url = utils.is_valid_url(x) path = x if is_url else Path(load_dir) / x - return processing_utils.encode_url_or_file_to_base64(path) + return utils.encode_url_or_file_to_base64(path) def deserialize( self, x: str | None, save_dir: str | Path | None = None, root_url: str | None = None, - access_token: str | None = None, ) -> str | None: """ Convert from serialized representation of a file (base64) to a human-friendly @@ -89,12 +85,11 @@ def deserialize( Parameters: x: Base64 representation of image to deserialize into a string filepath save_dir: Path to directory to save the deserialized image to - encryption_key: Used to decrypt the file root_url: Ignored """ if x is None or x == "": return None - file = processing_utils.decode_base64_to_file(x, dir=save_dir) + file = utils.decode_base64_to_file(x, dir=save_dir) return file.name @@ -103,7 +98,6 @@ def serialize( self, x: str | None, load_dir: str | Path = "", - encryption_key: bytes | None = None, ) -> Dict | None: """ Convert from human-friendly version of a file (string filepath) to a @@ -111,16 +105,13 @@ def serialize( Parameters: x: String path to file to serialize load_dir: Path to directory containing x - encryption_key: Used to encrypt the file """ if x is None or x == "": return None filename = str(Path(load_dir) / x) return { "name": filename, - "data": processing_utils.encode_url_or_file_to_base64( - filename, encryption_key=encryption_key - ), + "data": utils.encode_url_or_file_to_base64(filename), "orig_name": Path(filename).name, "is_file": False, } @@ -130,7 +121,6 @@ def deserialize( x: str | Dict | None, save_dir: Path | str | None = None, root_url: str | None = None, - access_token: str | None = None, ) -> str | None: """ Convert from serialized representation of a file (base64) to a human-friendly @@ -138,7 +128,6 @@ def deserialize( Parameters: x: Base64 representation of file to deserialize into a string filepath save_dir: Path to directory to save the deserialized file to - encryption_key: Used to decrypt the file root_url: If this component is loaded from an external Space, this is the URL of the Space """ if x is None: @@ -146,24 +135,22 @@ def deserialize( if isinstance(save_dir, Path): save_dir = str(save_dir) if isinstance(x, str): - file_name = processing_utils.decode_base64_to_file( - x, dir=save_dir, encryption_key=encryption_key - ).name + file_name = utils.decode_base64_to_file(x, dir=save_dir).name elif isinstance(x, dict): if x.get("is_file", False): if root_url is not None: - file_name = processing_utils.download_tmp_copy_of_file( + file_name = utils.download_tmp_copy_of_file( root_url + "file=" + x["name"], - access_token=access_token, + # access_token=Context.access_token, # TODO: will fix dir=save_dir, ).name else: - file_name = processing_utils.create_tmp_copy_of_file( + file_name = utils.create_tmp_copy_of_file( x["name"], dir=save_dir ).name else: - file_name = processing_utils.decode_base64_to_file( - x["data"], dir=save_dir, encryption_key=encryption_key + file_name = utils.decode_base64_to_file( + x["data"], dir=save_dir ).name else: raise ValueError( @@ -177,7 +164,6 @@ def serialize( self, x: str | None, load_dir: str | Path = "", - encryption_key: bytes | None = None, ) -> Dict | None: """ Convert from a a human-friendly version (string path to json file) to a @@ -185,11 +171,10 @@ def serialize( Parameters: x: String path to json file to read to get json string load_dir: Path to directory containing x - encryption_key: Ignored """ if x is None or x == "": return None - return processing_utils.file_to_json(Path(load_dir) / x) + return utils.file_to_json(Path(load_dir) / x) def deserialize( self, @@ -203,12 +188,11 @@ def deserialize( Parameters: x: Json string save_dir: Path to save the deserialized json file to - encryption_key: Ignored root_url: Ignored """ if x is None: return None - return processing_utils.dict_or_str_to_json_file(x, dir=save_dir).name + return utils.dict_or_str_to_json_file(x, dir=save_dir).name SERIALIZER_MAPPING = {cls.__name__: cls for cls in Serializable.__subclasses__()} diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index dee6161631881..df156b68f291c 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -1,6 +1,14 @@ -import json -from typing import Any, Callable, Dict +from __future__ import annotations +import json +from typing import Any, Callable, Dict, Tuple +import tempfile +from pathlib import Path +import os +import base64 +import shutil +import mimetypes +import requests import fsspec.asyn from websockets.legacy.protocol import WebSocketCommonProtocol @@ -19,6 +27,19 @@ class QueueError(Exception): pass +######################## +# Network utils +######################## + +def is_valid_url(possible_url: str) -> bool: + headers = {"User-Agent": "gradio (https://gradio.app/; team@gradio.app)"} + try: + head_request = requests.head(possible_url, headers=headers) + if head_request.status_code == 405: + return requests.get(possible_url, headers=headers).ok + return head_request.ok + except Exception: + return False async def get_pred_from_ws( websocket: WebSocketCommonProtocol, data: str, hash_data: str @@ -37,13 +58,173 @@ async def get_pred_from_ws( completed = resp["msg"] == "process_completed" return resp["output"] +######################## +# Data processing utils +######################## -def synchronize_async(func: Callable, *args, **kwargs) -> Any: - """ - Runs async functions in sync scopes. +def download_tmp_copy_of_file( + url_path: str, access_token: str | None = None, dir: str | None = None +) -> tempfile._TemporaryFileWrapper: + if dir is not None: + os.makedirs(dir, exist_ok=True) + headers = {"Authorization": "Bearer " + access_token} if access_token else {} + prefix = Path(url_path).stem + suffix = Path(url_path).suffix + file_obj = tempfile.NamedTemporaryFile( + delete=False, + prefix=prefix, + suffix=suffix, + dir=dir, + ) + with requests.get(url_path, headers=headers, stream=True) as r: + with open(file_obj.name, "wb") as f: + shutil.copyfileobj(r.raw, f) + return file_obj + + +def create_tmp_copy_of_file( + file_path: str, dir: str | None = None +) -> tempfile._TemporaryFileWrapper: + if dir is not None: + os.makedirs(dir, exist_ok=True) + prefix = Path(file_path).stem + suffix = Path(file_path).suffix + file_obj = tempfile.NamedTemporaryFile( + delete=False, + prefix=prefix, + suffix=suffix, + dir=dir, + ) + shutil.copy2(file_path, file_obj.name) + return file_obj + + +def get_mimetype(filename: str) -> str | None: + mimetype = mimetypes.guess_type(filename)[0] + if mimetype is not None: + mimetype = mimetype.replace("x-wav", "wav").replace("x-flac", "flac") + return mimetype + + +def get_extension(encoding: str) -> str | None: + encoding = encoding.replace("audio/wav", "audio/x-wav") + type = mimetypes.guess_type(encoding)[0] + if type == "audio/flac": # flac is not supported by mimetypes + return "flac" + elif type is None: + return None + extension = mimetypes.guess_extension(type) + if extension is not None and extension.startswith("."): + extension = extension[1:] + return extension + +def encode_file_to_base64(f): + with open(f, "rb") as file: + encoded_string = base64.b64encode(file.read()) + base64_str = str(encoded_string, "utf-8") + mimetype = get_mimetype(f) + return ( + "data:" + + (mimetype if mimetype is not None else "") + + ";base64," + + base64_str + ) + + +def encode_url_to_base64(url): + encoded_string = base64.b64encode(requests.get(url).content) + base64_str = str(encoded_string, "utf-8") + mimetype = get_mimetype(url) + return ( + "data:" + (mimetype if mimetype is not None else "") + ";base64," + base64_str + ) + + +def encode_url_or_file_to_base64(path: str | Path): + path = str(path) + if is_valid_url(path): + return encode_url_to_base64(path) + else: + return encode_file_to_base64(path) - Can be used in any scope. See run_coro_in_background for more details. +def decode_base64_to_binary(encoding) -> Tuple[bytes, str | None]: + extension = get_extension(encoding) + try: + data = encoding.split(",")[1] + except IndexError: + data = "" + return base64.b64decode(data), extension + + +def strip_invalid_filename_characters(filename: str, max_bytes: int = 200) -> str: + """Strips invalid characters from a filename and ensures that the file_length is less than `max_bytes` bytes.""" + filename = "".join([char for char in filename if char.isalnum() or char in "._- "]) + filename_len = len(filename.encode()) + if filename_len > max_bytes: + while filename_len > max_bytes: + if len(filename) == 0: + break + filename = filename[:-1] + filename_len = len(filename.encode()) + return filename + + +def decode_base64_to_file(encoding, file_path=None, dir=None, prefix=None): + if dir is not None: + os.makedirs(dir, exist_ok=True) + data, extension = decode_base64_to_binary(encoding) + if file_path is not None and prefix is None: + filename = Path(file_path).name + prefix = filename + if "." in filename: + prefix = filename[0 : filename.index(".")] + extension = filename[filename.index(".") + 1 :] + + if prefix is not None: + prefix = strip_invalid_filename_characters(prefix) + + if extension is None: + file_obj = tempfile.NamedTemporaryFile(delete=False, prefix=prefix, dir=dir) + else: + file_obj = tempfile.NamedTemporaryFile( + delete=False, + prefix=prefix, + suffix="." + extension, + dir=dir, + ) + file_obj.write(data) + file_obj.flush() + return file_obj + + +def dict_or_str_to_json_file(jsn, dir=None): + if dir is not None: + os.makedirs(dir, exist_ok=True) + + file_obj = tempfile.NamedTemporaryFile( + delete=False, suffix=".json", dir=dir, mode="w+" + ) + if isinstance(jsn, str): + jsn = json.loads(jsn) + json.dump(jsn, file_obj) + file_obj.flush() + return file_obj + + +def file_to_json(file_path: str | Path) -> Dict: + with open(file_path) as f: + return json.load(f) + + +######################## +# Misc utils +######################## + +def synchronize_async(func: Callable, *args, **kwargs) -> Any: + """ + Runs async functions in sync scopes. Can be used in any scope. + Example: if inspect.iscoroutinefunction(block_fn.fn): predictions = utils.synchronize_async(block_fn.fn, *processed_input) diff --git a/gradio/components.py b/gradio/components.py index 74f190d0dcee0..5bdf7ab5353d8 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -48,7 +48,7 @@ from gradio.interpretation import NeighborInterpretable, TokenInterpretable from gradio.layouts import Column, Form, Row from gradio.processing_utils import TempFileManager -from gradio.serializing import ( +from gradio_client.serializing import ( FileSerializable, ImgSerializable, JSONSerializable, diff --git a/gradio/processing_utils.py b/gradio/processing_utils.py index a7e27a2028810..ca64b691f4941 100644 --- a/gradio/processing_utils.py +++ b/gradio/processing_utils.py @@ -299,25 +299,6 @@ def decode_base64_to_file(encoding, file_path=None, dir=None, prefix=None): return file_obj -def dict_or_str_to_json_file(jsn, dir=None): - if dir is not None: - os.makedirs(dir, exist_ok=True) - - file_obj = tempfile.NamedTemporaryFile( - delete=False, suffix=".json", dir=dir, mode="w+" - ) - if isinstance(jsn, str): - jsn = json.loads(jsn) - json.dump(jsn, file_obj) - file_obj.flush() - return file_obj - - -def file_to_json(file_path: str | Path) -> Dict: - with open(file_path) as f: - return json.load(f) - - class TempFileManager: """ A class that should be inherited by any Component that needs to manage temporary files. @@ -444,41 +425,6 @@ def base64_to_temp_file_if_needed( return full_temp_file_path -def download_tmp_copy_of_file( - url_path: str, access_token: str | None = None, dir: str | None = None -) -> tempfile._TemporaryFileWrapper: - if dir is not None: - os.makedirs(dir, exist_ok=True) - headers = {"Authorization": "Bearer " + access_token} if access_token else {} - prefix = Path(url_path).stem - suffix = Path(url_path).suffix - file_obj = tempfile.NamedTemporaryFile( - delete=False, - prefix=prefix, - suffix=suffix, - dir=dir, - ) - with requests.get(url_path, headers=headers, stream=True) as r: - with open(file_obj.name, "wb") as f: - shutil.copyfileobj(r.raw, f) - return file_obj - - -def create_tmp_copy_of_file( - file_path: str, dir: str | None = None -) -> tempfile._TemporaryFileWrapper: - if dir is not None: - os.makedirs(dir, exist_ok=True) - prefix = Path(file_path).stem - suffix = Path(file_path).suffix - file_obj = tempfile.NamedTemporaryFile( - delete=False, - prefix=prefix, - suffix=suffix, - dir=dir, - ) - shutil.copy2(file_path, file_obj.name) - return file_obj def _convert(image, dtype, force_copy=False, uniform=False): diff --git a/gradio/serializing.py b/gradio/serializing.py deleted file mode 100644 index 6780ef7d289a0..0000000000000 --- a/gradio/serializing.py +++ /dev/null @@ -1,196 +0,0 @@ -from __future__ import annotations - -from abc import ABC, abstractmethod -from pathlib import Path -from typing import Any, Dict - -from gradio import processing_utils, utils -from gradio.context import Context - - -class Serializable(ABC): - @abstractmethod - def serialize(self, x: Any, load_dir: str | Path = ""): - """ - Convert data from human-readable format to serialized format for a browser. - """ - pass - - @abstractmethod - def deserialize( - self, - x: Any, - save_dir: str | Path | None = None, - root_url: str | None = None, - ): - """ - Convert data from serialized format for a browser to human-readable format. - """ - pass - - -class SimpleSerializable(Serializable): - def serialize(self, x: Any, load_dir: str | Path = "") -> Any: - """ - Convert data from human-readable format to serialized format. For SimpleSerializable components, this is a no-op. - Parameters: - x: Input data to serialize - load_dir: Ignored - """ - return x - - def deserialize( - self, - x: Any, - save_dir: str | Path | None = None, - root_url: str | None = None, - ): - """ - Convert data from serialized format to human-readable format. For SimpleSerializable components, this is a no-op. - Parameters: - x: Input data to deserialize - save_dir: Ignored - root_url: Ignored - """ - return x - - -class ImgSerializable(Serializable): - def serialize( - self, - x: str | None, - load_dir: str | Path = "", - ) -> str | None: - """ - Convert from human-friendly version of a file (string filepath) to a seralized - representation (base64). - Parameters: - x: String path to file to serialize - load_dir: Path to directory containing x - """ - if x is None or x == "": - return None - is_url = utils.validate_url(x) - path = x if is_url else Path(load_dir) / x - return processing_utils.encode_url_or_file_to_base64(path) - - def deserialize( - self, - x: str | None, - save_dir: str | Path | None = None, - root_url: str | None = None, - ) -> str | None: - """ - Convert from serialized representation of a file (base64) to a human-friendly - version (string filepath). Optionally, save the file to the directory specified by save_dir - Parameters: - x: Base64 representation of image to deserialize into a string filepath - save_dir: Path to directory to save the deserialized image to - root_url: Ignored - """ - if x is None or x == "": - return None - file = processing_utils.decode_base64_to_file(x, dir=save_dir) - return file.name - - -class FileSerializable(Serializable): - def serialize( - self, - x: str | None, - load_dir: str | Path = "", - ) -> Dict | None: - """ - Convert from human-friendly version of a file (string filepath) to a - seralized representation (base64) - Parameters: - x: String path to file to serialize - load_dir: Path to directory containing x - """ - if x is None or x == "": - return None - filename = str(Path(load_dir) / x) - return { - "name": filename, - "data": processing_utils.encode_url_or_file_to_base64(filename), - "orig_name": Path(filename).name, - "is_file": False, - } - - def deserialize( - self, - x: str | Dict | None, - save_dir: Path | str | None = None, - root_url: str | None = None, - ) -> str | None: - """ - Convert from serialized representation of a file (base64) to a human-friendly - version (string filepath). Optionally, save the file to the directory specified by `save_dir` - Parameters: - x: Base64 representation of file to deserialize into a string filepath - save_dir: Path to directory to save the deserialized file to - root_url: If this component is loaded from an external Space, this is the URL of the Space - """ - if x is None: - return None - if isinstance(save_dir, Path): - save_dir = str(save_dir) - if isinstance(x, str): - file_name = processing_utils.decode_base64_to_file(x, dir=save_dir).name - elif isinstance(x, dict): - if x.get("is_file", False): - if root_url is not None: - file_name = processing_utils.download_tmp_copy_of_file( - root_url + "file=" + x["name"], - access_token=Context.access_token, - dir=save_dir, - ).name - else: - file_name = processing_utils.create_tmp_copy_of_file( - x["name"], dir=save_dir - ).name - else: - file_name = processing_utils.decode_base64_to_file( - x["data"], dir=save_dir - ).name - else: - raise ValueError( - f"A FileSerializable component cannot only deserialize a string or a dict, not a: {type(x)}" - ) - return file_name - - -class JSONSerializable(Serializable): - def serialize( - self, - x: str | None, - load_dir: str | Path = "", - ) -> Dict | None: - """ - Convert from a a human-friendly version (string path to json file) to a - serialized representation (json string) - Parameters: - x: String path to json file to read to get json string - load_dir: Path to directory containing x - """ - if x is None or x == "": - return None - return processing_utils.file_to_json(Path(load_dir) / x) - - def deserialize( - self, - x: str | Dict, - save_dir: str | Path | None = None, - root_url: str | None = None, - ) -> str | None: - """ - Convert from serialized representation (json string) to a human-friendly - version (string path to json file). Optionally, save the file to the directory specified by `save_dir` - Parameters: - x: Json string - save_dir: Path to save the deserialized json file to - root_url: Ignored - """ - if x is None: - return None - return processing_utils.dict_or_str_to_json_file(x, dir=save_dir).name From d7d9499502ce519dd74b1c82cf0c816a626b6ac8 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 17:30:23 -0800 Subject: [PATCH 23/93] access token --- client/python/gradio_client/client.py | 2 +- client/python/gradio_client/serializing.py | 11 ++++++++++- gradio/blocks.py | 2 +- 3 files changed, 12 insertions(+), 3 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 4fff2b0a14229..ba549e3055109 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -253,7 +253,7 @@ def create_fn(dependency: Dict) -> Callable: deserializers.append(deserializer()) # type: ignore def deserialize_fn(*data): - result = [s.deserialize(d) for s, d in zip(deserializers, data)] + result = [s.deserialize(d, access_token=self.access_token) for s, d in zip(deserializers, data)] if len(outputs) == 1: result = result[0] return result diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index 9445f1ae63443..598d452dc0a22 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -21,6 +21,7 @@ def deserialize( x: Any, save_dir: str | Path | None = None, root_url: str | None = None, + access_token: str | None = None, ): """ Convert data from serialized format for a browser to human-readable format. @@ -43,6 +44,7 @@ def deserialize( x: Any, save_dir: str | Path | None = None, root_url: str | None = None, + access_token: str | None = None, ): """ Convert data from serialized format to human-readable format. For SimpleSerializable components, this is a no-op. @@ -50,6 +52,7 @@ def deserialize( x: Input data to deserialize save_dir: Ignored root_url: Ignored + access_token: Ignored """ return x @@ -78,6 +81,7 @@ def deserialize( x: str | None, save_dir: str | Path | None = None, root_url: str | None = None, + access_token: str | None = None, ) -> str | None: """ Convert from serialized representation of a file (base64) to a human-friendly @@ -86,6 +90,7 @@ def deserialize( x: Base64 representation of image to deserialize into a string filepath save_dir: Path to directory to save the deserialized image to root_url: Ignored + access_token: Ignored """ if x is None or x == "": return None @@ -121,6 +126,7 @@ def deserialize( x: str | Dict | None, save_dir: Path | str | None = None, root_url: str | None = None, + access_token: str | None = None, ) -> str | None: """ Convert from serialized representation of a file (base64) to a human-friendly @@ -129,6 +135,7 @@ def deserialize( x: Base64 representation of file to deserialize into a string filepath save_dir: Path to directory to save the deserialized file to root_url: If this component is loaded from an external Space, this is the URL of the Space + access_token: If this component is loaded from an external private Space, this is the access token for the Space """ if x is None: return None @@ -141,7 +148,7 @@ def deserialize( if root_url is not None: file_name = utils.download_tmp_copy_of_file( root_url + "file=" + x["name"], - # access_token=Context.access_token, # TODO: will fix + access_token=access_token, dir=save_dir, ).name else: @@ -181,6 +188,7 @@ def deserialize( x: str | Dict, save_dir: str | Path | None = None, root_url: str | None = None, + access_token: str | None = None, ) -> str | None: """ Convert from serialized representation (json string) to a human-friendly @@ -189,6 +197,7 @@ def deserialize( x: Json string save_dir: Path to save the deserialized json file to root_url: Ignored + access_token: Ignored """ if x is None: return None diff --git a/gradio/blocks.py b/gradio/blocks.py index 50a595ef91fa2..58905d4a1b646 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -896,7 +896,7 @@ def deserialize_data(self, fn_index: int, outputs: List[Any]) -> List[Any]: assert isinstance( block, components.IOComponent ), f"{block.__class__} Component with id {output_id} not a valid output component." - deserialized = block.deserialize(outputs[o], root_url=block.root_url) + deserialized = block.deserialize(outputs[o], root_url=block.root_url, access_token=Context.access_token) predictions.append(deserialized) return predictions From d9166156a2cb271e38a986d62e4f738c3d5081c9 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 17:30:56 -0800 Subject: [PATCH 24/93] formatting --- client/python/gradio_client/client.py | 5 ++++- client/python/gradio_client/serializing.py | 4 +--- client/python/gradio_client/utils.py | 22 +++++++++++++++------- gradio/blocks.py | 4 +++- gradio/components.py | 14 +++++++------- gradio/processing_utils.py | 2 -- 6 files changed, 30 insertions(+), 21 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index ba549e3055109..ae9c557fabc44 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -253,7 +253,10 @@ def create_fn(dependency: Dict) -> Callable: deserializers.append(deserializer()) # type: ignore def deserialize_fn(*data): - result = [s.deserialize(d, access_token=self.access_token) for s, d in zip(deserializers, data)] + result = [ + s.deserialize(d, access_token=self.access_token) + for s, d in zip(deserializers, data) + ] if len(outputs) == 1: result = result[0] return result diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index 598d452dc0a22..1a0e815d5990f 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -156,9 +156,7 @@ def deserialize( x["name"], dir=save_dir ).name else: - file_name = utils.decode_base64_to_file( - x["data"], dir=save_dir - ).name + file_name = utils.decode_base64_to_file(x["data"], dir=save_dir).name else: raise ValueError( f"A FileSerializable component cannot only deserialize a string or a dict, not a: {type(x)}" diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index df156b68f291c..544ec33b404f7 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -1,15 +1,16 @@ from __future__ import annotations +import base64 import json -from typing import Any, Callable, Dict, Tuple -import tempfile -from pathlib import Path +import mimetypes import os -import base64 import shutil -import mimetypes -import requests +import tempfile +from pathlib import Path +from typing import Any, Callable, Dict, Tuple + import fsspec.asyn +import requests from websockets.legacy.protocol import WebSocketCommonProtocol API_URL = "{}/api/predict/" @@ -27,10 +28,12 @@ class QueueError(Exception): pass + ######################## # Network utils ######################## + def is_valid_url(possible_url: str) -> bool: headers = {"User-Agent": "gradio (https://gradio.app/; team@gradio.app)"} try: @@ -41,6 +44,7 @@ def is_valid_url(possible_url: str) -> bool: except Exception: return False + async def get_pred_from_ws( websocket: WebSocketCommonProtocol, data: str, hash_data: str ) -> Dict[str, Any]: @@ -58,10 +62,12 @@ async def get_pred_from_ws( completed = resp["msg"] == "process_completed" return resp["output"] + ######################## # Data processing utils ######################## + def download_tmp_copy_of_file( url_path: str, access_token: str | None = None, dir: str | None = None ) -> tempfile._TemporaryFileWrapper: @@ -118,6 +124,7 @@ def get_extension(encoding: str) -> str | None: extension = extension[1:] return extension + def encode_file_to_base64(f): with open(f, "rb") as file: encoded_string = base64.b64encode(file.read()) @@ -221,10 +228,11 @@ def file_to_json(file_path: str | Path) -> Dict: # Misc utils ######################## + def synchronize_async(func: Callable, *args, **kwargs) -> Any: """ Runs async functions in sync scopes. Can be used in any scope. - + Example: if inspect.iscoroutinefunction(block_fn.fn): predictions = utils.synchronize_async(block_fn.fn, *processed_input) diff --git a/gradio/blocks.py b/gradio/blocks.py index 58905d4a1b646..da8a49d5ffa94 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -896,7 +896,9 @@ def deserialize_data(self, fn_index: int, outputs: List[Any]) -> List[Any]: assert isinstance( block, components.IOComponent ), f"{block.__class__} Component with id {output_id} not a valid output component." - deserialized = block.deserialize(outputs[o], root_url=block.root_url, access_token=Context.access_token) + deserialized = block.deserialize( + outputs[o], root_url=block.root_url, access_token=Context.access_token + ) predictions.append(deserialized) return predictions diff --git a/gradio/components.py b/gradio/components.py index 5bdf7ab5353d8..44863b9a20f90 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -25,6 +25,13 @@ import PIL import PIL.ImageOps from ffmpy import FFmpeg +from gradio_client.serializing import ( + FileSerializable, + ImgSerializable, + JSONSerializable, + Serializable, + SimpleSerializable, +) from pandas.api.types import is_numeric_dtype from PIL import Image as _Image # using _ to minimize namespace pollution from typing_extensions import Literal @@ -48,13 +55,6 @@ from gradio.interpretation import NeighborInterpretable, TokenInterpretable from gradio.layouts import Column, Form, Row from gradio.processing_utils import TempFileManager -from gradio_client.serializing import ( - FileSerializable, - ImgSerializable, - JSONSerializable, - Serializable, - SimpleSerializable, -) if TYPE_CHECKING: from typing import TypedDict diff --git a/gradio/processing_utils.py b/gradio/processing_utils.py index ca64b691f4941..c153384794f27 100644 --- a/gradio/processing_utils.py +++ b/gradio/processing_utils.py @@ -425,8 +425,6 @@ def base64_to_temp_file_if_needed( return full_temp_file_path - - def _convert(image, dtype, force_copy=False, uniform=False): """ Adapted from: https://github.com/scikit-image/scikit-image/blob/main/skimage/util/dtype.py#L510-L531 From 7c5cd1d417d318abdb41c2a628c8088eb028d6f9 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 17:42:24 -0800 Subject: [PATCH 25/93] deprecate --- gradio/__init__.py | 1 + gradio/blocks.py | 1 + gradio/external.py | 28 ++++++++++++++++++++++++++++ gradio/interface.py | 1 + 4 files changed, 31 insertions(+) diff --git a/gradio/__init__.py b/gradio/__init__.py index b0633eb028456..96059ef79e2e1 100644 --- a/gradio/__init__.py +++ b/gradio/__init__.py @@ -52,6 +52,7 @@ component, ) from gradio.exceptions import Error +from gradio.external import load from gradio.flagging import ( CSVLogger, FlaggingCallback, diff --git a/gradio/blocks.py b/gradio/blocks.py index da8a49d5ffa94..9934736a9c37e 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -1177,6 +1177,7 @@ def get_time(): """ # _js: Optional frontend js method to run before running 'fn'. Input arguments for js method are values of 'inputs' and 'outputs', return should be a list of values for output components. if isinstance(self_or_cls, type): + warnings.warn("gr.Blocks.load() will be deprecated. Use gr.load() instead.") if name is None: raise ValueError( "Blocks.load() requires passing parameters as keyword arguments" diff --git a/gradio/external.py b/gradio/external.py index 8e6686470910b..ce22507b4274e 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -33,6 +33,34 @@ from gradio.interface import Interface +def load( + name: str, + src: str | None = None, + api_key: str | None = None, + alias: str | None = None, + **kwargs, +) -> Blocks: + """ + Class method that constructs an Interface from a Hugging Face repo. Can accept + model repos (if src is "models") or Space repos (if src is "spaces"). The input + and output components are automatically loaded from the repo. + Parameters: + name: the name of the model (e.g. "gpt2" or "facebook/bart-base") or space (e.g. "flax-community/spanish-gpt2"), can include the `src` as prefix (e.g. "models/facebook/bart-base") + src: the source of the model: `models` or `spaces` (or leave empty if source is provided as a prefix in `name`) + api_key: optional access token for loading private Hugging Face Hub models or spaces. Find your token here: https://huggingface.co/settings/tokens + alias: optional string used as the name of the loaded model instead of the default name (only applies if loading a Space running Gradio 2.x) + Returns: + a Gradio Interface object for the given model + Example: + import gradio as gr + description = "Story generation with GPT" + examples = [["An adventurer is approached by a mysterious stranger in the tavern for a new quest."]] + demo = gr.Interface.load("models/EleutherAI/gpt-neo-1.3B", description=description, examples=examples) + demo.launch() + """ + return load_blocks_from_repo(name=name, src=src, api_key=api_key, alias=alias, **kwargs) + + def load_blocks_from_repo( name: str, src: str | None = None, diff --git a/gradio/interface.py b/gradio/interface.py index 28321641ba005..2afa7238d6c28 100644 --- a/gradio/interface.py +++ b/gradio/interface.py @@ -96,6 +96,7 @@ def load( demo = gr.Interface.load("models/EleutherAI/gpt-neo-1.3B", description=description, examples=examples) demo.launch() """ + warnings.warn("gr.Intrerface.load() will be deprecated. Use gr.load() instead.") return super().load(name=name, src=src, api_key=api_key, alias=alias, **kwargs) @classmethod From 135a3fe37e661ad8f51f733ae40d6aca425a8442 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 17:42:53 -0800 Subject: [PATCH 26/93] format backend --- gradio/external.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/gradio/external.py b/gradio/external.py index ce22507b4274e..57a91e7a58acf 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -37,7 +37,7 @@ def load( name: str, src: str | None = None, api_key: str | None = None, - alias: str | None = None, + alias: str | None = None, **kwargs, ) -> Blocks: """ @@ -58,7 +58,9 @@ def load( demo = gr.Interface.load("models/EleutherAI/gpt-neo-1.3B", description=description, examples=examples) demo.launch() """ - return load_blocks_from_repo(name=name, src=src, api_key=api_key, alias=alias, **kwargs) + return load_blocks_from_repo( + name=name, src=src, api_key=api_key, alias=alias, **kwargs + ) def load_blocks_from_repo( From 7dc1d32023bdd0c025f7641431a8be677c1ab970 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 17:51:30 -0800 Subject: [PATCH 27/93] client replace --- gradio/external.py | 58 +++++----------------------------------------- 1 file changed, 6 insertions(+), 52 deletions(-) diff --git a/gradio/external.py b/gradio/external.py index 57a91e7a58acf..d94b54ea3fe64 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -5,12 +5,12 @@ import json import re -import uuid import warnings from copy import deepcopy from typing import TYPE_CHECKING, Callable, Dict import requests +from gradio_client import Client import gradio from gradio import components, utils @@ -55,7 +55,7 @@ def load( import gradio as gr description = "Story generation with GPT" examples = [["An adventurer is approached by a mysterious stranger in the tavern for a new quest."]] - demo = gr.Interface.load("models/EleutherAI/gpt-neo-1.3B", description=description, examples=examples) + demo = gr.load("models/EleutherAI/gpt-neo-1.3B", description=description, examples=examples) demo.launch() """ return load_blocks_from_repo( @@ -441,58 +441,12 @@ def from_spaces( "Blocks or Interface locally. You may find this Guide helpful: " "https://gradio.app/using_blocks_like_functions/" ) - return from_spaces_blocks(config, api_key, iframe_url) + return from_spaces_blocks(space=space_name, api_key=api_key) -def from_spaces_blocks(config: Dict, api_key: str | None, iframe_url: str) -> Blocks: - api_url = "{}/api/predict/".format(iframe_url) - - headers = {"Content-Type": "application/json"} - if api_key is not None: - headers["Authorization"] = f"Bearer {api_key}" - ws_url = "{}/queue/join".format(iframe_url).replace("https", "wss") - - ws_fn = get_ws_fn(ws_url, headers) - - fns = [] - for d, dependency in enumerate(config["dependencies"]): - if dependency["backend_fn"]: - - def get_fn(outputs, fn_index, use_ws): - def fn(*data): - data = json.dumps({"data": data, "fn_index": fn_index}) - hash_data = json.dumps( - {"fn_index": fn_index, "session_hash": str(uuid.uuid4())} - ) - if use_ws: - result = utils.synchronize_async(ws_fn, data, hash_data) - output = result["data"] - else: - response = requests.post(api_url, headers=headers, data=data) - result = json.loads(response.content.decode("utf-8")) - try: - output = result["data"] - except KeyError: - if "error" in result and "429" in result["error"]: - raise TooManyRequestsError( - "Too many requests to the Hugging Face API" - ) - raise KeyError( - f"Could not find 'data' key in response from external Space. Response received: {result}" - ) - if len(outputs) == 1: - output = output[0] - return output - - return fn - - fn = get_fn( - deepcopy(dependency["outputs"]), d, use_websocket(config, dependency) - ) - fns.append(fn) - else: - fns.append(None) - return gradio.Blocks.from_config(config, fns, iframe_url) +def from_spaces_blocks(space: str, api_key: str | None) -> Blocks: + client = Client(space=space, access_token=api_key) + return gradio.Blocks.from_config(client.config, client.predict_fns, client.src) def from_spaces_interface( From 79760eaf6657db46ca30526e491854b1c2802825 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 18:42:47 -0800 Subject: [PATCH 28/93] updates --- gradio/components.py | 17 +++++++++-------- gradio/external.py | 1 - 2 files changed, 9 insertions(+), 9 deletions(-) diff --git a/gradio/components.py b/gradio/components.py index 44863b9a20f90..16edf3244da1c 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -25,19 +25,13 @@ import PIL import PIL.ImageOps from ffmpy import FFmpeg -from gradio_client.serializing import ( - FileSerializable, - ImgSerializable, - JSONSerializable, - Serializable, - SimpleSerializable, -) from pandas.api.types import is_numeric_dtype from PIL import Image as _Image # using _ to minimize namespace pollution from typing_extensions import Literal from gradio import media_data, processing_utils, utils from gradio.blocks import Block, BlockContext +from gradio.context import Context from gradio.documentation import document, set_documentation_group from gradio.events import ( Blurrable, @@ -55,6 +49,13 @@ from gradio.interpretation import NeighborInterpretable, TokenInterpretable from gradio.layouts import Column, Form, Row from gradio.processing_utils import TempFileManager +from gradio_client.serializing import ( + FileSerializable, + ImgSerializable, + JSONSerializable, + Serializable, + SimpleSerializable, +) if TYPE_CHECKING: from typing import TypedDict @@ -3905,7 +3906,7 @@ def deserialize( else: caption = None name = FileSerializable.deserialize( - self, img_data, gallery_path, root_url=root_url + self, img_data, gallery_path, root_url=root_url, access_token=Context.access_token ) captions[name] = caption captions_file = gallery_path / "captions.json" diff --git a/gradio/external.py b/gradio/external.py index d94b54ea3fe64..24d910a50f693 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -6,7 +6,6 @@ import json import re import warnings -from copy import deepcopy from typing import TYPE_CHECKING, Callable, Dict import requests From 49011994aab6ed8f7b00c6047e018c5617d19434 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 9 Mar 2023 18:53:31 -0800 Subject: [PATCH 29/93] moving from utils --- client/python/gradio_client/client.py | 4 +-- client/python/test/test_utils.py | 2 ++ gradio/external.py | 2 -- gradio/processing_utils.py | 42 --------------------------- test/test_external.py | 20 ------------- 5 files changed, 4 insertions(+), 66 deletions(-) create mode 100644 client/python/test/test_utils.py diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index ae9c557fabc44..affb7466747ba 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -296,8 +296,8 @@ def _get_config(self) -> Dict: f"Gradio 2.x is not supported by this client. Please upgrade this app to Gradio 3.x." ) return config - - + + class Job(Future): """A Job is a thin wrapper over the Future class that can be cancelled.""" diff --git a/client/python/test/test_utils.py b/client/python/test/test_utils.py new file mode 100644 index 0000000000000..a1ace313b166f --- /dev/null +++ b/client/python/test/test_utils.py @@ -0,0 +1,2 @@ +import pytest + diff --git a/gradio/external.py b/gradio/external.py index 24d910a50f693..bc1763b0278a7 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -19,11 +19,9 @@ cols_to_rows, encode_to_base64, get_tabular_examples, - get_ws_fn, postprocess_label, rows_to_cols, streamline_spaces_interface, - use_websocket, ) from gradio.processing_utils import to_binary diff --git a/gradio/processing_utils.py b/gradio/processing_utils.py index c153384794f27..dba81d29f3847 100644 --- a/gradio/processing_utils.py +++ b/gradio/processing_utils.py @@ -70,48 +70,6 @@ def encode_url_or_file_to_base64(path: str | Path): return encode_file_to_base64(path) -def get_mimetype(filename: str) -> str | None: - mimetype = mimetypes.guess_type(filename)[0] - if mimetype is not None: - mimetype = mimetype.replace("x-wav", "wav").replace("x-flac", "flac") - return mimetype - - -def get_extension(encoding: str) -> str | None: - encoding = encoding.replace("audio/wav", "audio/x-wav") - type = mimetypes.guess_type(encoding)[0] - if type == "audio/flac": # flac is not supported by mimetypes - return "flac" - elif type is None: - return None - extension = mimetypes.guess_extension(type) - if extension is not None and extension.startswith("."): - extension = extension[1:] - return extension - - -def encode_file_to_base64(f): - with open(f, "rb") as file: - encoded_string = base64.b64encode(file.read()) - base64_str = str(encoded_string, "utf-8") - mimetype = get_mimetype(f) - return ( - "data:" - + (mimetype if mimetype is not None else "") - + ";base64," - + base64_str - ) - - -def encode_url_to_base64(url): - encoded_string = base64.b64encode(requests.get(url).content) - base64_str = str(encoded_string, "utf-8") - mimetype = get_mimetype(url) - return ( - "data:" + (mimetype if mimetype is not None else "") + ";base64," + base64_str - ) - - def encode_plot_to_base64(plt): with BytesIO() as output_bytes: plt.savefig(output_bytes, format="png") diff --git a/test/test_external.py b/test/test_external.py index a3d80813d2888..8af334f85e4c3 100644 --- a/test/test_external.py +++ b/test/test_external.py @@ -18,7 +18,6 @@ TooManyRequestsError, cols_to_rows, get_tabular_examples, - use_websocket, ) from gradio.external_utils import get_pred_from_ws @@ -477,25 +476,6 @@ def test_can_load_tabular_model_with_different_widget_data(hypothetical_readme): check_dataset(io.config, hypothetical_readme) -@pytest.mark.parametrize( - "config, dependency, answer", - [ - ({"version": "3.3", "enable_queue": True}, {"queue": True}, True), - ({"version": "3.3", "enable_queue": False}, {"queue": None}, False), - ({"version": "3.3", "enable_queue": True}, {"queue": None}, True), - ({"version": "3.3", "enable_queue": True}, {"queue": False}, False), - ({"enable_queue": True}, {"queue": False}, False), - ({"version": "3.2", "enable_queue": False}, {"queue": None}, False), - ({"version": "3.2", "enable_queue": True}, {"queue": None}, True), - ({"version": "3.2", "enable_queue": True}, {"queue": False}, False), - ({"version": "3.1.3", "enable_queue": True}, {"queue": None}, False), - ({"version": "3.1.3", "enable_queue": False}, {"queue": True}, False), - ], -) -def test_use_websocket_after_315(config, dependency, answer): - assert use_websocket(config, dependency) == answer - - class AsyncMock(MagicMock): async def __call__(self, *args, **kwargs): return super(AsyncMock, self).__call__(*args, **kwargs) From cf672b82b2c677a13a14bc04388558bad9ed5dbc Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 11:22:00 -0800 Subject: [PATCH 30/93] remove code duplication --- gradio/processing_utils.py | 36 ------------------------------------ 1 file changed, 36 deletions(-) diff --git a/gradio/processing_utils.py b/gradio/processing_utils.py index dba81d29f3847..1ef2b777877f7 100644 --- a/gradio/processing_utils.py +++ b/gradio/processing_utils.py @@ -220,42 +220,6 @@ def convert_to_16_bit_wav(data): ################## -def decode_base64_to_binary(encoding) -> Tuple[bytes, str | None]: - extension = get_extension(encoding) - try: - data = encoding.split(",")[1] - except IndexError: - data = "" - return base64.b64decode(data), extension - - -def decode_base64_to_file(encoding, file_path=None, dir=None, prefix=None): - if dir is not None: - os.makedirs(dir, exist_ok=True) - data, extension = decode_base64_to_binary(encoding) - if file_path is not None and prefix is None: - filename = Path(file_path).name - prefix = filename - if "." in filename: - prefix = filename[0 : filename.index(".")] - extension = filename[filename.index(".") + 1 :] - - if prefix is not None: - prefix = utils.strip_invalid_filename_characters(prefix) - - if extension is None: - file_obj = tempfile.NamedTemporaryFile(delete=False, prefix=prefix, dir=dir) - else: - file_obj = tempfile.NamedTemporaryFile( - delete=False, - prefix=prefix, - suffix="." + extension, - dir=dir, - ) - file_obj.write(data) - file_obj.flush() - return file_obj - class TempFileManager: """ From 862df4bac6b917eaba17689a085e9857f6031f49 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 11:38:29 -0800 Subject: [PATCH 31/93] rm duplicates --- gradio/components.py | 17 +++++++++-------- gradio/processing_utils.py | 27 +++++++++------------------ gradio/utils.py | 13 ------------- 3 files changed, 18 insertions(+), 39 deletions(-) diff --git a/gradio/components.py b/gradio/components.py index 16edf3244da1c..ae6795e51d457 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -28,6 +28,7 @@ from pandas.api.types import is_numeric_dtype from PIL import Image as _Image # using _ to minimize namespace pollution from typing_extensions import Literal +from gradio_client import utils as client_utils from gradio import media_data, processing_utils, utils from gradio.blocks import Block, BlockContext @@ -1589,7 +1590,7 @@ def postprocess( elif isinstance(y, _Image.Image): return processing_utils.encode_pil_to_base64(y) elif isinstance(y, (str, Path)): - return processing_utils.encode_url_or_file_to_base64(y) + return client_utils.encode_url_or_file_to_base64(y) else: raise ValueError("Cannot process this value as an Image") @@ -2153,7 +2154,7 @@ def tokenize(self, x): leave_one_out_data[start:stop] = 0 file = tempfile.NamedTemporaryFile(delete=False, suffix=".wav") processing_utils.audio_to_file(sample_rate, leave_one_out_data, file.name) - out_data = processing_utils.encode_file_to_base64(file.name) + out_data = client_utils.encode_file_to_base64(file.name) leave_one_out_sets.append(out_data) file.close() Path(file.name).unlink() @@ -2164,7 +2165,7 @@ def tokenize(self, x): token[stop:] = 0 file = tempfile.NamedTemporaryFile(delete=False, suffix=".wav") processing_utils.audio_to_file(sample_rate, token, file.name) - token_data = processing_utils.encode_file_to_base64(file.name) + token_data = client_utils.encode_file_to_base64(file.name) file.close() Path(file.name).unlink() @@ -2195,7 +2196,7 @@ def get_masked_inputs(self, tokens, binary_mask_matrix): masked_input = masked_input + t * int(b) file = tempfile.NamedTemporaryFile(delete=False) processing_utils.audio_to_file(sample_rate, masked_input, file.name) - masked_data = processing_utils.encode_file_to_base64(file.name) + masked_data = client_utils.encode_file_to_base64(file.name) file.close() Path(file.name).unlink() masked_inputs.append(masked_data) @@ -2405,7 +2406,7 @@ def process_single_file(f) -> bytes | tempfile._TemporaryFileWrapper: file.name = temp_file_path file.orig_name = file_name # type: ignore else: - file = processing_utils.decode_base64_to_file( + file = client_utils.decode_base64_to_file( data, file_path=file_name ) file.orig_name = file_name # type: ignore @@ -2417,7 +2418,7 @@ def process_single_file(f) -> bytes | tempfile._TemporaryFileWrapper: if is_file: with open(file_name, "rb") as file_data: return file_data.read() - return processing_utils.decode_base64_to_binary(data)[0] + return client_utils.decode_base64_to_binary(data)[0] else: raise ValueError( "Unknown type: " @@ -3136,7 +3137,7 @@ def process_single_file(f) -> bytes | tempfile._TemporaryFileWrapper: file.name = temp_file_path file.orig_name = file_name # type: ignore else: - file = processing_utils.decode_base64_to_file( + file = client_utils.decode_base64_to_file( data, file_path=file_name ) file.orig_name = file_name # type: ignore @@ -3146,7 +3147,7 @@ def process_single_file(f) -> bytes | tempfile._TemporaryFileWrapper: if is_file: with open(file_name, "rb") as file_data: return file_data.read() - return processing_utils.decode_base64_to_binary(data)[0] + return client_utils.decode_base64_to_binary(data)[0] else: raise ValueError( "Unknown type: " diff --git a/gradio/processing_utils.py b/gradio/processing_utils.py index 1ef2b777877f7..b938f2ca73c65 100644 --- a/gradio/processing_utils.py +++ b/gradio/processing_utils.py @@ -3,8 +3,6 @@ import base64 import hashlib import json -import mimetypes -import os import secrets import shutil import subprocess @@ -13,7 +11,7 @@ import warnings from io import BytesIO from pathlib import Path -from typing import Dict, Set, Tuple +from typing import Dict, Set import aiofiles import numpy as np @@ -21,6 +19,7 @@ from fastapi import UploadFile from ffmpy import FFmpeg, FFprobe, FFRuntimeError from PIL import Image, ImageOps, PngImagePlugin +from gradio_client import utils as client_utils from gradio import utils @@ -40,7 +39,7 @@ def to_binary(x: str | Dict) -> bytes: if x.get("data"): base64str = x["data"] else: - base64str = encode_url_or_file_to_base64(x["name"]) + base64str = client_utils.encode_url_or_file_to_base64(x["name"]) else: base64str = x return base64.b64decode(base64str.split(",")[1]) @@ -62,14 +61,6 @@ def decode_base64_to_image(encoding: str) -> Image.Image: return img -def encode_url_or_file_to_base64(path: str | Path): - path = str(path) - if utils.validate_url(path): - return encode_url_to_base64(path) - else: - return encode_file_to_base64(path) - - def encode_plot_to_base64(plt): with BytesIO() as output_bytes: plt.savefig(output_bytes, format="png") @@ -267,7 +258,7 @@ def make_temp_copy_if_needed(self, file_path: str) -> str: temp_dir.mkdir(exist_ok=True, parents=True) f = tempfile.NamedTemporaryFile(delete=False, dir=temp_dir) - f.name = utils.strip_invalid_filename_characters(Path(file_path).name) + f.name = client_utils.strip_invalid_filename_characters(Path(file_path).name) full_temp_file_path = str(utils.abspath(temp_dir / f.name)) if not Path(full_temp_file_path).exists(): @@ -286,7 +277,7 @@ async def save_uploaded_file(self, file: UploadFile, upload_dir: str) -> str: if file.filename: file_name = Path(file.filename).name - output_file_obj.name = utils.strip_invalid_filename_characters(file_name) + output_file_obj.name = client_utils.strip_invalid_filename_characters(file_name) full_temp_file_path = str(utils.abspath(temp_dir / output_file_obj.name)) @@ -307,7 +298,7 @@ def download_temp_copy_if_needed(self, url: str) -> str: temp_dir.mkdir(exist_ok=True, parents=True) f = tempfile.NamedTemporaryFile(delete=False, dir=temp_dir) - f.name = utils.strip_invalid_filename_characters(Path(url).name) + f.name = client_utils.strip_invalid_filename_characters(Path(url).name) full_temp_file_path = str(utils.abspath(temp_dir / f.name)) if not Path(full_temp_file_path).exists(): @@ -327,9 +318,9 @@ def base64_to_temp_file_if_needed( temp_dir = Path(self.DEFAULT_TEMP_DIR) / temp_dir temp_dir.mkdir(exist_ok=True, parents=True) - guess_extension = get_extension(base64_encoding) + guess_extension = client_utils.get_extension(base64_encoding) if file_name: - file_name = utils.strip_invalid_filename_characters(file_name) + file_name = client_utils.strip_invalid_filename_characters(file_name) elif guess_extension: file_name = "file." + guess_extension else: @@ -339,7 +330,7 @@ def base64_to_temp_file_if_needed( full_temp_file_path = str(utils.abspath(temp_dir / f.name)) if not Path(full_temp_file_path).exists(): - data, _ = decode_base64_to_binary(base64_encoding) + data, _ = client_utils.decode_base64_to_binary(base64_encoding) with open(full_temp_file_path, "wb") as fb: fb.write(data) diff --git a/gradio/utils.py b/gradio/utils.py index d9b9dde1eef51..e30b210c8c013 100644 --- a/gradio/utils.py +++ b/gradio/utils.py @@ -787,19 +787,6 @@ def set_directory(path: Path | str): os.chdir(origin) -def strip_invalid_filename_characters(filename: str, max_bytes: int = 200) -> str: - """Strips invalid characters from a filename and ensures that the file_length is less than `max_bytes` bytes.""" - filename = "".join([char for char in filename if char.isalnum() or char in "._- "]) - filename_len = len(filename.encode()) - if filename_len > max_bytes: - while filename_len > max_bytes: - if len(filename) == 0: - break - filename = filename[:-1] - filename_len = len(filename.encode()) - return filename - - def sanitize_value_for_csv(value: str | Number) -> str | Number: """ Sanitizes a value that is being written to a CSV file to prevent CSV injection attacks. From 22cf73302a5337f85e8c8dd8630702fdec362510 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 11:48:41 -0800 Subject: [PATCH 32/93] simplify --- client/python/gradio_client/serializing.py | 10 ++-------- 1 file changed, 2 insertions(+), 8 deletions(-) diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index 1a0e815d5990f..8872aa25d506b 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -9,20 +9,14 @@ class Serializable(ABC): @abstractmethod - def serialize(self, x: Any, load_dir: str | Path = ""): + def serialize(self): """ Convert data from human-readable format to serialized format for a browser. """ pass @abstractmethod - def deserialize( - self, - x: Any, - save_dir: str | Path | None = None, - root_url: str | None = None, - access_token: str | None = None, - ): + def deserialize(self): """ Convert data from serialized format for a browser to human-readable format. """ From f3fee63621b86bd2ab3a2a0cd188feeb1c166d9f Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 11:58:48 -0800 Subject: [PATCH 33/93] galleryserializer --- client/python/gradio_client/serializing.py | 52 +++++++++++++++++++++- gradio/components.py | 37 +-------------- 2 files changed, 51 insertions(+), 38 deletions(-) diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index 8872aa25d506b..3880c576d8ae2 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -1,8 +1,11 @@ from __future__ import annotations from abc import ABC, abstractmethod +import os from pathlib import Path -from typing import Any, Dict +import json +from typing import Any, Dict, List +import uuid from gradio_client import utils @@ -196,6 +199,50 @@ def deserialize( return utils.dict_or_str_to_json_file(x, dir=save_dir).name +class GallerySerializable(Serializable): + def serialize( + self, + x: str | None, + load_dir: str | Path = "" + ) -> List[List[str]] | None: + if x is None or x == "": + return None + files = [] + captions_file = Path(x) / "captions.json" + with captions_file.open("r") as captions_json: + captions = json.load(captions_json) + for file_name, caption in captions.items(): + img = FileSerializable().serialize(file_name) + files.append([img, caption]) + return files + + def deserialize( + self, + x: Any, + save_dir: str = "", + root_url: str | None = None, + access_token: str | None = None, + ) -> None | str: + if x is None: + return None + gallery_path = Path(save_dir) / str(uuid.uuid4()) + gallery_path.mkdir(exist_ok=True, parents=True) + captions = {} + for img_data in x: + if isinstance(img_data, list) or isinstance(img_data, tuple): + img_data, caption = img_data + else: + caption = None + name = FileSerializable().deserialize( + img_data, gallery_path, root_url=root_url, access_token=access_token + ) + captions[name] = caption + captions_file = gallery_path / "captions.json" + with captions_file.open("w") as captions_json: + json.dump(captions, captions_json) + return os.path.abspath(gallery_path) + + SERIALIZER_MAPPING = {cls.__name__: cls for cls in Serializable.__subclasses__()} COMPONENT_MAPPING = { @@ -220,10 +267,11 @@ def deserialize( "highlightedtext": JSONSerializable, "json": JSONSerializable, "html": SimpleSerializable, - "gallery": SimpleSerializable, # TODO: Make this a proper Serializable class + "gallery": GallerySerializable, # TODO: Make this a proper Serializable class "chatbot": JSONSerializable, "model3d": FileSerializable, "plot": JSONSerializable, "markdown": SimpleSerializable, "dataset": SimpleSerializable, + "code": SimpleSerializable, } diff --git a/gradio/components.py b/gradio/components.py index ae6795e51d457..241db36a64f87 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -3759,7 +3759,7 @@ def style(self): @document("style") -class Gallery(IOComponent, TempFileManager, FileSerializable): +class Gallery(IOComponent, TempFileManager, GallerySerializable): """ Used to display a list of images as a gallery that can be scrolled through. Preprocessing: this component does *not* accept input. @@ -3890,41 +3890,6 @@ def style( return Component.style(self, container=container, **kwargs) - def deserialize( - self, - x: Any, - save_dir: str = "", - root_url: str | None = None, - ) -> None | str: - if x is None: - return None - gallery_path = Path(save_dir) / str(uuid.uuid4()) - gallery_path.mkdir(exist_ok=True, parents=True) - captions = {} - for img_data in x: - if isinstance(img_data, list) or isinstance(img_data, tuple): - img_data, caption = img_data - else: - caption = None - name = FileSerializable.deserialize( - self, img_data, gallery_path, root_url=root_url, access_token=Context.access_token - ) - captions[name] = caption - captions_file = gallery_path / "captions.json" - with captions_file.open("w") as captions_json: - json.dump(captions, captions_json) - return str(utils.abspath(gallery_path)) - - def serialize(self, x: Any, load_dir: str = "", called_directly: bool = False): - files = [] - captions_file = Path(x) / "captions.json" - with captions_file.open("r") as captions_json: - captions = json.load(captions_json) - for file_name, caption in captions.items(): - img = FileSerializable.serialize(self, file_name) - files.append([img, caption]) - return files - class Carousel(IOComponent, Changeable, SimpleSerializable): """ From effdc2e36bc72f0b9c0a2ce55cffbeff49924570 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 12:01:12 -0800 Subject: [PATCH 34/93] serializable --- gradio/components.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradio/components.py b/gradio/components.py index 241db36a64f87..f1936e153ef28 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -10,7 +10,6 @@ import operator import random import tempfile -import uuid import warnings from copy import deepcopy from enum import Enum @@ -56,6 +55,7 @@ JSONSerializable, Serializable, SimpleSerializable, + GallerySerializable, ) if TYPE_CHECKING: From 5b7873b854138a08e5731e48b03e230e262dc731 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 12:18:50 -0800 Subject: [PATCH 35/93] load serializers --- gradio/blocks.py | 19 ++++++++++--------- gradio/utils.py | 29 ++++++++++++++++++++++++++++- 2 files changed, 38 insertions(+), 10 deletions(-) diff --git a/gradio/blocks.py b/gradio/blocks.py index 9934736a9c37e..a697cbab31507 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -1081,15 +1081,16 @@ def getLayout(block): config["layout"] = getLayout(self) for _id, block in self.blocks.items(): - config["components"].append( - { - "id": _id, - "type": (block.get_block_name()), - "props": utils.delete_none(block.get_config()) - if hasattr(block, "get_config") - else {}, - } - ) + props = block.get_config() if hasattr(block, "get_config") else {} + block_config = { + "id": _id, + "type": block.get_block_name(), + "props": utils.delete_none(props) + } + serializer = utils.get_serializer_name(block) + if serializer: + block_config["serializer"] = serializer + config["components"].append(block_config) config["dependencies"] = self.dependencies return config diff --git a/gradio/utils.py b/gradio/utils.py index e30b210c8c013..c8efa74d3ce69 100644 --- a/gradio/utils.py +++ b/gradio/utils.py @@ -4,6 +4,7 @@ import asyncio import copy +import functools import inspect import json import json.decoder @@ -52,7 +53,7 @@ from gradio.strings import en if TYPE_CHECKING: # Only import for type checking (is False at runtime). - from gradio.blocks import BlockContext + from gradio.blocks import Block, BlockContext from gradio.components import Component analytics_url = "https://api.gradio.app/" @@ -992,6 +993,32 @@ def abspath(path: str | Path) -> Path: return Path(path).resolve() +def get_serializer_name(block: Block) -> str | None: + if not hasattr(block, "serialize"): + return None + + def get_class_that_defined_method(meth: Callable): + # Adapted from: https://stackoverflow.com/a/25959545/5209347 + if isinstance(meth, functools.partial): + return get_class_that_defined_method(meth.func) + if inspect.ismethod(meth) or (inspect.isbuiltin(meth) and getattr(meth, '__self__', None) is not None and getattr(meth.__self__, '__class__', None)): + for cls in inspect.getmro(meth.__self__.__class__): + if meth.__name__ in cls.__dict__: + return cls + meth = getattr(meth, '__func__', meth) # fallback to __qualname__ parsing + if inspect.isfunction(meth): + cls = getattr(inspect.getmodule(meth), + meth.__qualname__.split('.', 1)[0].rsplit('.', 1)[0], + None) + if isinstance(cls, type): + return cls + return getattr(meth, '__objclass__', None) + + cls = get_class_that_defined_method(block.serialize) # type: ignore + if cls: + return cls.__name__ + + def get_markdown_parser() -> MarkdownIt: md = ( MarkdownIt( From 171c3b8c08b81853e0240d70266b22e6b83421e1 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 12:27:05 -0800 Subject: [PATCH 36/93] fixing errors --- client/python/gradio_client/serializing.py | 14 ++++++++++++-- 1 file changed, 12 insertions(+), 2 deletions(-) diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index 3880c576d8ae2..bb17f677e75c7 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -12,14 +12,24 @@ class Serializable(ABC): @abstractmethod - def serialize(self): + def serialize( + self, + x: Any, + load_dir: str | Path = "" + ): """ Convert data from human-readable format to serialized format for a browser. """ pass @abstractmethod - def deserialize(self): + def deserialize( + self, + x: Any, + save_dir: str | Path | None = None, + root_url: str | None = None, + access_token: str | None = None, + ): """ Convert data from serialized format for a browser to human-readable format. """ From a718db2d57652cd905e4c9184f8ab6c115428a7e Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 12:30:05 -0800 Subject: [PATCH 37/93] errors --- gradio/components.py | 4 ++-- gradio/flagging.py | 9 +++++---- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/gradio/components.py b/gradio/components.py index f1936e153ef28..47f0b4b42c0be 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -5324,7 +5324,7 @@ def as_example(self, input_data: str | None) -> str: @document("style") -class Dataset(Clickable, Component): +class Dataset(Clickable, Component, SimpleSerializable): """ Used to create an output widget for showing datasets. Used to render the examples box. @@ -5427,7 +5427,7 @@ def style(self, **kwargs): @document() -class Interpretation(Component): +class Interpretation(Component, SimpleSerializable): """ Used to create an interpretation widget for a component. Preprocessing: this component does *not* accept input. diff --git a/gradio/flagging.py b/gradio/flagging.py index cc19927d9812e..81f0e53ecd7ea 100644 --- a/gradio/flagging.py +++ b/gradio/flagging.py @@ -12,6 +12,7 @@ from typing import TYPE_CHECKING, Any, List import pkg_resources +from gradio_client import utils as client_utils import gradio as gr from gradio import utils @@ -139,7 +140,7 @@ def flag( csv_data = [] for component, sample in zip(self.components, flag_data): - save_dir = Path(flagging_dir) / utils.strip_invalid_filename_characters( + save_dir = Path(flagging_dir) / client_utils.strip_invalid_filename_characters( component.label or "" ) csv_data.append( @@ -205,7 +206,7 @@ def flag( csv_data = [] for idx, (component, sample) in enumerate(zip(self.components, flag_data)): - save_dir = Path(flagging_dir) / utils.strip_invalid_filename_characters( + save_dir = Path(flagging_dir) / client_utils.strip_invalid_filename_characters( getattr(component, "label", None) or f"component {idx}" ) if utils.is_update(sample): @@ -339,7 +340,7 @@ def flag( for component, sample in zip(self.components, flag_data): save_dir = Path( self.dataset_dir - ) / utils.strip_invalid_filename_characters(component.label or "") + ) / client_utils.strip_invalid_filename_characters(component.label or "") filepath = component.deserialize(sample, save_dir, None) csv_data.append(filepath) if isinstance(component, tuple(file_preview_types)): @@ -474,7 +475,7 @@ def flag( headers.append(component.label) try: - save_dir = Path(folder_name) / utils.strip_invalid_filename_characters( + save_dir = Path(folder_name) / client_utils.strip_invalid_filename_characters( component.label or "" ) filepath = component.deserialize(sample, save_dir, None) From 7c116c3acd76f9d4ffd5540b593ada36fbd50ae5 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 12:32:53 -0800 Subject: [PATCH 38/93] typing --- gradio/utils.py | 11 +++-------- 1 file changed, 3 insertions(+), 8 deletions(-) diff --git a/gradio/utils.py b/gradio/utils.py index c8efa74d3ce69..248414bee26ce 100644 --- a/gradio/utils.py +++ b/gradio/utils.py @@ -572,7 +572,6 @@ class AsyncRequest: You can see example usages in test_utils.py. """ - ResponseJson = NewType("ResponseJson", Json) client = httpx.AsyncClient() class Method(str, Enum): @@ -675,9 +674,7 @@ def _create_request(method: Method, url: str, **kwargs) -> httpx.Request: request = httpx.Request(method, url, **kwargs) return request - def _validate_response_data( - self, response: ResponseJson - ) -> Union[BaseModel, ResponseJson | None]: + def _validate_response_data(self, response): """ Validate response using given validation methods. If there is a validation method and response is not valid, validation functions will raise an exception for them. @@ -706,7 +703,7 @@ def _validate_response_data( return validated_response - def _validate_response_by_model(self, response: ResponseJson) -> BaseModel: + def _validate_response_by_model(self, response) -> BaseModel: """ Validate response json using the validation model. Args: @@ -719,9 +716,7 @@ def _validate_response_by_model(self, response: ResponseJson) -> BaseModel: validated_data = parse_obj_as(self._validation_model, response) return validated_data - def _validate_response_by_validation_function( - self, response: ResponseJson - ) -> ResponseJson | None: + def _validate_response_by_validation_function(self, response): """ Validate response json using the validation function. Args: From 250351dad10f6b963f5409f6a4e75e5e0ea3ebd6 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 12:40:40 -0800 Subject: [PATCH 39/93] tests --- client/python/test/test_utils.py | 69 ++++++++++++++++++++++++++++++++ test/test_processing_utils.py | 39 ------------------ test/test_utils.py | 20 --------- 3 files changed, 69 insertions(+), 59 deletions(-) diff --git a/client/python/test/test_utils.py b/client/python/test/test_utils.py index a1ace313b166f..9e6735ad3f05a 100644 --- a/client/python/test/test_utils.py +++ b/client/python/test/test_utils.py @@ -1,2 +1,71 @@ +from copy import deepcopy +import tempfile + import pytest +from gradio import media_data + +from gradio_client import utils + +data = { + +} + + +def test_encode_url_or_file_to_base64(self): + output_base64 = utils.encode_url_or_file_to_base64( + "gradio/test_data/test_image.png" + ) + assert output_base64 == deepcopy(media_data.BASE64_IMAGE) + +def test_encode_file_to_base64(self): + output_base64 = utils.encode_file_to_base64( + "gradio/test_data/test_image.png" + ) + assert output_base64 == deepcopy(media_data.BASE64_IMAGE) + +@pytest.mark.flaky +def test_encode_url_to_base64(self): + output_base64 = utils.encode_url_to_base64( + "https://raw.githubusercontent.com/gradio-app/gradio/main/gradio/test_data/test_image.png" + ) + assert output_base64 == deepcopy(media_data.BASE64_IMAGE) + +def test_decode_base64_to_binary(self): + binary = utils.decode_base64_to_binary( + deepcopy(media_data.BASE64_IMAGE) + ) + assert deepcopy(media_data.BINARY_IMAGE) == binary + +def test_decode_base64_to_file(self): + temp_file = utils.decode_base64_to_file( + deepcopy(media_data.BASE64_IMAGE) + ) + assert isinstance(temp_file, tempfile._TemporaryFileWrapper) + + +def test_download_private_file(): + url_path = "https://gradio-tests-not-actually-private-space.hf.space/file=lion.jpg" + access_token = "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" # Intentionally revealing this key for testing purposes + file = utils.download_tmp_copy_of_file( + url_path=url_path, access_token=access_token + ) + assert file.name.endswith(".jpg") + + +@pytest.mark.parametrize( + "orig_filename, new_filename", + [ + ("abc", "abc"), + ("$$AAabc&3", "AAabc3"), + ("$$AAabc&3", "AAabc3"), + ("$$AAa..b-c&3_", "AAa..b-c3_"), + ("$$AAa..b-c&3_", "AAa..b-c3_"), + ( + "ゆかりです。私、こんなかわいい服は初めて着ました…。なんだかうれしくって、楽しいです。歌いたくなる気分って、初めてです。これがアイドルってことなのかもしれませんね", + "ゆかりです私こんなかわいい服は初めて着ましたなんだかうれしくって楽しいです歌いたくなる気分って初めてですこれがアイドルってことなの", + ), + ], +) +def test_strip_invalid_filename_characters(orig_filename, new_filename): + assert utils.strip_invalid_filename_characters(orig_filename) == new_filename diff --git a/test/test_processing_utils.py b/test/test_processing_utils.py index 190b6b2286c18..bc3cbf6ba8872 100644 --- a/test/test_processing_utils.py +++ b/test/test_processing_utils.py @@ -23,25 +23,6 @@ def test_decode_base64_to_image(self): ) assert isinstance(output_image, Image.Image) - def test_encode_url_or_file_to_base64(self): - output_base64 = processing_utils.encode_url_or_file_to_base64( - "gradio/test_data/test_image.png" - ) - assert output_base64 == deepcopy(media_data.BASE64_IMAGE) - - def test_encode_file_to_base64(self): - output_base64 = processing_utils.encode_file_to_base64( - "gradio/test_data/test_image.png" - ) - assert output_base64 == deepcopy(media_data.BASE64_IMAGE) - - @pytest.mark.flaky - def test_encode_url_to_base64(self): - output_base64 = processing_utils.encode_url_to_base64( - "https://raw.githubusercontent.com/gradio-app/gradio/main/gradio/test_data/test_image.png" - ) - assert output_base64 == deepcopy(media_data.BASE64_IMAGE) - def test_encode_plot_to_base64(self): plt.plot([1, 2, 3, 4]) output_base64 = processing_utils.encode_plot_to_base64(plt) @@ -216,18 +197,6 @@ def test_download_temp_copy_if_needed(self, mock_copy): class TestOutputPreprocessing: - def test_decode_base64_to_binary(self): - binary = processing_utils.decode_base64_to_binary( - deepcopy(media_data.BASE64_IMAGE) - ) - assert deepcopy(media_data.BINARY_IMAGE) == binary - - def test_decode_base64_to_file(self): - temp_file = processing_utils.decode_base64_to_file( - deepcopy(media_data.BASE64_IMAGE) - ) - assert isinstance(temp_file, tempfile._TemporaryFileWrapper) - float_dtype_list = [ float, float, @@ -327,11 +296,3 @@ def test_video_conversion_returns_original_video_if_fails( # If the conversion succeeded it'd be .mp4 assert Path(playable_vid).suffix == ".avi" - -def test_download_private_file(): - url_path = "https://gradio-tests-not-actually-private-space.hf.space/file=lion.jpg" - access_token = "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" # Intentionally revealing this key for testing purposes - file = processing_utils.download_tmp_copy_of_file( - url_path=url_path, access_token=access_token - ) - assert file.name.endswith(".jpg") diff --git a/test/test_utils.py b/test/test_utils.py index da67d26fadaf3..3c7b526d615b3 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -37,7 +37,6 @@ sagemaker_check, sanitize_list_for_csv, sanitize_value_for_csv, - strip_invalid_filename_characters, validate_url, version_check, ) @@ -576,25 +575,6 @@ def test_later_suffix(self): list_of_names = ["test", "test_1", "test_2", "test_3"] assert append_unique_suffix(name, list_of_names) == "test_4" - -@pytest.mark.parametrize( - "orig_filename, new_filename", - [ - ("abc", "abc"), - ("$$AAabc&3", "AAabc3"), - ("$$AAabc&3", "AAabc3"), - ("$$AAa..b-c&3_", "AAa..b-c3_"), - ("$$AAa..b-c&3_", "AAa..b-c3_"), - ( - "ゆかりです。私、こんなかわいい服は初めて着ました…。なんだかうれしくって、楽しいです。歌いたくなる気分って、初めてです。これがアイドルってことなのかもしれませんね", - "ゆかりです私こんなかわいい服は初めて着ましたなんだかうれしくって楽しいです歌いたくなる気分って初めてですこれがアイドルってことなの", - ), - ], -) -def test_strip_invalid_filename_characters(orig_filename, new_filename): - assert strip_invalid_filename_characters(orig_filename) == new_filename - - class TestAbspath: def test_abspath_no_symlink(self): resolved_path = str(abspath("../gradio/gradio/test_data/lion.jpg")) From 8c00ed81ad6a520f6e3dc050fcb9f62b4063ca49 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Fri, 10 Mar 2023 12:46:33 -0800 Subject: [PATCH 40/93] changelog --- CHANGELOG.md | 2 ++ 1 file changed, 2 insertions(+) diff --git a/CHANGELOG.md b/CHANGELOG.md index eed1182f08d3b..275d54cafc00f 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -6,6 +6,8 @@ ![](https://user-images.githubusercontent.com/12937446/224116643-5cfb94b3-93ce-43ee-bb7b-c25c3b66e0a1.png) +- Adds a new lightweight library `gradio_client` which can be used to make predictions via API to +hosted Gradio apps. See `client/python/README.md` for more info. By [@abidlabs](https://github.com/abidlabs) in [PR 3300](https://github.com/gradio-app/gradio/pull/3300) ## Bug Fixes: - Use `huggingface_hub` to send telemetry on `interface` and `blocks`; eventually to replace segment by [@dawoodkhan82](https://github.com/dawoodkhan82) in [PR 3342](https://github.com/gradio-app/gradio/pull/3342) From 70041ff466c68b2a4307f83ca8c64c91f7ba273c Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 14 Mar 2023 13:59:55 -0700 Subject: [PATCH 41/93] lint --- client/python/README.md | 2 +- client/python/gradio_client/client.py | 4 ++-- client/python/gradio_client/serializing.py | 26 +++++++++------------- gradio/blocks.py | 2 +- gradio/components.py | 26 +++++++++------------- gradio/documentation.py | 2 +- gradio/flagging.py | 18 ++++++++++----- gradio/processing_utils.py | 7 +++--- gradio/utils.py | 22 +++++++++++------- test/test_external.py | 6 +---- test/test_processing_utils.py | 1 - test/test_utils.py | 1 + 12 files changed, 58 insertions(+), 59 deletions(-) diff --git a/client/python/README.md b/client/python/README.md index 720ce48616eb0..ec4048d5526d6 100644 --- a/client/python/README.md +++ b/client/python/README.md @@ -19,7 +19,7 @@ job.result() ## Installation -The lightweit `gradio_client` package can be installed from pip: +If you already have a recent version of `gradio`, then the `gradio_client` is included as a dependency. Otherwise, the lightweight `gradio_client` package can be installed from pip (or pip3) and works with Python versions 3.7 or higher: ```bash $ pip install gradio_client diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index affb7466747ba..ae9c557fabc44 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -296,8 +296,8 @@ def _get_config(self) -> Dict: f"Gradio 2.x is not supported by this client. Please upgrade this app to Gradio 3.x." ) return config - - + + class Job(Future): """A Job is a thin wrapper over the Future class that can be cancelled.""" diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index bb17f677e75c7..1741f1bee2140 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -1,22 +1,18 @@ from __future__ import annotations -from abc import ABC, abstractmethod +import json import os +import uuid +from abc import ABC, abstractmethod from pathlib import Path -import json from typing import Any, Dict, List -import uuid from gradio_client import utils class Serializable(ABC): @abstractmethod - def serialize( - self, - x: Any, - load_dir: str | Path = "" - ): + def serialize(self, x: Any, load_dir: str | Path = ""): """ Convert data from human-readable format to serialized format for a browser. """ @@ -24,11 +20,11 @@ def serialize( @abstractmethod def deserialize( - self, - x: Any, + self, + x: Any, save_dir: str | Path | None = None, root_url: str | None = None, - access_token: str | None = None, + access_token: str | None = None, ): """ Convert data from serialized format for a browser to human-readable format. @@ -211,12 +207,10 @@ def deserialize( class GallerySerializable(Serializable): def serialize( - self, - x: str | None, - load_dir: str | Path = "" + self, x: str | None, load_dir: str | Path = "" ) -> List[List[str]] | None: if x is None or x == "": - return None + return None files = [] captions_file = Path(x) / "captions.json" with captions_file.open("r") as captions_json: @@ -224,7 +218,7 @@ def serialize( for file_name, caption in captions.items(): img = FileSerializable().serialize(file_name) files.append([img, caption]) - return files + return files def deserialize( self, diff --git a/gradio/blocks.py b/gradio/blocks.py index a697cbab31507..fa8b4fafa9873 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -1085,7 +1085,7 @@ def getLayout(block): block_config = { "id": _id, "type": block.get_block_name(), - "props": utils.delete_none(props) + "props": utils.delete_none(props), } serializer = utils.get_serializer_name(block) if serializer: diff --git a/gradio/components.py b/gradio/components.py index d43bf94514a31..ec408901e7d50 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -25,10 +25,18 @@ import PIL import PIL.ImageOps from ffmpy import FFmpeg +from gradio_client import utils as client_utils +from gradio_client.serializing import ( + FileSerializable, + GallerySerializable, + ImgSerializable, + JSONSerializable, + Serializable, + SimpleSerializable, +) from pandas.api.types import is_numeric_dtype from PIL import Image as _Image # using _ to minimize namespace pollution from typing_extensions import Literal -from gradio_client import utils as client_utils from gradio import media_data, processing_utils, utils from gradio.blocks import Block, BlockContext @@ -50,14 +58,6 @@ from gradio.interpretation import NeighborInterpretable, TokenInterpretable from gradio.layouts import Column, Form, Row from gradio.processing_utils import TempFileManager -from gradio_client.serializing import ( - FileSerializable, - ImgSerializable, - JSONSerializable, - Serializable, - SimpleSerializable, - GallerySerializable, -) if TYPE_CHECKING: from typing import TypedDict @@ -2407,9 +2407,7 @@ def process_single_file(f) -> bytes | tempfile._TemporaryFileWrapper: file.name = temp_file_path file.orig_name = file_name # type: ignore else: - file = client_utils.decode_base64_to_file( - data, file_path=file_name - ) + file = client_utils.decode_base64_to_file(data, file_path=file_name) file.orig_name = file_name # type: ignore self.temp_files.add(str(utils.abspath(file.name))) return file @@ -3138,9 +3136,7 @@ def process_single_file(f) -> bytes | tempfile._TemporaryFileWrapper: file.name = temp_file_path file.orig_name = file_name # type: ignore else: - file = client_utils.decode_base64_to_file( - data, file_path=file_name - ) + file = client_utils.decode_base64_to_file(data, file_path=file_name) file.orig_name = file_name # type: ignore self.temp_files.add(str(utils.abspath(file.name))) return file diff --git a/gradio/documentation.py b/gradio/documentation.py index 079b7f4f7ce1b..6b2c6159a12ab 100644 --- a/gradio/documentation.py +++ b/gradio/documentation.py @@ -228,7 +228,7 @@ def generate_documentation(): "parameters": parameter_docs, "returns": return_docs, "example": examples_doc, - "override_signature": override_signature + "override_signature": override_signature, } ) documentation[mode].append(cls_documentation) diff --git a/gradio/flagging.py b/gradio/flagging.py index 81f0e53ecd7ea..b22eb1f21663c 100644 --- a/gradio/flagging.py +++ b/gradio/flagging.py @@ -140,9 +140,9 @@ def flag( csv_data = [] for component, sample in zip(self.components, flag_data): - save_dir = Path(flagging_dir) / client_utils.strip_invalid_filename_characters( - component.label or "" - ) + save_dir = Path( + flagging_dir + ) / client_utils.strip_invalid_filename_characters(component.label or "") csv_data.append( component.deserialize( sample, @@ -206,7 +206,9 @@ def flag( csv_data = [] for idx, (component, sample) in enumerate(zip(self.components, flag_data)): - save_dir = Path(flagging_dir) / client_utils.strip_invalid_filename_characters( + save_dir = Path( + flagging_dir + ) / client_utils.strip_invalid_filename_characters( getattr(component, "label", None) or f"component {idx}" ) if utils.is_update(sample): @@ -340,7 +342,9 @@ def flag( for component, sample in zip(self.components, flag_data): save_dir = Path( self.dataset_dir - ) / client_utils.strip_invalid_filename_characters(component.label or "") + ) / client_utils.strip_invalid_filename_characters( + component.label or "" + ) filepath = component.deserialize(sample, save_dir, None) csv_data.append(filepath) if isinstance(component, tuple(file_preview_types)): @@ -475,7 +479,9 @@ def flag( headers.append(component.label) try: - save_dir = Path(folder_name) / client_utils.strip_invalid_filename_characters( + save_dir = Path( + folder_name + ) / client_utils.strip_invalid_filename_characters( component.label or "" ) filepath = component.deserialize(sample, save_dir, None) diff --git a/gradio/processing_utils.py b/gradio/processing_utils.py index b938f2ca73c65..0f8b9bcfac433 100644 --- a/gradio/processing_utils.py +++ b/gradio/processing_utils.py @@ -18,8 +18,8 @@ import requests from fastapi import UploadFile from ffmpy import FFmpeg, FFprobe, FFRuntimeError -from PIL import Image, ImageOps, PngImagePlugin from gradio_client import utils as client_utils +from PIL import Image, ImageOps, PngImagePlugin from gradio import utils @@ -211,7 +211,6 @@ def convert_to_16_bit_wav(data): ################## - class TempFileManager: """ A class that should be inherited by any Component that needs to manage temporary files. @@ -277,7 +276,9 @@ async def save_uploaded_file(self, file: UploadFile, upload_dir: str) -> str: if file.filename: file_name = Path(file.filename).name - output_file_obj.name = client_utils.strip_invalid_filename_characters(file_name) + output_file_obj.name = client_utils.strip_invalid_filename_characters( + file_name + ) full_temp_file_path = str(utils.abspath(temp_dir / output_file_obj.name)) diff --git a/gradio/utils.py b/gradio/utils.py index 248414bee26ce..326271cd0f970 100644 --- a/gradio/utils.py +++ b/gradio/utils.py @@ -996,22 +996,28 @@ def get_class_that_defined_method(meth: Callable): # Adapted from: https://stackoverflow.com/a/25959545/5209347 if isinstance(meth, functools.partial): return get_class_that_defined_method(meth.func) - if inspect.ismethod(meth) or (inspect.isbuiltin(meth) and getattr(meth, '__self__', None) is not None and getattr(meth.__self__, '__class__', None)): + if inspect.ismethod(meth) or ( + inspect.isbuiltin(meth) + and getattr(meth, "__self__", None) is not None + and getattr(meth.__self__, "__class__", None) + ): for cls in inspect.getmro(meth.__self__.__class__): if meth.__name__ in cls.__dict__: return cls - meth = getattr(meth, '__func__', meth) # fallback to __qualname__ parsing + meth = getattr(meth, "__func__", meth) # fallback to __qualname__ parsing if inspect.isfunction(meth): - cls = getattr(inspect.getmodule(meth), - meth.__qualname__.split('.', 1)[0].rsplit('.', 1)[0], - None) + cls = getattr( + inspect.getmodule(meth), + meth.__qualname__.split(".", 1)[0].rsplit(".", 1)[0], + None, + ) if isinstance(cls, type): return cls - return getattr(meth, '__objclass__', None) - + return getattr(meth, "__objclass__", None) + cls = get_class_that_defined_method(block.serialize) # type: ignore if cls: - return cls.__name__ + return cls.__name__ def get_markdown_parser() -> MarkdownIt: diff --git a/test/test_external.py b/test/test_external.py index 8af334f85e4c3..496c543e2f2e4 100644 --- a/test/test_external.py +++ b/test/test_external.py @@ -14,11 +14,7 @@ from gradio import media_data from gradio.context import Context from gradio.exceptions import InvalidApiName -from gradio.external import ( - TooManyRequestsError, - cols_to_rows, - get_tabular_examples, -) +from gradio.external import TooManyRequestsError, cols_to_rows, get_tabular_examples from gradio.external_utils import get_pred_from_ws """ diff --git a/test/test_processing_utils.py b/test/test_processing_utils.py index bc3cbf6ba8872..94ddcc27df6d6 100644 --- a/test/test_processing_utils.py +++ b/test/test_processing_utils.py @@ -295,4 +295,3 @@ def test_video_conversion_returns_original_video_if_fails( ) # If the conversion succeeded it'd be .mp4 assert Path(playable_vid).suffix == ".avi" - diff --git a/test/test_utils.py b/test/test_utils.py index 3c7b526d615b3..7aef377cd5682 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -575,6 +575,7 @@ def test_later_suffix(self): list_of_names = ["test", "test_1", "test_2", "test_3"] assert append_unique_suffix(name, list_of_names) == "test_4" + class TestAbspath: def test_abspath_no_symlink(self): resolved_path = str(abspath("../gradio/gradio/test_data/lion.jpg")) From c8bba5085050bf92f37ea0a209f276609315ab8b Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 08:32:14 -0700 Subject: [PATCH 42/93] fix lint --- gradio/components.py | 1 - gradio/utils.py | 3 +-- scripts/lint_backend.sh | 2 +- 3 files changed, 2 insertions(+), 4 deletions(-) diff --git a/gradio/components.py b/gradio/components.py index 88739d831ebc4..b43ccdc33a6ba 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -39,7 +39,6 @@ from gradio import media_data, processing_utils, utils from gradio.blocks import Block, BlockContext -from gradio.context import Context from gradio.documentation import document, set_documentation_group from gradio.events import ( Blurrable, diff --git a/gradio/utils.py b/gradio/utils.py index 35d0ad091f8ac..6c8ca84736d10 100644 --- a/gradio/utils.py +++ b/gradio/utils.py @@ -30,7 +30,6 @@ Dict, Generator, List, - NewType, Tuple, Type, TypeVar, @@ -46,7 +45,7 @@ from markdown_it import MarkdownIt from mdit_py_plugins.dollarmath.index import dollarmath_plugin from mdit_py_plugins.footnote.index import footnote_plugin -from pydantic import BaseModel, Json, parse_obj_as +from pydantic import BaseModel, parse_obj_as import gradio from gradio.context import Context diff --git a/scripts/lint_backend.sh b/scripts/lint_backend.sh index 525436b272562..f4d92f939cf5e 100644 --- a/scripts/lint_backend.sh +++ b/scripts/lint_backend.sh @@ -4,4 +4,4 @@ cd "$(dirname ${0})/.." python -m black --check gradio test client/python/gradio_client python -m isort --profile=black --check-only gradio test client/python/gradio_client -python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403,F541 gradio test client/python/gradio_client --exclude gradio/__init__.py client/python/gradio_client/__init__.py \ No newline at end of file +python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403,F541 gradio test client/python/gradio_client --exclude gradio/__init__.py,client/python/gradio_client/__init__.py \ No newline at end of file From 8f4ff853b72bab2c6cc63e014138361d0f6174af Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 08:58:21 -0700 Subject: [PATCH 43/93] fixing files --- client/python/gradio_client/utils.py | 2 +- gradio/blocks.py | 3 ++- gradio/components.py | 2 +- gradio/helpers.py | 3 ++- gradio/interface.py | 4 ++-- gradio/interpretation.py | 3 ++- gradio/processing_utils.py | 8 ++++---- gradio/utils.py | 18 ------------------ scripts/type_check_backend.sh | 2 +- 9 files changed, 15 insertions(+), 30 deletions(-) diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index 544ec33b404f7..eeccf154e11f4 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -242,4 +242,4 @@ def synchronize_async(func: Callable, *args, **kwargs) -> Any: *args: **kwargs: """ - return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) + return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) # type: ignore diff --git a/gradio/blocks.py b/gradio/blocks.py index 2279732fa4fb1..07071a5142155 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -18,6 +18,7 @@ import requests from anyio import CapacityLimiter from typing_extensions import Literal +from gradio_client import utils as client_utils from gradio import components, external, networking, queueing, routes, strings, utils from gradio.context import Context @@ -792,7 +793,7 @@ def __call__(self, *inputs, fn_index: int = 0, api_name: str | None = None): if batch: processed_inputs = [[inp] for inp in processed_inputs] - outputs = utils.synchronize_async( + outputs = client_utils.synchronize_async( self.process_api, fn_index=fn_index, inputs=processed_inputs, diff --git a/gradio/components.py b/gradio/components.py index b43ccdc33a6ba..fc10cfd91c404 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -4089,7 +4089,7 @@ def _process_chat_messages( if chat_message is None: return None elif isinstance(chat_message, (tuple, list)): - mime_type = processing_utils.get_mimetype(chat_message[0]) + mime_type = client_utils.get_mimetype(chat_message[0]) return { "name": chat_message[0], "mime_type": mime_type, diff --git a/gradio/helpers.py b/gradio/helpers.py index 3619c40c69288..493a7fe8db910 100644 --- a/gradio/helpers.py +++ b/gradio/helpers.py @@ -19,6 +19,7 @@ import numpy as np import PIL import PIL.Image +from gradio_client import utils as client_utils from gradio import processing_utils, routes, utils from gradio.context import Context @@ -67,7 +68,7 @@ def create_examples( batch=batch, _initiated_directly=False, ) - utils.synchronize_async(examples_obj.create) + client_utils.synchronize_async(examples_obj.create) return examples_obj diff --git a/gradio/interface.py b/gradio/interface.py index 2afa7238d6c28..7e0a21809a5ec 100644 --- a/gradio/interface.py +++ b/gradio/interface.py @@ -243,10 +243,10 @@ def __init__( self.cache_examples = False self.input_components = [ - get_component_instance(i, render=False) for i in inputs + get_component_instance(i, render=False) for i in inputs # type: ignore ] self.output_components = [ - get_component_instance(o, render=False) for o in outputs + get_component_instance(o, render=False) for o in outputs # type: ignore ] for component in self.input_components + self.output_components: diff --git a/gradio/interpretation.py b/gradio/interpretation.py index f48feb379e71b..fb4f3abfe5ca1 100644 --- a/gradio/interpretation.py +++ b/gradio/interpretation.py @@ -8,6 +8,7 @@ from typing import TYPE_CHECKING, Any, Dict, List, Tuple import numpy as np +from gradio_client import utils as client_utils from gradio import components, utils @@ -208,7 +209,7 @@ def get_masked_prediction(binary_mask): for masked_x in masked_xs: processed_masked_input = copy.deepcopy(processed_input) processed_masked_input[i] = input_component.preprocess(masked_x) - new_output = utils.synchronize_async( + new_output = client_utils.synchronize_async( interface.call_function, 0, processed_masked_input ) new_output = new_output["prediction"] diff --git a/gradio/processing_utils.py b/gradio/processing_utils.py index 0f8b9bcfac433..668ff1a2abc6b 100644 --- a/gradio/processing_utils.py +++ b/gradio/processing_utils.py @@ -321,13 +321,13 @@ def base64_to_temp_file_if_needed( guess_extension = client_utils.get_extension(base64_encoding) if file_name: - file_name = client_utils.strip_invalid_filename_characters(file_name) + file_name_ = client_utils.strip_invalid_filename_characters(file_name) elif guess_extension: - file_name = "file." + guess_extension + file_name_ = "file." + guess_extension else: - file_name = "file" + file_name_ = "file" f = tempfile.NamedTemporaryFile(delete=False, dir=temp_dir) - f.name = file_name + f.name = file_name_ full_temp_file_path = str(utils.abspath(temp_dir / f.name)) if not Path(full_temp_file_path).exists(): diff --git a/gradio/utils.py b/gradio/utils.py index 6c8ca84736d10..80f3389b7dde3 100644 --- a/gradio/utils.py +++ b/gradio/utils.py @@ -498,24 +498,6 @@ def component_or_layout_class(cls_name: str) -> Type[Component] | Type[BlockCont raise ValueError(f"No such component or layout: {cls_name}") -def synchronize_async(func: Callable, *args, **kwargs) -> Any: - """ - Runs async functions in sync scopes. - - Can be used in any scope. See run_coro_in_background for more details. - - Example: - if inspect.iscoroutinefunction(block_fn.fn): - predictions = utils.synchronize_async(block_fn.fn, *processed_input) - - Args: - func: - *args: - **kwargs: - """ - return fsspec.asyn.sync(fsspec.asyn.get_loop(), func, *args, **kwargs) - - def run_coro_in_background(func: Callable, *args, **kwargs): """ Runs coroutines in background. diff --git a/scripts/type_check_backend.sh b/scripts/type_check_backend.sh index 0b7b4c360fb77..154032d004f3e 100644 --- a/scripts/type_check_backend.sh +++ b/scripts/type_check_backend.sh @@ -5,4 +5,4 @@ pip_required pip install --upgrade pip pip install pyright -pyright gradio/*.py +pyright gradio/*.py client/python/gradio_client/*.py From 3ab7ac862941c17b91bfef6925c99a08a7f79428 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 09:00:19 -0700 Subject: [PATCH 44/93] formatting --- gradio/blocks.py | 2 +- gradio/utils.py | 1 - requirements.txt | 15 +++++++-------- 3 files changed, 8 insertions(+), 10 deletions(-) diff --git a/gradio/blocks.py b/gradio/blocks.py index 07071a5142155..a3d20297a59e6 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -17,8 +17,8 @@ import anyio import requests from anyio import CapacityLimiter -from typing_extensions import Literal from gradio_client import utils as client_utils +from typing_extensions import Literal from gradio import components, external, networking, queueing, routes, strings, utils from gradio.context import Context diff --git a/gradio/utils.py b/gradio/utils.py index 80f3389b7dde3..5147b019e4489 100644 --- a/gradio/utils.py +++ b/gradio/utils.py @@ -37,7 +37,6 @@ ) import aiohttp -import fsspec.asyn import httpx import matplotlib.pyplot as plt import requests diff --git a/requirements.txt b/requirements.txt index 6754f915462a2..93bfd705bb5ac 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,8 +1,12 @@ +aiofiles aiohttp altair>=4.2.0 fastapi ffmpy +gradio_client +httpx huggingface_hub>=0.13.0 +Jinja2 markdown-it-py[linkify]>=2.0.0 mdit-py-plugins<=0.3.3 markupsafe @@ -11,16 +15,11 @@ numpy orjson pandas pillow +pydantic python-multipart pydub pyyaml requests -uvicorn -Jinja2 -fsspec -httpx -pydantic -websockets>=10.0 typing_extensions -aiofiles -gradio_client \ No newline at end of file +uvicorn +websockets>=10.0 \ No newline at end of file From 1b2a1595c7c791eb9219c87e8f5ee33cb6791b72 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 09:08:06 -0700 Subject: [PATCH 45/93] type --- gradio/components.py | 2 +- gradio/interpretation.py | 4 ++-- 2 files changed, 3 insertions(+), 3 deletions(-) diff --git a/gradio/components.py b/gradio/components.py index fc10cfd91c404..9be3e984d6f83 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -4357,7 +4357,7 @@ def postprocess(self, y) -> Dict[str, str] | None: """ if y is None: return None - if isinstance(y, (ModuleType, matplotlib.figure.Figure)): + if isinstance(y, (ModuleType, matplotlib.figure.Figure)): # type: ignore dtype = "matplotlib" out_y = processing_utils.encode_plot_to_base64(y) elif "bokeh" in y.__module__: diff --git a/gradio/interpretation.py b/gradio/interpretation.py index fb4f3abfe5ca1..5713d8fe3e895 100644 --- a/gradio/interpretation.py +++ b/gradio/interpretation.py @@ -10,7 +10,7 @@ import numpy as np from gradio_client import utils as client_utils -from gradio import components, utils +from gradio import components if TYPE_CHECKING: # Only import for type checking (is False at runtime). from gradio import Interface @@ -140,7 +140,7 @@ async def run_interpret(interface: Interface, raw_input: List): ( neighbor_values, interpret_kwargs, - ) = input_component.get_interpretation_neighbors(x) + ) = input_component.get_interpretation_neighbors(x) # type: ignore interface_scores = [] alternative_output = [] for neighbor_input in neighbor_values: From ad7a0482aee2a8ab38f31ae62973d28554340992 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 09:08:54 -0700 Subject: [PATCH 46/93] fix type checking --- scripts/type_check_backend.sh | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/scripts/type_check_backend.sh b/scripts/type_check_backend.sh index 154032d004f3e..9fb2826644649 100644 --- a/scripts/type_check_backend.sh +++ b/scripts/type_check_backend.sh @@ -4,5 +4,5 @@ source scripts/helpers.sh pip_required pip install --upgrade pip -pip install pyright +pip install pyright==1.1.299 pyright gradio/*.py client/python/gradio_client/*.py From e438d71aaafbf97de1f52e3cfb6e6f3b5f1f03a6 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 09:30:01 -0700 Subject: [PATCH 47/93] changelog --- CHANGELOG.md | 2 +- scripts/type_check_backend.sh | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 4621042195521..19ae2ef66fe2e 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,7 @@ No changes to highlight. ## Testing and Infrastructure Changes: -No changes to highlight. +- Pinned `pyright==1.1.298` for stability by [@abidlabs](https://github.com/abidlabs) in [PR 3462](https://github.com/gradio-app/gradio/pull/3462) ## Breaking Changes: diff --git a/scripts/type_check_backend.sh b/scripts/type_check_backend.sh index 0b7b4c360fb77..d0e40eeeea391 100644 --- a/scripts/type_check_backend.sh +++ b/scripts/type_check_backend.sh @@ -4,5 +4,5 @@ source scripts/helpers.sh pip_required pip install --upgrade pip -pip install pyright +pip install pyright==1.1.298 pyright gradio/*.py From 6e009eeb0a376a7fb4f6bdf40bb00c794eca11f2 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 09:32:19 -0700 Subject: [PATCH 48/93] changelog --- CHANGELOG.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 19ae2ef66fe2e..a401abc3698e0 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -16,7 +16,7 @@ No changes to highlight. ## Testing and Infrastructure Changes: -- Pinned `pyright==1.1.298` for stability by [@abidlabs](https://github.com/abidlabs) in [PR 3462](https://github.com/gradio-app/gradio/pull/3462) +- Pinned `pyright==1.1.298` for stability by [@abidlabs](https://github.com/abidlabs) in [PR 3474](https://github.com/gradio-app/gradio/pull/3474) ## Breaking Changes: From e11132d4b8243e4493cfb71892a1e9b829d732ef Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 09:50:35 -0700 Subject: [PATCH 49/93] Update client/python/gradio_client/client.py Co-authored-by: Lucain --- client/python/gradio_client/client.py | 15 ++++----------- 1 file changed, 4 insertions(+), 11 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index ae9c557fabc44..8f1795fccb4f8 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -103,17 +103,10 @@ def pprint(self, api_name: str | None = None) -> None: ################################## def _infer_fn_index(self, api_name: str) -> int: - inferred_fn_index = next( - ( - i - for i, d in enumerate(self.config["dependencies"]) - if d.get("api_name") == api_name - ), - None, - ) - if inferred_fn_index is None: - raise ValueError(f"Cannot find a function with api_name: {api_name}") - return inferred_fn_index + for i, d in enumerate(self.config["dependencies"]): + if d.get("api_name") == api_name: + return i + raise ValueError(f"Cannot find a function with api_name: {api_name}") def _get_complete_fn(self, api_name: str | None, fn_index: int) -> Callable: if api_name is not None: From e612e4a24686abf30b5c447a624517eb6e3daf95 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 11:13:42 -0700 Subject: [PATCH 50/93] formatting, tests --- client/python/gradio_client/client.py | 9 +- client/python/test/test_utils.py | 10 +- gradio/components.py | 2 +- gradio/interpretation.py | 4 +- gradio/test_data/blocks_configs.py | 192 ++++++++++++++------------ test/test_blocks.py | 1 + test/test_components.py | 13 +- 7 files changed, 126 insertions(+), 105 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index ae9c557fabc44..ce06bf525fe10 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -30,9 +30,8 @@ def __init__( if space is None and src is None: raise ValueError("Either `space` or `src` must be provided") elif space and src: - raise ValueError("Only one of `space` or `src` must be provided") - self.space = space - self.src = src or self._space_name_to_src() + raise ValueError("Only one of `space` or `src` should be provided") + self.src = src or self._space_name_to_src(space) if self.src is None: raise ValueError( f"Could not find Space: {space}. If it is a private Space, please provide an access_token." @@ -272,10 +271,10 @@ def __del__(self): if hasattr(self, "executor"): self.executor.shutdown(wait=True) - def _space_name_to_src(self) -> str | None: + def _space_name_to_src(self, space) -> str | None: return ( requests.get( - f"https://huggingface.co/api/spaces/{self.space}/host", + f"https://huggingface.co/api/spaces/{space}/host", headers=self.headers, ) .json() diff --git a/client/python/test/test_utils.py b/client/python/test/test_utils.py index 9e6735ad3f05a..d3ad111225be3 100644 --- a/client/python/test/test_utils.py +++ b/client/python/test/test_utils.py @@ -11,32 +11,32 @@ } -def test_encode_url_or_file_to_base64(self): +def test_encode_url_or_file_to_base64(): output_base64 = utils.encode_url_or_file_to_base64( "gradio/test_data/test_image.png" ) assert output_base64 == deepcopy(media_data.BASE64_IMAGE) -def test_encode_file_to_base64(self): +def test_encode_file_to_base64(): output_base64 = utils.encode_file_to_base64( "gradio/test_data/test_image.png" ) assert output_base64 == deepcopy(media_data.BASE64_IMAGE) @pytest.mark.flaky -def test_encode_url_to_base64(self): +def test_encode_url_to_base64(): output_base64 = utils.encode_url_to_base64( "https://raw.githubusercontent.com/gradio-app/gradio/main/gradio/test_data/test_image.png" ) assert output_base64 == deepcopy(media_data.BASE64_IMAGE) -def test_decode_base64_to_binary(self): +def test_decode_base64_to_binary(): binary = utils.decode_base64_to_binary( deepcopy(media_data.BASE64_IMAGE) ) assert deepcopy(media_data.BINARY_IMAGE) == binary -def test_decode_base64_to_file(self): +def test_decode_base64_to_file(): temp_file = utils.decode_base64_to_file( deepcopy(media_data.BASE64_IMAGE) ) diff --git a/gradio/components.py b/gradio/components.py index 9be3e984d6f83..d164e2077a90b 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -5712,7 +5712,7 @@ def style(self): return self -class StatusTracker(Component): +class StatusTracker(Component, SimpleSerializable): def __init__( self, **kwargs, diff --git a/gradio/interpretation.py b/gradio/interpretation.py index 5713d8fe3e895..a4e64c5d2433e 100644 --- a/gradio/interpretation.py +++ b/gradio/interpretation.py @@ -140,7 +140,9 @@ async def run_interpret(interface: Interface, raw_input: List): ( neighbor_values, interpret_kwargs, - ) = input_component.get_interpretation_neighbors(x) # type: ignore + ) = input_component.get_interpretation_neighbors( + x + ) # type: ignore interface_scores = [] alternative_output = [] for neighbor_input in neighbor_values: diff --git a/gradio/test_data/blocks_configs.py b/gradio/test_data/blocks_configs.py index b1701588bd6bf..e273cd20e5571 100644 --- a/gradio/test_data/blocks_configs.py +++ b/gradio/test_data/blocks_configs.py @@ -1,11 +1,11 @@ XRAY_CONFIG = { - "version": "3.4b3\n", + "version": "3.21.0\n", "mode": "blocks", "dev_mode": True, "analytics_enabled": False, "components": [ { - "id": 27, + "id": 1, "type": "markdown", "props": { "value": "

Detect Disease From Scan

\n

With this model you can lorem ipsum

\n
    \n
  • ipsum 1
  • \n
  • ipsum 2
  • \n
\n", @@ -13,9 +13,10 @@ "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, { - "id": 28, + "id": 2, "type": "checkboxgroup", "props": { "choices": ["Covid", "Malaria", "Lung Cancer"], @@ -26,15 +27,16 @@ "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, - {"id": 29, "type": "tabs", "props": {"visible": True, "style": {}}}, + {"id": 3, "type": "tabs", "props": {"visible": True, "style": {}}}, { - "id": 30, + "id": 4, "type": "tabitem", "props": {"label": "X-ray", "visible": True, "style": {}}, }, { - "id": 31, + "id": 5, "type": "row", "props": { "type": "row", @@ -44,7 +46,7 @@ }, }, { - "id": 32, + "id": 6, "type": "image", "props": { "image_mode": "RGB", @@ -57,14 +59,16 @@ "visible": True, "style": {}, }, + "serializer": "ImgSerializable", }, { - "id": 33, + "id": 7, "type": "json", "props": {"show_label": True, "name": "json", "visible": True, "style": {}}, + "serializer": "JSONSerializable", }, { - "id": 34, + "id": 8, "type": "button", "props": { "value": "Run", @@ -74,14 +78,15 @@ "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, { - "id": 35, + "id": 9, "type": "tabitem", "props": {"label": "CT Scan", "visible": True, "style": {}}, }, { - "id": 36, + "id": 10, "type": "row", "props": { "type": "row", @@ -91,7 +96,7 @@ }, }, { - "id": 37, + "id": 11, "type": "image", "props": { "image_mode": "RGB", @@ -104,26 +109,29 @@ "visible": True, "style": {}, }, + "serializer": "ImgSerializable", }, { - "id": 38, + "id": 1212, "type": "json", "props": {"show_label": True, "name": "json", "visible": True, "style": {}}, + "serializer": "JSONSerializable", }, { - "id": 39, + "id": 13, "type": "button", "props": { "value": "Run", "variant": "secondary", - "name": "button", "interactive": True, + "name": "button", "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, { - "id": 40, + "id": 14, "type": "textbox", "props": { "lines": 1, @@ -135,14 +143,15 @@ "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, { - "id": 41, + "id": 15, "type": "form", "props": {"type": "form", "visible": True, "style": {}}, }, { - "id": 42, + "id": 16, "type": "form", "props": {"type": "form", "visible": True, "style": {}}, }, @@ -151,73 +160,74 @@ "title": "Gradio", "is_space": False, "enable_queue": None, - "show_error": False, + "show_error": True, "show_api": True, + "is_colab": False, "layout": { - "id": 26, + "id": 0, "children": [ - {"id": 27}, - {"id": 41, "children": [{"id": 28}]}, + {"id": 1}, + {"id": 15, "children": [{"id": 2}]}, { - "id": 29, + "id": 3, "children": [ { - "id": 30, + "id": 4, "children": [ - {"id": 31, "children": [{"id": 32}, {"id": 33}]}, - {"id": 34}, + {"id": 5, "children": [{"id": 6}, {"id": 7}]}, + {"id": 8}, ], }, { - "id": 35, + "id": 9, "children": [ - {"id": 36, "children": [{"id": 37}, {"id": 38}]}, - {"id": 39}, + {"id": 10, "children": [{"id": 11}, {"id": 1212}]}, + {"id": 13}, ], }, ], }, - {"id": 42, "children": [{"id": 40}]}, + {"id": 16, "children": [{"id": 14}]}, ], }, "dependencies": [ { - "targets": [34], + "targets": [8], "trigger": "click", - "inputs": [28, 32], - "outputs": [33], + "inputs": [2, 6], + "outputs": [7], "backend_fn": True, "js": None, "queue": None, "api_name": None, "scroll_to_output": False, "show_progress": True, + "every": None, "batch": False, "max_batch_size": 4, "cancels": [], - "every": None, - "collects_event_data": False, "types": {"continuous": False, "generator": False}, + "collects_event_data": False, "trigger_after": None, "trigger_only_on_success": False, }, { - "targets": [39], + "targets": [13], "trigger": "click", - "inputs": [28, 37], - "outputs": [38], + "inputs": [2, 11], + "outputs": [1212], "backend_fn": True, "js": None, "queue": None, "api_name": None, "scroll_to_output": False, "show_progress": True, + "every": None, "batch": False, "max_batch_size": 4, "cancels": [], - "every": None, - "collects_event_data": False, "types": {"continuous": False, "generator": False}, + "collects_event_data": False, "trigger_after": None, "trigger_only_on_success": False, }, @@ -225,19 +235,19 @@ "targets": [], "trigger": "load", "inputs": [], - "outputs": [40], + "outputs": [14], "backend_fn": True, "js": None, "queue": None, "api_name": None, "scroll_to_output": False, "show_progress": True, + "every": None, "batch": False, "max_batch_size": 4, "cancels": [], - "every": None, - "collects_event_data": False, "types": {"continuous": False, "generator": False}, + "collects_event_data": False, "trigger_after": None, "trigger_only_on_success": False, }, @@ -246,13 +256,13 @@ XRAY_CONFIG_DIFF_IDS = { - "version": "3.4b3\n", + "version": "3.21.0\n", "mode": "blocks", - "analytics_enabled": False, "dev_mode": True, + "analytics_enabled": False, "components": [ { - "id": 27, + "id": 1, "type": "markdown", "props": { "value": "

Detect Disease From Scan

\n

With this model you can lorem ipsum

\n
    \n
  • ipsum 1
  • \n
  • ipsum 2
  • \n
\n", @@ -260,9 +270,10 @@ "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, { - "id": 28, + "id": 2, "type": "checkboxgroup", "props": { "choices": ["Covid", "Malaria", "Lung Cancer"], @@ -273,15 +284,16 @@ "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, - {"id": 29, "type": "tabs", "props": {"visible": True, "style": {}}}, + {"id": 3, "type": "tabs", "props": {"visible": True, "style": {}}}, { - "id": 30, + "id": 4, "type": "tabitem", "props": {"label": "X-ray", "visible": True, "style": {}}, }, { - "id": 31, + "id": 5, "type": "row", "props": { "type": "row", @@ -291,7 +303,7 @@ }, }, { - "id": 32, + "id": 6, "type": "image", "props": { "image_mode": "RGB", @@ -304,14 +316,16 @@ "visible": True, "style": {}, }, + "serializer": "ImgSerializable", }, { - "id": 33, + "id": 7, "type": "json", "props": {"show_label": True, "name": "json", "visible": True, "style": {}}, + "serializer": "JSONSerializable", }, { - "id": 34, + "id": 8, "type": "button", "props": { "value": "Run", @@ -321,14 +335,15 @@ "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, { - "id": 35, + "id": 9, "type": "tabitem", "props": {"label": "CT Scan", "visible": True, "style": {}}, }, { - "id": 36, + "id": 10, "type": "row", "props": { "type": "row", @@ -338,7 +353,7 @@ }, }, { - "id": 37, + "id": 11, "type": "image", "props": { "image_mode": "RGB", @@ -351,14 +366,16 @@ "visible": True, "style": {}, }, + "serializer": "ImgSerializable", }, { - "id": 38, + "id": 1212, "type": "json", "props": {"show_label": True, "name": "json", "visible": True, "style": {}}, + "serializer": "JSONSerializable", }, { - "id": 933, + "id": 13, "type": "button", "props": { "value": "Run", @@ -368,9 +385,10 @@ "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, { - "id": 40, + "id": 14, "type": "textbox", "props": { "lines": 1, @@ -382,14 +400,15 @@ "visible": True, "style": {}, }, + "serializer": "SimpleSerializable", }, { - "id": 41, + "id": 15, "type": "form", "props": {"type": "form", "visible": True, "style": {}}, }, { - "id": 42, + "id": 16, "type": "form", "props": {"type": "form", "visible": True, "style": {}}, }, @@ -398,73 +417,74 @@ "title": "Gradio", "is_space": False, "enable_queue": None, - "show_error": False, + "show_error": True, "show_api": True, + "is_colab": False, "layout": { - "id": 26, + "id": 0, "children": [ - {"id": 27}, - {"id": 41, "children": [{"id": 28}]}, + {"id": 1}, + {"id": 15, "children": [{"id": 2}]}, { - "id": 29, + "id": 3, "children": [ { - "id": 30, + "id": 4, "children": [ - {"id": 31, "children": [{"id": 32}, {"id": 33}]}, - {"id": 34}, + {"id": 5, "children": [{"id": 6}, {"id": 7}]}, + {"id": 8}, ], }, { - "id": 35, + "id": 9, "children": [ - {"id": 36, "children": [{"id": 37}, {"id": 38}]}, - {"id": 933}, + {"id": 10, "children": [{"id": 11}, {"id": 1212}]}, + {"id": 13}, ], }, ], }, - {"id": 42, "children": [{"id": 40}]}, + {"id": 16, "children": [{"id": 14}]}, ], }, "dependencies": [ { - "targets": [34], + "targets": [8], "trigger": "click", - "inputs": [28, 32], - "outputs": [33], + "inputs": [2, 6], + "outputs": [7], "backend_fn": True, "js": None, "queue": None, "api_name": None, "scroll_to_output": False, "show_progress": True, + "every": None, "batch": False, "max_batch_size": 4, "cancels": [], - "every": None, - "collects_event_data": False, "types": {"continuous": False, "generator": False}, + "collects_event_data": False, "trigger_after": None, "trigger_only_on_success": False, }, { - "targets": [933], + "targets": [13], "trigger": "click", - "inputs": [28, 37], - "outputs": [38], + "inputs": [2, 11], + "outputs": [1212], "backend_fn": True, "js": None, "queue": None, "api_name": None, "scroll_to_output": False, "show_progress": True, + "every": None, "batch": False, "max_batch_size": 4, "cancels": [], - "every": None, - "collects_event_data": False, "types": {"continuous": False, "generator": False}, + "collects_event_data": False, "trigger_after": None, "trigger_only_on_success": False, }, @@ -472,19 +492,19 @@ "targets": [], "trigger": "load", "inputs": [], - "outputs": [40], + "outputs": [14], "backend_fn": True, "js": None, "queue": None, "api_name": None, "scroll_to_output": False, "show_progress": True, + "every": None, "batch": False, "max_batch_size": 4, "cancels": [], - "every": None, - "collects_event_data": False, "types": {"continuous": False, "generator": False}, + "collects_event_data": False, "trigger_after": None, "trigger_only_on_success": False, }, diff --git a/test/test_blocks.py b/test/test_blocks.py index d279a6cbc595c..37584ccbec31f 100644 --- a/test/test_blocks.py +++ b/test/test_blocks.py @@ -133,6 +133,7 @@ def fake_func(): demo.load(fake_func, [], [textbox]) config = demo.get_config_file() + print("config\n", config) assert assert_configs_are_equivalent_besides_ids(XRAY_CONFIG, config) assert config["show_api"] is True _ = demo.launch(prevent_thread_lock=True, show_api=False) diff --git a/test/test_components.py b/test/test_components.py index 4f153881d0fec..bc29407e09a5f 100644 --- a/test/test_components.py +++ b/test/test_components.py @@ -22,6 +22,7 @@ import PIL import pytest import vega_datasets +from gradio_client import utils as client_utils from scipy.io import wavfile import gradio as gr @@ -676,7 +677,7 @@ def test_component_functions(self): @pytest.mark.flaky def test_serialize_url(self): img = "https://gradio.app/assets/img/header-image.jpg" - expected = processing_utils.encode_url_or_file_to_base64(img) + expected = client_utils.encode_url_or_file_to_base64(img) assert gr.Image().serialize(img) == expected def test_in_interface_as_input(self): @@ -817,7 +818,7 @@ def test_component_functions(self): gr.Audio(type="unknown") # Output functionalities - y_audio = gr.processing_utils.decode_base64_to_file( + y_audio = client_utils.decode_base64_to_file( deepcopy(media_data.BASE64_AUDIO)["data"] ) audio_output = gr.Audio(type="filepath") @@ -875,7 +876,7 @@ def reverse_audio(audio): iface = gr.Interface(reverse_audio, "audio", "audio") reversed_file = iface("test/test_files/audio_sample.wav") reversed_reversed_file = iface(reversed_file) - reversed_reversed_data = gr.processing_utils.encode_url_or_file_to_base64( + reversed_reversed_data = client_utils.encode_url_or_file_to_base64( reversed_reversed_file ) similarity = SequenceMatcher( @@ -1956,10 +1957,8 @@ def test_gallery(self, mock_uuid): gallery = gr.Gallery() test_file_dir = Path(Path(__file__).parent, "test_files") data = [ - gr.processing_utils.encode_file_to_base64(Path(test_file_dir, "bus.png")), - gr.processing_utils.encode_file_to_base64( - Path(test_file_dir, "cheetah1.jpg") - ), + client_utils.encode_file_to_base64(Path(test_file_dir, "bus.png")), + client_utils.encode_file_to_base64(Path(test_file_dir, "cheetah1.jpg")), ] with tempfile.TemporaryDirectory() as tmpdir: From 4fdb95691806f0dc55fb3e795012db47e671e7c4 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 11:14:29 -0700 Subject: [PATCH 51/93] formatting, tests --- gradio/test_data/blocks_configs.py | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/gradio/test_data/blocks_configs.py b/gradio/test_data/blocks_configs.py index e273cd20e5571..d9d2df2e83079 100644 --- a/gradio/test_data/blocks_configs.py +++ b/gradio/test_data/blocks_configs.py @@ -112,7 +112,7 @@ "serializer": "ImgSerializable", }, { - "id": 1212, + "id": 12, "type": "json", "props": {"show_label": True, "name": "json", "visible": True, "style": {}}, "serializer": "JSONSerializable", @@ -181,7 +181,7 @@ { "id": 9, "children": [ - {"id": 10, "children": [{"id": 11}, {"id": 1212}]}, + {"id": 10, "children": [{"id": 11}, {"id": 12}]}, {"id": 13}, ], }, @@ -215,7 +215,7 @@ "targets": [13], "trigger": "click", "inputs": [2, 11], - "outputs": [1212], + "outputs": [12], "backend_fn": True, "js": None, "queue": None, From 131a6487c321f03b2b6f316b93150928130e28c5 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Wed, 15 Mar 2023 16:48:29 -0700 Subject: [PATCH 52/93] gr.load --- client/python/gradio_client/client.py | 1 - demo/autocomplete/run.ipynb | 2 +- demo/autocomplete/run.py | 2 +- demo/automatic-speech-recognition/run.ipynb | 2 +- demo/automatic-speech-recognition/run.py | 2 +- demo/blocks_gpt/run.ipynb | 2 +- demo/blocks_gpt/run.py | 2 +- demo/gpt_j/run.ipynb | 2 +- demo/gpt_j/run.py | 2 +- demo/gpt_j_unified/run.ipynb | 2 +- demo/gpt_j_unified/run.py | 2 +- .../image_classifier_interface_load/run.ipynb | 2 +- demo/image_classifier_interface_load/run.py | 2 +- demo/interface_parallel_load/run.ipynb | 2 +- demo/interface_parallel_load/run.py | 6 +- demo/interface_series_load/run.ipynb | 2 +- demo/interface_series_load/run.py | 4 +- demo/question-answering/run.ipynb | 2 +- demo/question-answering/run.py | 2 +- demo/stt_or_tts/run.ipynb | 2 +- demo/stt_or_tts/run.py | 4 +- gradio/blocks.py | 4 +- gradio/external.py | 2 +- gradio/interface.py | 2 +- test/test_external.py | 76 +++++++++---------- test/test_mix.py | 8 +- 26 files changed, 70 insertions(+), 71 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index ab14fec18ce5e..74389d42cc08c 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -166,7 +166,6 @@ def predict_fn(*data): f"Could not find 'data' key in response. Response received: {result}" ) return tuple(output) - return predict_fn fns = [] diff --git a/demo/autocomplete/run.ipynb b/demo/autocomplete/run.ipynb index d07b7fce83c96..51e547bf6d2a1 100644 --- a/demo/autocomplete/run.ipynb +++ b/demo/autocomplete/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: autocomplete\n", "### This text generation demo works like autocomplete. There's only one textbox and it's used for both the input and the output. The demo loads the model as an interface, and uses that interface as an API. It then uses blocks to create the UI. All of this is done in less than 10 lines of code.\n", " "]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import os\n", "\n", "# save your HF API token from https:/hf.co/settings/tokens as an env variable to avoid rate limiting\n", "auth_token = os.getenv(\"auth_token\")\n", "\n", "# load a model from https://hf.co/models as an interface, then use it as an api \n", "# you can remove the api_key parameter if you don't care about rate limiting. \n", "api = gr.Interface.load(\"huggingface/EleutherAI/gpt-j-6B\", api_key=auth_token)\n", "\n", "def complete_with_gpt(text):\n", " return text[:-50] + api(text[-50:])\n", "\n", "with gr.Blocks() as demo:\n", " textbox = gr.Textbox(placeholder=\"Type here...\", lines=4)\n", " btn = gr.Button(\"Autocomplete\")\n", " \n", " # define what will run when the button is clicked, here the textbox is used as both an input and an output\n", " btn.click(fn=complete_with_gpt, inputs=textbox, outputs=textbox, queue=False)\n", "\n", "demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: autocomplete\n", "### This text generation demo works like autocomplete. There's only one textbox and it's used for both the input and the output. The demo loads the model as an interface, and uses that interface as an API. It then uses blocks to create the UI. All of this is done in less than 10 lines of code.\n", " "]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import os\n", "\n", "# save your HF API token from https:/hf.co/settings/tokens as an env variable to avoid rate limiting\n", "auth_token = os.getenv(\"auth_token\")\n", "\n", "# load a model from https://hf.co/models as an interface, then use it as an api \n", "# you can remove the api_key parameter if you don't care about rate limiting. \n", "api = gr.load(\"huggingface/EleutherAI/gpt-j-6B\", api_key=auth_token)\n", "\n", "def complete_with_gpt(text):\n", " return text[:-50] + api(text[-50:])\n", "\n", "with gr.Blocks() as demo:\n", " textbox = gr.Textbox(placeholder=\"Type here...\", lines=4)\n", " btn = gr.Button(\"Autocomplete\")\n", " \n", " # define what will run when the button is clicked, here the textbox is used as both an input and an output\n", " btn.click(fn=complete_with_gpt, inputs=textbox, outputs=textbox, queue=False)\n", "\n", "demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/autocomplete/run.py b/demo/autocomplete/run.py index fab6b8cf46f04..172ac9cb99b34 100644 --- a/demo/autocomplete/run.py +++ b/demo/autocomplete/run.py @@ -6,7 +6,7 @@ # load a model from https://hf.co/models as an interface, then use it as an api # you can remove the api_key parameter if you don't care about rate limiting. -api = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B", api_key=auth_token) +api = gr.load("huggingface/EleutherAI/gpt-j-6B", api_key=auth_token) def complete_with_gpt(text): return text[:-50] + api(text[-50:]) diff --git a/demo/automatic-speech-recognition/run.ipynb b/demo/automatic-speech-recognition/run.ipynb index 2758720871cef..b41b1f186c104 100644 --- a/demo/automatic-speech-recognition/run.ipynb +++ b/demo/automatic-speech-recognition/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: automatic-speech-recognition\n", "### Automatic speech recognition English. Record from your microphone and the app will transcribe the audio.\n", " "]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import os\n", "\n", "# save your HF API token from https:/hf.co/settings/tokens as an env variable to avoid rate limiting\n", "auth_token = os.getenv(\"auth_token\")\n", "\n", "# automatically load the interface from a HF model \n", "# you can remove the api_key parameter if you don't care about rate limiting. \n", "demo = gr.Interface.load(\n", " \"huggingface/facebook/wav2vec2-base-960h\",\n", " title=\"Speech-to-text\",\n", " inputs=\"mic\",\n", " description=\"Let me try to guess what you're saying!\",\n", " api_key=auth_token\n", ")\n", "\n", "demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: automatic-speech-recognition\n", "### Automatic speech recognition English. Record from your microphone and the app will transcribe the audio.\n", " "]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import os\n", "\n", "# save your HF API token from https:/hf.co/settings/tokens as an env variable to avoid rate limiting\n", "auth_token = os.getenv(\"auth_token\")\n", "\n", "# automatically load the interface from a HF model \n", "# you can remove the api_key parameter if you don't care about rate limiting. \n", "demo = gr.load(\n", " \"huggingface/facebook/wav2vec2-base-960h\",\n", " title=\"Speech-to-text\",\n", " inputs=\"mic\",\n", " description=\"Let me try to guess what you're saying!\",\n", " api_key=auth_token\n", ")\n", "\n", "demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/automatic-speech-recognition/run.py b/demo/automatic-speech-recognition/run.py index b18231f4c6fca..299e458781a5c 100644 --- a/demo/automatic-speech-recognition/run.py +++ b/demo/automatic-speech-recognition/run.py @@ -6,7 +6,7 @@ # automatically load the interface from a HF model # you can remove the api_key parameter if you don't care about rate limiting. -demo = gr.Interface.load( +demo = gr.load( "huggingface/facebook/wav2vec2-base-960h", title="Speech-to-text", inputs="mic", diff --git a/demo/blocks_gpt/run.ipynb b/demo/blocks_gpt/run.ipynb index 032ab40b437e4..c5e7694496e1b 100644 --- a/demo/blocks_gpt/run.ipynb +++ b/demo/blocks_gpt/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: blocks_gpt"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "api = gr.Interface.load(\"huggingface/EleutherAI/gpt-j-6B\")\n", "\n", "def complete_with_gpt(text):\n", " # Use the last 50 characters of the text as context\n", " return text[:-50] + api(text[-50:])\n", "\n", "with gr.Blocks() as demo:\n", " textbox = gr.Textbox(placeholder=\"Type here and press enter...\", lines=4)\n", " btn = gr.Button(\"Generate\")\n", " \n", " btn.click(complete_with_gpt, textbox, textbox)\n", " \n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: blocks_gpt"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "api = gr.load(\"huggingface/EleutherAI/gpt-j-6B\")\n", "\n", "def complete_with_gpt(text):\n", " # Use the last 50 characters of the text as context\n", " return text[:-50] + api(text[-50:])\n", "\n", "with gr.Blocks() as demo:\n", " textbox = gr.Textbox(placeholder=\"Type here and press enter...\", lines=4)\n", " btn = gr.Button(\"Generate\")\n", " \n", " btn.click(complete_with_gpt, textbox, textbox)\n", " \n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/blocks_gpt/run.py b/demo/blocks_gpt/run.py index 8799cde02d2e4..3f360ce3abe8b 100644 --- a/demo/blocks_gpt/run.py +++ b/demo/blocks_gpt/run.py @@ -1,6 +1,6 @@ import gradio as gr -api = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B") +api = gr.load("huggingface/EleutherAI/gpt-j-6B") def complete_with_gpt(text): # Use the last 50 characters of the text as context diff --git a/demo/gpt_j/run.ipynb b/demo/gpt_j/run.ipynb index 06d255f9f390c..f96d85442027f 100644 --- a/demo/gpt_j/run.ipynb +++ b/demo/gpt_j/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: gpt_j"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "title = \"GPT-J-6B\"\n", "\n", "examples = [\n", " [\"The tower is 324 metres (1,063 ft) tall,\"],\n", " [\"The Moon's orbit around Earth has\"],\n", " [\"The smooth Borealis basin in the Northern Hemisphere covers 40%\"],\n", "]\n", "\n", "demo = gr.Interface.load(\n", " \"huggingface/EleutherAI/gpt-j-6B\",\n", " inputs=gr.Textbox(lines=5, max_lines=6, label=\"Input Text\"),\n", " title=title,\n", " examples=examples,\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: gpt_j"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "title = \"GPT-J-6B\"\n", "\n", "examples = [\n", " [\"The tower is 324 metres (1,063 ft) tall,\"],\n", " [\"The Moon's orbit around Earth has\"],\n", " [\"The smooth Borealis basin in the Northern Hemisphere covers 40%\"],\n", "]\n", "\n", "demo = gr.load(\n", " \"huggingface/EleutherAI/gpt-j-6B\",\n", " inputs=gr.Textbox(lines=5, max_lines=6, label=\"Input Text\"),\n", " title=title,\n", " examples=examples,\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/gpt_j/run.py b/demo/gpt_j/run.py index 18dbd4b107f74..29c6fa5f4206a 100644 --- a/demo/gpt_j/run.py +++ b/demo/gpt_j/run.py @@ -8,7 +8,7 @@ ["The smooth Borealis basin in the Northern Hemisphere covers 40%"], ] -demo = gr.Interface.load( +demo = gr.load( "huggingface/EleutherAI/gpt-j-6B", inputs=gr.Textbox(lines=5, max_lines=6, label="Input Text"), title=title, diff --git a/demo/gpt_j_unified/run.ipynb b/demo/gpt_j_unified/run.ipynb index d840f1d52ec7a..8732b5c942906 100644 --- a/demo/gpt_j_unified/run.ipynb +++ b/demo/gpt_j_unified/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: gpt_j_unified"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "component = gr.Textbox(lines=5, label=\"Text\")\n", "api = gr.Interface.load(\"huggingface/EleutherAI/gpt-j-6B\")\n", "\n", "demo = gr.Interface(\n", " fn=lambda x: x[:-50] + api(x[-50:]),\n", " inputs=component,\n", " outputs=component,\n", " title=\"GPT-J-6B\",\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: gpt_j_unified"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "component = gr.Textbox(lines=5, label=\"Text\")\n", "api = gr.load(\"huggingface/EleutherAI/gpt-j-6B\")\n", "\n", "demo = gr.Interface(\n", " fn=lambda x: x[:-50] + api(x[-50:]),\n", " inputs=component,\n", " outputs=component,\n", " title=\"GPT-J-6B\",\n", ")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/gpt_j_unified/run.py b/demo/gpt_j_unified/run.py index b561f89509172..30c13507c6cee 100644 --- a/demo/gpt_j_unified/run.py +++ b/demo/gpt_j_unified/run.py @@ -1,7 +1,7 @@ import gradio as gr component = gr.Textbox(lines=5, label="Text") -api = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B") +api = gr.load("huggingface/EleutherAI/gpt-j-6B") demo = gr.Interface( fn=lambda x: x[:-50] + api(x[-50:]), diff --git a/demo/image_classifier_interface_load/run.ipynb b/demo/image_classifier_interface_load/run.ipynb index ffdb026b9d844..122dfa407fde1 100644 --- a/demo/image_classifier_interface_load/run.ipynb +++ b/demo/image_classifier_interface_load/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: image_classifier_interface_load"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/image_classifier_interface_load/cheetah1.jpeg\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/image_classifier_interface_load/cheetah1.jpg\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/image_classifier_interface_load/lion.jpg"]}, {"cell_type": "code", "execution_count": null, "id": 44380577570523278879349135829904343037, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import pathlib\n", "\n", "current_dir = pathlib.Path(__file__).parent\n", "\n", "images = [str(current_dir / \"cheetah1.jpeg\"), str(current_dir / \"cheetah1.jpg\"), str(current_dir / \"lion.jpg\")]\n", "\n", "\n", "img_classifier = gr.Interface.load(\n", " \"models/google/vit-base-patch16-224\", examples=images, cache_examples=False\n", ")\n", "\n", "\n", "def func(img, text):\n", " return img_classifier(img), text\n", "\n", "\n", "using_img_classifier_as_function = gr.Interface(\n", " func,\n", " [gr.Image(type=\"filepath\"), \"text\"],\n", " [\"label\", \"text\"],\n", " examples=[\n", " [str(current_dir / \"cheetah1.jpeg\"), None],\n", " [str(current_dir / \"cheetah1.jpg\"), \"cheetah\"],\n", " [str(current_dir / \"lion.jpg\"), \"lion\"],\n", " ],\n", " cache_examples=False,\n", ")\n", "demo = gr.TabbedInterface([using_img_classifier_as_function, img_classifier])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: image_classifier_interface_load"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["# Downloading files from the demo repo\n", "import os\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/image_classifier_interface_load/cheetah1.jpeg\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/image_classifier_interface_load/cheetah1.jpg\n", "!wget -q https://github.com/gradio-app/gradio/raw/main/demo/image_classifier_interface_load/lion.jpg"]}, {"cell_type": "code", "execution_count": null, "id": 44380577570523278879349135829904343037, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "import pathlib\n", "\n", "current_dir = pathlib.Path(__file__).parent\n", "\n", "images = [str(current_dir / \"cheetah1.jpeg\"), str(current_dir / \"cheetah1.jpg\"), str(current_dir / \"lion.jpg\")]\n", "\n", "\n", "img_classifier = gr.load(\n", " \"models/google/vit-base-patch16-224\", examples=images, cache_examples=False\n", ")\n", "\n", "\n", "def func(img, text):\n", " return img_classifier(img), text\n", "\n", "\n", "using_img_classifier_as_function = gr.Interface(\n", " func,\n", " [gr.Image(type=\"filepath\"), \"text\"],\n", " [\"label\", \"text\"],\n", " examples=[\n", " [str(current_dir / \"cheetah1.jpeg\"), None],\n", " [str(current_dir / \"cheetah1.jpg\"), \"cheetah\"],\n", " [str(current_dir / \"lion.jpg\"), \"lion\"],\n", " ],\n", " cache_examples=False,\n", ")\n", "demo = gr.TabbedInterface([using_img_classifier_as_function, img_classifier])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/image_classifier_interface_load/run.py b/demo/image_classifier_interface_load/run.py index b2e6b515be0da..86d8dc8bc8def 100644 --- a/demo/image_classifier_interface_load/run.py +++ b/demo/image_classifier_interface_load/run.py @@ -6,7 +6,7 @@ images = [str(current_dir / "cheetah1.jpeg"), str(current_dir / "cheetah1.jpg"), str(current_dir / "lion.jpg")] -img_classifier = gr.Interface.load( +img_classifier = gr.load( "models/google/vit-base-patch16-224", examples=images, cache_examples=False ) diff --git a/demo/interface_parallel_load/run.ipynb b/demo/interface_parallel_load/run.ipynb index e29e99ca2d23e..59f81075bbfde 100644 --- a/demo/interface_parallel_load/run.ipynb +++ b/demo/interface_parallel_load/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: interface_parallel_load"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "generator1 = gr.Interface.load(\"huggingface/gpt2\")\n", "generator2 = gr.Interface.load(\"huggingface/EleutherAI/gpt-neo-2.7B\")\n", "generator3 = gr.Interface.load(\"huggingface/EleutherAI/gpt-j-6B\")\n", "\n", "demo = gr.Parallel(generator1, generator2, generator3)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: interface_parallel_load"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "generator1 = gr.load(\"huggingface/gpt2\")\n", "generator2 = gr.load(\"huggingface/EleutherAI/gpt-neo-2.7B\")\n", "generator3 = gr.load(\"huggingface/EleutherAI/gpt-j-6B\")\n", "\n", "demo = gr.Parallel(generator1, generator2, generator3)\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/interface_parallel_load/run.py b/demo/interface_parallel_load/run.py index cae6397329484..2267f4c514ef1 100644 --- a/demo/interface_parallel_load/run.py +++ b/demo/interface_parallel_load/run.py @@ -1,8 +1,8 @@ import gradio as gr -generator1 = gr.Interface.load("huggingface/gpt2") -generator2 = gr.Interface.load("huggingface/EleutherAI/gpt-neo-2.7B") -generator3 = gr.Interface.load("huggingface/EleutherAI/gpt-j-6B") +generator1 = gr.load("huggingface/gpt2") +generator2 = gr.load("huggingface/EleutherAI/gpt-neo-2.7B") +generator3 = gr.load("huggingface/EleutherAI/gpt-j-6B") demo = gr.Parallel(generator1, generator2, generator3) diff --git a/demo/interface_series_load/run.ipynb b/demo/interface_series_load/run.ipynb index a2fe9eb44f713..601225736450d 100644 --- a/demo/interface_series_load/run.ipynb +++ b/demo/interface_series_load/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: interface_series_load"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "generator = gr.Interface.load(\"huggingface/gpt2\")\n", "translator = gr.Interface.load(\"huggingface/t5-small\")\n", "\n", "demo = gr.Series(generator, translator, description=\"This demo combines two Spaces: a text generator (`huggingface/gpt2`) and a text translator (`huggingface/t5-small`). The first Space takes a prompt as input and generates a text. The second Space takes the generated text as input and translates it into another language.\")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: interface_series_load"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "generator = gr.load(\"huggingface/gpt2\")\n", "translator = gr.load(\"huggingface/t5-small\")\n", "\n", "demo = gr.Series(generator, translator, description=\"This demo combines two Spaces: a text generator (`huggingface/gpt2`) and a text translator (`huggingface/t5-small`). The first Space takes a prompt as input and generates a text. The second Space takes the generated text as input and translates it into another language.\")\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/interface_series_load/run.py b/demo/interface_series_load/run.py index a3a0981d55f05..13703ccadbf81 100644 --- a/demo/interface_series_load/run.py +++ b/demo/interface_series_load/run.py @@ -1,7 +1,7 @@ import gradio as gr -generator = gr.Interface.load("huggingface/gpt2") -translator = gr.Interface.load("huggingface/t5-small") +generator = gr.load("huggingface/gpt2") +translator = gr.load("huggingface/t5-small") demo = gr.Series(generator, translator, description="This demo combines two Spaces: a text generator (`huggingface/gpt2`) and a text translator (`huggingface/t5-small`). The first Space takes a prompt as input and generates a text. The second Space takes the generated text as input and translates it into another language.") diff --git a/demo/question-answering/run.ipynb b/demo/question-answering/run.ipynb index 61c3732b70261..d634d3da8ae53 100644 --- a/demo/question-answering/run.ipynb +++ b/demo/question-answering/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: question-answering"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "context = \"The Amazon rainforest, also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America. This basin encompasses 7,000,000 square kilometres (2,700,000 sq mi), of which 5,500,000 square kilometres (2,100,000 sq mi) are covered by the rainforest. This region includes territory belonging to nine nations. The majority of the forest is contained within Brazil, with 60% of the rainforest, followed by Peru with 13%, Colombia with 10%, and with minor amounts in Venezuela, Ecuador, Bolivia, Guyana, Suriname and French Guiana. The Amazon represents over half of the planet's remaining rainforests, and comprises the largest and most biodiverse tract of tropical rainforest in the world, with an estimated 390 billion individual trees divided into 16,000 species.\"\n", "question = \"Which continent is the Amazon rainforest in?\"\n", "gr.Interface.load(\n", " \"huggingface/deepset/roberta-base-squad2\",\n", " inputs=[gr.inputs.Textbox(lines=7, default=context, label=\"Context Paragraph\"), gr.inputs.Textbox(lines=2, default=question, label=\"Question\")],\n", " outputs=[gr.outputs.Textbox(label=\"Answer\"), gr.outputs.Textbox(label=\"Score\")],\n", " title=None).launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: question-answering"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "context = \"The Amazon rainforest, also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America. This basin encompasses 7,000,000 square kilometres (2,700,000 sq mi), of which 5,500,000 square kilometres (2,100,000 sq mi) are covered by the rainforest. This region includes territory belonging to nine nations. The majority of the forest is contained within Brazil, with 60% of the rainforest, followed by Peru with 13%, Colombia with 10%, and with minor amounts in Venezuela, Ecuador, Bolivia, Guyana, Suriname and French Guiana. The Amazon represents over half of the planet's remaining rainforests, and comprises the largest and most biodiverse tract of tropical rainforest in the world, with an estimated 390 billion individual trees divided into 16,000 species.\"\n", "question = \"Which continent is the Amazon rainforest in?\"\n", "gr.load(\n", " \"huggingface/deepset/roberta-base-squad2\",\n", " inputs=[gr.inputs.Textbox(lines=7, default=context, label=\"Context Paragraph\"), gr.inputs.Textbox(lines=2, default=question, label=\"Question\")],\n", " outputs=[gr.outputs.Textbox(label=\"Answer\"), gr.outputs.Textbox(label=\"Score\")],\n", " title=None).launch()"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/question-answering/run.py b/demo/question-answering/run.py index 1369aa37e79e2..5674c329c60d2 100644 --- a/demo/question-answering/run.py +++ b/demo/question-answering/run.py @@ -1,7 +1,7 @@ import gradio as gr context = "The Amazon rainforest, also known in English as Amazonia or the Amazon Jungle, is a moist broadleaf forest that covers most of the Amazon basin of South America. This basin encompasses 7,000,000 square kilometres (2,700,000 sq mi), of which 5,500,000 square kilometres (2,100,000 sq mi) are covered by the rainforest. This region includes territory belonging to nine nations. The majority of the forest is contained within Brazil, with 60% of the rainforest, followed by Peru with 13%, Colombia with 10%, and with minor amounts in Venezuela, Ecuador, Bolivia, Guyana, Suriname and French Guiana. The Amazon represents over half of the planet's remaining rainforests, and comprises the largest and most biodiverse tract of tropical rainforest in the world, with an estimated 390 billion individual trees divided into 16,000 species." question = "Which continent is the Amazon rainforest in?" -gr.Interface.load( +gr.load( "huggingface/deepset/roberta-base-squad2", inputs=[gr.inputs.Textbox(lines=7, default=context, label="Context Paragraph"), gr.inputs.Textbox(lines=2, default=question, label="Question")], outputs=[gr.outputs.Textbox(label="Answer"), gr.outputs.Textbox(label="Score")], diff --git a/demo/stt_or_tts/run.ipynb b/demo/stt_or_tts/run.ipynb index 028c02cbfabe6..0e7da7abeb49b 100644 --- a/demo/stt_or_tts/run.ipynb +++ b/demo/stt_or_tts/run.ipynb @@ -1 +1 @@ -{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: stt_or_tts"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "title = \"GPT-J-6B\"\n", "\n", "tts_examples = [\n", " \"I love learning machine learning\",\n", " \"How do you do?\",\n", "]\n", "\n", "tts_demo = gr.Interface.load(\n", " \"huggingface/facebook/fastspeech2-en-ljspeech\",\n", " title=None,\n", " examples=tts_examples,\n", " description=\"Give me something to say!\",\n", ")\n", "\n", "stt_demo = gr.Interface.load(\n", " \"huggingface/facebook/wav2vec2-base-960h\",\n", " title=None,\n", " inputs=\"mic\",\n", " description=\"Let me try to guess what you're saying!\",\n", ")\n", "\n", "demo = gr.TabbedInterface([tts_demo, stt_demo], [\"Text-to-speech\", \"Speech-to-text\"])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file +{"cells": [{"cell_type": "markdown", "id": 302934307671667531413257853548643485645, "metadata": {}, "source": ["# Gradio Demo: stt_or_tts"]}, {"cell_type": "code", "execution_count": null, "id": 272996653310673477252411125948039410165, "metadata": {}, "outputs": [], "source": ["!pip install -q gradio "]}, {"cell_type": "code", "execution_count": null, "id": 288918539441861185822528903084949547379, "metadata": {}, "outputs": [], "source": ["import gradio as gr\n", "\n", "title = \"GPT-J-6B\"\n", "\n", "tts_examples = [\n", " \"I love learning machine learning\",\n", " \"How do you do?\",\n", "]\n", "\n", "tts_demo = gr.load(\n", " \"huggingface/facebook/fastspeech2-en-ljspeech\",\n", " title=None,\n", " examples=tts_examples,\n", " description=\"Give me something to say!\",\n", ")\n", "\n", "stt_demo = gr.load(\n", " \"huggingface/facebook/wav2vec2-base-960h\",\n", " title=None,\n", " inputs=\"mic\",\n", " description=\"Let me try to guess what you're saying!\",\n", ")\n", "\n", "demo = gr.TabbedInterface([tts_demo, stt_demo], [\"Text-to-speech\", \"Speech-to-text\"])\n", "\n", "if __name__ == \"__main__\":\n", " demo.launch()\n"]}], "metadata": {}, "nbformat": 4, "nbformat_minor": 5} \ No newline at end of file diff --git a/demo/stt_or_tts/run.py b/demo/stt_or_tts/run.py index 55f86ce01829c..8235d24ed33e7 100644 --- a/demo/stt_or_tts/run.py +++ b/demo/stt_or_tts/run.py @@ -7,14 +7,14 @@ "How do you do?", ] -tts_demo = gr.Interface.load( +tts_demo = gr.load( "huggingface/facebook/fastspeech2-en-ljspeech", title=None, examples=tts_examples, description="Give me something to say!", ) -stt_demo = gr.Interface.load( +stt_demo = gr.load( "huggingface/facebook/wav2vec2-base-960h", title=None, inputs="mic", diff --git a/gradio/blocks.py b/gradio/blocks.py index a3d20297a59e6..4c61a9afb40f9 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -619,7 +619,7 @@ def iterate_over_children(children_list): # add the event triggers for dependency, fn in zip(config["dependencies"], fns): # We used to add a "fake_event" to the config to cache examples - # without removing it. This was causing bugs in calling gr.Interface.load + # without removing it. This was causing bugs in calling gr.load # We fixed the issue by removing "fake_event" from the config in examples.py # but we still need to skip these events when loading the config to support # older demos @@ -1174,7 +1174,7 @@ def load( method, the two of which, confusingly, do two completely different things. - Class method: loads a demo from a Hugging Face Spaces repo and creates it locally and returns a block instance. Equivalent to gradio.Interface.load() + Class method: loads a demo from a Hugging Face Spaces repo and creates it locally and returns a block instance. Equivalent to gradio.load() Instance method: adds event that runs as soon as the demo loads in the browser. Example usage below. diff --git a/gradio/external.py b/gradio/external.py index bc1763b0278a7..cd16d276296be 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -1,5 +1,5 @@ """This module should not be used directly as its API is subject to change. Instead, -use the `gr.Blocks.load()` or `gr.Interface.load()` functions.""" +use the `gr.Blocks.load()` or `gr.load()` functions.""" from __future__ import annotations diff --git a/gradio/interface.py b/gradio/interface.py index 7e0a21809a5ec..d997c0fc2aa44 100644 --- a/gradio/interface.py +++ b/gradio/interface.py @@ -93,7 +93,7 @@ def load( import gradio as gr description = "Story generation with GPT" examples = [["An adventurer is approached by a mysterious stranger in the tavern for a new quest."]] - demo = gr.Interface.load("models/EleutherAI/gpt-neo-1.3B", description=description, examples=examples) + demo = gr.load("models/EleutherAI/gpt-neo-1.3B", description=description, examples=examples) demo.launch() """ warnings.warn("gr.Intrerface.load() will be deprecated. Use gr.load() instead.") diff --git a/test/test_external.py b/test/test_external.py index 496c543e2f2e4..6a2def4282d36 100644 --- a/test/test_external.py +++ b/test/test_external.py @@ -22,7 +22,7 @@ Hub and Space APIs do not change, and they keep their most famous models up. So if, e.g. Spaces is down, then these test will not pass. -These tests actually test gr.Interface.load() and gr.Blocks.load() but are +These tests actually test gr.load() and gr.Blocks.load() but are included in a separate file because of the above-mentioned dependency. """ @@ -35,7 +35,7 @@ class TestLoadInterface: def test_audio_to_audio(self): model_type = "audio-to-audio" - interface = gr.Interface.load( + interface = gr.load( name="speechbrain/mtl-mimic-voicebank", src="models", alias=model_type, @@ -57,7 +57,7 @@ def test_question_answering(self): def test_text_generation(self): model_type = "text_generation" - interface = gr.Interface.load( + interface = gr.load( "models/gpt2", alias=model_type, description="This is a test description" ) assert interface.__name__ == model_type @@ -70,7 +70,7 @@ def test_text_generation(self): def test_summarization(self): model_type = "summarization" - interface = gr.Interface.load( + interface = gr.load( "models/facebook/bart-large-cnn", api_key=None, alias=model_type ) assert interface.__name__ == model_type @@ -79,7 +79,7 @@ def test_summarization(self): def test_translation(self): model_type = "translation" - interface = gr.Interface.load( + interface = gr.load( "models/facebook/bart-large-cnn", api_key=None, alias=model_type ) assert interface.__name__ == model_type @@ -88,7 +88,7 @@ def test_translation(self): def test_text2text_generation(self): model_type = "text2text-generation" - interface = gr.Interface.load( + interface = gr.load( "models/sshleifer/tiny-mbart", api_key=None, alias=model_type ) assert interface.__name__ == model_type @@ -97,7 +97,7 @@ def test_text2text_generation(self): def test_text_classification(self): model_type = "text-classification" - interface = gr.Interface.load( + interface = gr.load( "models/distilbert-base-uncased-finetuned-sst-2-english", api_key=None, alias=model_type, @@ -108,7 +108,7 @@ def test_text_classification(self): def test_fill_mask(self): model_type = "fill-mask" - interface = gr.Interface.load( + interface = gr.load( "models/bert-base-uncased", api_key=None, alias=model_type ) assert interface.__name__ == model_type @@ -117,7 +117,7 @@ def test_fill_mask(self): def test_zero_shot_classification(self): model_type = "zero-shot-classification" - interface = gr.Interface.load( + interface = gr.load( "models/facebook/bart-large-mnli", api_key=None, alias=model_type ) assert interface.__name__ == model_type @@ -128,7 +128,7 @@ def test_zero_shot_classification(self): def test_automatic_speech_recognition(self): model_type = "automatic-speech-recognition" - interface = gr.Interface.load( + interface = gr.load( "models/facebook/wav2vec2-base-960h", api_key=None, alias=model_type ) assert interface.__name__ == model_type @@ -137,7 +137,7 @@ def test_automatic_speech_recognition(self): def test_image_classification(self): model_type = "image-classification" - interface = gr.Interface.load( + interface = gr.load( "models/google/vit-base-patch16-224", api_key=None, alias=model_type ) assert interface.__name__ == model_type @@ -146,7 +146,7 @@ def test_image_classification(self): def test_feature_extraction(self): model_type = "feature-extraction" - interface = gr.Interface.load( + interface = gr.load( "models/sentence-transformers/distilbert-base-nli-mean-tokens", api_key=None, alias=model_type, @@ -157,7 +157,7 @@ def test_feature_extraction(self): def test_sentence_similarity(self): model_type = "text-to-speech" - interface = gr.Interface.load( + interface = gr.load( "models/julien-c/ljspeech_tts_train_tacotron2_raw_phn_tacotron_g2p_en_no_space_train", api_key=None, alias=model_type, @@ -168,7 +168,7 @@ def test_sentence_similarity(self): def test_text_to_speech(self): model_type = "text-to-speech" - interface = gr.Interface.load( + interface = gr.load( "models/julien-c/ljspeech_tts_train_tacotron2_raw_phn_tacotron_g2p_en_no_space_train", api_key=None, alias=model_type, @@ -179,7 +179,7 @@ def test_text_to_speech(self): def test_text_to_image(self): model_type = "text-to-image" - interface = gr.Interface.load( + interface = gr.load( "models/osanseviero/BigGAN-deep-128", api_key=None, alias=model_type ) assert interface.__name__ == model_type @@ -188,12 +188,12 @@ def test_text_to_image(self): def test_english_to_spanish(self): with pytest.warns(UserWarning): - io = gr.Interface.load("spaces/abidlabs/english_to_spanish", title="hi") + io = gr.load("spaces/abidlabs/english_to_spanish", title="hi") assert isinstance(io.input_components[0], gr.Textbox) assert isinstance(io.output_components[0], gr.Textbox) def test_sentiment_model(self): - io = gr.Interface.load("models/distilbert-base-uncased-finetuned-sst-2-english") + io = gr.load("models/distilbert-base-uncased-finetuned-sst-2-english") try: output = io("I am happy, I love you") assert json.load(open(output))["label"] == "POSITIVE" @@ -217,7 +217,7 @@ def test_translation_model(self): pass def test_numerical_to_label_space(self): - io = gr.Interface.load("spaces/abidlabs/titanic-survival") + io = gr.load("spaces/abidlabs/titanic-survival") try: output = io("male", 77, 10) assert json.load(open(output))["label"] == "Perishes" @@ -225,7 +225,7 @@ def test_numerical_to_label_space(self): pass def test_image_to_text(self): - io = gr.Interface.load("models/nlpconnect/vit-gpt2-image-captioning") + io = gr.load("models/nlpconnect/vit-gpt2-image-captioning") try: output = io("gradio/test_data/lion.jpg") assert isinstance(output, str) @@ -233,7 +233,7 @@ def test_image_to_text(self): pass def test_conversational(self): - io = gr.Interface.load("models/microsoft/DialoGPT-medium") + io = gr.load("models/microsoft/DialoGPT-medium") app, _, _ = io.launch(prevent_thread_lock=True) client = TestClient(app) assert app.state_holder == {} @@ -247,7 +247,7 @@ def test_conversational(self): assert isinstance(app.state_holder["foo"], dict) def test_speech_recognition_model(self): - io = gr.Interface.load("models/facebook/wav2vec2-base-960h") + io = gr.load("models/facebook/wav2vec2-base-960h") try: output = io("gradio/test_data/test_audio.wav") assert output is not None @@ -276,7 +276,7 @@ def test_speech_recognition_model(self): io.close() def test_text_to_image_model(self): - io = gr.Interface.load("models/osanseviero/BigGAN-deep-128") + io = gr.load("models/osanseviero/BigGAN-deep-128") try: filename = io("chest") assert filename.endswith(".jpg") or filename.endswith(".jpeg") @@ -285,7 +285,7 @@ def test_text_to_image_model(self): def test_private_space(self): api_key = "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" # Intentionally revealing this key for testing purposes - io = gr.Interface.load( + io = gr.load( "spaces/gradio-tests/not-actually-private-space", api_key=api_key ) try: @@ -296,7 +296,7 @@ def test_private_space(self): def test_private_space_audio(self): api_key = "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" # Intentionally revealing this key for testing purposes - io = gr.Interface.load( + io = gr.load( "spaces/gradio-tests/not-actually-private-space-audio", api_key=api_key ) try: @@ -308,17 +308,17 @@ def test_private_space_audio(self): def test_multiple_spaces_one_private(self): api_key = "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" # Intentionally revealing this key for testing purposes with gr.Blocks(): - gr.Interface.load( + gr.load( "spaces/gradio-tests/not-actually-private-space", api_key=api_key ) - gr.Interface.load( + gr.load( "spaces/gradio/test-loading-examples", ) assert Context.access_token == api_key def test_loading_files_via_proxy_works(self): api_key = "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" # Intentionally revealing this key for testing purposes - io = gr.Interface.load( + io = gr.load( "spaces/gradio-tests/test-loading-examples-private", api_key=api_key ) app, _, _ = io.launch(prevent_thread_lock=True) @@ -333,7 +333,7 @@ class TestLoadInterfaceWithExamples: def test_interface_load_examples(self, tmp_path): test_file_dir = Path(Path(__file__).parent, "test_files") with patch("gradio.helpers.CACHED_FOLDER", tmp_path): - gr.Interface.load( + gr.load( name="models/google/vit-base-patch16-224", examples=[Path(test_file_dir, "cheetah1.jpg")], cache_examples=False, @@ -342,14 +342,14 @@ def test_interface_load_examples(self, tmp_path): def test_interface_load_cache_examples(self, tmp_path): test_file_dir = Path(Path(__file__).parent, "test_files") with patch("gradio.helpers.CACHED_FOLDER", tmp_path): - gr.Interface.load( + gr.load( name="models/google/vit-base-patch16-224", examples=[Path(test_file_dir, "cheetah1.jpg")], cache_examples=True, ) def test_root_url(self): - demo = gr.Interface.load("spaces/gradio/test-loading-examples") + demo = gr.load("spaces/gradio/test-loading-examples") assert all( [ c["props"]["root_url"] @@ -359,17 +359,17 @@ def test_root_url(self): ) def test_root_url_deserialization(self): - demo = gr.Interface.load("spaces/gradio/simple_gallery") + demo = gr.load("spaces/gradio/simple_gallery") path_to_files = demo("test") assert (Path(path_to_files) / "captions.json").exists() def test_interface_with_examples(self): # This demo has the "fake_event" correctly removed - demo = gr.Interface.load("spaces/freddyaboulton/calculator") + demo = gr.load("spaces/freddyaboulton/calculator") assert demo(2, "add", 3) == 5 # This demo still has the "fake_event". both should work - demo = gr.Interface.load("spaces/abidlabs/test-calculator-2") + demo = gr.load("spaces/abidlabs/test-calculator-2") assert demo(2, "add", 4) == 6 @@ -440,13 +440,13 @@ def check_dataset(config, readme_examples): def test_load_blocks_with_default_values(): - io = gr.Interface.load("spaces/abidlabs/min-dalle") + io = gr.load("spaces/abidlabs/min-dalle") assert isinstance(io.get_config_file()["components"][0]["props"]["value"], list) - io = gr.Interface.load("spaces/abidlabs/min-dalle-later") + io = gr.load("spaces/abidlabs/min-dalle-later") assert isinstance(io.get_config_file()["components"][0]["props"]["value"], list) - io = gr.Interface.load("spaces/freddyaboulton/dataframe_load") + io = gr.load("spaces/freddyaboulton/dataframe_load") assert io.get_config_file()["components"][0]["props"]["value"] == { "headers": ["a", "b"], "data": [[1, 4], [2, 5], [3, 6]], @@ -467,7 +467,7 @@ def test_can_load_tabular_model_with_different_widget_data(hypothetical_readme): with patch( "gradio.external.get_tabular_examples", return_value=hypothetical_readme ): - io = gr.Interface.load("models/scikit-learn/tabular-playground") + io = gr.load("models/scikit-learn/tabular-playground") check_dataframe(io.config) check_dataset(io.config, hypothetical_readme) @@ -514,7 +514,7 @@ def test_respect_queue_when_load_from_config(): with patch( "gradio.external_utils.get_pred_from_ws", return_value={"data": ["foo"]} ): - interface = gr.Interface.load("spaces/freddyaboulton/saymyname") + interface = gr.load("spaces/freddyaboulton/saymyname") assert interface("bob") == "foo" diff --git a/test/test_mix.py b/test/test_mix.py index d58de1ece96c3..5729f46a4d8bb 100644 --- a/test/test_mix.py +++ b/test/test_mix.py @@ -24,8 +24,8 @@ def test_in_interface(self): @pytest.mark.flaky def test_with_external(self): - io1 = gr.Interface.load("spaces/abidlabs/image-identity") - io2 = gr.Interface.load("spaces/abidlabs/image-classifier") + io1 = gr.load("spaces/abidlabs/image-identity") + io2 = gr.load("spaces/abidlabs/image-classifier") series = mix.Series(io1, io2) try: output = series("gradio/test_data/lion.jpg") @@ -55,8 +55,8 @@ def test_multiple_return_in_interface(self): @pytest.mark.flaky def test_with_external(self): - io1 = gr.Interface.load("spaces/abidlabs/english_to_spanish") - io2 = gr.Interface.load("spaces/abidlabs/english2german") + io1 = gr.load("spaces/abidlabs/english_to_spanish") + io2 = gr.load("spaces/abidlabs/english2german") parallel = mix.Parallel(io1, io2) try: hello_es, hello_de = parallel("Hello") From 2deb1c7b17b756a2167f85c6b2625a04cedae251 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 03:51:36 -0700 Subject: [PATCH 53/93] refactoring --- client/python/gradio_client/client.py | 302 ++++++++++++-------------- 1 file changed, 136 insertions(+), 166 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 74389d42cc08c..e254419dfa025 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -6,11 +6,12 @@ import re import uuid from concurrent.futures import Future -from typing import Callable, Dict, List +from typing import Callable, Dict, List, Tuple import requests import websockets from gradio_client import serializing, utils +from gradio_client.serializing import Serializable from packaging import version @@ -42,11 +43,9 @@ def __init__( self.api_url = utils.API_URL.format(self.src) self.ws_url = utils.WS_URL.format(self.src).replace("https", "wss") self.config = self._get_config() - - self.predict_fns = self._setup_predict_fns() - self.serialize_fns = self._setup_serialize_fn() - self.deserialize_fns = self._setup_deserialize_fn() - + + self.endpoints = [Endpoint(self, fn_index, dependency) for fn_index, dependency in enumerate(self.config["dependencies"])] + # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) @@ -55,24 +54,25 @@ def predict( *args, api_name: str | None = None, fn_index: int = 0, - callbacks: List[Callable] | None = None, + result_callbacks: List[Callable] | None = None, ) -> Future: - complete_fn = self._get_complete_fn(api_name, fn_index) - future = self.executor.submit(complete_fn, *args) + if api_name: + fn_index = self._infer_fn_index(api_name) + end_to_end_fn = self.endpoints[fn_index].end_to_end_fn + future = self.executor.submit(end_to_end_fn, *args) job = Job(future) - if callbacks: - + + if result_callbacks: def create_fn(callback) -> Callable: def fn(future): if isinstance(future.result(), tuple): callback(*future.result()) else: callback(future.result()) - return fn - - for callback in callbacks: + for callback in result_callbacks: job.add_done_callback(create_fn(callback)) + return job def info(self, api_name: str | None = None) -> Dict: @@ -107,158 +107,6 @@ def _infer_fn_index(self, api_name: str) -> int: return i raise ValueError(f"Cannot find a function with api_name: {api_name}") - def _get_complete_fn(self, api_name: str | None, fn_index: int) -> Callable: - if api_name is not None: - fn_index = self._infer_fn_index(api_name) - - predict_fn = self._get_predict_fn(fn_index) - serialize_fn = self._get_serialize_fn(fn_index) - deserialize_fn = self._get_deserialize_fn(fn_index) - - return lambda *args: deserialize_fn(*predict_fn(*serialize_fn(*args))) - - def _use_websocket(self, dependency: Dict) -> bool: - queue_enabled = self.config.get("enable_queue", False) - queue_uses_websocket = version.parse( - self.config.get("version", "2.0") - ) >= version.Version("3.2") - dependency_uses_queue = dependency.get("queue", False) is not False - return queue_enabled and queue_uses_websocket and dependency_uses_queue - - async def _ws_fn(self, data, hash_data): - async with websockets.connect( # type: ignore - self.ws_url, open_timeout=10, extra_headers=self.headers - ) as websocket: - return await utils.get_pred_from_ws(websocket, data, hash_data) - - def _get_predict_fn(self, fn_index: int) -> Callable: - return self.predict_fns[fn_index] - - def _setup_predict_fns(self) -> List[Callable]: - def create_fn(fn_index, dependency: Dict) -> Callable: - if not dependency["backend_fn"]: - return lambda *args: args - use_ws = self._use_websocket(dependency) - - def predict_fn(*data): - if not dependency["backend_fn"]: - return None - data = json.dumps({"data": data, "fn_index": fn_index}) - hash_data = json.dumps( - {"fn_index": fn_index, "session_hash": str(uuid.uuid4())} - ) - if use_ws: - result = utils.synchronize_async(self._ws_fn, data, hash_data) - output = result["data"] - else: - response = requests.post( - self.api_url, headers=self.headers, data=data - ) - result = json.loads(response.content.decode("utf-8")) - try: - output = result["data"] - except KeyError: - if "error" in result and "429" in result["error"]: - raise utils.TooManyRequestsError( - "Too many requests to the Hugging Face API" - ) - raise KeyError( - f"Could not find 'data' key in response. Response received: {result}" - ) - return tuple(output) - return predict_fn - - fns = [] - for fn_index, dependency in enumerate(self.config["dependencies"]): - fns.append(create_fn(fn_index, dependency)) - return fns - - def _get_serialize_fn(self, fn_index: int) -> Callable: - return self.serialize_fns[fn_index] - - def _setup_serialize_fn(self) -> List[Callable]: - def create_fn(dependency: Dict) -> Callable: - if not dependency["backend_fn"]: - return lambda *args: args - inputs = dependency["inputs"] - serializers = [] - - for i in inputs: - for component in self.config["components"]: - if component["id"] == i: - if component.get("serializer"): - serializer_name = component["serializer"] - assert ( - serializer_name in serializing.SERIALIZER_MAPPING - ), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." - serializer = serializing.SERIALIZER_MAPPING[serializer_name] - else: - component_name = component["type"] - assert ( - component_name in serializing.COMPONENT_MAPPING - ), f"Unknown component: {component_name}, you may need to update your gradio_client version." - serializer = serializing.COMPONENT_MAPPING[component_name] - serializers.append(serializer()) # type: ignore - - def serialize_fn(*data): - assert len(data) == len( - serializers - ), f"Expected {len(serializers)} arguments, got {len(data)}" - return [s.serialize(d) for s, d in zip(serializers, data)] - - return serialize_fn - - fns = [] - for dependency in self.config["dependencies"]: - fns.append(create_fn(dependency)) - return fns - - def _get_deserialize_fn(self, fn_index: int) -> Callable: - return self.deserialize_fns[fn_index] - - def _setup_deserialize_fn(self) -> List[Callable]: - def create_fn(dependency: Dict) -> Callable: - if not dependency["backend_fn"]: - return lambda *args: args - - outputs = dependency["outputs"] - deserializers = [] - - for i in outputs: - for component in self.config["components"]: - if component["id"] == i: - if component.get("serializer"): - serializer_name = component["serializer"] - assert ( - serializer_name in serializing.SERIALIZER_MAPPING - ), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." - deserializer = serializing.SERIALIZER_MAPPING[ - serializer_name - ] - else: - component_name = component["type"] - assert ( - component_name in serializing.COMPONENT_MAPPING - ), f"Unknown component: {component_name}, you may need to update your gradio_client version." - deserializer = serializing.COMPONENT_MAPPING[component_name] - deserializers.append(deserializer()) # type: ignore - - def deserialize_fn(*data): - result = [ - s.deserialize(d, access_token=self.access_token) - for s, d in zip(deserializers, data) - ] - if len(outputs) == 1: - result = result[0] - return result - - return deserialize_fn - - fns = [] - for dependency in self.config["dependencies"]: - fns.append(create_fn(dependency)) - return fns - def __del__(self): if hasattr(self, "executor"): self.executor.shutdown(wait=True) @@ -289,6 +137,128 @@ def _get_config(self) -> Dict: return config +class Endpoint: + """Helper class for storing all the information about a single API endpoint.""" + def __init__(self, client: Client, fn_index: int, dependency: Dict): + self.api_url = client.api_url + self.ws_url = client.ws_url + self.fn_index = fn_index + self.dependency = dependency + self.headers = client.headers + self.config = client.config + self.use_ws = self._use_websocket(self.dependency) + self.access_token = client.access_token + self.serializers, self.deserializers = self._setup_serializers() + + def end_to_end_fn(self, *data): + outputs = self.deserialize(self.predict(self.serialize(*data))) + if len(self.dependency["outputs"]) == 1: + return outputs[0] + return outputs + + def predict(self, *data) -> Tuple: + if not self.dependency["backend_fn"]: + return data + data = json.dumps({"data": data, "fn_index": self.fn_index}) + hash_data = json.dumps( + {"fn_index": self.fn_index, "session_hash": str(uuid.uuid4())} + ) + if self.use_ws: + result = utils.synchronize_async(self._ws_fn, data, hash_data) + output = result["data"] + else: + response = requests.post( + self.api_url, headers=self.headers, data=data + ) + result = json.loads(response.content.decode("utf-8")) + try: + output = result["data"] + except KeyError: + if "error" in result and "429" in result["error"]: + raise utils.TooManyRequestsError( + "Too many requests to the Hugging Face API" + ) + raise KeyError( + f"Could not find 'data' key in response. Response received: {result}" + ) + return tuple(output) + + def serialize(self, *data) -> Tuple: + assert len(data) == len( + self.serializers + ), f"Expected {len(self.serializers)} arguments, got {len(data)}" + return tuple([s.serialize(d) for s, d in zip(self.serializers, data)]) + + def deserialize(self, *data) -> Tuple: + assert len(data) == len( + self.deserializers + ), f"Expected {len(self.deserializers)} outputs, got {len(data)}" + return tuple([ + s.deserialize(d, access_token=self.access_token) + for s, d in zip(self.deserializers, data) + ]) + + def _setup_serializers(self) -> Tuple[List[Serializable], List[Serializable]]: + inputs = self.dependency["inputs"] + serializers = [] + + for i in inputs: + for component in self.config["components"]: + if component["id"] == i: + if component.get("serializer"): + serializer_name = component["serializer"] + assert ( + serializer_name in serializing.SERIALIZER_MAPPING + ), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + serializer = serializing.SERIALIZER_MAPPING[serializer_name] + else: + component_name = component["type"] + assert ( + component_name in serializing.COMPONENT_MAPPING + ), f"Unknown component: {component_name}, you may need to update your gradio_client version." + serializer = serializing.COMPONENT_MAPPING[component_name] + serializers.append(serializer()) # type: ignore + + outputs = self.dependency["outputs"] + deserializers = [] + + for i in outputs: + for component in self.config["components"]: + if component["id"] == i: + if component.get("serializer"): + serializer_name = component["serializer"] + assert ( + serializer_name in serializing.SERIALIZER_MAPPING + ), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." + deserializer = serializing.SERIALIZER_MAPPING[ + serializer_name + ] + else: + component_name = component["type"] + assert ( + component_name in serializing.COMPONENT_MAPPING + ), f"Unknown component: {component_name}, you may need to update your gradio_client version." + deserializer = serializing.COMPONENT_MAPPING[component_name] + deserializers.append(deserializer()) # type: ignore + + return serializers, deserializers + + def _use_websocket(self, dependency: Dict) -> bool: + queue_enabled = self.config.get("enable_queue", False) + queue_uses_websocket = version.parse( + self.config.get("version", "2.0") + ) >= version.Version("3.2") + dependency_uses_queue = dependency.get("queue", False) is not False + return queue_enabled and queue_uses_websocket and dependency_uses_queue + + async def _ws_fn(self, data, hash_data): + async with websockets.connect( # type: ignore + self.ws_url, open_timeout=10, extra_headers=self.headers + ) as websocket: + return await utils.get_pred_from_ws(websocket, data, hash_data) + + + class Job(Future): """A Job is a thin wrapper over the Future class that can be cancelled.""" From 96153a2d190ee7429067a4f98b721e8d73b0f588 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 04:16:52 -0700 Subject: [PATCH 54/93] refactoring' --- client/python/gradio_client/client.py | 19 ++++++++++++++----- client/python/gradio_client/utils.py | 5 +++++ 2 files changed, 19 insertions(+), 5 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index e254419dfa025..a3689c9e4e447 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -148,17 +148,27 @@ def __init__(self, client: Client, fn_index: int, dependency: Dict): self.config = client.config self.use_ws = self._use_websocket(self.dependency) self.access_token = client.access_token - self.serializers, self.deserializers = self._setup_serializers() + try: + self.serializers, self.deserializers = self._setup_serializers() + self.is_valid = self.dependency["backend_fn"] # Only a real API endpoint if backend_fn is True + except AssertionError: + self.is_valid = False def end_to_end_fn(self, *data): - outputs = self.deserialize(self.predict(self.serialize(*data))) + if not self.is_valid: + raise utils.InvalidAPIEndpointError() + print("data", data) + inputs = self.serialize(*data) + print("inputs", inputs) + predictions = self.predict(*inputs) + print("predictions", predictions) + outputs = self.deserialize(*predictions) + print("outputs", outputs) if len(self.dependency["outputs"]) == 1: return outputs[0] return outputs def predict(self, *data) -> Tuple: - if not self.dependency["backend_fn"]: - return data data = json.dumps({"data": data, "fn_index": self.fn_index}) hash_data = json.dumps( {"fn_index": self.fn_index, "session_hash": str(uuid.uuid4())} @@ -221,7 +231,6 @@ def _setup_serializers(self) -> Tuple[List[Serializable], List[Serializable]]: outputs = self.dependency["outputs"] deserializers = [] - for i in outputs: for component in self.config["components"]: if component["id"] == i: diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index eeccf154e11f4..6c8727c0d5758 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -29,6 +29,11 @@ class QueueError(Exception): pass +class InvalidAPIEndpointError(Exception): + """Raised when the API endpoint is invalid.""" + + pass + ######################## # Network utils ######################## From 2a4100c46a405424da4cde18b2e745cc6e168e41 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 04:17:12 -0700 Subject: [PATCH 55/93] formatting --- client/python/gradio_client/client.py | 56 +++++++++++++++------------ client/python/gradio_client/utils.py | 1 + test/test_external.py | 12 ++---- 3 files changed, 35 insertions(+), 34 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index a3689c9e4e447..6128911242199 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -43,9 +43,12 @@ def __init__( self.api_url = utils.API_URL.format(self.src) self.ws_url = utils.WS_URL.format(self.src).replace("https", "wss") self.config = self._get_config() - - self.endpoints = [Endpoint(self, fn_index, dependency) for fn_index, dependency in enumerate(self.config["dependencies"])] - + + self.endpoints = [ + Endpoint(self, fn_index, dependency) + for fn_index, dependency in enumerate(self.config["dependencies"]) + ] + # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) @@ -61,18 +64,21 @@ def predict( end_to_end_fn = self.endpoints[fn_index].end_to_end_fn future = self.executor.submit(end_to_end_fn, *args) job = Job(future) - + if result_callbacks: + def create_fn(callback) -> Callable: def fn(future): if isinstance(future.result(), tuple): callback(*future.result()) else: callback(future.result()) + return fn + for callback in result_callbacks: job.add_done_callback(create_fn(callback)) - + return job def info(self, api_name: str | None = None) -> Dict: @@ -139,6 +145,7 @@ def _get_config(self) -> Dict: class Endpoint: """Helper class for storing all the information about a single API endpoint.""" + def __init__(self, client: Client, fn_index: int, dependency: Dict): self.api_url = client.api_url self.ws_url = client.ws_url @@ -150,10 +157,12 @@ def __init__(self, client: Client, fn_index: int, dependency: Dict): self.access_token = client.access_token try: self.serializers, self.deserializers = self._setup_serializers() - self.is_valid = self.dependency["backend_fn"] # Only a real API endpoint if backend_fn is True + self.is_valid = self.dependency[ + "backend_fn" + ] # Only a real API endpoint if backend_fn is True except AssertionError: self.is_valid = False - + def end_to_end_fn(self, *data): if not self.is_valid: raise utils.InvalidAPIEndpointError() @@ -167,7 +176,7 @@ def end_to_end_fn(self, *data): if len(self.dependency["outputs"]) == 1: return outputs[0] return outputs - + def predict(self, *data) -> Tuple: data = json.dumps({"data": data, "fn_index": self.fn_index}) hash_data = json.dumps( @@ -177,9 +186,7 @@ def predict(self, *data) -> Tuple: result = utils.synchronize_async(self._ws_fn, data, hash_data) output = result["data"] else: - response = requests.post( - self.api_url, headers=self.headers, data=data - ) + response = requests.post(self.api_url, headers=self.headers, data=data) result = json.loads(response.content.decode("utf-8")) try: output = result["data"] @@ -192,22 +199,24 @@ def predict(self, *data) -> Tuple: f"Could not find 'data' key in response. Response received: {result}" ) return tuple(output) - + def serialize(self, *data) -> Tuple: assert len(data) == len( self.serializers ), f"Expected {len(self.serializers)} arguments, got {len(data)}" return tuple([s.serialize(d) for s, d in zip(self.serializers, data)]) - + def deserialize(self, *data) -> Tuple: assert len(data) == len( self.deserializers ), f"Expected {len(self.deserializers)} outputs, got {len(data)}" - return tuple([ - s.deserialize(d, access_token=self.access_token) - for s, d in zip(self.deserializers, data) - ]) - + return tuple( + [ + s.deserialize(d, access_token=self.access_token) + for s, d in zip(self.deserializers, data) + ] + ) + def _setup_serializers(self) -> Tuple[List[Serializable], List[Serializable]]: inputs = self.dependency["inputs"] serializers = [] @@ -228,7 +237,7 @@ def _setup_serializers(self) -> Tuple[List[Serializable], List[Serializable]]: ), f"Unknown component: {component_name}, you may need to update your gradio_client version." serializer = serializing.COMPONENT_MAPPING[component_name] serializers.append(serializer()) # type: ignore - + outputs = self.dependency["outputs"] deserializers = [] for i in outputs: @@ -239,9 +248,7 @@ def _setup_serializers(self) -> Tuple[List[Serializable], List[Serializable]]: assert ( serializer_name in serializing.SERIALIZER_MAPPING ), f"Unknown serializer: {serializer_name}, you may need to update your gradio_client version." - deserializer = serializing.SERIALIZER_MAPPING[ - serializer_name - ] + deserializer = serializing.SERIALIZER_MAPPING[serializer_name] else: component_name = component["type"] assert ( @@ -249,9 +256,9 @@ def _setup_serializers(self) -> Tuple[List[Serializable], List[Serializable]]: ), f"Unknown component: {component_name}, you may need to update your gradio_client version." deserializer = serializing.COMPONENT_MAPPING[component_name] deserializers.append(deserializer()) # type: ignore - + return serializers, deserializers - + def _use_websocket(self, dependency: Dict) -> bool: queue_enabled = self.config.get("enable_queue", False) queue_uses_websocket = version.parse( @@ -266,7 +273,6 @@ async def _ws_fn(self, data, hash_data): ) as websocket: return await utils.get_pred_from_ws(websocket, data, hash_data) - class Job(Future): """A Job is a thin wrapper over the Future class that can be cancelled.""" diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index 6c8727c0d5758..6bb83bf9c7c26 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -34,6 +34,7 @@ class InvalidAPIEndpointError(Exception): pass + ######################## # Network utils ######################## diff --git a/test/test_external.py b/test/test_external.py index 6a2def4282d36..f82810df1e1cd 100644 --- a/test/test_external.py +++ b/test/test_external.py @@ -108,9 +108,7 @@ def test_text_classification(self): def test_fill_mask(self): model_type = "fill-mask" - interface = gr.load( - "models/bert-base-uncased", api_key=None, alias=model_type - ) + interface = gr.load("models/bert-base-uncased", api_key=None, alias=model_type) assert interface.__name__ == model_type assert isinstance(interface.input_components[0], gr.Textbox) assert isinstance(interface.output_components[0], gr.Label) @@ -285,9 +283,7 @@ def test_text_to_image_model(self): def test_private_space(self): api_key = "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" # Intentionally revealing this key for testing purposes - io = gr.load( - "spaces/gradio-tests/not-actually-private-space", api_key=api_key - ) + io = gr.load("spaces/gradio-tests/not-actually-private-space", api_key=api_key) try: output = io("abc") assert output == "abc" @@ -308,9 +304,7 @@ def test_private_space_audio(self): def test_multiple_spaces_one_private(self): api_key = "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" # Intentionally revealing this key for testing purposes with gr.Blocks(): - gr.load( - "spaces/gradio-tests/not-actually-private-space", api_key=api_key - ) + gr.load("spaces/gradio-tests/not-actually-private-space", api_key=api_key) gr.load( "spaces/gradio/test-loading-examples", ) From 0135d0101c203d75ce8d650b39a0f3f7f23bbd60 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 04:50:08 -0700 Subject: [PATCH 56/93] formatting --- client/python/gradio_client/client.py | 17 +++++---- client/python/test/test_utils.py | 41 +++++++++++++++++++--- gradio/external.py | 3 +- gradio/external_utils.py | 50 ++------------------------- test/test_external.py | 49 -------------------------- 5 files changed, 50 insertions(+), 110 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 6128911242199..9c7c8265b54e9 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -6,7 +6,7 @@ import re import uuid from concurrent.futures import Future -from typing import Callable, Dict, List, Tuple +from typing import Any, Callable, Dict, List, Tuple import requests import websockets @@ -25,7 +25,7 @@ def __init__( ): self.access_token = access_token self.headers = ( - {"Authorization": "Bearer {access_token}"} if access_token else {} + {"Authorization": f"Bearer {access_token}"} if access_token else {} ) if space is None and src is None: @@ -61,12 +61,12 @@ def predict( ) -> Future: if api_name: fn_index = self._infer_fn_index(api_name) + end_to_end_fn = self.endpoints[fn_index].end_to_end_fn future = self.executor.submit(end_to_end_fn, *args) job = Job(future) if result_callbacks: - def create_fn(callback) -> Callable: def fn(future): if isinstance(future.result(), tuple): @@ -166,13 +166,9 @@ def __init__(self, client: Client, fn_index: int, dependency: Dict): def end_to_end_fn(self, *data): if not self.is_valid: raise utils.InvalidAPIEndpointError() - print("data", data) inputs = self.serialize(*data) - print("inputs", inputs) predictions = self.predict(*inputs) - print("predictions", predictions) outputs = self.deserialize(*predictions) - print("outputs", outputs) if len(self.dependency["outputs"]) == 1: return outputs[0] return outputs @@ -199,6 +195,13 @@ def predict(self, *data) -> Tuple: f"Could not find 'data' key in response. Response received: {result}" ) return tuple(output) + + def _predict_resolve(self, *data) -> Any: + """Needed for gradio.load(), which has a slightly different signature for serializing/deserializing""" + outputs = self.predict(*data) + if len(self.dependency["outputs"]) == 1: + return outputs[0] + return outputs def serialize(self, *data) -> Tuple: assert len(data) == len( diff --git a/client/python/test/test_utils.py b/client/python/test/test_utils.py index d3ad111225be3..f5e0b3c4b1702 100644 --- a/client/python/test/test_utils.py +++ b/client/python/test/test_utils.py @@ -1,15 +1,13 @@ from copy import deepcopy import tempfile +from unittest.mock import MagicMock, patch +import json import pytest -from gradio import media_data +from gradio import media_data, exceptions from gradio_client import utils -data = { - -} - def test_encode_url_or_file_to_base64(): output_base64 = utils.encode_url_or_file_to_base64( @@ -69,3 +67,36 @@ def test_download_private_file(): def test_strip_invalid_filename_characters(orig_filename, new_filename): assert utils.strip_invalid_filename_characters(orig_filename) == new_filename + +class AsyncMock(MagicMock): + async def __call__(self, *args, **kwargs): + return super(AsyncMock, self).__call__(*args, **kwargs) + + +@pytest.mark.asyncio +async def test_get_pred_from_ws(): + mock_ws = AsyncMock(name="ws") + messages = [ + json.dumps({"msg": "estimation"}), + json.dumps({"msg": "send_data"}), + json.dumps({"msg": "process_generating"}), + json.dumps({"msg": "process_completed", "output": {"data": ["result!"]}}), + ] + mock_ws.recv.side_effect = messages + data = json.dumps({"data": ["foo"], "fn_index": "foo"}) + hash_data = json.dumps({"session_hash": "daslskdf", "fn_index": "foo"}) + output = await utils.get_pred_from_ws(mock_ws, data, hash_data) + assert output == {"data": ["result!"]} + mock_ws.send.assert_called_once_with(data) + + +@pytest.mark.asyncio +async def test_get_pred_from_ws_raises_if_queue_full(): + mock_ws = AsyncMock(name="ws") + messages = [json.dumps({"msg": "queue_full"})] + mock_ws.recv.side_effect = messages + data = json.dumps({"data": ["foo"], "fn_index": "foo"}) + hash_data = json.dumps({"session_hash": "daslskdf", "fn_index": "foo"}) + with pytest.raises(exceptions.Error, match="Queue is full!"): + await utils.get_pred_from_ws(mock_ws, data, hash_data) + diff --git a/gradio/external.py b/gradio/external.py index cd16d276296be..64c23ce725daf 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -443,7 +443,8 @@ def from_spaces( def from_spaces_blocks(space: str, api_key: str | None) -> Blocks: client = Client(space=space, access_token=api_key) - return gradio.Blocks.from_config(client.config, client.predict_fns, client.src) + predict_fns = [endpoint._predict_resolve for endpoint in client.endpoints] + return gradio.Blocks.from_config(client.config, predict_fns, client.src) def from_spaces_interface( diff --git a/gradio/external_utils.py b/gradio/external_utils.py index 82294add0fe12..cb402e98d99c0 100644 --- a/gradio/external_utils.py +++ b/gradio/external_utils.py @@ -1,20 +1,16 @@ """Utility function for gradio/external.py""" import base64 -import json import math import operator import re import warnings -from typing import Any, Dict, List, Tuple +from typing import Dict, List, Tuple import requests -import websockets import yaml -from packaging import version -from websockets.legacy.protocol import WebSocketCommonProtocol -from gradio import components, exceptions +from gradio import components ################## # Helper functions for processing tabular data @@ -116,48 +112,6 @@ def encode_to_base64(r: requests.Response) -> str: return new_base64 -################## -# Helper functions for connecting to websockets -################## - - -async def get_pred_from_ws( - websocket: WebSocketCommonProtocol, data: str, hash_data: str -) -> Dict[str, Any]: - completed = False - resp = {} - while not completed: - msg = await websocket.recv() - resp = json.loads(msg) - if resp["msg"] == "queue_full": - raise exceptions.Error("Queue is full! Please try again.") - if resp["msg"] == "send_hash": - await websocket.send(hash_data) - elif resp["msg"] == "send_data": - await websocket.send(data) - completed = resp["msg"] == "process_completed" - return resp["output"] - - -def get_ws_fn(ws_url, headers): - async def ws_fn(data, hash_data): - async with websockets.connect( # type: ignore - ws_url, open_timeout=10, extra_headers=headers - ) as websocket: - return await get_pred_from_ws(websocket, data, hash_data) - - return ws_fn - - -def use_websocket(config, dependency): - queue_enabled = config.get("enable_queue", False) - queue_uses_websocket = version.parse( - config.get("version", "2.0") - ) >= version.Version("3.2") - dependency_uses_queue = dependency.get("queue", False) is not False - return queue_enabled and queue_uses_websocket and dependency_uses_queue - - ################## # Helper function for cleaning up an Interface loaded from HF Spaces ################## diff --git a/test/test_external.py b/test/test_external.py index f82810df1e1cd..d195b1ce11697 100644 --- a/test/test_external.py +++ b/test/test_external.py @@ -1,6 +1,5 @@ import json import os -import sys import textwrap import warnings from pathlib import Path @@ -9,13 +8,11 @@ import pytest from fastapi.testclient import TestClient -import gradio import gradio as gr from gradio import media_data from gradio.context import Context from gradio.exceptions import InvalidApiName from gradio.external import TooManyRequestsError, cols_to_rows, get_tabular_examples -from gradio.external_utils import get_pred_from_ws """ WARNING: These tests have an external dependency: namely that Hugging Face's @@ -466,52 +463,6 @@ def test_can_load_tabular_model_with_different_widget_data(hypothetical_readme): check_dataset(io.config, hypothetical_readme) -class AsyncMock(MagicMock): - async def __call__(self, *args, **kwargs): - return super(AsyncMock, self).__call__(*args, **kwargs) - - -@pytest.mark.asyncio -async def test_get_pred_from_ws(): - mock_ws = AsyncMock(name="ws") - messages = [ - json.dumps({"msg": "estimation"}), - json.dumps({"msg": "send_data"}), - json.dumps({"msg": "process_generating"}), - json.dumps({"msg": "process_completed", "output": {"data": ["result!"]}}), - ] - mock_ws.recv.side_effect = messages - data = json.dumps({"data": ["foo"], "fn_index": "foo"}) - hash_data = json.dumps({"session_hash": "daslskdf", "fn_index": "foo"}) - output = await get_pred_from_ws(mock_ws, data, hash_data) - assert output == {"data": ["result!"]} - mock_ws.send.assert_called_once_with(data) - - -@pytest.mark.asyncio -async def test_get_pred_from_ws_raises_if_queue_full(): - mock_ws = AsyncMock(name="ws") - messages = [json.dumps({"msg": "queue_full"})] - mock_ws.recv.side_effect = messages - data = json.dumps({"data": ["foo"], "fn_index": "foo"}) - hash_data = json.dumps({"session_hash": "daslskdf", "fn_index": "foo"}) - with pytest.raises(gradio.Error, match="Queue is full!"): - await get_pred_from_ws(mock_ws, data, hash_data) - - -@pytest.mark.skipif( - sys.version_info < (3, 8), - reason="Mocks of async context manager don't work for 3.7", -) -def test_respect_queue_when_load_from_config(): - with patch("websockets.connect"): - with patch( - "gradio.external_utils.get_pred_from_ws", return_value={"data": ["foo"]} - ): - interface = gr.load("spaces/freddyaboulton/saymyname") - assert interface("bob") == "foo" - - def test_raise_value_error_when_api_name_invalid(): with pytest.raises(InvalidApiName): demo = gr.Blocks.load(name="spaces/gradio/hello_world") From 19c213a944bc1dbf5ebcafe9bed4098da8539712 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 04:50:36 -0700 Subject: [PATCH 57/93] tests --- .../captions.json | 1 + ...92e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg | Bin 0 -> 20552 bytes client/python/gradio_client/client.py | 7 ++++--- client/python/test/test_client.py | 2 ++ 4 files changed, 7 insertions(+), 3 deletions(-) create mode 100644 2be57641-ee31-4a94-a674-a7c565e474ea/captions.json create mode 100644 2be57641-ee31-4a94-a674-a7c565e474ea/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg create mode 100644 client/python/test/test_client.py diff --git a/2be57641-ee31-4a94-a674-a7c565e474ea/captions.json b/2be57641-ee31-4a94-a674-a7c565e474ea/captions.json new file mode 100644 index 0000000000000..5471cc474c03b --- /dev/null +++ b/2be57641-ee31-4a94-a674-a7c565e474ea/captions.json @@ -0,0 +1 @@ +{"C:\\Users\\islam\\dev\\gradio-repos\\gradio\\2be57641-ee31-4a94-a674-a7c565e474ea\\cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg": null} \ No newline at end of file diff --git a/2be57641-ee31-4a94-a674-a7c565e474ea/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg b/2be57641-ee31-4a94-a674-a7c565e474ea/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c510ff30e09c1ce410afa499f0bfc3a63c751134 GIT binary patch literal 20552 zcma%ib8ux(^yiChy%_Jsw(VqM+qUfqC(b09cw%#6O>Eot#P;lbe_Ok?f9~nJRk!Ng zKIe0*x~o42{deW>4gg(FN>&O00RaF&{2PG3>i}^8EHpGU3^XhZ3@ifdzXu5(78V`} z84(c)5fK?3`F{i*85Ip34HX#+3mY2?i-?erkcjmE%6~>6;Naj;kWql>=s*G-3>NJi{vUybgo1{Fg8;z)(+Z&jAR(Zj zAphb2?+>89 z&By=G+y86)f6s^cr(*gC;-5f3|K|Yaf1E==VnC5&vWh}uQK+z)xQK zcMX6H1@R9x6b3*TfM!=Lu)q^%W_oP}y;P_(>Zpu*uZm=ja16)koli*CHjZgKl7i`S zstpHUWzxV~WwxP=urY&(u3&}$q6k*3xRdA)@vau(&s;Ez?6PC+D_n|_S`)S%BBYfV z4qXB?0kOqs2zvM>fwC9c)R5+TVSvjGRn|ylW!5F}7}{W2V4)?tVSRjM-h?LW+)EW~ zG@5shv#o%_i1Z}x6Ko5VE1%&0(dSUx<`R5N0em>~jR8W00*EX6-?H;)C_ZZ06h3STfrFg4gWuKO6XSxs|DlNHy078>(#3?4!3%P;L#_+-@cO(@f)WtCYuBNSOeB$3B~w6^m%n0 z*ohlDxO)6$YZzdLT$XLr-TpFrN44h&QS2pK3O1k{DIA8`m*1}B(pCli*y`c8sEfUk#0 z<0n>;CY76maW<03zz4rvYj-ZcH+JV-oy@+zBk#G(g*>f_%+O?|;xi6xSRiMdn8cDh zO(7cx%@G&ncPQi-_C)PtXU=JXxWWQ-+0Wqw10x&!urD+2hjo!r4n<-U0bvRc|B(5H zW(Q71;*PC5MFntKX+H$U%zq!5uHU1=-lSeY;)W9THD6rAi!V>7-W~4GZ8)7KEGbI) z(wZpHJW6k&`UP;dIs7B!&l8T!kzGA?wD4z882z7w2x67lTgi7Za>nX@hn9}WsaVg; zUNUFtt#%MUZ!zM@w;~(xmQA`8NU-`mVr!4F=a#E{<@n<2kFL`fYvyF;;uE{&WSAKA z8@5eRRwyBfM8H*S;za8P+Oh1o_|1ywY#th(a-J&p&yj`!b%{btbMrNIhz~N_(T5Kk zy;s6gdzQ9mdCMmf%FaNBx~PMgeF&MgEpwC0@r{|ApRS$;r+)#c?jJ(x=tVXtN$Cfa zWb}hh?ycUi!o~0X91SHAu^C2g*T=G%2&!um5lb=|{^*c>DWqHv_pi}_-2NCte@||t z8G^q6c+91j+=D{hHJNYDYz8@Qt$kG_sw?#A_YTs4JyWLpsO!XcCqzb{ER=ls*6l4i}Xtb8G7i6d# zi@Cb1oH1iae*qDEb;*;uETg8=45Eam=F%Rqs8YjpVNl6-3^#wyt->Dxc3qTSPN0z) zU{PKl#Wn!PM~i1a_Hr!gBIYz+V#+Hnb>7;|5m6+;+*F5BfEG+^VMzp$i+8111P2Lj z))Pl=MM|fSEt^Kfquwx{t>Z~tW5ed+vA+;L<>44OQHtyc*{YMS;>eNuG{Fh4YEi|k zYydO44nwuH6}!sUm9@;s+FbWnGodun7Hu*vE>zXiONnua1o$NYVBvT#yK+OA zYJz`&H-@k@$kZINxqEezG{B>fIHlds1g8=kgtAE@HKQ|;rdD?$%1-(rH3272B8;`x zUk97StuAxTcsWMO@>KB%88gl&Xu?TwU9RBO`GVv>=3~=%h~1_@Pp8xMt@JR`Ot=Rm zKbs{QuISDGl!qggcciqeIA+(s6Q3$HMn$dE}v;YVYy&R z`)Hmq8H289R%YKtd-TphC>v8M@S$TiZrW4bCSVO(-8R4=lEbMO@aM=jPW&yY6TLyY!<4P~*U z6tHV{eq{GEhmG{>4r<+Pci_PVy_8y1Gv1>}SdC<=wS!$l?Qr||x>f~&V zlcUJ|n!<7hHOr-yk}I4cbsRFHgLpKKsck)D6{+eX4=w5Yf(c(W#Y_WPyhBp0b|;XN z@x&18G@*Y%=zT*_B8hcze)Dtj5+3_R!x=1xCBw}S01%ec@+$qC<1gUUv;)WLglbL* z%v90*il^L5{83X>f}L`+^NPtIroUQuqiKYE@c|)>6^L)sZd?5rfA*+4-ptrpC2nIG zm$jTLD(tK8qdh;6`O2oOsCXXAzpOGu9|OxHa-lsUdm&;<9UZnE<7y=l#sMkQ;Oe-y z9(xHj!Pc4WDkupe5X>@dN7C*jL!w}?E)_6_Xhahcq@T1s@YRwDV@Lp3h4jOK1x8rp zpUB-#U<|~2g`@@G{mtXKR_SrIf24#Xj5iA#-V-T|BzZ74QcxKixYZ@|5_ZO{JRW<1 zqDEJhs`T^*NbpSVB&wti;uZ0NSvU6A(xaB(3`TBtemYE`MeYSq{u%|`HhI%iQ~8cz zXPW!mzzf8)=>*~8wGHZ}$VD|~G1tVM(_`bVLsr7-{TNwsWkVyC75u@1!}XpZwlreY z=P=`~T#<*36|V3XpeeZQLFW*j&uvz8Zy-cdQ!_Dl(I*o*;h9~r<8oL*<2Z1*lQ-vE z5_7Hp4gTPTMtPS+M8l{9S2xBClr2c ziMC2oKOQo`q5*RbI^nF5ql9(Mut4ydN{mU)xK0G>`O~+Z4uIk zF<^E4=-i+>hv*N|+z->g8#+7bBsgWsMY1l5Mya03+!9T5^qjH znQ4r;MEZRgj@<7IbpqnSC;b{ur2gDmw`Pt0oHu{7y-xX11h(axz+c2;1Z%IU=k;%& zU1fHYDyOU@4_Z%^)XgtiApvD@WO70%zfqyWJs2b7K5$<8`X|}w9KA-#8ep%Lq?<3< zI9NEBf!7U+Kl zd<`@kf0}gljT<=YJIyS+z{I{fc=`smT+|a_I6@0S1{ZQJm?J z$ZC9X$^Hrl%DqZ(`4gif3q#=szJovdr5SZ$r3$eTTZ#{TJ+ctvKC~y_9`PE$xf`km z-N2&4t6x!16b-;%KYe^#BxgLArKprW=@N9x4^Wiu&aqHS$|mMX*v=Q7=N&VR6jO}{EGodE)c z46gg;RO|~aq>VkTwIxwgSr@PlFRMpF z_ej*s?Z10a*9pg@_040H4=+|ZfPW2chA(_otm^qe=&0n>_dLRZy$BaE#EHaD_`@>Z zTSCcMWmf`fY@g03J2p1yW+CyH$gJ(b3XyI17$GGpD)Ah3IKO5{e7!CYqJ$1Y zVpP4=sdhBYT|-A|JZ3`)eo?bZe|!)3_5QG^P(@72qsJ35xbBer1*pATyZkXgA1g`; zl$}D+mCC;^%ulANp6aUv%ULE#L&tj_r2Nr0tXzDSRwPxb|A>lV*ogNjK{q7Qz#a`FCcwiqj`rASqsCe0 z{aQqRkC1G`AmjKA&p+q^v9IVCIWmq-X-oA1L6toy!K79&_SL(Hy(~^WCrf z%R!3c+$eT^1QcTSKHLqxaoB}?rnp)Bq{0>rBR7o?_0S!V*Ks}c=1MKLkt0dm%yOyk zkCvZD{E^69HZp79ZO{gy_0(#%om7Bbkb90(p^?m&AZ`cRBv+-R>|}V-`C*E3=E0N% z%kc6OU55kwPUS7I{zz$J)Ij0m9Xjwc6^E?`I?KRJNd=}8rrQvln2!|NtA_E*Y#oln zhs@c6TEoHOrEEn)m!sWISuU~Iv*NC2{eiVqZ6zKWErQ@{d)LR}M6aHGF-4SlVfS3j zz5tBGg*4S93Spl7I5Jxs33mgT(O$2V<=#K`;F?!ru2$j#=2v*8k@JoP(7L+}Y ziQf|bg>yDWq5kvk6Iq6*OjVuTK0-dLAbt~9F`}@%0cU<^{$+JRbyb|5bel9m*6T{c zrjD0yInhvcX_TI98TeBc4Db)ryuinz(X;h9Nm!t&=Vq3cg0-(kc*UR-{&61n?`kxK zIKi$4om=@~k>5k>Z|bKJbvB+|y?+7gk=NrDPoqE{;z?h0`Q#6xT>RUkQsTW0nzoD* zuQ+YY!MY-2nuABkj`?57av4BM2+6NcAAg`Vr)Yv>q^tp#+fBxW+X{dZf_s#E#J>P) zUX6H9LIl!SPtw|A*Vi9D`~e(IqJfpLy-fu-AX1r8t?38jY$A1R4vlf++(Pe+AnbZc zLarTJOHiy|W|f&3mx`Y50Cyu+lvPT5Dm7t3De>onU4e%^%`68}S#?6xIAbq=jG0|P zhTg?lNgH)4mwP{il<^MMVCoDtIz#Ebdcwh1T(#~?l&0U$?-DO$c2(mg zBH3`$OZ1MRbs@9eD~yNx zu&Yqon=-GE3NP#j^Xa>sq3x$K{Gn_43y6NN;Cb37dtD|4)kaO`vrG{iD;MkR0g|4d z?41LqLF+i*gZHEsnNiN#EK#rv?Rb>uQlUu4w?h(i1I1iu`KYE0*i}2lNa$_Q-aBO2 zCMI}x{c?rr8gdE*XfrLp1??#6`E`EXT}dbe36Q`EHSzW~3RqUnbF`UTpfS^lsFPhn zdu_;)q)zvi)^DWf66XB0iFs+^Chub58A3Dpd>Blk=QDAai!e7RlcheTDFlJKg|>aY z$I8AJY>lmd;Lg-_hHg@(d-EHm!7V$w!V1&PgD)DKap}rfx;p4gE_7|XH zpiPl}t3)5P0%f6aQ7>0pKO}u&0LN0SXbtq19E1YJ{wOe|2+@W^W!3KY;eAse|a)8#o+OJy}cG6jOJ*sB}T?h*|k1e7Fq_NP%0Pv6)Y&sH*V=mx)2Ce1OU{+HeYucNj0N@DJQ&wm{0`q$1G5|c3bQpl(YCKcJV`oPU@p((p78zP72!bo zV$0#b09Vk(^l9dAQGbVdDkS7(L!!%GT-rns`1fb&JIKqzari=Y9F3-k_z`b(2^SBq zQZQ#Xu4FnIE0)H7C!`}O=Ekj@R_AnAu8e7(3Dg$p>JzY0-%r|>AFug!oP?nv)SCqY zX2YdP%30{SCff(}{wvej3VTD;84(1(V#ORR6#&35)xy_>DrZdCw$*g8nU{Bd14)=( zgbtXs^+gB-R5r6Uy78z9eR)nmlwaUh69-R4IUnhe828T%f_$`tN@LD@iyL@7d!hBw zx(o_HH~Z8a_|fp=a%K|{Rgh(zECn4gy@V!Vlg9CqUKBh67CvwQqE9nyT}1auQgIf2 zZ4vX7BT@BDer8um=fin^lSa>DZyT^kE3XknG8B~$zk!+Z*EL1he z1iZ79AU5*oVwwdsrK#j*Qy;jM&Xzn2-ZW#SIchX!Xg1(HkVSX3fVEOAvweqMS&`y~ z?%(-fxqV(|1(A^k8R>Qm()0;*6wp(AFCMN$gC?&bzGlowXu_|N z#UFeOyeB;zrig5*>-=hO3GtyAS zo0Y9Tcfnr1@55S*;NsvJCY0S`z%JU{+|?YjuL2KB9B8w2efL>;sp~eBbbhT>Axic) z0SR0#+qxl>|;qNB%6#(8I2r4HpiYU?t@UTU05P{=6lDm+DvTx6)NGRd5}8kQ98 z<|f*oKX-Yog~QgjFfv4w=U4F9PK=%AGl{hg6Y~GZ#(z}YrY{n`tKAABDY$TrVq+W2 zWA-R6E;XNK8$UwWW%pdLL*?PxN24DuG4LV-Bfr*WRV*s$GceB$%e(`-8Is%79-f zhB^9X=U97wnzX=ih=CkfzX{^0x76i;QwE&To_z>iR7VLVU8a60SycW7pr?r<;DbQ) z;mW-KHtBROR|kK7l2Ei4w{^k#OUS-|Sa7ij@e~~I`>DFL8*xWB%c&HhhxxZXG-$I-0ufD@ip zX~y*Zx>2o-(qAm*mIv6&14zAu@#V3w!qxtaJ;PcgretkO8clp*m742KjDZ~HFYDG5J zL@cHy-P)01x>h<)Fw{x&Kn7d<;O155>s>MXb1eh(HAC(a{POD_`K2MCGLu@~LgV8A zjs9mkT4OuzPVDQBOP+2i22t!I;X`0?nzTL58iHYe_iT&@fmpM(uVHaBd$KdE*A6m~ zbI;@On_52bVxn#u-7WbUqTRE0fls|av&qLS>e*68BSMbI@!Ydsuy<$nfk$xp?ma#4mQ*Qq--vXeHNC<& zOgqHk-hk~4jX$Exd1_2A4IT~AHLy@+@WeRx(TKV#MR!JNJ_jE*vb5hVa0~kus&Rzv zQv0^YTsZy4Z&~w83|+jn=r5Vm%cL;nU$g>MkG*n3B9v_Vxv<=3Yn=%ySysg{@J9U- z0dPiLGfWmb6jOh=kb~iShrocG-?n#xVhZJ;7D9i0g$N$gOt$OP+egrp6n`ihX|*QL z_te^3H#rABD*>4~5qy7X5{Jm~gz>)tm>Zr7iChQq&6n)%h5|&Qmx-thE+QKJhK`g|*|)X>3t(#a5mIO#b7CYRkfC)T4vFrijqb=%!4q4oEIL zPKCK{fmH#z<9VUh8vB#ep7B55fQPLu$ui0G>ai(Alwnwx$HZM7TGVvXT7)zQ%ef@g z5o_>k)~2z^N#*1Tf%w1iN^H71!b1+qRSMU)9$=0mvGkB+vS~Ffjj)-xF^0>^OOnqm ztHKuehzP9AOT#6k~w_ilE+Jr26JlV#P(@BG@**FJN-W;DE* zY~|@iicKXcgles&UgS?YF+Q0&`X0UtZ#iwu;xY|J9VQtniqU3X$Si0)1Hk%3fF*c( zNmQoo_j2KNSQP!FvG7$ym{!cf+NJ_}_NtO_`*P@Y8k09fsR78iPJ=@%g1jYrNbnb% z1h^=N-zyTO1O&>?(TE43@P7~bT#?OQWUkh2vqigW0sDPYvxK^{HC$w)!9R@V0{BZe zPwJvmrY`pVhoS6}Lfei(7lxUmepY!jO+|XsIHgK($wNaRhM#cROsuh=2bqVHDJLiW zNayAA?$y%D3;VxQ!x|k@z*Qt_d3{}>5NoZJRF6|9l8hj$Sq{6l#ysTk(8aq>mQf3k zkg7}L@lxkeG50IqMj`3J)P*k>Q_cc&Q&yH5<-nx`R?lQJr{%aai@;VB@~>l+E@EIo z;cu$++2v_xugaX9Og5pEbHb&_lGHCo8aM+j%Y_|!1pBxUx~+UhV>=jyL^0JD;Sn@4;ZCSEcUhe0hOo=nm7+&}=Q&fty2 zKILepz86<|y*TB>6PFSMIg)fco6D3pdC1cwV+EnLn<^KhqhpH0LX6bOtT#hfmrK%C zQ=wA<%;ZY~tK5LR>rVy%pu;aE&)>6he?*!}_@`g>P)5GHP;Hgodi!=G)fPURIeCph zN5(dI*ZW^F<}AWcnxlRmrW?I!vqi4o#o2oUa|!1&d52mkE2A=n({*Sx3m{^qO?66U_S4JpG6iqPWHPJi-&b#6_uU%yK!-boXizHZ znL@R;{F!c|qFItpmn2Gw?;!sR=zsiGV@tsBt@0{fX4@Wf?1z`q9Wtuu6OL@zFsI}k zrlJx~qdj+$B->DOnR6A8$QAzFu98jxJjLKpvc@9(9G5J-zeGbW&Y(;@4@-7#mdzK> zA5XWEwo;?Zx*naWgM^RYdw|>1D|ihDm1lb{_NK{tfLNN8@}`WEN!tJ8Qgvxhk{+@i znUuudlh9{UiEY+iFkH)1OHEfM>G`@|47%poKF{Re0=diCmiDcn@5a6NJxSccSC-{G zm>6=_u7A38Y2mb)PN66q8rLD4y>jd}V5(uHYDuwN$+P{HP^_aUiXE*PX&-ntHAhQ0 z%r#3AU#)$MzujZZ>6H-zD;P4(t8|cR5YqLGE)_lX+CMQ?E zY5lmDWr++pn9DpiKn_C4>}5F*ckgL-;k4S*Tp6QG%H9bZcpO_MD|M*4Z4&F#I$F~* zi!Jf2SD>$_RCQ8ZY{RIoU)?HA9-7lY{Ck1tK;@~t_z^E=KS(rxK9^V3&KrB=B28AT zqMmf&r(;P9ECM%Sv$|+0l$@!Mq^}^ zB159DuduE%$}}9Gy*AGX)u7h2QQUI|w=vX1{g=SiDFs~&M^2{IX^`FW2MVi|V*=cv!%IU@#) z_@&U%XPR1Qk>TGV;fLCU+Y+gCGfs+hqG?2m4`adQsl{U+7rV&>La z5UElu8pw{pqS3>qECOzs)S3*8RU*6A3}Mk>ZqZrL9l%lvbE zcox#i8;ZuVr7D;cN}-3&=x9uyPlk(MSSeG(YQ0>^En+md&7cQc>DjW8t>!uB2f}kh zZ+)7plE$$^ZohnM3o(hUQwuJfOZ~aZ_DLx7Kqz7fd1g?FZIO|r%z``+KsWGslw)83 ze|GXpx_da$>}yVw_qZ_iOKZ?+aGq@RDw!MBQ+fE6>H1@a>P1EDo z?qd~?l;xV9`&5F(R6%vTe*sq<2Re(;ZX$3$_UgY){W$ZhrZ`lW`n4|8qd<6*7?4mx zYxOU{Q~cp}L#mv{ln|^Z*Lzu{f%vUiRz85n?Y_3l*(`G6-hw%rT_!NUeu=}5ts#r2 zs(G8=xN^*|pl!$Dz3ia)H&v*;V6w+221_GZy?A{B8MnpGt7Q_WsEen>+$AT@94_II zQMSqZu`-&l$rRjEN$euTu(8rW$MgLMt}(tUU?;KlbK#=h@@V5nzy3B(9Jh%Ew(BtXn)Tz%`CG~d zc_}5fd4R@(>zQx%w2O1%FusL4FNBmIce$|deLjtudOU5pyoGJoVmr95jb?=`-sc`h z?~S07nkXtoHJX-FuNPh0nVLA{4#0KMclj4kO?rQWx9Na0WMiCklxu|6f5+YdYpm2A zwDz^(265PO{91mWBp6xqjaHMnrt$d-;U3RjkW!HE-5rg*yT{;o zC8u!Mhfcua8qQFD`OjpTqGX=DSPs8TMJK(FetqlAXDgr68u}1tmVh8V8yitbZ25p z8<&4VK=;0Ct_@f=Q*+BQ+7sk>Jl0AW#4LBFBvG+)6XJwOW3qo=kWp6Kr3z6Y znVekEl$7A>0-J+s2W+(2h=1Tjzr}2QD#<6r7J0b3&<*E7<3OgijW#nAR?pu)rr_vFxgMfk08WK`bRuZ~cVFN8&`y%{tCm&aZ?r#dQ;MZUZt{xplznUA(v$ z=j->NyEN_!8IqZ{^=PC{A$~qZNShipZDW&}j~asrmVG!*%LvJx`R6Q5byTH1D4 zm2B2PK;1#G1o#YpRJg5}6$lSbVk)?m3?vqGO}67$4$r~&3&{F1Vs!8C`I@W9=c_dayLVc&0Et|* zqlSXRK~2mUFQg{!`pWKF{ky{dxVe5XFPL?h%q&>JWojeqb}D?%Nb*=wnyDD)JXy}X zxm^sbx`D=oui+BR`X_rKZ&tB9loXcFBK`S(he5N_eO3~fRE$cI6k<<=()wXLFDDxW zeKGhK;HId3sp*vJ&59fr6dTO8EA5{vTGn`3SzoQOEw0?jw^+1I%c}i@!*tiR2H#uo z+D$ZQK{50bMSCm8!J+ZsmtTnRXg*fAt1Mi~bW2%jzZ~a1lb++}=qpLSjT=Kr$372cN-~H{AxRtxhM_LJU|8OT(FLmsNxJ* zn=dN*5ou600iBuPILn5SAjD!hjHsHRT`9PcvypG5i)|6PYeF0K3=?GwG;V6pJLrTS z_+7an1wSey=`Q2XUEAw(pvqx350|psK*jfFwi4gI)3~iWrGu8*vIWu)8&YJK`SQfh z%Bvq}qGlb7=pEO^EILvWW%NJ)0#ZRSl?Dkx0dRNi`i*$b17Mk3Idlo*eSD~!ME*>? zc}{Qk_@?Ih+CVaE%`R~g`%doUO{`y|d33+|yFVXRV>EDQCzBjtnQc%SED62+X@(XI zf_ao(QF*6m1j1-^gqG~=I&477>Siy|)ZmDn#M1e@q9-mZ0KL((M$uU}n>glrp(CL= zVPZ0@dG9XT6X!gd%5G9F!5XV#(TVgaikMn}6+_y@5x>s-L?i%x!xgQpaku_+1!y!lsg-S4o#Nu@ zNK81}oXH>$2<7(+hoWFZJyZ!_dwzJpK{&Y95Fb%1KTF!u6)yoUf~6`~;M)xO7H$44XBsj8b`q znZBeAMc3O$CYOZA%6@`xEvpCzuFGNnPVgvV!K0aK=ATVIo8VP?sg55~8#}q4<$@!n zN=Lt~k#4M}ub^jI?()5nJW=7NYw;`V(f6hRy+sAlNta-1u!;;yEG*co0Ai3te=e3n z{Wh^7J9b;2gV1^}>Rht^>7RESF;+>~G-Z7;y?z_DQFBuFn z_Wp#l@BWBK|9Obp+y$w&Xh7+hdK=Xpzbrhz1GKSQ-{-?LUOON7!34I&&Ww+FYN!GE zE74)}=l4g_g+=84Jq8Nz-DZWZ){%Q45wahm#>}*(#!Z;gqPCuGvkdOKQpyokrU*{o zG58>1#foz=q`H7tGllvjz_jI2CWpDJ3yml72f=b=v1n z+S-I7L<%(&5lUfLUR#-7x+E=y;)fjPbx6_&luS&wp{vhy?Qc%&{d9@qWHK$CKfd3~ z-RzT)B58JD;>^Y1SF@phlfCFA#rzq@H?rc`_|h(jiLl8)xrbCA;l`YG8T9fH(>ILZl&@n>75la_E||EwD>AD-ToQZ$clFGdd3K zhXhsGQhnHNS&}JxwCn^-GNJIB(~r-E7)7RcVM2iV(=<_=VvIFD2V$NafB1&JYu|XsVnhmoQuZXJ$uKBPx zl7|Hui89Yvk^pO9BtwT3!X$Q_xqCbGPNo%x4_5V|rasAYk##|0IZzD@JJQR-uQ8>z zI!uxp%5^b_j*KaMrwy`4_;-yZ9-5>RO-+QV_qtMF=GF0RNypjPbTknHDPC&!i1DK` zox{_8&lizTS}WAfZ+)*6*#+|e7c|pSq996%ZKy?M^?Gb*mzkh0{x8#EAH!Z}HrWk+z;(?>aP=CN(Z>wdlu@@UiTsZ?$Yl!HjGp2SiL#72ThW)YB zAAX#!bmwXn8aBjD02;oNp_El_uehiU;&RC%W{zx#GvIGK*W;V>-ogga)dVKGvger? z*c7>T1smv;odgIocB%>AAPpVK1M5>TUpvEM@!=hE?nEtGezvmHhN&!=EE>K7)id&0 zHq6FBaHexWmz8?{)(7m#aFEi;q_omPw=eLJ%j?R0ZlndDZZ}vvCW2>eT`rL|Or}9? zHQT{jcz)MrMGEYJPI|3%;vZXaY0o-LQB{;djr)x-fI=4UC@EHQg$Cg{!jHsNnD1aJ zFRT#jdh|o164fauGKXb2Yn`%!s1+SThoTh`*_D!f>5C$2T#3e!&`^8-C>A-K9KW~nH z&$aaXozGH6K;IWDeG|x2@IHCVLAv?t;1cbZbcw^EXIUQ0GC+@udpY3F(C+^&73WZjjF*N ze2K;Q0$Fj!fGHS2n^=ubz=iTuBGxD$%u;1)TbMaSFx;h{xLyYt5_NeLCDnaYpMZyr z{1rdkINgB0deW*)=y4Niz%&sfS)+A$;}v<~mR{0DF!NZFIzFYibpVsh1=fpYv!9zd z`boIh=LL18VQ;#$agxn-R;rvM0|QcOM;Oce^(}M9#p_qOyyU&s%q6ZfOR{J`{Iwvz z)vVT5S3W75@*3+F-wWWsew8AV@OfE=ds(dERlip7#2|QzwLW{5jN!Q^VAmynd2cm#iOo~| z_)~%!Eps?6a&qBCWMd!gn6+))0~!DVNOq`8i}~|Hypxu92h;?rZb{J(#X_2#J4@5+ zj;SfqL?ye!XSz)6%yv1(1I!2`S+a|I=wV@n&koFE!!9P}1X^TlcwRUR)8?-R3%N?o zn_FTpy11Xq)Lhoo-?#(12Hgj_*LV-+PTJZ?(AmmKD3Hm71K)i=)z5J1eRO2qxZ|(B zPkUrSh4Ma#1(o>{V6*Do&aBD-RO@-hjFS0ZdqbShZgwe6AB`eMb%;3jquH;UR99Q2(FzNW>BtswaYkeK@F>XQ5Xwulaof~&2` zZNBcOmT4%prz2^N`1tdH8#Dh0#H4%nSJ`Yg5iqS)@1Su$5|_8YOnsRS@@CuHOx?^v zJ>RV#0c}WrYP)qFSyCf7C95uzBSjkB;k{@$m|2k)XhW$lYBi2LF ztWK3G-Q{*#tE(ck3gn^pJ1^0{&X#<~_$YY^)R4u06b9CNXyr%ln(i%KsQ5DI&I`CnrcD>y;k!P>mi>?v z>x<`yje3<8c2qh$i^Y$-!*%91`rgf}6VXDl0gcAc6IuIVcCu}Ri!`EatJiRf)Dtg) zRiUvp4Sk$7;D>lDrtlxNL^Vc+b7G5|>lIqbDvM|y?;#w&b ziTsPruSKs`VWl_j+SuTI;8obug+A|mVJ(!VSli5BK&*8TW%*E?OCubKCSZSNpQ1E= z={+?|>y;(9@l-xHuX7cIZ8@&XogkAzJk$#Y(r$&R*O6tvY2|CbIa#|)(CJggMetm=HIzwM*gOaWc)O#P(Mdq z1dW^|Z#Fwh#>OBn@Uo(};_`QpGPOII%LEbA|Dk+yo+T=sHAmK{+ zV5<|yotWN4qyE_5+vYN;Y*B^ci;)UcgNb~==^j1yWpVw9pJuwHjC}<7?EnxY)K@f* zk8nqn>c~6%rh3)0#^63aQowNeV=XwrsJ_xPi71KKu>gAX7mVl{9`VcWZm2Q5deCeGhK{h%=8JnH5sCqY8)BlJTUn0-!&RAgXR^cUGg&2qH z8QV-c@entfv)_D6KITdss|z2=1wIVvxgd(9V48`k-l3urT(fYK&d`)!{wkGrUxbh) z8N`*YYLNvYwlo~^Tjmh@2s})E$d%mwBX-YC8T@O5jAaK2LwQtn7{zthh-N>@N4U@2 z`}tMOvkKfE-WP=m8#Z|55%z8wS^bn)TGe-CFHV1_uhGAg3y(LwWLtmbYFnsaCnW>f zrCm!Ll>|3jAa^}f(J9s5*+}`GtF85 zkwtN>ow2Vig(@47yQEA)(p((i1me{vmAkN8XG>MinU$ z?mGZ(t3ozb?$V1QSC;6rG2z7g>)Kxcm(DzpMoO51U5wuWR#)b(l4|+Um~^H5=Zso| z8nfwd*?pl6m$yZloLE>|a->{m5#jr4yf2K_b>R}%z~`;)-FhOcZn|aLXbpZUn^%(- zNXy;NBI;B0{-D=7V>(G7JNx)RA6={JC}WCx9#Uwl=>ELa^=W`ck_22M zmOfU%9J?Mf#|pHWqrn>+=$aqG&m$WfqGYJA`0B?tpIm&tY9SvL-_$ z7^!xuGY_i-=AKlLsDWcUttv98s&uFBGZnGTZ0p1cHA@if8x62(Fmq8vlUic zbAg5a-TweOnw1RVAub{e{y7nNL=x@B0rfuEJ?S1)yC2(r0tnvD*$R`Hj@_!f=XcV} z_NzIi{E*_=DK#5JM!^i|8I~m=vnq0Sk6dP&IZ(2LGR+g?wT8R%T7p_UHg+>CQbmVp z(OOrJ#F-Rj1{54){i<1ICAUSf<4;DhYkLNvrmf5jOj9OQ_CKXw85H4YgrMOnWJw`u zUfmZ5V~HPn^T7Nk(S=H`kxT8CZ6E-)cpdAZ$bv*jbd5GCq`8I!NvGyn0P{KbX6{WV zwI0X!CD9XBeA~PLuxC=EZV!Gt8s^Q_9T=AE$8{UcLfc%Mr}cX&Ak(!J@I~0$Dzkz7 zw~T@U{z9A5G^5_f^$V8F>r~RD)34^TmnGq1ry~c7?6ZrB>sS|ZpHb^ml!uBgVe*U` zgD((%=F(6?O=P4A7QZ5$$*3wp0o+(*;@*IGxk(``%;-~E~`J&#-SGK&FVz-bN;2;EX z?rB~(V=1EY-J@OBmf#m(Gl9<1+~ofNnA0fI;)HJY$&jdx3~F+z{v&(0u20&j2-2nC zoSnNRJA()<g@gqEyb?QAgGZ^BSbOCWz2#Dv}o} zLb8^Pm~K%bA3SaCnw(@&z^UpEppFgG&YQd$qe-G+k>JV*JCA&AZKpk}Nn5`rjT)=?TI5qZcwoPjtD2xkz#0fB*-?iNqN*W>1nEx(oH1TjkXk>o3suH z{6A9mVNE5)T>>>N8ZSlsGKWZ&@9tzom*?G-d(!H(FiAde7=0m8OA^(jN-XtlI63a zSn4`E*ZoZ&Pw1b-d}yK@)nsW78)-N@N8(}Wil$cUU5SZreHWS*scot0cK1+Bv!tjP zOoxW!zuKD&`A2<=mK<9Ge1Q?J(g>w& zD6m!qN4-e08*5b{am{-UN1C8}-g!2L7T~2vvC2menpcNF7DJ_>t60BeWki-qgx%DQfps1Uc#xKZgZQp?8 zf$lp}B@#tIWKbC5`Xq`03mn6c{ zI;awmNr?7hzxS--8!9@TZ0~f|cz>5Xv4)1}PvQfD+0XiU5zlUFbxgC`8~B-J)IC{e9-E~| ztin}!VRc7~YMdWA&cXSQ)KPM+ix?Uo!|eiX3#M&!`B*%WFPVX3`gy&+)Nn=B{ewJ{ zWuH*!HahN?Err`U#UT2lJ6I9TQgBjRCkeQ{6dEkoHriv`&doTte+eZBz%WyR!O1_J zdHCI0M|9QGD84D#E}_%=fLQ0#rCBzd=fjXSo)9Y&!udS zLqKb)<2lbL0s%{8`i>4kclF8Tz0GOmbZFyNYPwCGwTuzO2rnlt z!?!*E0CQRqOTQqLo9MG^z-AWceC@f3K7a~-YUv1}s}Sz8*~X)Q8*k_iD>rdLN~F$6 z0g12<2vR-rzQ5YFS_SO$7-qF%uZY__TpVpDpX_L=XxmI3*L4fC0E@tB9CMUyg&)@k z)P1Tm$jd_-?4{FgHQNIuliQ1pH^eMehD>POGPwNv=ZaM^oGdi4T^3h~cf;XIRCquJ z%Mq0xY@hVgIY;{vsW0R@u#yOqz>63d8<#L{SY%Q zu+${CY2aqfqBkHe?0|lfGoC)ip%_y-d0H~wq4 z8mTO8flVV#=&fGX_TD>>7A8qEw`1jx*dN7GZo3_?qe-M&B zeV0u(k}7luZlmdY<(@6K4;#m|yyq+Q`G@IUn4P1WGNxh|D(LSVfcS_?xIT=h29t@b zgz}U$$|CYC8dlwpYO4x5_d{kIy&^-|Up>%cfmKAL$iawg`Wky6(IHq6ImIhsqDVPX z0l@d~`_frNf_|U*NOZ=O!mTi3OZm3{0P-H6rDVz^6504Ji6S=B_>oRf9{3=4tmG7o z!{a8MdvkFF>e5dV;4Djoj1Y5@N0{T-aY-559#PSvbZ^PY({?3X6XB=RTT$p1s&@=agL0qDQr-*jgw&nxg7rh z;+uk@d9sFiE*9=ah~fy210Kb_$JuL(m&wuCqNtaqTm)5SVqb31Tk72GPCrHOPmR{Z zn($BF2Aw0PISk4NY%e^1wQKk?Rq{wyrFgc{!*HZ{Qh+jj8Mz<%s!CL>NJiZiYjV*% zhCem2VC4tYtbIQ!=5nX5i>Tjyli?7_m6R^m7>_(;`+bcxUG^lEbWgXl5u>ETd`t-5 z!<~)Cn;pegD!U~qUqlT<;n}}dwt`l+oJJN4nA;5)`hqq)rhV#f1v*_6_tje~biT0F zx>u-U(E6s`-(+A7QUxxK2h!4DamVdT7&yw!DJjQ6=Bd>-7Mg6Dht0;8%BtCB45eF; z!H2jt(e7;L_WsNlt2(yQey@T%Rq?H6;mbC{Bqt(8@c~&BjxY#5^;+@(Q{o3sHuSl$ zhH0Xe%Us%AC|!VZd?p<22a$~PPwrW`PnDH&7OKGAlD=dnm1miuv{r>=J zMMJjHMQzqPJ(a}GEZ-?-Xu>5>xp^78?H$MMMvNTWYBA4jE@R@|z$omcM?JVcW12?T zQbKLDOL?wLR{4@h_;wf|A8p@n#qCg1rd6v%)q+_8X;`l8fTOTlDl(GT9FpjZKAaeX z-8W%Z9)hOv6j=z@@tb&=lx4P%iaT!q0O_Tg;)jRHj>I^E4CQ<>j5~K+pYKecNLS^I z-@84s#NK4yRC{}h6K{=w^iheoizo;0+18VH_G<)R#U2+Kvr3Gl`+NhEVriWRVUTwLmXUY5T(w&pk5WWi|P zwhjlAc0Z7Z{-IqhS~y>36)+jBjc5Pa Future: if api_name: fn_index = self._infer_fn_index(api_name) - + end_to_end_fn = self.endpoints[fn_index].end_to_end_fn future = self.executor.submit(end_to_end_fn, *args) job = Job(future) if result_callbacks: + def create_fn(callback) -> Callable: def fn(future): if isinstance(future.result(), tuple): @@ -195,13 +196,13 @@ def predict(self, *data) -> Tuple: f"Could not find 'data' key in response. Response received: {result}" ) return tuple(output) - + def _predict_resolve(self, *data) -> Any: """Needed for gradio.load(), which has a slightly different signature for serializing/deserializing""" outputs = self.predict(*data) if len(self.dependency["outputs"]) == 1: return outputs[0] - return outputs + return outputs def serialize(self, *data) -> Tuple: assert len(data) == len( diff --git a/client/python/test/test_client.py b/client/python/test/test_client.py new file mode 100644 index 0000000000000..a1ace313b166f --- /dev/null +++ b/client/python/test/test_client.py @@ -0,0 +1,2 @@ +import pytest + From a6a601f72d4d78636b5110be8976a286fd16fa1c Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 04:50:52 -0700 Subject: [PATCH 58/93] tests --- .../captions.json | 1 - ...92e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg | Bin 20552 -> 0 bytes 2 files changed, 1 deletion(-) delete mode 100644 2be57641-ee31-4a94-a674-a7c565e474ea/captions.json delete mode 100644 2be57641-ee31-4a94-a674-a7c565e474ea/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg diff --git a/2be57641-ee31-4a94-a674-a7c565e474ea/captions.json b/2be57641-ee31-4a94-a674-a7c565e474ea/captions.json deleted file mode 100644 index 5471cc474c03b..0000000000000 --- a/2be57641-ee31-4a94-a674-a7c565e474ea/captions.json +++ /dev/null @@ -1 +0,0 @@ -{"C:\\Users\\islam\\dev\\gradio-repos\\gradio\\2be57641-ee31-4a94-a674-a7c565e474ea\\cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg": null} \ No newline at end of file diff --git a/2be57641-ee31-4a94-a674-a7c565e474ea/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg b/2be57641-ee31-4a94-a674-a7c565e474ea/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89ak8rl6lqe.jpg deleted file mode 100644 index c510ff30e09c1ce410afa499f0bfc3a63c751134..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 20552 zcma%ib8ux(^yiChy%_Jsw(VqM+qUfqC(b09cw%#6O>Eot#P;lbe_Ok?f9~nJRk!Ng zKIe0*x~o42{deW>4gg(FN>&O00RaF&{2PG3>i}^8EHpGU3^XhZ3@ifdzXu5(78V`} z84(c)5fK?3`F{i*85Ip34HX#+3mY2?i-?erkcjmE%6~>6;Naj;kWql>=s*G-3>NJi{vUybgo1{Fg8;z)(+Z&jAR(Zj zAphb2?+>89 z&By=G+y86)f6s^cr(*gC;-5f3|K|Yaf1E==VnC5&vWh}uQK+z)xQK zcMX6H1@R9x6b3*TfM!=Lu)q^%W_oP}y;P_(>Zpu*uZm=ja16)koli*CHjZgKl7i`S zstpHUWzxV~WwxP=urY&(u3&}$q6k*3xRdA)@vau(&s;Ez?6PC+D_n|_S`)S%BBYfV z4qXB?0kOqs2zvM>fwC9c)R5+TVSvjGRn|ylW!5F}7}{W2V4)?tVSRjM-h?LW+)EW~ zG@5shv#o%_i1Z}x6Ko5VE1%&0(dSUx<`R5N0em>~jR8W00*EX6-?H;)C_ZZ06h3STfrFg4gWuKO6XSxs|DlNHy078>(#3?4!3%P;L#_+-@cO(@f)WtCYuBNSOeB$3B~w6^m%n0 z*ohlDxO)6$YZzdLT$XLr-TpFrN44h&QS2pK3O1k{DIA8`m*1}B(pCli*y`c8sEfUk#0 z<0n>;CY76maW<03zz4rvYj-ZcH+JV-oy@+zBk#G(g*>f_%+O?|;xi6xSRiMdn8cDh zO(7cx%@G&ncPQi-_C)PtXU=JXxWWQ-+0Wqw10x&!urD+2hjo!r4n<-U0bvRc|B(5H zW(Q71;*PC5MFntKX+H$U%zq!5uHU1=-lSeY;)W9THD6rAi!V>7-W~4GZ8)7KEGbI) z(wZpHJW6k&`UP;dIs7B!&l8T!kzGA?wD4z882z7w2x67lTgi7Za>nX@hn9}WsaVg; zUNUFtt#%MUZ!zM@w;~(xmQA`8NU-`mVr!4F=a#E{<@n<2kFL`fYvyF;;uE{&WSAKA z8@5eRRwyBfM8H*S;za8P+Oh1o_|1ywY#th(a-J&p&yj`!b%{btbMrNIhz~N_(T5Kk zy;s6gdzQ9mdCMmf%FaNBx~PMgeF&MgEpwC0@r{|ApRS$;r+)#c?jJ(x=tVXtN$Cfa zWb}hh?ycUi!o~0X91SHAu^C2g*T=G%2&!um5lb=|{^*c>DWqHv_pi}_-2NCte@||t z8G^q6c+91j+=D{hHJNYDYz8@Qt$kG_sw?#A_YTs4JyWLpsO!XcCqzb{ER=ls*6l4i}Xtb8G7i6d# zi@Cb1oH1iae*qDEb;*;uETg8=45Eam=F%Rqs8YjpVNl6-3^#wyt->Dxc3qTSPN0z) zU{PKl#Wn!PM~i1a_Hr!gBIYz+V#+Hnb>7;|5m6+;+*F5BfEG+^VMzp$i+8111P2Lj z))Pl=MM|fSEt^Kfquwx{t>Z~tW5ed+vA+;L<>44OQHtyc*{YMS;>eNuG{Fh4YEi|k zYydO44nwuH6}!sUm9@;s+FbWnGodun7Hu*vE>zXiONnua1o$NYVBvT#yK+OA zYJz`&H-@k@$kZINxqEezG{B>fIHlds1g8=kgtAE@HKQ|;rdD?$%1-(rH3272B8;`x zUk97StuAxTcsWMO@>KB%88gl&Xu?TwU9RBO`GVv>=3~=%h~1_@Pp8xMt@JR`Ot=Rm zKbs{QuISDGl!qggcciqeIA+(s6Q3$HMn$dE}v;YVYy&R z`)Hmq8H289R%YKtd-TphC>v8M@S$TiZrW4bCSVO(-8R4=lEbMO@aM=jPW&yY6TLyY!<4P~*U z6tHV{eq{GEhmG{>4r<+Pci_PVy_8y1Gv1>}SdC<=wS!$l?Qr||x>f~&V zlcUJ|n!<7hHOr-yk}I4cbsRFHgLpKKsck)D6{+eX4=w5Yf(c(W#Y_WPyhBp0b|;XN z@x&18G@*Y%=zT*_B8hcze)Dtj5+3_R!x=1xCBw}S01%ec@+$qC<1gUUv;)WLglbL* z%v90*il^L5{83X>f}L`+^NPtIroUQuqiKYE@c|)>6^L)sZd?5rfA*+4-ptrpC2nIG zm$jTLD(tK8qdh;6`O2oOsCXXAzpOGu9|OxHa-lsUdm&;<9UZnE<7y=l#sMkQ;Oe-y z9(xHj!Pc4WDkupe5X>@dN7C*jL!w}?E)_6_Xhahcq@T1s@YRwDV@Lp3h4jOK1x8rp zpUB-#U<|~2g`@@G{mtXKR_SrIf24#Xj5iA#-V-T|BzZ74QcxKixYZ@|5_ZO{JRW<1 zqDEJhs`T^*NbpSVB&wti;uZ0NSvU6A(xaB(3`TBtemYE`MeYSq{u%|`HhI%iQ~8cz zXPW!mzzf8)=>*~8wGHZ}$VD|~G1tVM(_`bVLsr7-{TNwsWkVyC75u@1!}XpZwlreY z=P=`~T#<*36|V3XpeeZQLFW*j&uvz8Zy-cdQ!_Dl(I*o*;h9~r<8oL*<2Z1*lQ-vE z5_7Hp4gTPTMtPS+M8l{9S2xBClr2c ziMC2oKOQo`q5*RbI^nF5ql9(Mut4ydN{mU)xK0G>`O~+Z4uIk zF<^E4=-i+>hv*N|+z->g8#+7bBsgWsMY1l5Mya03+!9T5^qjH znQ4r;MEZRgj@<7IbpqnSC;b{ur2gDmw`Pt0oHu{7y-xX11h(axz+c2;1Z%IU=k;%& zU1fHYDyOU@4_Z%^)XgtiApvD@WO70%zfqyWJs2b7K5$<8`X|}w9KA-#8ep%Lq?<3< zI9NEBf!7U+Kl zd<`@kf0}gljT<=YJIyS+z{I{fc=`smT+|a_I6@0S1{ZQJm?J z$ZC9X$^Hrl%DqZ(`4gif3q#=szJovdr5SZ$r3$eTTZ#{TJ+ctvKC~y_9`PE$xf`km z-N2&4t6x!16b-;%KYe^#BxgLArKprW=@N9x4^Wiu&aqHS$|mMX*v=Q7=N&VR6jO}{EGodE)c z46gg;RO|~aq>VkTwIxwgSr@PlFRMpF z_ej*s?Z10a*9pg@_040H4=+|ZfPW2chA(_otm^qe=&0n>_dLRZy$BaE#EHaD_`@>Z zTSCcMWmf`fY@g03J2p1yW+CyH$gJ(b3XyI17$GGpD)Ah3IKO5{e7!CYqJ$1Y zVpP4=sdhBYT|-A|JZ3`)eo?bZe|!)3_5QG^P(@72qsJ35xbBer1*pATyZkXgA1g`; zl$}D+mCC;^%ulANp6aUv%ULE#L&tj_r2Nr0tXzDSRwPxb|A>lV*ogNjK{q7Qz#a`FCcwiqj`rASqsCe0 z{aQqRkC1G`AmjKA&p+q^v9IVCIWmq-X-oA1L6toy!K79&_SL(Hy(~^WCrf z%R!3c+$eT^1QcTSKHLqxaoB}?rnp)Bq{0>rBR7o?_0S!V*Ks}c=1MKLkt0dm%yOyk zkCvZD{E^69HZp79ZO{gy_0(#%om7Bbkb90(p^?m&AZ`cRBv+-R>|}V-`C*E3=E0N% z%kc6OU55kwPUS7I{zz$J)Ij0m9Xjwc6^E?`I?KRJNd=}8rrQvln2!|NtA_E*Y#oln zhs@c6TEoHOrEEn)m!sWISuU~Iv*NC2{eiVqZ6zKWErQ@{d)LR}M6aHGF-4SlVfS3j zz5tBGg*4S93Spl7I5Jxs33mgT(O$2V<=#K`;F?!ru2$j#=2v*8k@JoP(7L+}Y ziQf|bg>yDWq5kvk6Iq6*OjVuTK0-dLAbt~9F`}@%0cU<^{$+JRbyb|5bel9m*6T{c zrjD0yInhvcX_TI98TeBc4Db)ryuinz(X;h9Nm!t&=Vq3cg0-(kc*UR-{&61n?`kxK zIKi$4om=@~k>5k>Z|bKJbvB+|y?+7gk=NrDPoqE{;z?h0`Q#6xT>RUkQsTW0nzoD* zuQ+YY!MY-2nuABkj`?57av4BM2+6NcAAg`Vr)Yv>q^tp#+fBxW+X{dZf_s#E#J>P) zUX6H9LIl!SPtw|A*Vi9D`~e(IqJfpLy-fu-AX1r8t?38jY$A1R4vlf++(Pe+AnbZc zLarTJOHiy|W|f&3mx`Y50Cyu+lvPT5Dm7t3De>onU4e%^%`68}S#?6xIAbq=jG0|P zhTg?lNgH)4mwP{il<^MMVCoDtIz#Ebdcwh1T(#~?l&0U$?-DO$c2(mg zBH3`$OZ1MRbs@9eD~yNx zu&Yqon=-GE3NP#j^Xa>sq3x$K{Gn_43y6NN;Cb37dtD|4)kaO`vrG{iD;MkR0g|4d z?41LqLF+i*gZHEsnNiN#EK#rv?Rb>uQlUu4w?h(i1I1iu`KYE0*i}2lNa$_Q-aBO2 zCMI}x{c?rr8gdE*XfrLp1??#6`E`EXT}dbe36Q`EHSzW~3RqUnbF`UTpfS^lsFPhn zdu_;)q)zvi)^DWf66XB0iFs+^Chub58A3Dpd>Blk=QDAai!e7RlcheTDFlJKg|>aY z$I8AJY>lmd;Lg-_hHg@(d-EHm!7V$w!V1&PgD)DKap}rfx;p4gE_7|XH zpiPl}t3)5P0%f6aQ7>0pKO}u&0LN0SXbtq19E1YJ{wOe|2+@W^W!3KY;eAse|a)8#o+OJy}cG6jOJ*sB}T?h*|k1e7Fq_NP%0Pv6)Y&sH*V=mx)2Ce1OU{+HeYucNj0N@DJQ&wm{0`q$1G5|c3bQpl(YCKcJV`oPU@p((p78zP72!bo zV$0#b09Vk(^l9dAQGbVdDkS7(L!!%GT-rns`1fb&JIKqzari=Y9F3-k_z`b(2^SBq zQZQ#Xu4FnIE0)H7C!`}O=Ekj@R_AnAu8e7(3Dg$p>JzY0-%r|>AFug!oP?nv)SCqY zX2YdP%30{SCff(}{wvej3VTD;84(1(V#ORR6#&35)xy_>DrZdCw$*g8nU{Bd14)=( zgbtXs^+gB-R5r6Uy78z9eR)nmlwaUh69-R4IUnhe828T%f_$`tN@LD@iyL@7d!hBw zx(o_HH~Z8a_|fp=a%K|{Rgh(zECn4gy@V!Vlg9CqUKBh67CvwQqE9nyT}1auQgIf2 zZ4vX7BT@BDer8um=fin^lSa>DZyT^kE3XknG8B~$zk!+Z*EL1he z1iZ79AU5*oVwwdsrK#j*Qy;jM&Xzn2-ZW#SIchX!Xg1(HkVSX3fVEOAvweqMS&`y~ z?%(-fxqV(|1(A^k8R>Qm()0;*6wp(AFCMN$gC?&bzGlowXu_|N z#UFeOyeB;zrig5*>-=hO3GtyAS zo0Y9Tcfnr1@55S*;NsvJCY0S`z%JU{+|?YjuL2KB9B8w2efL>;sp~eBbbhT>Axic) z0SR0#+qxl>|;qNB%6#(8I2r4HpiYU?t@UTU05P{=6lDm+DvTx6)NGRd5}8kQ98 z<|f*oKX-Yog~QgjFfv4w=U4F9PK=%AGl{hg6Y~GZ#(z}YrY{n`tKAABDY$TrVq+W2 zWA-R6E;XNK8$UwWW%pdLL*?PxN24DuG4LV-Bfr*WRV*s$GceB$%e(`-8Is%79-f zhB^9X=U97wnzX=ih=CkfzX{^0x76i;QwE&To_z>iR7VLVU8a60SycW7pr?r<;DbQ) z;mW-KHtBROR|kK7l2Ei4w{^k#OUS-|Sa7ij@e~~I`>DFL8*xWB%c&HhhxxZXG-$I-0ufD@ip zX~y*Zx>2o-(qAm*mIv6&14zAu@#V3w!qxtaJ;PcgretkO8clp*m742KjDZ~HFYDG5J zL@cHy-P)01x>h<)Fw{x&Kn7d<;O155>s>MXb1eh(HAC(a{POD_`K2MCGLu@~LgV8A zjs9mkT4OuzPVDQBOP+2i22t!I;X`0?nzTL58iHYe_iT&@fmpM(uVHaBd$KdE*A6m~ zbI;@On_52bVxn#u-7WbUqTRE0fls|av&qLS>e*68BSMbI@!Ydsuy<$nfk$xp?ma#4mQ*Qq--vXeHNC<& zOgqHk-hk~4jX$Exd1_2A4IT~AHLy@+@WeRx(TKV#MR!JNJ_jE*vb5hVa0~kus&Rzv zQv0^YTsZy4Z&~w83|+jn=r5Vm%cL;nU$g>MkG*n3B9v_Vxv<=3Yn=%ySysg{@J9U- z0dPiLGfWmb6jOh=kb~iShrocG-?n#xVhZJ;7D9i0g$N$gOt$OP+egrp6n`ihX|*QL z_te^3H#rABD*>4~5qy7X5{Jm~gz>)tm>Zr7iChQq&6n)%h5|&Qmx-thE+QKJhK`g|*|)X>3t(#a5mIO#b7CYRkfC)T4vFrijqb=%!4q4oEIL zPKCK{fmH#z<9VUh8vB#ep7B55fQPLu$ui0G>ai(Alwnwx$HZM7TGVvXT7)zQ%ef@g z5o_>k)~2z^N#*1Tf%w1iN^H71!b1+qRSMU)9$=0mvGkB+vS~Ffjj)-xF^0>^OOnqm ztHKuehzP9AOT#6k~w_ilE+Jr26JlV#P(@BG@**FJN-W;DE* zY~|@iicKXcgles&UgS?YF+Q0&`X0UtZ#iwu;xY|J9VQtniqU3X$Si0)1Hk%3fF*c( zNmQoo_j2KNSQP!FvG7$ym{!cf+NJ_}_NtO_`*P@Y8k09fsR78iPJ=@%g1jYrNbnb% z1h^=N-zyTO1O&>?(TE43@P7~bT#?OQWUkh2vqigW0sDPYvxK^{HC$w)!9R@V0{BZe zPwJvmrY`pVhoS6}Lfei(7lxUmepY!jO+|XsIHgK($wNaRhM#cROsuh=2bqVHDJLiW zNayAA?$y%D3;VxQ!x|k@z*Qt_d3{}>5NoZJRF6|9l8hj$Sq{6l#ysTk(8aq>mQf3k zkg7}L@lxkeG50IqMj`3J)P*k>Q_cc&Q&yH5<-nx`R?lQJr{%aai@;VB@~>l+E@EIo z;cu$++2v_xugaX9Og5pEbHb&_lGHCo8aM+j%Y_|!1pBxUx~+UhV>=jyL^0JD;Sn@4;ZCSEcUhe0hOo=nm7+&}=Q&fty2 zKILepz86<|y*TB>6PFSMIg)fco6D3pdC1cwV+EnLn<^KhqhpH0LX6bOtT#hfmrK%C zQ=wA<%;ZY~tK5LR>rVy%pu;aE&)>6he?*!}_@`g>P)5GHP;Hgodi!=G)fPURIeCph zN5(dI*ZW^F<}AWcnxlRmrW?I!vqi4o#o2oUa|!1&d52mkE2A=n({*Sx3m{^qO?66U_S4JpG6iqPWHPJi-&b#6_uU%yK!-boXizHZ znL@R;{F!c|qFItpmn2Gw?;!sR=zsiGV@tsBt@0{fX4@Wf?1z`q9Wtuu6OL@zFsI}k zrlJx~qdj+$B->DOnR6A8$QAzFu98jxJjLKpvc@9(9G5J-zeGbW&Y(;@4@-7#mdzK> zA5XWEwo;?Zx*naWgM^RYdw|>1D|ihDm1lb{_NK{tfLNN8@}`WEN!tJ8Qgvxhk{+@i znUuudlh9{UiEY+iFkH)1OHEfM>G`@|47%poKF{Re0=diCmiDcn@5a6NJxSccSC-{G zm>6=_u7A38Y2mb)PN66q8rLD4y>jd}V5(uHYDuwN$+P{HP^_aUiXE*PX&-ntHAhQ0 z%r#3AU#)$MzujZZ>6H-zD;P4(t8|cR5YqLGE)_lX+CMQ?E zY5lmDWr++pn9DpiKn_C4>}5F*ckgL-;k4S*Tp6QG%H9bZcpO_MD|M*4Z4&F#I$F~* zi!Jf2SD>$_RCQ8ZY{RIoU)?HA9-7lY{Ck1tK;@~t_z^E=KS(rxK9^V3&KrB=B28AT zqMmf&r(;P9ECM%Sv$|+0l$@!Mq^}^ zB159DuduE%$}}9Gy*AGX)u7h2QQUI|w=vX1{g=SiDFs~&M^2{IX^`FW2MVi|V*=cv!%IU@#) z_@&U%XPR1Qk>TGV;fLCU+Y+gCGfs+hqG?2m4`adQsl{U+7rV&>La z5UElu8pw{pqS3>qECOzs)S3*8RU*6A3}Mk>ZqZrL9l%lvbE zcox#i8;ZuVr7D;cN}-3&=x9uyPlk(MSSeG(YQ0>^En+md&7cQc>DjW8t>!uB2f}kh zZ+)7plE$$^ZohnM3o(hUQwuJfOZ~aZ_DLx7Kqz7fd1g?FZIO|r%z``+KsWGslw)83 ze|GXpx_da$>}yVw_qZ_iOKZ?+aGq@RDw!MBQ+fE6>H1@a>P1EDo z?qd~?l;xV9`&5F(R6%vTe*sq<2Re(;ZX$3$_UgY){W$ZhrZ`lW`n4|8qd<6*7?4mx zYxOU{Q~cp}L#mv{ln|^Z*Lzu{f%vUiRz85n?Y_3l*(`G6-hw%rT_!NUeu=}5ts#r2 zs(G8=xN^*|pl!$Dz3ia)H&v*;V6w+221_GZy?A{B8MnpGt7Q_WsEen>+$AT@94_II zQMSqZu`-&l$rRjEN$euTu(8rW$MgLMt}(tUU?;KlbK#=h@@V5nzy3B(9Jh%Ew(BtXn)Tz%`CG~d zc_}5fd4R@(>zQx%w2O1%FusL4FNBmIce$|deLjtudOU5pyoGJoVmr95jb?=`-sc`h z?~S07nkXtoHJX-FuNPh0nVLA{4#0KMclj4kO?rQWx9Na0WMiCklxu|6f5+YdYpm2A zwDz^(265PO{91mWBp6xqjaHMnrt$d-;U3RjkW!HE-5rg*yT{;o zC8u!Mhfcua8qQFD`OjpTqGX=DSPs8TMJK(FetqlAXDgr68u}1tmVh8V8yitbZ25p z8<&4VK=;0Ct_@f=Q*+BQ+7sk>Jl0AW#4LBFBvG+)6XJwOW3qo=kWp6Kr3z6Y znVekEl$7A>0-J+s2W+(2h=1Tjzr}2QD#<6r7J0b3&<*E7<3OgijW#nAR?pu)rr_vFxgMfk08WK`bRuZ~cVFN8&`y%{tCm&aZ?r#dQ;MZUZt{xplznUA(v$ z=j->NyEN_!8IqZ{^=PC{A$~qZNShipZDW&}j~asrmVG!*%LvJx`R6Q5byTH1D4 zm2B2PK;1#G1o#YpRJg5}6$lSbVk)?m3?vqGO}67$4$r~&3&{F1Vs!8C`I@W9=c_dayLVc&0Et|* zqlSXRK~2mUFQg{!`pWKF{ky{dxVe5XFPL?h%q&>JWojeqb}D?%Nb*=wnyDD)JXy}X zxm^sbx`D=oui+BR`X_rKZ&tB9loXcFBK`S(he5N_eO3~fRE$cI6k<<=()wXLFDDxW zeKGhK;HId3sp*vJ&59fr6dTO8EA5{vTGn`3SzoQOEw0?jw^+1I%c}i@!*tiR2H#uo z+D$ZQK{50bMSCm8!J+ZsmtTnRXg*fAt1Mi~bW2%jzZ~a1lb++}=qpLSjT=Kr$372cN-~H{AxRtxhM_LJU|8OT(FLmsNxJ* zn=dN*5ou600iBuPILn5SAjD!hjHsHRT`9PcvypG5i)|6PYeF0K3=?GwG;V6pJLrTS z_+7an1wSey=`Q2XUEAw(pvqx350|psK*jfFwi4gI)3~iWrGu8*vIWu)8&YJK`SQfh z%Bvq}qGlb7=pEO^EILvWW%NJ)0#ZRSl?Dkx0dRNi`i*$b17Mk3Idlo*eSD~!ME*>? zc}{Qk_@?Ih+CVaE%`R~g`%doUO{`y|d33+|yFVXRV>EDQCzBjtnQc%SED62+X@(XI zf_ao(QF*6m1j1-^gqG~=I&477>Siy|)ZmDn#M1e@q9-mZ0KL((M$uU}n>glrp(CL= zVPZ0@dG9XT6X!gd%5G9F!5XV#(TVgaikMn}6+_y@5x>s-L?i%x!xgQpaku_+1!y!lsg-S4o#Nu@ zNK81}oXH>$2<7(+hoWFZJyZ!_dwzJpK{&Y95Fb%1KTF!u6)yoUf~6`~;M)xO7H$44XBsj8b`q znZBeAMc3O$CYOZA%6@`xEvpCzuFGNnPVgvV!K0aK=ATVIo8VP?sg55~8#}q4<$@!n zN=Lt~k#4M}ub^jI?()5nJW=7NYw;`V(f6hRy+sAlNta-1u!;;yEG*co0Ai3te=e3n z{Wh^7J9b;2gV1^}>Rht^>7RESF;+>~G-Z7;y?z_DQFBuFn z_Wp#l@BWBK|9Obp+y$w&Xh7+hdK=Xpzbrhz1GKSQ-{-?LUOON7!34I&&Ww+FYN!GE zE74)}=l4g_g+=84Jq8Nz-DZWZ){%Q45wahm#>}*(#!Z;gqPCuGvkdOKQpyokrU*{o zG58>1#foz=q`H7tGllvjz_jI2CWpDJ3yml72f=b=v1n z+S-I7L<%(&5lUfLUR#-7x+E=y;)fjPbx6_&luS&wp{vhy?Qc%&{d9@qWHK$CKfd3~ z-RzT)B58JD;>^Y1SF@phlfCFA#rzq@H?rc`_|h(jiLl8)xrbCA;l`YG8T9fH(>ILZl&@n>75la_E||EwD>AD-ToQZ$clFGdd3K zhXhsGQhnHNS&}JxwCn^-GNJIB(~r-E7)7RcVM2iV(=<_=VvIFD2V$NafB1&JYu|XsVnhmoQuZXJ$uKBPx zl7|Hui89Yvk^pO9BtwT3!X$Q_xqCbGPNo%x4_5V|rasAYk##|0IZzD@JJQR-uQ8>z zI!uxp%5^b_j*KaMrwy`4_;-yZ9-5>RO-+QV_qtMF=GF0RNypjPbTknHDPC&!i1DK` zox{_8&lizTS}WAfZ+)*6*#+|e7c|pSq996%ZKy?M^?Gb*mzkh0{x8#EAH!Z}HrWk+z;(?>aP=CN(Z>wdlu@@UiTsZ?$Yl!HjGp2SiL#72ThW)YB zAAX#!bmwXn8aBjD02;oNp_El_uehiU;&RC%W{zx#GvIGK*W;V>-ogga)dVKGvger? z*c7>T1smv;odgIocB%>AAPpVK1M5>TUpvEM@!=hE?nEtGezvmHhN&!=EE>K7)id&0 zHq6FBaHexWmz8?{)(7m#aFEi;q_omPw=eLJ%j?R0ZlndDZZ}vvCW2>eT`rL|Or}9? zHQT{jcz)MrMGEYJPI|3%;vZXaY0o-LQB{;djr)x-fI=4UC@EHQg$Cg{!jHsNnD1aJ zFRT#jdh|o164fauGKXb2Yn`%!s1+SThoTh`*_D!f>5C$2T#3e!&`^8-C>A-K9KW~nH z&$aaXozGH6K;IWDeG|x2@IHCVLAv?t;1cbZbcw^EXIUQ0GC+@udpY3F(C+^&73WZjjF*N ze2K;Q0$Fj!fGHS2n^=ubz=iTuBGxD$%u;1)TbMaSFx;h{xLyYt5_NeLCDnaYpMZyr z{1rdkINgB0deW*)=y4Niz%&sfS)+A$;}v<~mR{0DF!NZFIzFYibpVsh1=fpYv!9zd z`boIh=LL18VQ;#$agxn-R;rvM0|QcOM;Oce^(}M9#p_qOyyU&s%q6ZfOR{J`{Iwvz z)vVT5S3W75@*3+F-wWWsew8AV@OfE=ds(dERlip7#2|QzwLW{5jN!Q^VAmynd2cm#iOo~| z_)~%!Eps?6a&qBCWMd!gn6+))0~!DVNOq`8i}~|Hypxu92h;?rZb{J(#X_2#J4@5+ zj;SfqL?ye!XSz)6%yv1(1I!2`S+a|I=wV@n&koFE!!9P}1X^TlcwRUR)8?-R3%N?o zn_FTpy11Xq)Lhoo-?#(12Hgj_*LV-+PTJZ?(AmmKD3Hm71K)i=)z5J1eRO2qxZ|(B zPkUrSh4Ma#1(o>{V6*Do&aBD-RO@-hjFS0ZdqbShZgwe6AB`eMb%;3jquH;UR99Q2(FzNW>BtswaYkeK@F>XQ5Xwulaof~&2` zZNBcOmT4%prz2^N`1tdH8#Dh0#H4%nSJ`Yg5iqS)@1Su$5|_8YOnsRS@@CuHOx?^v zJ>RV#0c}WrYP)qFSyCf7C95uzBSjkB;k{@$m|2k)XhW$lYBi2LF ztWK3G-Q{*#tE(ck3gn^pJ1^0{&X#<~_$YY^)R4u06b9CNXyr%ln(i%KsQ5DI&I`CnrcD>y;k!P>mi>?v z>x<`yje3<8c2qh$i^Y$-!*%91`rgf}6VXDl0gcAc6IuIVcCu}Ri!`EatJiRf)Dtg) zRiUvp4Sk$7;D>lDrtlxNL^Vc+b7G5|>lIqbDvM|y?;#w&b ziTsPruSKs`VWl_j+SuTI;8obug+A|mVJ(!VSli5BK&*8TW%*E?OCubKCSZSNpQ1E= z={+?|>y;(9@l-xHuX7cIZ8@&XogkAzJk$#Y(r$&R*O6tvY2|CbIa#|)(CJggMetm=HIzwM*gOaWc)O#P(Mdq z1dW^|Z#Fwh#>OBn@Uo(};_`QpGPOII%LEbA|Dk+yo+T=sHAmK{+ zV5<|yotWN4qyE_5+vYN;Y*B^ci;)UcgNb~==^j1yWpVw9pJuwHjC}<7?EnxY)K@f* zk8nqn>c~6%rh3)0#^63aQowNeV=XwrsJ_xPi71KKu>gAX7mVl{9`VcWZm2Q5deCeGhK{h%=8JnH5sCqY8)BlJTUn0-!&RAgXR^cUGg&2qH z8QV-c@entfv)_D6KITdss|z2=1wIVvxgd(9V48`k-l3urT(fYK&d`)!{wkGrUxbh) z8N`*YYLNvYwlo~^Tjmh@2s})E$d%mwBX-YC8T@O5jAaK2LwQtn7{zthh-N>@N4U@2 z`}tMOvkKfE-WP=m8#Z|55%z8wS^bn)TGe-CFHV1_uhGAg3y(LwWLtmbYFnsaCnW>f zrCm!Ll>|3jAa^}f(J9s5*+}`GtF85 zkwtN>ow2Vig(@47yQEA)(p((i1me{vmAkN8XG>MinU$ z?mGZ(t3ozb?$V1QSC;6rG2z7g>)Kxcm(DzpMoO51U5wuWR#)b(l4|+Um~^H5=Zso| z8nfwd*?pl6m$yZloLE>|a->{m5#jr4yf2K_b>R}%z~`;)-FhOcZn|aLXbpZUn^%(- zNXy;NBI;B0{-D=7V>(G7JNx)RA6={JC}WCx9#Uwl=>ELa^=W`ck_22M zmOfU%9J?Mf#|pHWqrn>+=$aqG&m$WfqGYJA`0B?tpIm&tY9SvL-_$ z7^!xuGY_i-=AKlLsDWcUttv98s&uFBGZnGTZ0p1cHA@if8x62(Fmq8vlUic zbAg5a-TweOnw1RVAub{e{y7nNL=x@B0rfuEJ?S1)yC2(r0tnvD*$R`Hj@_!f=XcV} z_NzIi{E*_=DK#5JM!^i|8I~m=vnq0Sk6dP&IZ(2LGR+g?wT8R%T7p_UHg+>CQbmVp z(OOrJ#F-Rj1{54){i<1ICAUSf<4;DhYkLNvrmf5jOj9OQ_CKXw85H4YgrMOnWJw`u zUfmZ5V~HPn^T7Nk(S=H`kxT8CZ6E-)cpdAZ$bv*jbd5GCq`8I!NvGyn0P{KbX6{WV zwI0X!CD9XBeA~PLuxC=EZV!Gt8s^Q_9T=AE$8{UcLfc%Mr}cX&Ak(!J@I~0$Dzkz7 zw~T@U{z9A5G^5_f^$V8F>r~RD)34^TmnGq1ry~c7?6ZrB>sS|ZpHb^ml!uBgVe*U` zgD((%=F(6?O=P4A7QZ5$$*3wp0o+(*;@*IGxk(``%;-~E~`J&#-SGK&FVz-bN;2;EX z?rB~(V=1EY-J@OBmf#m(Gl9<1+~ofNnA0fI;)HJY$&jdx3~F+z{v&(0u20&j2-2nC zoSnNRJA()<g@gqEyb?QAgGZ^BSbOCWz2#Dv}o} zLb8^Pm~K%bA3SaCnw(@&z^UpEppFgG&YQd$qe-G+k>JV*JCA&AZKpk}Nn5`rjT)=?TI5qZcwoPjtD2xkz#0fB*-?iNqN*W>1nEx(oH1TjkXk>o3suH z{6A9mVNE5)T>>>N8ZSlsGKWZ&@9tzom*?G-d(!H(FiAde7=0m8OA^(jN-XtlI63a zSn4`E*ZoZ&Pw1b-d}yK@)nsW78)-N@N8(}Wil$cUU5SZreHWS*scot0cK1+Bv!tjP zOoxW!zuKD&`A2<=mK<9Ge1Q?J(g>w& zD6m!qN4-e08*5b{am{-UN1C8}-g!2L7T~2vvC2menpcNF7DJ_>t60BeWki-qgx%DQfps1Uc#xKZgZQp?8 zf$lp}B@#tIWKbC5`Xq`03mn6c{ zI;awmNr?7hzxS--8!9@TZ0~f|cz>5Xv4)1}PvQfD+0XiU5zlUFbxgC`8~B-J)IC{e9-E~| ztin}!VRc7~YMdWA&cXSQ)KPM+ix?Uo!|eiX3#M&!`B*%WFPVX3`gy&+)Nn=B{ewJ{ zWuH*!HahN?Err`U#UT2lJ6I9TQgBjRCkeQ{6dEkoHriv`&doTte+eZBz%WyR!O1_J zdHCI0M|9QGD84D#E}_%=fLQ0#rCBzd=fjXSo)9Y&!udS zLqKb)<2lbL0s%{8`i>4kclF8Tz0GOmbZFyNYPwCGwTuzO2rnlt z!?!*E0CQRqOTQqLo9MG^z-AWceC@f3K7a~-YUv1}s}Sz8*~X)Q8*k_iD>rdLN~F$6 z0g12<2vR-rzQ5YFS_SO$7-qF%uZY__TpVpDpX_L=XxmI3*L4fC0E@tB9CMUyg&)@k z)P1Tm$jd_-?4{FgHQNIuliQ1pH^eMehD>POGPwNv=ZaM^oGdi4T^3h~cf;XIRCquJ z%Mq0xY@hVgIY;{vsW0R@u#yOqz>63d8<#L{SY%Q zu+${CY2aqfqBkHe?0|lfGoC)ip%_y-d0H~wq4 z8mTO8flVV#=&fGX_TD>>7A8qEw`1jx*dN7GZo3_?qe-M&B zeV0u(k}7luZlmdY<(@6K4;#m|yyq+Q`G@IUn4P1WGNxh|D(LSVfcS_?xIT=h29t@b zgz}U$$|CYC8dlwpYO4x5_d{kIy&^-|Up>%cfmKAL$iawg`Wky6(IHq6ImIhsqDVPX z0l@d~`_frNf_|U*NOZ=O!mTi3OZm3{0P-H6rDVz^6504Ji6S=B_>oRf9{3=4tmG7o z!{a8MdvkFF>e5dV;4Djoj1Y5@N0{T-aY-559#PSvbZ^PY({?3X6XB=RTT$p1s&@=agL0qDQr-*jgw&nxg7rh z;+uk@d9sFiE*9=ah~fy210Kb_$JuL(m&wuCqNtaqTm)5SVqb31Tk72GPCrHOPmR{Z zn($BF2Aw0PISk4NY%e^1wQKk?Rq{wyrFgc{!*HZ{Qh+jj8Mz<%s!CL>NJiZiYjV*% zhCem2VC4tYtbIQ!=5nX5i>Tjyli?7_m6R^m7>_(;`+bcxUG^lEbWgXl5u>ETd`t-5 z!<~)Cn;pegD!U~qUqlT<;n}}dwt`l+oJJN4nA;5)`hqq)rhV#f1v*_6_tje~biT0F zx>u-U(E6s`-(+A7QUxxK2h!4DamVdT7&yw!DJjQ6=Bd>-7Mg6Dht0;8%BtCB45eF; z!H2jt(e7;L_WsNlt2(yQey@T%Rq?H6;mbC{Bqt(8@c~&BjxY#5^;+@(Q{o3sHuSl$ zhH0Xe%Us%AC|!VZd?p<22a$~PPwrW`PnDH&7OKGAlD=dnm1miuv{r>=J zMMJjHMQzqPJ(a}GEZ-?-Xu>5>xp^78?H$MMMvNTWYBA4jE@R@|z$omcM?JVcW12?T zQbKLDOL?wLR{4@h_;wf|A8p@n#qCg1rd6v%)q+_8X;`l8fTOTlDl(GT9FpjZKAaeX z-8W%Z9)hOv6j=z@@tb&=lx4P%iaT!q0O_Tg;)jRHj>I^E4CQ<>j5~K+pYKecNLS^I z-@84s#NK4yRC{}h6K{=w^iheoizo;0+18VH_G<)R#U2+Kvr3Gl`+NhEVriWRVUTwLmXUY5T(w&pk5WWi|P zwhjlAc0Z7Z{-IqhS~y>36)+jBjc5Pa Date: Thu, 16 Mar 2023 05:01:09 -0700 Subject: [PATCH 59/93] fix tests --- .../captions.json | 1 + ...92e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg | Bin 0 -> 20552 bytes client/python/test/test_utils.py | 6 +++--- 3 files changed, 4 insertions(+), 3 deletions(-) create mode 100644 a554f15b-8611-4651-a2f6-2c5445d2b1b8/captions.json create mode 100644 a554f15b-8611-4651-a2f6-2c5445d2b1b8/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg diff --git a/a554f15b-8611-4651-a2f6-2c5445d2b1b8/captions.json b/a554f15b-8611-4651-a2f6-2c5445d2b1b8/captions.json new file mode 100644 index 0000000000000..38097716e1d5a --- /dev/null +++ b/a554f15b-8611-4651-a2f6-2c5445d2b1b8/captions.json @@ -0,0 +1 @@ +{"C:\\Users\\islam\\dev\\gradio-repos\\gradio\\a554f15b-8611-4651-a2f6-2c5445d2b1b8\\cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg": null} \ No newline at end of file diff --git a/a554f15b-8611-4651-a2f6-2c5445d2b1b8/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg b/a554f15b-8611-4651-a2f6-2c5445d2b1b8/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg new file mode 100644 index 0000000000000000000000000000000000000000..c510ff30e09c1ce410afa499f0bfc3a63c751134 GIT binary patch literal 20552 zcma%ib8ux(^yiChy%_Jsw(VqM+qUfqC(b09cw%#6O>Eot#P;lbe_Ok?f9~nJRk!Ng zKIe0*x~o42{deW>4gg(FN>&O00RaF&{2PG3>i}^8EHpGU3^XhZ3@ifdzXu5(78V`} z84(c)5fK?3`F{i*85Ip34HX#+3mY2?i-?erkcjmE%6~>6;Naj;kWql>=s*G-3>NJi{vUybgo1{Fg8;z)(+Z&jAR(Zj zAphb2?+>89 z&By=G+y86)f6s^cr(*gC;-5f3|K|Yaf1E==VnC5&vWh}uQK+z)xQK zcMX6H1@R9x6b3*TfM!=Lu)q^%W_oP}y;P_(>Zpu*uZm=ja16)koli*CHjZgKl7i`S zstpHUWzxV~WwxP=urY&(u3&}$q6k*3xRdA)@vau(&s;Ez?6PC+D_n|_S`)S%BBYfV z4qXB?0kOqs2zvM>fwC9c)R5+TVSvjGRn|ylW!5F}7}{W2V4)?tVSRjM-h?LW+)EW~ zG@5shv#o%_i1Z}x6Ko5VE1%&0(dSUx<`R5N0em>~jR8W00*EX6-?H;)C_ZZ06h3STfrFg4gWuKO6XSxs|DlNHy078>(#3?4!3%P;L#_+-@cO(@f)WtCYuBNSOeB$3B~w6^m%n0 z*ohlDxO)6$YZzdLT$XLr-TpFrN44h&QS2pK3O1k{DIA8`m*1}B(pCli*y`c8sEfUk#0 z<0n>;CY76maW<03zz4rvYj-ZcH+JV-oy@+zBk#G(g*>f_%+O?|;xi6xSRiMdn8cDh zO(7cx%@G&ncPQi-_C)PtXU=JXxWWQ-+0Wqw10x&!urD+2hjo!r4n<-U0bvRc|B(5H zW(Q71;*PC5MFntKX+H$U%zq!5uHU1=-lSeY;)W9THD6rAi!V>7-W~4GZ8)7KEGbI) z(wZpHJW6k&`UP;dIs7B!&l8T!kzGA?wD4z882z7w2x67lTgi7Za>nX@hn9}WsaVg; zUNUFtt#%MUZ!zM@w;~(xmQA`8NU-`mVr!4F=a#E{<@n<2kFL`fYvyF;;uE{&WSAKA z8@5eRRwyBfM8H*S;za8P+Oh1o_|1ywY#th(a-J&p&yj`!b%{btbMrNIhz~N_(T5Kk zy;s6gdzQ9mdCMmf%FaNBx~PMgeF&MgEpwC0@r{|ApRS$;r+)#c?jJ(x=tVXtN$Cfa zWb}hh?ycUi!o~0X91SHAu^C2g*T=G%2&!um5lb=|{^*c>DWqHv_pi}_-2NCte@||t z8G^q6c+91j+=D{hHJNYDYz8@Qt$kG_sw?#A_YTs4JyWLpsO!XcCqzb{ER=ls*6l4i}Xtb8G7i6d# zi@Cb1oH1iae*qDEb;*;uETg8=45Eam=F%Rqs8YjpVNl6-3^#wyt->Dxc3qTSPN0z) zU{PKl#Wn!PM~i1a_Hr!gBIYz+V#+Hnb>7;|5m6+;+*F5BfEG+^VMzp$i+8111P2Lj z))Pl=MM|fSEt^Kfquwx{t>Z~tW5ed+vA+;L<>44OQHtyc*{YMS;>eNuG{Fh4YEi|k zYydO44nwuH6}!sUm9@;s+FbWnGodun7Hu*vE>zXiONnua1o$NYVBvT#yK+OA zYJz`&H-@k@$kZINxqEezG{B>fIHlds1g8=kgtAE@HKQ|;rdD?$%1-(rH3272B8;`x zUk97StuAxTcsWMO@>KB%88gl&Xu?TwU9RBO`GVv>=3~=%h~1_@Pp8xMt@JR`Ot=Rm zKbs{QuISDGl!qggcciqeIA+(s6Q3$HMn$dE}v;YVYy&R z`)Hmq8H289R%YKtd-TphC>v8M@S$TiZrW4bCSVO(-8R4=lEbMO@aM=jPW&yY6TLyY!<4P~*U z6tHV{eq{GEhmG{>4r<+Pci_PVy_8y1Gv1>}SdC<=wS!$l?Qr||x>f~&V zlcUJ|n!<7hHOr-yk}I4cbsRFHgLpKKsck)D6{+eX4=w5Yf(c(W#Y_WPyhBp0b|;XN z@x&18G@*Y%=zT*_B8hcze)Dtj5+3_R!x=1xCBw}S01%ec@+$qC<1gUUv;)WLglbL* z%v90*il^L5{83X>f}L`+^NPtIroUQuqiKYE@c|)>6^L)sZd?5rfA*+4-ptrpC2nIG zm$jTLD(tK8qdh;6`O2oOsCXXAzpOGu9|OxHa-lsUdm&;<9UZnE<7y=l#sMkQ;Oe-y z9(xHj!Pc4WDkupe5X>@dN7C*jL!w}?E)_6_Xhahcq@T1s@YRwDV@Lp3h4jOK1x8rp zpUB-#U<|~2g`@@G{mtXKR_SrIf24#Xj5iA#-V-T|BzZ74QcxKixYZ@|5_ZO{JRW<1 zqDEJhs`T^*NbpSVB&wti;uZ0NSvU6A(xaB(3`TBtemYE`MeYSq{u%|`HhI%iQ~8cz zXPW!mzzf8)=>*~8wGHZ}$VD|~G1tVM(_`bVLsr7-{TNwsWkVyC75u@1!}XpZwlreY z=P=`~T#<*36|V3XpeeZQLFW*j&uvz8Zy-cdQ!_Dl(I*o*;h9~r<8oL*<2Z1*lQ-vE z5_7Hp4gTPTMtPS+M8l{9S2xBClr2c ziMC2oKOQo`q5*RbI^nF5ql9(Mut4ydN{mU)xK0G>`O~+Z4uIk zF<^E4=-i+>hv*N|+z->g8#+7bBsgWsMY1l5Mya03+!9T5^qjH znQ4r;MEZRgj@<7IbpqnSC;b{ur2gDmw`Pt0oHu{7y-xX11h(axz+c2;1Z%IU=k;%& zU1fHYDyOU@4_Z%^)XgtiApvD@WO70%zfqyWJs2b7K5$<8`X|}w9KA-#8ep%Lq?<3< zI9NEBf!7U+Kl zd<`@kf0}gljT<=YJIyS+z{I{fc=`smT+|a_I6@0S1{ZQJm?J z$ZC9X$^Hrl%DqZ(`4gif3q#=szJovdr5SZ$r3$eTTZ#{TJ+ctvKC~y_9`PE$xf`km z-N2&4t6x!16b-;%KYe^#BxgLArKprW=@N9x4^Wiu&aqHS$|mMX*v=Q7=N&VR6jO}{EGodE)c z46gg;RO|~aq>VkTwIxwgSr@PlFRMpF z_ej*s?Z10a*9pg@_040H4=+|ZfPW2chA(_otm^qe=&0n>_dLRZy$BaE#EHaD_`@>Z zTSCcMWmf`fY@g03J2p1yW+CyH$gJ(b3XyI17$GGpD)Ah3IKO5{e7!CYqJ$1Y zVpP4=sdhBYT|-A|JZ3`)eo?bZe|!)3_5QG^P(@72qsJ35xbBer1*pATyZkXgA1g`; zl$}D+mCC;^%ulANp6aUv%ULE#L&tj_r2Nr0tXzDSRwPxb|A>lV*ogNjK{q7Qz#a`FCcwiqj`rASqsCe0 z{aQqRkC1G`AmjKA&p+q^v9IVCIWmq-X-oA1L6toy!K79&_SL(Hy(~^WCrf z%R!3c+$eT^1QcTSKHLqxaoB}?rnp)Bq{0>rBR7o?_0S!V*Ks}c=1MKLkt0dm%yOyk zkCvZD{E^69HZp79ZO{gy_0(#%om7Bbkb90(p^?m&AZ`cRBv+-R>|}V-`C*E3=E0N% z%kc6OU55kwPUS7I{zz$J)Ij0m9Xjwc6^E?`I?KRJNd=}8rrQvln2!|NtA_E*Y#oln zhs@c6TEoHOrEEn)m!sWISuU~Iv*NC2{eiVqZ6zKWErQ@{d)LR}M6aHGF-4SlVfS3j zz5tBGg*4S93Spl7I5Jxs33mgT(O$2V<=#K`;F?!ru2$j#=2v*8k@JoP(7L+}Y ziQf|bg>yDWq5kvk6Iq6*OjVuTK0-dLAbt~9F`}@%0cU<^{$+JRbyb|5bel9m*6T{c zrjD0yInhvcX_TI98TeBc4Db)ryuinz(X;h9Nm!t&=Vq3cg0-(kc*UR-{&61n?`kxK zIKi$4om=@~k>5k>Z|bKJbvB+|y?+7gk=NrDPoqE{;z?h0`Q#6xT>RUkQsTW0nzoD* zuQ+YY!MY-2nuABkj`?57av4BM2+6NcAAg`Vr)Yv>q^tp#+fBxW+X{dZf_s#E#J>P) zUX6H9LIl!SPtw|A*Vi9D`~e(IqJfpLy-fu-AX1r8t?38jY$A1R4vlf++(Pe+AnbZc zLarTJOHiy|W|f&3mx`Y50Cyu+lvPT5Dm7t3De>onU4e%^%`68}S#?6xIAbq=jG0|P zhTg?lNgH)4mwP{il<^MMVCoDtIz#Ebdcwh1T(#~?l&0U$?-DO$c2(mg zBH3`$OZ1MRbs@9eD~yNx zu&Yqon=-GE3NP#j^Xa>sq3x$K{Gn_43y6NN;Cb37dtD|4)kaO`vrG{iD;MkR0g|4d z?41LqLF+i*gZHEsnNiN#EK#rv?Rb>uQlUu4w?h(i1I1iu`KYE0*i}2lNa$_Q-aBO2 zCMI}x{c?rr8gdE*XfrLp1??#6`E`EXT}dbe36Q`EHSzW~3RqUnbF`UTpfS^lsFPhn zdu_;)q)zvi)^DWf66XB0iFs+^Chub58A3Dpd>Blk=QDAai!e7RlcheTDFlJKg|>aY z$I8AJY>lmd;Lg-_hHg@(d-EHm!7V$w!V1&PgD)DKap}rfx;p4gE_7|XH zpiPl}t3)5P0%f6aQ7>0pKO}u&0LN0SXbtq19E1YJ{wOe|2+@W^W!3KY;eAse|a)8#o+OJy}cG6jOJ*sB}T?h*|k1e7Fq_NP%0Pv6)Y&sH*V=mx)2Ce1OU{+HeYucNj0N@DJQ&wm{0`q$1G5|c3bQpl(YCKcJV`oPU@p((p78zP72!bo zV$0#b09Vk(^l9dAQGbVdDkS7(L!!%GT-rns`1fb&JIKqzari=Y9F3-k_z`b(2^SBq zQZQ#Xu4FnIE0)H7C!`}O=Ekj@R_AnAu8e7(3Dg$p>JzY0-%r|>AFug!oP?nv)SCqY zX2YdP%30{SCff(}{wvej3VTD;84(1(V#ORR6#&35)xy_>DrZdCw$*g8nU{Bd14)=( zgbtXs^+gB-R5r6Uy78z9eR)nmlwaUh69-R4IUnhe828T%f_$`tN@LD@iyL@7d!hBw zx(o_HH~Z8a_|fp=a%K|{Rgh(zECn4gy@V!Vlg9CqUKBh67CvwQqE9nyT}1auQgIf2 zZ4vX7BT@BDer8um=fin^lSa>DZyT^kE3XknG8B~$zk!+Z*EL1he z1iZ79AU5*oVwwdsrK#j*Qy;jM&Xzn2-ZW#SIchX!Xg1(HkVSX3fVEOAvweqMS&`y~ z?%(-fxqV(|1(A^k8R>Qm()0;*6wp(AFCMN$gC?&bzGlowXu_|N z#UFeOyeB;zrig5*>-=hO3GtyAS zo0Y9Tcfnr1@55S*;NsvJCY0S`z%JU{+|?YjuL2KB9B8w2efL>;sp~eBbbhT>Axic) z0SR0#+qxl>|;qNB%6#(8I2r4HpiYU?t@UTU05P{=6lDm+DvTx6)NGRd5}8kQ98 z<|f*oKX-Yog~QgjFfv4w=U4F9PK=%AGl{hg6Y~GZ#(z}YrY{n`tKAABDY$TrVq+W2 zWA-R6E;XNK8$UwWW%pdLL*?PxN24DuG4LV-Bfr*WRV*s$GceB$%e(`-8Is%79-f zhB^9X=U97wnzX=ih=CkfzX{^0x76i;QwE&To_z>iR7VLVU8a60SycW7pr?r<;DbQ) z;mW-KHtBROR|kK7l2Ei4w{^k#OUS-|Sa7ij@e~~I`>DFL8*xWB%c&HhhxxZXG-$I-0ufD@ip zX~y*Zx>2o-(qAm*mIv6&14zAu@#V3w!qxtaJ;PcgretkO8clp*m742KjDZ~HFYDG5J zL@cHy-P)01x>h<)Fw{x&Kn7d<;O155>s>MXb1eh(HAC(a{POD_`K2MCGLu@~LgV8A zjs9mkT4OuzPVDQBOP+2i22t!I;X`0?nzTL58iHYe_iT&@fmpM(uVHaBd$KdE*A6m~ zbI;@On_52bVxn#u-7WbUqTRE0fls|av&qLS>e*68BSMbI@!Ydsuy<$nfk$xp?ma#4mQ*Qq--vXeHNC<& zOgqHk-hk~4jX$Exd1_2A4IT~AHLy@+@WeRx(TKV#MR!JNJ_jE*vb5hVa0~kus&Rzv zQv0^YTsZy4Z&~w83|+jn=r5Vm%cL;nU$g>MkG*n3B9v_Vxv<=3Yn=%ySysg{@J9U- z0dPiLGfWmb6jOh=kb~iShrocG-?n#xVhZJ;7D9i0g$N$gOt$OP+egrp6n`ihX|*QL z_te^3H#rABD*>4~5qy7X5{Jm~gz>)tm>Zr7iChQq&6n)%h5|&Qmx-thE+QKJhK`g|*|)X>3t(#a5mIO#b7CYRkfC)T4vFrijqb=%!4q4oEIL zPKCK{fmH#z<9VUh8vB#ep7B55fQPLu$ui0G>ai(Alwnwx$HZM7TGVvXT7)zQ%ef@g z5o_>k)~2z^N#*1Tf%w1iN^H71!b1+qRSMU)9$=0mvGkB+vS~Ffjj)-xF^0>^OOnqm ztHKuehzP9AOT#6k~w_ilE+Jr26JlV#P(@BG@**FJN-W;DE* zY~|@iicKXcgles&UgS?YF+Q0&`X0UtZ#iwu;xY|J9VQtniqU3X$Si0)1Hk%3fF*c( zNmQoo_j2KNSQP!FvG7$ym{!cf+NJ_}_NtO_`*P@Y8k09fsR78iPJ=@%g1jYrNbnb% z1h^=N-zyTO1O&>?(TE43@P7~bT#?OQWUkh2vqigW0sDPYvxK^{HC$w)!9R@V0{BZe zPwJvmrY`pVhoS6}Lfei(7lxUmepY!jO+|XsIHgK($wNaRhM#cROsuh=2bqVHDJLiW zNayAA?$y%D3;VxQ!x|k@z*Qt_d3{}>5NoZJRF6|9l8hj$Sq{6l#ysTk(8aq>mQf3k zkg7}L@lxkeG50IqMj`3J)P*k>Q_cc&Q&yH5<-nx`R?lQJr{%aai@;VB@~>l+E@EIo z;cu$++2v_xugaX9Og5pEbHb&_lGHCo8aM+j%Y_|!1pBxUx~+UhV>=jyL^0JD;Sn@4;ZCSEcUhe0hOo=nm7+&}=Q&fty2 zKILepz86<|y*TB>6PFSMIg)fco6D3pdC1cwV+EnLn<^KhqhpH0LX6bOtT#hfmrK%C zQ=wA<%;ZY~tK5LR>rVy%pu;aE&)>6he?*!}_@`g>P)5GHP;Hgodi!=G)fPURIeCph zN5(dI*ZW^F<}AWcnxlRmrW?I!vqi4o#o2oUa|!1&d52mkE2A=n({*Sx3m{^qO?66U_S4JpG6iqPWHPJi-&b#6_uU%yK!-boXizHZ znL@R;{F!c|qFItpmn2Gw?;!sR=zsiGV@tsBt@0{fX4@Wf?1z`q9Wtuu6OL@zFsI}k zrlJx~qdj+$B->DOnR6A8$QAzFu98jxJjLKpvc@9(9G5J-zeGbW&Y(;@4@-7#mdzK> zA5XWEwo;?Zx*naWgM^RYdw|>1D|ihDm1lb{_NK{tfLNN8@}`WEN!tJ8Qgvxhk{+@i znUuudlh9{UiEY+iFkH)1OHEfM>G`@|47%poKF{Re0=diCmiDcn@5a6NJxSccSC-{G zm>6=_u7A38Y2mb)PN66q8rLD4y>jd}V5(uHYDuwN$+P{HP^_aUiXE*PX&-ntHAhQ0 z%r#3AU#)$MzujZZ>6H-zD;P4(t8|cR5YqLGE)_lX+CMQ?E zY5lmDWr++pn9DpiKn_C4>}5F*ckgL-;k4S*Tp6QG%H9bZcpO_MD|M*4Z4&F#I$F~* zi!Jf2SD>$_RCQ8ZY{RIoU)?HA9-7lY{Ck1tK;@~t_z^E=KS(rxK9^V3&KrB=B28AT zqMmf&r(;P9ECM%Sv$|+0l$@!Mq^}^ zB159DuduE%$}}9Gy*AGX)u7h2QQUI|w=vX1{g=SiDFs~&M^2{IX^`FW2MVi|V*=cv!%IU@#) z_@&U%XPR1Qk>TGV;fLCU+Y+gCGfs+hqG?2m4`adQsl{U+7rV&>La z5UElu8pw{pqS3>qECOzs)S3*8RU*6A3}Mk>ZqZrL9l%lvbE zcox#i8;ZuVr7D;cN}-3&=x9uyPlk(MSSeG(YQ0>^En+md&7cQc>DjW8t>!uB2f}kh zZ+)7plE$$^ZohnM3o(hUQwuJfOZ~aZ_DLx7Kqz7fd1g?FZIO|r%z``+KsWGslw)83 ze|GXpx_da$>}yVw_qZ_iOKZ?+aGq@RDw!MBQ+fE6>H1@a>P1EDo z?qd~?l;xV9`&5F(R6%vTe*sq<2Re(;ZX$3$_UgY){W$ZhrZ`lW`n4|8qd<6*7?4mx zYxOU{Q~cp}L#mv{ln|^Z*Lzu{f%vUiRz85n?Y_3l*(`G6-hw%rT_!NUeu=}5ts#r2 zs(G8=xN^*|pl!$Dz3ia)H&v*;V6w+221_GZy?A{B8MnpGt7Q_WsEen>+$AT@94_II zQMSqZu`-&l$rRjEN$euTu(8rW$MgLMt}(tUU?;KlbK#=h@@V5nzy3B(9Jh%Ew(BtXn)Tz%`CG~d zc_}5fd4R@(>zQx%w2O1%FusL4FNBmIce$|deLjtudOU5pyoGJoVmr95jb?=`-sc`h z?~S07nkXtoHJX-FuNPh0nVLA{4#0KMclj4kO?rQWx9Na0WMiCklxu|6f5+YdYpm2A zwDz^(265PO{91mWBp6xqjaHMnrt$d-;U3RjkW!HE-5rg*yT{;o zC8u!Mhfcua8qQFD`OjpTqGX=DSPs8TMJK(FetqlAXDgr68u}1tmVh8V8yitbZ25p z8<&4VK=;0Ct_@f=Q*+BQ+7sk>Jl0AW#4LBFBvG+)6XJwOW3qo=kWp6Kr3z6Y znVekEl$7A>0-J+s2W+(2h=1Tjzr}2QD#<6r7J0b3&<*E7<3OgijW#nAR?pu)rr_vFxgMfk08WK`bRuZ~cVFN8&`y%{tCm&aZ?r#dQ;MZUZt{xplznUA(v$ z=j->NyEN_!8IqZ{^=PC{A$~qZNShipZDW&}j~asrmVG!*%LvJx`R6Q5byTH1D4 zm2B2PK;1#G1o#YpRJg5}6$lSbVk)?m3?vqGO}67$4$r~&3&{F1Vs!8C`I@W9=c_dayLVc&0Et|* zqlSXRK~2mUFQg{!`pWKF{ky{dxVe5XFPL?h%q&>JWojeqb}D?%Nb*=wnyDD)JXy}X zxm^sbx`D=oui+BR`X_rKZ&tB9loXcFBK`S(he5N_eO3~fRE$cI6k<<=()wXLFDDxW zeKGhK;HId3sp*vJ&59fr6dTO8EA5{vTGn`3SzoQOEw0?jw^+1I%c}i@!*tiR2H#uo z+D$ZQK{50bMSCm8!J+ZsmtTnRXg*fAt1Mi~bW2%jzZ~a1lb++}=qpLSjT=Kr$372cN-~H{AxRtxhM_LJU|8OT(FLmsNxJ* zn=dN*5ou600iBuPILn5SAjD!hjHsHRT`9PcvypG5i)|6PYeF0K3=?GwG;V6pJLrTS z_+7an1wSey=`Q2XUEAw(pvqx350|psK*jfFwi4gI)3~iWrGu8*vIWu)8&YJK`SQfh z%Bvq}qGlb7=pEO^EILvWW%NJ)0#ZRSl?Dkx0dRNi`i*$b17Mk3Idlo*eSD~!ME*>? zc}{Qk_@?Ih+CVaE%`R~g`%doUO{`y|d33+|yFVXRV>EDQCzBjtnQc%SED62+X@(XI zf_ao(QF*6m1j1-^gqG~=I&477>Siy|)ZmDn#M1e@q9-mZ0KL((M$uU}n>glrp(CL= zVPZ0@dG9XT6X!gd%5G9F!5XV#(TVgaikMn}6+_y@5x>s-L?i%x!xgQpaku_+1!y!lsg-S4o#Nu@ zNK81}oXH>$2<7(+hoWFZJyZ!_dwzJpK{&Y95Fb%1KTF!u6)yoUf~6`~;M)xO7H$44XBsj8b`q znZBeAMc3O$CYOZA%6@`xEvpCzuFGNnPVgvV!K0aK=ATVIo8VP?sg55~8#}q4<$@!n zN=Lt~k#4M}ub^jI?()5nJW=7NYw;`V(f6hRy+sAlNta-1u!;;yEG*co0Ai3te=e3n z{Wh^7J9b;2gV1^}>Rht^>7RESF;+>~G-Z7;y?z_DQFBuFn z_Wp#l@BWBK|9Obp+y$w&Xh7+hdK=Xpzbrhz1GKSQ-{-?LUOON7!34I&&Ww+FYN!GE zE74)}=l4g_g+=84Jq8Nz-DZWZ){%Q45wahm#>}*(#!Z;gqPCuGvkdOKQpyokrU*{o zG58>1#foz=q`H7tGllvjz_jI2CWpDJ3yml72f=b=v1n z+S-I7L<%(&5lUfLUR#-7x+E=y;)fjPbx6_&luS&wp{vhy?Qc%&{d9@qWHK$CKfd3~ z-RzT)B58JD;>^Y1SF@phlfCFA#rzq@H?rc`_|h(jiLl8)xrbCA;l`YG8T9fH(>ILZl&@n>75la_E||EwD>AD-ToQZ$clFGdd3K zhXhsGQhnHNS&}JxwCn^-GNJIB(~r-E7)7RcVM2iV(=<_=VvIFD2V$NafB1&JYu|XsVnhmoQuZXJ$uKBPx zl7|Hui89Yvk^pO9BtwT3!X$Q_xqCbGPNo%x4_5V|rasAYk##|0IZzD@JJQR-uQ8>z zI!uxp%5^b_j*KaMrwy`4_;-yZ9-5>RO-+QV_qtMF=GF0RNypjPbTknHDPC&!i1DK` zox{_8&lizTS}WAfZ+)*6*#+|e7c|pSq996%ZKy?M^?Gb*mzkh0{x8#EAH!Z}HrWk+z;(?>aP=CN(Z>wdlu@@UiTsZ?$Yl!HjGp2SiL#72ThW)YB zAAX#!bmwXn8aBjD02;oNp_El_uehiU;&RC%W{zx#GvIGK*W;V>-ogga)dVKGvger? z*c7>T1smv;odgIocB%>AAPpVK1M5>TUpvEM@!=hE?nEtGezvmHhN&!=EE>K7)id&0 zHq6FBaHexWmz8?{)(7m#aFEi;q_omPw=eLJ%j?R0ZlndDZZ}vvCW2>eT`rL|Or}9? zHQT{jcz)MrMGEYJPI|3%;vZXaY0o-LQB{;djr)x-fI=4UC@EHQg$Cg{!jHsNnD1aJ zFRT#jdh|o164fauGKXb2Yn`%!s1+SThoTh`*_D!f>5C$2T#3e!&`^8-C>A-K9KW~nH z&$aaXozGH6K;IWDeG|x2@IHCVLAv?t;1cbZbcw^EXIUQ0GC+@udpY3F(C+^&73WZjjF*N ze2K;Q0$Fj!fGHS2n^=ubz=iTuBGxD$%u;1)TbMaSFx;h{xLyYt5_NeLCDnaYpMZyr z{1rdkINgB0deW*)=y4Niz%&sfS)+A$;}v<~mR{0DF!NZFIzFYibpVsh1=fpYv!9zd z`boIh=LL18VQ;#$agxn-R;rvM0|QcOM;Oce^(}M9#p_qOyyU&s%q6ZfOR{J`{Iwvz z)vVT5S3W75@*3+F-wWWsew8AV@OfE=ds(dERlip7#2|QzwLW{5jN!Q^VAmynd2cm#iOo~| z_)~%!Eps?6a&qBCWMd!gn6+))0~!DVNOq`8i}~|Hypxu92h;?rZb{J(#X_2#J4@5+ zj;SfqL?ye!XSz)6%yv1(1I!2`S+a|I=wV@n&koFE!!9P}1X^TlcwRUR)8?-R3%N?o zn_FTpy11Xq)Lhoo-?#(12Hgj_*LV-+PTJZ?(AmmKD3Hm71K)i=)z5J1eRO2qxZ|(B zPkUrSh4Ma#1(o>{V6*Do&aBD-RO@-hjFS0ZdqbShZgwe6AB`eMb%;3jquH;UR99Q2(FzNW>BtswaYkeK@F>XQ5Xwulaof~&2` zZNBcOmT4%prz2^N`1tdH8#Dh0#H4%nSJ`Yg5iqS)@1Su$5|_8YOnsRS@@CuHOx?^v zJ>RV#0c}WrYP)qFSyCf7C95uzBSjkB;k{@$m|2k)XhW$lYBi2LF ztWK3G-Q{*#tE(ck3gn^pJ1^0{&X#<~_$YY^)R4u06b9CNXyr%ln(i%KsQ5DI&I`CnrcD>y;k!P>mi>?v z>x<`yje3<8c2qh$i^Y$-!*%91`rgf}6VXDl0gcAc6IuIVcCu}Ri!`EatJiRf)Dtg) zRiUvp4Sk$7;D>lDrtlxNL^Vc+b7G5|>lIqbDvM|y?;#w&b ziTsPruSKs`VWl_j+SuTI;8obug+A|mVJ(!VSli5BK&*8TW%*E?OCubKCSZSNpQ1E= z={+?|>y;(9@l-xHuX7cIZ8@&XogkAzJk$#Y(r$&R*O6tvY2|CbIa#|)(CJggMetm=HIzwM*gOaWc)O#P(Mdq z1dW^|Z#Fwh#>OBn@Uo(};_`QpGPOII%LEbA|Dk+yo+T=sHAmK{+ zV5<|yotWN4qyE_5+vYN;Y*B^ci;)UcgNb~==^j1yWpVw9pJuwHjC}<7?EnxY)K@f* zk8nqn>c~6%rh3)0#^63aQowNeV=XwrsJ_xPi71KKu>gAX7mVl{9`VcWZm2Q5deCeGhK{h%=8JnH5sCqY8)BlJTUn0-!&RAgXR^cUGg&2qH z8QV-c@entfv)_D6KITdss|z2=1wIVvxgd(9V48`k-l3urT(fYK&d`)!{wkGrUxbh) z8N`*YYLNvYwlo~^Tjmh@2s})E$d%mwBX-YC8T@O5jAaK2LwQtn7{zthh-N>@N4U@2 z`}tMOvkKfE-WP=m8#Z|55%z8wS^bn)TGe-CFHV1_uhGAg3y(LwWLtmbYFnsaCnW>f zrCm!Ll>|3jAa^}f(J9s5*+}`GtF85 zkwtN>ow2Vig(@47yQEA)(p((i1me{vmAkN8XG>MinU$ z?mGZ(t3ozb?$V1QSC;6rG2z7g>)Kxcm(DzpMoO51U5wuWR#)b(l4|+Um~^H5=Zso| z8nfwd*?pl6m$yZloLE>|a->{m5#jr4yf2K_b>R}%z~`;)-FhOcZn|aLXbpZUn^%(- zNXy;NBI;B0{-D=7V>(G7JNx)RA6={JC}WCx9#Uwl=>ELa^=W`ck_22M zmOfU%9J?Mf#|pHWqrn>+=$aqG&m$WfqGYJA`0B?tpIm&tY9SvL-_$ z7^!xuGY_i-=AKlLsDWcUttv98s&uFBGZnGTZ0p1cHA@if8x62(Fmq8vlUic zbAg5a-TweOnw1RVAub{e{y7nNL=x@B0rfuEJ?S1)yC2(r0tnvD*$R`Hj@_!f=XcV} z_NzIi{E*_=DK#5JM!^i|8I~m=vnq0Sk6dP&IZ(2LGR+g?wT8R%T7p_UHg+>CQbmVp z(OOrJ#F-Rj1{54){i<1ICAUSf<4;DhYkLNvrmf5jOj9OQ_CKXw85H4YgrMOnWJw`u zUfmZ5V~HPn^T7Nk(S=H`kxT8CZ6E-)cpdAZ$bv*jbd5GCq`8I!NvGyn0P{KbX6{WV zwI0X!CD9XBeA~PLuxC=EZV!Gt8s^Q_9T=AE$8{UcLfc%Mr}cX&Ak(!J@I~0$Dzkz7 zw~T@U{z9A5G^5_f^$V8F>r~RD)34^TmnGq1ry~c7?6ZrB>sS|ZpHb^ml!uBgVe*U` zgD((%=F(6?O=P4A7QZ5$$*3wp0o+(*;@*IGxk(``%;-~E~`J&#-SGK&FVz-bN;2;EX z?rB~(V=1EY-J@OBmf#m(Gl9<1+~ofNnA0fI;)HJY$&jdx3~F+z{v&(0u20&j2-2nC zoSnNRJA()<g@gqEyb?QAgGZ^BSbOCWz2#Dv}o} zLb8^Pm~K%bA3SaCnw(@&z^UpEppFgG&YQd$qe-G+k>JV*JCA&AZKpk}Nn5`rjT)=?TI5qZcwoPjtD2xkz#0fB*-?iNqN*W>1nEx(oH1TjkXk>o3suH z{6A9mVNE5)T>>>N8ZSlsGKWZ&@9tzom*?G-d(!H(FiAde7=0m8OA^(jN-XtlI63a zSn4`E*ZoZ&Pw1b-d}yK@)nsW78)-N@N8(}Wil$cUU5SZreHWS*scot0cK1+Bv!tjP zOoxW!zuKD&`A2<=mK<9Ge1Q?J(g>w& zD6m!qN4-e08*5b{am{-UN1C8}-g!2L7T~2vvC2menpcNF7DJ_>t60BeWki-qgx%DQfps1Uc#xKZgZQp?8 zf$lp}B@#tIWKbC5`Xq`03mn6c{ zI;awmNr?7hzxS--8!9@TZ0~f|cz>5Xv4)1}PvQfD+0XiU5zlUFbxgC`8~B-J)IC{e9-E~| ztin}!VRc7~YMdWA&cXSQ)KPM+ix?Uo!|eiX3#M&!`B*%WFPVX3`gy&+)Nn=B{ewJ{ zWuH*!HahN?Err`U#UT2lJ6I9TQgBjRCkeQ{6dEkoHriv`&doTte+eZBz%WyR!O1_J zdHCI0M|9QGD84D#E}_%=fLQ0#rCBzd=fjXSo)9Y&!udS zLqKb)<2lbL0s%{8`i>4kclF8Tz0GOmbZFyNYPwCGwTuzO2rnlt z!?!*E0CQRqOTQqLo9MG^z-AWceC@f3K7a~-YUv1}s}Sz8*~X)Q8*k_iD>rdLN~F$6 z0g12<2vR-rzQ5YFS_SO$7-qF%uZY__TpVpDpX_L=XxmI3*L4fC0E@tB9CMUyg&)@k z)P1Tm$jd_-?4{FgHQNIuliQ1pH^eMehD>POGPwNv=ZaM^oGdi4T^3h~cf;XIRCquJ z%Mq0xY@hVgIY;{vsW0R@u#yOqz>63d8<#L{SY%Q zu+${CY2aqfqBkHe?0|lfGoC)ip%_y-d0H~wq4 z8mTO8flVV#=&fGX_TD>>7A8qEw`1jx*dN7GZo3_?qe-M&B zeV0u(k}7luZlmdY<(@6K4;#m|yyq+Q`G@IUn4P1WGNxh|D(LSVfcS_?xIT=h29t@b zgz}U$$|CYC8dlwpYO4x5_d{kIy&^-|Up>%cfmKAL$iawg`Wky6(IHq6ImIhsqDVPX z0l@d~`_frNf_|U*NOZ=O!mTi3OZm3{0P-H6rDVz^6504Ji6S=B_>oRf9{3=4tmG7o z!{a8MdvkFF>e5dV;4Djoj1Y5@N0{T-aY-559#PSvbZ^PY({?3X6XB=RTT$p1s&@=agL0qDQr-*jgw&nxg7rh z;+uk@d9sFiE*9=ah~fy210Kb_$JuL(m&wuCqNtaqTm)5SVqb31Tk72GPCrHOPmR{Z zn($BF2Aw0PISk4NY%e^1wQKk?Rq{wyrFgc{!*HZ{Qh+jj8Mz<%s!CL>NJiZiYjV*% zhCem2VC4tYtbIQ!=5nX5i>Tjyli?7_m6R^m7>_(;`+bcxUG^lEbWgXl5u>ETd`t-5 z!<~)Cn;pegD!U~qUqlT<;n}}dwt`l+oJJN4nA;5)`hqq)rhV#f1v*_6_tje~biT0F zx>u-U(E6s`-(+A7QUxxK2h!4DamVdT7&yw!DJjQ6=Bd>-7Mg6Dht0;8%BtCB45eF; z!H2jt(e7;L_WsNlt2(yQey@T%Rq?H6;mbC{Bqt(8@c~&BjxY#5^;+@(Q{o3sHuSl$ zhH0Xe%Us%AC|!VZd?p<22a$~PPwrW`PnDH&7OKGAlD=dnm1miuv{r>=J zMMJjHMQzqPJ(a}GEZ-?-Xu>5>xp^78?H$MMMvNTWYBA4jE@R@|z$omcM?JVcW12?T zQbKLDOL?wLR{4@h_;wf|A8p@n#qCg1rd6v%)q+_8X;`l8fTOTlDl(GT9FpjZKAaeX z-8W%Z9)hOv6j=z@@tb&=lx4P%iaT!q0O_Tg;)jRHj>I^E4CQ<>j5~K+pYKecNLS^I z-@84s#NK4yRC{}h6K{=w^iheoizo;0+18VH_G<)R#U2+Kvr3Gl`+NhEVriWRVUTwLmXUY5T(w&pk5WWi|P zwhjlAc0Z7Z{-IqhS~y>36)+jBjc5Pa Date: Thu, 16 Mar 2023 05:01:34 -0700 Subject: [PATCH 60/93] cleanup --- .../captions.json | 1 - ...92e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg | Bin 20552 -> 0 bytes 2 files changed, 1 deletion(-) delete mode 100644 a554f15b-8611-4651-a2f6-2c5445d2b1b8/captions.json delete mode 100644 a554f15b-8611-4651-a2f6-2c5445d2b1b8/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg diff --git a/a554f15b-8611-4651-a2f6-2c5445d2b1b8/captions.json b/a554f15b-8611-4651-a2f6-2c5445d2b1b8/captions.json deleted file mode 100644 index 38097716e1d5a..0000000000000 --- a/a554f15b-8611-4651-a2f6-2c5445d2b1b8/captions.json +++ /dev/null @@ -1 +0,0 @@ -{"C:\\Users\\islam\\dev\\gradio-repos\\gradio\\a554f15b-8611-4651-a2f6-2c5445d2b1b8\\cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg": null} \ No newline at end of file diff --git a/a554f15b-8611-4651-a2f6-2c5445d2b1b8/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg b/a554f15b-8611-4651-a2f6-2c5445d2b1b8/cheetah1d0a3c81692e072d119e2c665defbd47ce4d3b89a5hla3tv0.jpg deleted file mode 100644 index c510ff30e09c1ce410afa499f0bfc3a63c751134..0000000000000000000000000000000000000000 GIT binary patch literal 0 HcmV?d00001 literal 20552 zcma%ib8ux(^yiChy%_Jsw(VqM+qUfqC(b09cw%#6O>Eot#P;lbe_Ok?f9~nJRk!Ng zKIe0*x~o42{deW>4gg(FN>&O00RaF&{2PG3>i}^8EHpGU3^XhZ3@ifdzXu5(78V`} z84(c)5fK?3`F{i*85Ip34HX#+3mY2?i-?erkcjmE%6~>6;Naj;kWql>=s*G-3>NJi{vUybgo1{Fg8;z)(+Z&jAR(Zj zAphb2?+>89 z&By=G+y86)f6s^cr(*gC;-5f3|K|Yaf1E==VnC5&vWh}uQK+z)xQK zcMX6H1@R9x6b3*TfM!=Lu)q^%W_oP}y;P_(>Zpu*uZm=ja16)koli*CHjZgKl7i`S zstpHUWzxV~WwxP=urY&(u3&}$q6k*3xRdA)@vau(&s;Ez?6PC+D_n|_S`)S%BBYfV z4qXB?0kOqs2zvM>fwC9c)R5+TVSvjGRn|ylW!5F}7}{W2V4)?tVSRjM-h?LW+)EW~ zG@5shv#o%_i1Z}x6Ko5VE1%&0(dSUx<`R5N0em>~jR8W00*EX6-?H;)C_ZZ06h3STfrFg4gWuKO6XSxs|DlNHy078>(#3?4!3%P;L#_+-@cO(@f)WtCYuBNSOeB$3B~w6^m%n0 z*ohlDxO)6$YZzdLT$XLr-TpFrN44h&QS2pK3O1k{DIA8`m*1}B(pCli*y`c8sEfUk#0 z<0n>;CY76maW<03zz4rvYj-ZcH+JV-oy@+zBk#G(g*>f_%+O?|;xi6xSRiMdn8cDh zO(7cx%@G&ncPQi-_C)PtXU=JXxWWQ-+0Wqw10x&!urD+2hjo!r4n<-U0bvRc|B(5H zW(Q71;*PC5MFntKX+H$U%zq!5uHU1=-lSeY;)W9THD6rAi!V>7-W~4GZ8)7KEGbI) z(wZpHJW6k&`UP;dIs7B!&l8T!kzGA?wD4z882z7w2x67lTgi7Za>nX@hn9}WsaVg; zUNUFtt#%MUZ!zM@w;~(xmQA`8NU-`mVr!4F=a#E{<@n<2kFL`fYvyF;;uE{&WSAKA z8@5eRRwyBfM8H*S;za8P+Oh1o_|1ywY#th(a-J&p&yj`!b%{btbMrNIhz~N_(T5Kk zy;s6gdzQ9mdCMmf%FaNBx~PMgeF&MgEpwC0@r{|ApRS$;r+)#c?jJ(x=tVXtN$Cfa zWb}hh?ycUi!o~0X91SHAu^C2g*T=G%2&!um5lb=|{^*c>DWqHv_pi}_-2NCte@||t z8G^q6c+91j+=D{hHJNYDYz8@Qt$kG_sw?#A_YTs4JyWLpsO!XcCqzb{ER=ls*6l4i}Xtb8G7i6d# zi@Cb1oH1iae*qDEb;*;uETg8=45Eam=F%Rqs8YjpVNl6-3^#wyt->Dxc3qTSPN0z) zU{PKl#Wn!PM~i1a_Hr!gBIYz+V#+Hnb>7;|5m6+;+*F5BfEG+^VMzp$i+8111P2Lj z))Pl=MM|fSEt^Kfquwx{t>Z~tW5ed+vA+;L<>44OQHtyc*{YMS;>eNuG{Fh4YEi|k zYydO44nwuH6}!sUm9@;s+FbWnGodun7Hu*vE>zXiONnua1o$NYVBvT#yK+OA zYJz`&H-@k@$kZINxqEezG{B>fIHlds1g8=kgtAE@HKQ|;rdD?$%1-(rH3272B8;`x zUk97StuAxTcsWMO@>KB%88gl&Xu?TwU9RBO`GVv>=3~=%h~1_@Pp8xMt@JR`Ot=Rm zKbs{QuISDGl!qggcciqeIA+(s6Q3$HMn$dE}v;YVYy&R z`)Hmq8H289R%YKtd-TphC>v8M@S$TiZrW4bCSVO(-8R4=lEbMO@aM=jPW&yY6TLyY!<4P~*U z6tHV{eq{GEhmG{>4r<+Pci_PVy_8y1Gv1>}SdC<=wS!$l?Qr||x>f~&V zlcUJ|n!<7hHOr-yk}I4cbsRFHgLpKKsck)D6{+eX4=w5Yf(c(W#Y_WPyhBp0b|;XN z@x&18G@*Y%=zT*_B8hcze)Dtj5+3_R!x=1xCBw}S01%ec@+$qC<1gUUv;)WLglbL* z%v90*il^L5{83X>f}L`+^NPtIroUQuqiKYE@c|)>6^L)sZd?5rfA*+4-ptrpC2nIG zm$jTLD(tK8qdh;6`O2oOsCXXAzpOGu9|OxHa-lsUdm&;<9UZnE<7y=l#sMkQ;Oe-y z9(xHj!Pc4WDkupe5X>@dN7C*jL!w}?E)_6_Xhahcq@T1s@YRwDV@Lp3h4jOK1x8rp zpUB-#U<|~2g`@@G{mtXKR_SrIf24#Xj5iA#-V-T|BzZ74QcxKixYZ@|5_ZO{JRW<1 zqDEJhs`T^*NbpSVB&wti;uZ0NSvU6A(xaB(3`TBtemYE`MeYSq{u%|`HhI%iQ~8cz zXPW!mzzf8)=>*~8wGHZ}$VD|~G1tVM(_`bVLsr7-{TNwsWkVyC75u@1!}XpZwlreY z=P=`~T#<*36|V3XpeeZQLFW*j&uvz8Zy-cdQ!_Dl(I*o*;h9~r<8oL*<2Z1*lQ-vE z5_7Hp4gTPTMtPS+M8l{9S2xBClr2c ziMC2oKOQo`q5*RbI^nF5ql9(Mut4ydN{mU)xK0G>`O~+Z4uIk zF<^E4=-i+>hv*N|+z->g8#+7bBsgWsMY1l5Mya03+!9T5^qjH znQ4r;MEZRgj@<7IbpqnSC;b{ur2gDmw`Pt0oHu{7y-xX11h(axz+c2;1Z%IU=k;%& zU1fHYDyOU@4_Z%^)XgtiApvD@WO70%zfqyWJs2b7K5$<8`X|}w9KA-#8ep%Lq?<3< zI9NEBf!7U+Kl zd<`@kf0}gljT<=YJIyS+z{I{fc=`smT+|a_I6@0S1{ZQJm?J z$ZC9X$^Hrl%DqZ(`4gif3q#=szJovdr5SZ$r3$eTTZ#{TJ+ctvKC~y_9`PE$xf`km z-N2&4t6x!16b-;%KYe^#BxgLArKprW=@N9x4^Wiu&aqHS$|mMX*v=Q7=N&VR6jO}{EGodE)c z46gg;RO|~aq>VkTwIxwgSr@PlFRMpF z_ej*s?Z10a*9pg@_040H4=+|ZfPW2chA(_otm^qe=&0n>_dLRZy$BaE#EHaD_`@>Z zTSCcMWmf`fY@g03J2p1yW+CyH$gJ(b3XyI17$GGpD)Ah3IKO5{e7!CYqJ$1Y zVpP4=sdhBYT|-A|JZ3`)eo?bZe|!)3_5QG^P(@72qsJ35xbBer1*pATyZkXgA1g`; zl$}D+mCC;^%ulANp6aUv%ULE#L&tj_r2Nr0tXzDSRwPxb|A>lV*ogNjK{q7Qz#a`FCcwiqj`rASqsCe0 z{aQqRkC1G`AmjKA&p+q^v9IVCIWmq-X-oA1L6toy!K79&_SL(Hy(~^WCrf z%R!3c+$eT^1QcTSKHLqxaoB}?rnp)Bq{0>rBR7o?_0S!V*Ks}c=1MKLkt0dm%yOyk zkCvZD{E^69HZp79ZO{gy_0(#%om7Bbkb90(p^?m&AZ`cRBv+-R>|}V-`C*E3=E0N% z%kc6OU55kwPUS7I{zz$J)Ij0m9Xjwc6^E?`I?KRJNd=}8rrQvln2!|NtA_E*Y#oln zhs@c6TEoHOrEEn)m!sWISuU~Iv*NC2{eiVqZ6zKWErQ@{d)LR}M6aHGF-4SlVfS3j zz5tBGg*4S93Spl7I5Jxs33mgT(O$2V<=#K`;F?!ru2$j#=2v*8k@JoP(7L+}Y ziQf|bg>yDWq5kvk6Iq6*OjVuTK0-dLAbt~9F`}@%0cU<^{$+JRbyb|5bel9m*6T{c zrjD0yInhvcX_TI98TeBc4Db)ryuinz(X;h9Nm!t&=Vq3cg0-(kc*UR-{&61n?`kxK zIKi$4om=@~k>5k>Z|bKJbvB+|y?+7gk=NrDPoqE{;z?h0`Q#6xT>RUkQsTW0nzoD* zuQ+YY!MY-2nuABkj`?57av4BM2+6NcAAg`Vr)Yv>q^tp#+fBxW+X{dZf_s#E#J>P) zUX6H9LIl!SPtw|A*Vi9D`~e(IqJfpLy-fu-AX1r8t?38jY$A1R4vlf++(Pe+AnbZc zLarTJOHiy|W|f&3mx`Y50Cyu+lvPT5Dm7t3De>onU4e%^%`68}S#?6xIAbq=jG0|P zhTg?lNgH)4mwP{il<^MMVCoDtIz#Ebdcwh1T(#~?l&0U$?-DO$c2(mg zBH3`$OZ1MRbs@9eD~yNx zu&Yqon=-GE3NP#j^Xa>sq3x$K{Gn_43y6NN;Cb37dtD|4)kaO`vrG{iD;MkR0g|4d z?41LqLF+i*gZHEsnNiN#EK#rv?Rb>uQlUu4w?h(i1I1iu`KYE0*i}2lNa$_Q-aBO2 zCMI}x{c?rr8gdE*XfrLp1??#6`E`EXT}dbe36Q`EHSzW~3RqUnbF`UTpfS^lsFPhn zdu_;)q)zvi)^DWf66XB0iFs+^Chub58A3Dpd>Blk=QDAai!e7RlcheTDFlJKg|>aY z$I8AJY>lmd;Lg-_hHg@(d-EHm!7V$w!V1&PgD)DKap}rfx;p4gE_7|XH zpiPl}t3)5P0%f6aQ7>0pKO}u&0LN0SXbtq19E1YJ{wOe|2+@W^W!3KY;eAse|a)8#o+OJy}cG6jOJ*sB}T?h*|k1e7Fq_NP%0Pv6)Y&sH*V=mx)2Ce1OU{+HeYucNj0N@DJQ&wm{0`q$1G5|c3bQpl(YCKcJV`oPU@p((p78zP72!bo zV$0#b09Vk(^l9dAQGbVdDkS7(L!!%GT-rns`1fb&JIKqzari=Y9F3-k_z`b(2^SBq zQZQ#Xu4FnIE0)H7C!`}O=Ekj@R_AnAu8e7(3Dg$p>JzY0-%r|>AFug!oP?nv)SCqY zX2YdP%30{SCff(}{wvej3VTD;84(1(V#ORR6#&35)xy_>DrZdCw$*g8nU{Bd14)=( zgbtXs^+gB-R5r6Uy78z9eR)nmlwaUh69-R4IUnhe828T%f_$`tN@LD@iyL@7d!hBw zx(o_HH~Z8a_|fp=a%K|{Rgh(zECn4gy@V!Vlg9CqUKBh67CvwQqE9nyT}1auQgIf2 zZ4vX7BT@BDer8um=fin^lSa>DZyT^kE3XknG8B~$zk!+Z*EL1he z1iZ79AU5*oVwwdsrK#j*Qy;jM&Xzn2-ZW#SIchX!Xg1(HkVSX3fVEOAvweqMS&`y~ z?%(-fxqV(|1(A^k8R>Qm()0;*6wp(AFCMN$gC?&bzGlowXu_|N z#UFeOyeB;zrig5*>-=hO3GtyAS zo0Y9Tcfnr1@55S*;NsvJCY0S`z%JU{+|?YjuL2KB9B8w2efL>;sp~eBbbhT>Axic) z0SR0#+qxl>|;qNB%6#(8I2r4HpiYU?t@UTU05P{=6lDm+DvTx6)NGRd5}8kQ98 z<|f*oKX-Yog~QgjFfv4w=U4F9PK=%AGl{hg6Y~GZ#(z}YrY{n`tKAABDY$TrVq+W2 zWA-R6E;XNK8$UwWW%pdLL*?PxN24DuG4LV-Bfr*WRV*s$GceB$%e(`-8Is%79-f zhB^9X=U97wnzX=ih=CkfzX{^0x76i;QwE&To_z>iR7VLVU8a60SycW7pr?r<;DbQ) z;mW-KHtBROR|kK7l2Ei4w{^k#OUS-|Sa7ij@e~~I`>DFL8*xWB%c&HhhxxZXG-$I-0ufD@ip zX~y*Zx>2o-(qAm*mIv6&14zAu@#V3w!qxtaJ;PcgretkO8clp*m742KjDZ~HFYDG5J zL@cHy-P)01x>h<)Fw{x&Kn7d<;O155>s>MXb1eh(HAC(a{POD_`K2MCGLu@~LgV8A zjs9mkT4OuzPVDQBOP+2i22t!I;X`0?nzTL58iHYe_iT&@fmpM(uVHaBd$KdE*A6m~ zbI;@On_52bVxn#u-7WbUqTRE0fls|av&qLS>e*68BSMbI@!Ydsuy<$nfk$xp?ma#4mQ*Qq--vXeHNC<& zOgqHk-hk~4jX$Exd1_2A4IT~AHLy@+@WeRx(TKV#MR!JNJ_jE*vb5hVa0~kus&Rzv zQv0^YTsZy4Z&~w83|+jn=r5Vm%cL;nU$g>MkG*n3B9v_Vxv<=3Yn=%ySysg{@J9U- z0dPiLGfWmb6jOh=kb~iShrocG-?n#xVhZJ;7D9i0g$N$gOt$OP+egrp6n`ihX|*QL z_te^3H#rABD*>4~5qy7X5{Jm~gz>)tm>Zr7iChQq&6n)%h5|&Qmx-thE+QKJhK`g|*|)X>3t(#a5mIO#b7CYRkfC)T4vFrijqb=%!4q4oEIL zPKCK{fmH#z<9VUh8vB#ep7B55fQPLu$ui0G>ai(Alwnwx$HZM7TGVvXT7)zQ%ef@g z5o_>k)~2z^N#*1Tf%w1iN^H71!b1+qRSMU)9$=0mvGkB+vS~Ffjj)-xF^0>^OOnqm ztHKuehzP9AOT#6k~w_ilE+Jr26JlV#P(@BG@**FJN-W;DE* zY~|@iicKXcgles&UgS?YF+Q0&`X0UtZ#iwu;xY|J9VQtniqU3X$Si0)1Hk%3fF*c( zNmQoo_j2KNSQP!FvG7$ym{!cf+NJ_}_NtO_`*P@Y8k09fsR78iPJ=@%g1jYrNbnb% z1h^=N-zyTO1O&>?(TE43@P7~bT#?OQWUkh2vqigW0sDPYvxK^{HC$w)!9R@V0{BZe zPwJvmrY`pVhoS6}Lfei(7lxUmepY!jO+|XsIHgK($wNaRhM#cROsuh=2bqVHDJLiW zNayAA?$y%D3;VxQ!x|k@z*Qt_d3{}>5NoZJRF6|9l8hj$Sq{6l#ysTk(8aq>mQf3k zkg7}L@lxkeG50IqMj`3J)P*k>Q_cc&Q&yH5<-nx`R?lQJr{%aai@;VB@~>l+E@EIo z;cu$++2v_xugaX9Og5pEbHb&_lGHCo8aM+j%Y_|!1pBxUx~+UhV>=jyL^0JD;Sn@4;ZCSEcUhe0hOo=nm7+&}=Q&fty2 zKILepz86<|y*TB>6PFSMIg)fco6D3pdC1cwV+EnLn<^KhqhpH0LX6bOtT#hfmrK%C zQ=wA<%;ZY~tK5LR>rVy%pu;aE&)>6he?*!}_@`g>P)5GHP;Hgodi!=G)fPURIeCph zN5(dI*ZW^F<}AWcnxlRmrW?I!vqi4o#o2oUa|!1&d52mkE2A=n({*Sx3m{^qO?66U_S4JpG6iqPWHPJi-&b#6_uU%yK!-boXizHZ znL@R;{F!c|qFItpmn2Gw?;!sR=zsiGV@tsBt@0{fX4@Wf?1z`q9Wtuu6OL@zFsI}k zrlJx~qdj+$B->DOnR6A8$QAzFu98jxJjLKpvc@9(9G5J-zeGbW&Y(;@4@-7#mdzK> zA5XWEwo;?Zx*naWgM^RYdw|>1D|ihDm1lb{_NK{tfLNN8@}`WEN!tJ8Qgvxhk{+@i znUuudlh9{UiEY+iFkH)1OHEfM>G`@|47%poKF{Re0=diCmiDcn@5a6NJxSccSC-{G zm>6=_u7A38Y2mb)PN66q8rLD4y>jd}V5(uHYDuwN$+P{HP^_aUiXE*PX&-ntHAhQ0 z%r#3AU#)$MzujZZ>6H-zD;P4(t8|cR5YqLGE)_lX+CMQ?E zY5lmDWr++pn9DpiKn_C4>}5F*ckgL-;k4S*Tp6QG%H9bZcpO_MD|M*4Z4&F#I$F~* zi!Jf2SD>$_RCQ8ZY{RIoU)?HA9-7lY{Ck1tK;@~t_z^E=KS(rxK9^V3&KrB=B28AT zqMmf&r(;P9ECM%Sv$|+0l$@!Mq^}^ zB159DuduE%$}}9Gy*AGX)u7h2QQUI|w=vX1{g=SiDFs~&M^2{IX^`FW2MVi|V*=cv!%IU@#) z_@&U%XPR1Qk>TGV;fLCU+Y+gCGfs+hqG?2m4`adQsl{U+7rV&>La z5UElu8pw{pqS3>qECOzs)S3*8RU*6A3}Mk>ZqZrL9l%lvbE zcox#i8;ZuVr7D;cN}-3&=x9uyPlk(MSSeG(YQ0>^En+md&7cQc>DjW8t>!uB2f}kh zZ+)7plE$$^ZohnM3o(hUQwuJfOZ~aZ_DLx7Kqz7fd1g?FZIO|r%z``+KsWGslw)83 ze|GXpx_da$>}yVw_qZ_iOKZ?+aGq@RDw!MBQ+fE6>H1@a>P1EDo z?qd~?l;xV9`&5F(R6%vTe*sq<2Re(;ZX$3$_UgY){W$ZhrZ`lW`n4|8qd<6*7?4mx zYxOU{Q~cp}L#mv{ln|^Z*Lzu{f%vUiRz85n?Y_3l*(`G6-hw%rT_!NUeu=}5ts#r2 zs(G8=xN^*|pl!$Dz3ia)H&v*;V6w+221_GZy?A{B8MnpGt7Q_WsEen>+$AT@94_II zQMSqZu`-&l$rRjEN$euTu(8rW$MgLMt}(tUU?;KlbK#=h@@V5nzy3B(9Jh%Ew(BtXn)Tz%`CG~d zc_}5fd4R@(>zQx%w2O1%FusL4FNBmIce$|deLjtudOU5pyoGJoVmr95jb?=`-sc`h z?~S07nkXtoHJX-FuNPh0nVLA{4#0KMclj4kO?rQWx9Na0WMiCklxu|6f5+YdYpm2A zwDz^(265PO{91mWBp6xqjaHMnrt$d-;U3RjkW!HE-5rg*yT{;o zC8u!Mhfcua8qQFD`OjpTqGX=DSPs8TMJK(FetqlAXDgr68u}1tmVh8V8yitbZ25p z8<&4VK=;0Ct_@f=Q*+BQ+7sk>Jl0AW#4LBFBvG+)6XJwOW3qo=kWp6Kr3z6Y znVekEl$7A>0-J+s2W+(2h=1Tjzr}2QD#<6r7J0b3&<*E7<3OgijW#nAR?pu)rr_vFxgMfk08WK`bRuZ~cVFN8&`y%{tCm&aZ?r#dQ;MZUZt{xplznUA(v$ z=j->NyEN_!8IqZ{^=PC{A$~qZNShipZDW&}j~asrmVG!*%LvJx`R6Q5byTH1D4 zm2B2PK;1#G1o#YpRJg5}6$lSbVk)?m3?vqGO}67$4$r~&3&{F1Vs!8C`I@W9=c_dayLVc&0Et|* zqlSXRK~2mUFQg{!`pWKF{ky{dxVe5XFPL?h%q&>JWojeqb}D?%Nb*=wnyDD)JXy}X zxm^sbx`D=oui+BR`X_rKZ&tB9loXcFBK`S(he5N_eO3~fRE$cI6k<<=()wXLFDDxW zeKGhK;HId3sp*vJ&59fr6dTO8EA5{vTGn`3SzoQOEw0?jw^+1I%c}i@!*tiR2H#uo z+D$ZQK{50bMSCm8!J+ZsmtTnRXg*fAt1Mi~bW2%jzZ~a1lb++}=qpLSjT=Kr$372cN-~H{AxRtxhM_LJU|8OT(FLmsNxJ* zn=dN*5ou600iBuPILn5SAjD!hjHsHRT`9PcvypG5i)|6PYeF0K3=?GwG;V6pJLrTS z_+7an1wSey=`Q2XUEAw(pvqx350|psK*jfFwi4gI)3~iWrGu8*vIWu)8&YJK`SQfh z%Bvq}qGlb7=pEO^EILvWW%NJ)0#ZRSl?Dkx0dRNi`i*$b17Mk3Idlo*eSD~!ME*>? zc}{Qk_@?Ih+CVaE%`R~g`%doUO{`y|d33+|yFVXRV>EDQCzBjtnQc%SED62+X@(XI zf_ao(QF*6m1j1-^gqG~=I&477>Siy|)ZmDn#M1e@q9-mZ0KL((M$uU}n>glrp(CL= zVPZ0@dG9XT6X!gd%5G9F!5XV#(TVgaikMn}6+_y@5x>s-L?i%x!xgQpaku_+1!y!lsg-S4o#Nu@ zNK81}oXH>$2<7(+hoWFZJyZ!_dwzJpK{&Y95Fb%1KTF!u6)yoUf~6`~;M)xO7H$44XBsj8b`q znZBeAMc3O$CYOZA%6@`xEvpCzuFGNnPVgvV!K0aK=ATVIo8VP?sg55~8#}q4<$@!n zN=Lt~k#4M}ub^jI?()5nJW=7NYw;`V(f6hRy+sAlNta-1u!;;yEG*co0Ai3te=e3n z{Wh^7J9b;2gV1^}>Rht^>7RESF;+>~G-Z7;y?z_DQFBuFn z_Wp#l@BWBK|9Obp+y$w&Xh7+hdK=Xpzbrhz1GKSQ-{-?LUOON7!34I&&Ww+FYN!GE zE74)}=l4g_g+=84Jq8Nz-DZWZ){%Q45wahm#>}*(#!Z;gqPCuGvkdOKQpyokrU*{o zG58>1#foz=q`H7tGllvjz_jI2CWpDJ3yml72f=b=v1n z+S-I7L<%(&5lUfLUR#-7x+E=y;)fjPbx6_&luS&wp{vhy?Qc%&{d9@qWHK$CKfd3~ z-RzT)B58JD;>^Y1SF@phlfCFA#rzq@H?rc`_|h(jiLl8)xrbCA;l`YG8T9fH(>ILZl&@n>75la_E||EwD>AD-ToQZ$clFGdd3K zhXhsGQhnHNS&}JxwCn^-GNJIB(~r-E7)7RcVM2iV(=<_=VvIFD2V$NafB1&JYu|XsVnhmoQuZXJ$uKBPx zl7|Hui89Yvk^pO9BtwT3!X$Q_xqCbGPNo%x4_5V|rasAYk##|0IZzD@JJQR-uQ8>z zI!uxp%5^b_j*KaMrwy`4_;-yZ9-5>RO-+QV_qtMF=GF0RNypjPbTknHDPC&!i1DK` zox{_8&lizTS}WAfZ+)*6*#+|e7c|pSq996%ZKy?M^?Gb*mzkh0{x8#EAH!Z}HrWk+z;(?>aP=CN(Z>wdlu@@UiTsZ?$Yl!HjGp2SiL#72ThW)YB zAAX#!bmwXn8aBjD02;oNp_El_uehiU;&RC%W{zx#GvIGK*W;V>-ogga)dVKGvger? z*c7>T1smv;odgIocB%>AAPpVK1M5>TUpvEM@!=hE?nEtGezvmHhN&!=EE>K7)id&0 zHq6FBaHexWmz8?{)(7m#aFEi;q_omPw=eLJ%j?R0ZlndDZZ}vvCW2>eT`rL|Or}9? zHQT{jcz)MrMGEYJPI|3%;vZXaY0o-LQB{;djr)x-fI=4UC@EHQg$Cg{!jHsNnD1aJ zFRT#jdh|o164fauGKXb2Yn`%!s1+SThoTh`*_D!f>5C$2T#3e!&`^8-C>A-K9KW~nH z&$aaXozGH6K;IWDeG|x2@IHCVLAv?t;1cbZbcw^EXIUQ0GC+@udpY3F(C+^&73WZjjF*N ze2K;Q0$Fj!fGHS2n^=ubz=iTuBGxD$%u;1)TbMaSFx;h{xLyYt5_NeLCDnaYpMZyr z{1rdkINgB0deW*)=y4Niz%&sfS)+A$;}v<~mR{0DF!NZFIzFYibpVsh1=fpYv!9zd z`boIh=LL18VQ;#$agxn-R;rvM0|QcOM;Oce^(}M9#p_qOyyU&s%q6ZfOR{J`{Iwvz z)vVT5S3W75@*3+F-wWWsew8AV@OfE=ds(dERlip7#2|QzwLW{5jN!Q^VAmynd2cm#iOo~| z_)~%!Eps?6a&qBCWMd!gn6+))0~!DVNOq`8i}~|Hypxu92h;?rZb{J(#X_2#J4@5+ zj;SfqL?ye!XSz)6%yv1(1I!2`S+a|I=wV@n&koFE!!9P}1X^TlcwRUR)8?-R3%N?o zn_FTpy11Xq)Lhoo-?#(12Hgj_*LV-+PTJZ?(AmmKD3Hm71K)i=)z5J1eRO2qxZ|(B zPkUrSh4Ma#1(o>{V6*Do&aBD-RO@-hjFS0ZdqbShZgwe6AB`eMb%;3jquH;UR99Q2(FzNW>BtswaYkeK@F>XQ5Xwulaof~&2` zZNBcOmT4%prz2^N`1tdH8#Dh0#H4%nSJ`Yg5iqS)@1Su$5|_8YOnsRS@@CuHOx?^v zJ>RV#0c}WrYP)qFSyCf7C95uzBSjkB;k{@$m|2k)XhW$lYBi2LF ztWK3G-Q{*#tE(ck3gn^pJ1^0{&X#<~_$YY^)R4u06b9CNXyr%ln(i%KsQ5DI&I`CnrcD>y;k!P>mi>?v z>x<`yje3<8c2qh$i^Y$-!*%91`rgf}6VXDl0gcAc6IuIVcCu}Ri!`EatJiRf)Dtg) zRiUvp4Sk$7;D>lDrtlxNL^Vc+b7G5|>lIqbDvM|y?;#w&b ziTsPruSKs`VWl_j+SuTI;8obug+A|mVJ(!VSli5BK&*8TW%*E?OCubKCSZSNpQ1E= z={+?|>y;(9@l-xHuX7cIZ8@&XogkAzJk$#Y(r$&R*O6tvY2|CbIa#|)(CJggMetm=HIzwM*gOaWc)O#P(Mdq z1dW^|Z#Fwh#>OBn@Uo(};_`QpGPOII%LEbA|Dk+yo+T=sHAmK{+ zV5<|yotWN4qyE_5+vYN;Y*B^ci;)UcgNb~==^j1yWpVw9pJuwHjC}<7?EnxY)K@f* zk8nqn>c~6%rh3)0#^63aQowNeV=XwrsJ_xPi71KKu>gAX7mVl{9`VcWZm2Q5deCeGhK{h%=8JnH5sCqY8)BlJTUn0-!&RAgXR^cUGg&2qH z8QV-c@entfv)_D6KITdss|z2=1wIVvxgd(9V48`k-l3urT(fYK&d`)!{wkGrUxbh) z8N`*YYLNvYwlo~^Tjmh@2s})E$d%mwBX-YC8T@O5jAaK2LwQtn7{zthh-N>@N4U@2 z`}tMOvkKfE-WP=m8#Z|55%z8wS^bn)TGe-CFHV1_uhGAg3y(LwWLtmbYFnsaCnW>f zrCm!Ll>|3jAa^}f(J9s5*+}`GtF85 zkwtN>ow2Vig(@47yQEA)(p((i1me{vmAkN8XG>MinU$ z?mGZ(t3ozb?$V1QSC;6rG2z7g>)Kxcm(DzpMoO51U5wuWR#)b(l4|+Um~^H5=Zso| z8nfwd*?pl6m$yZloLE>|a->{m5#jr4yf2K_b>R}%z~`;)-FhOcZn|aLXbpZUn^%(- zNXy;NBI;B0{-D=7V>(G7JNx)RA6={JC}WCx9#Uwl=>ELa^=W`ck_22M zmOfU%9J?Mf#|pHWqrn>+=$aqG&m$WfqGYJA`0B?tpIm&tY9SvL-_$ z7^!xuGY_i-=AKlLsDWcUttv98s&uFBGZnGTZ0p1cHA@if8x62(Fmq8vlUic zbAg5a-TweOnw1RVAub{e{y7nNL=x@B0rfuEJ?S1)yC2(r0tnvD*$R`Hj@_!f=XcV} z_NzIi{E*_=DK#5JM!^i|8I~m=vnq0Sk6dP&IZ(2LGR+g?wT8R%T7p_UHg+>CQbmVp z(OOrJ#F-Rj1{54){i<1ICAUSf<4;DhYkLNvrmf5jOj9OQ_CKXw85H4YgrMOnWJw`u zUfmZ5V~HPn^T7Nk(S=H`kxT8CZ6E-)cpdAZ$bv*jbd5GCq`8I!NvGyn0P{KbX6{WV zwI0X!CD9XBeA~PLuxC=EZV!Gt8s^Q_9T=AE$8{UcLfc%Mr}cX&Ak(!J@I~0$Dzkz7 zw~T@U{z9A5G^5_f^$V8F>r~RD)34^TmnGq1ry~c7?6ZrB>sS|ZpHb^ml!uBgVe*U` zgD((%=F(6?O=P4A7QZ5$$*3wp0o+(*;@*IGxk(``%;-~E~`J&#-SGK&FVz-bN;2;EX z?rB~(V=1EY-J@OBmf#m(Gl9<1+~ofNnA0fI;)HJY$&jdx3~F+z{v&(0u20&j2-2nC zoSnNRJA()<g@gqEyb?QAgGZ^BSbOCWz2#Dv}o} zLb8^Pm~K%bA3SaCnw(@&z^UpEppFgG&YQd$qe-G+k>JV*JCA&AZKpk}Nn5`rjT)=?TI5qZcwoPjtD2xkz#0fB*-?iNqN*W>1nEx(oH1TjkXk>o3suH z{6A9mVNE5)T>>>N8ZSlsGKWZ&@9tzom*?G-d(!H(FiAde7=0m8OA^(jN-XtlI63a zSn4`E*ZoZ&Pw1b-d}yK@)nsW78)-N@N8(}Wil$cUU5SZreHWS*scot0cK1+Bv!tjP zOoxW!zuKD&`A2<=mK<9Ge1Q?J(g>w& zD6m!qN4-e08*5b{am{-UN1C8}-g!2L7T~2vvC2menpcNF7DJ_>t60BeWki-qgx%DQfps1Uc#xKZgZQp?8 zf$lp}B@#tIWKbC5`Xq`03mn6c{ zI;awmNr?7hzxS--8!9@TZ0~f|cz>5Xv4)1}PvQfD+0XiU5zlUFbxgC`8~B-J)IC{e9-E~| ztin}!VRc7~YMdWA&cXSQ)KPM+ix?Uo!|eiX3#M&!`B*%WFPVX3`gy&+)Nn=B{ewJ{ zWuH*!HahN?Err`U#UT2lJ6I9TQgBjRCkeQ{6dEkoHriv`&doTte+eZBz%WyR!O1_J zdHCI0M|9QGD84D#E}_%=fLQ0#rCBzd=fjXSo)9Y&!udS zLqKb)<2lbL0s%{8`i>4kclF8Tz0GOmbZFyNYPwCGwTuzO2rnlt z!?!*E0CQRqOTQqLo9MG^z-AWceC@f3K7a~-YUv1}s}Sz8*~X)Q8*k_iD>rdLN~F$6 z0g12<2vR-rzQ5YFS_SO$7-qF%uZY__TpVpDpX_L=XxmI3*L4fC0E@tB9CMUyg&)@k z)P1Tm$jd_-?4{FgHQNIuliQ1pH^eMehD>POGPwNv=ZaM^oGdi4T^3h~cf;XIRCquJ z%Mq0xY@hVgIY;{vsW0R@u#yOqz>63d8<#L{SY%Q zu+${CY2aqfqBkHe?0|lfGoC)ip%_y-d0H~wq4 z8mTO8flVV#=&fGX_TD>>7A8qEw`1jx*dN7GZo3_?qe-M&B zeV0u(k}7luZlmdY<(@6K4;#m|yyq+Q`G@IUn4P1WGNxh|D(LSVfcS_?xIT=h29t@b zgz}U$$|CYC8dlwpYO4x5_d{kIy&^-|Up>%cfmKAL$iawg`Wky6(IHq6ImIhsqDVPX z0l@d~`_frNf_|U*NOZ=O!mTi3OZm3{0P-H6rDVz^6504Ji6S=B_>oRf9{3=4tmG7o z!{a8MdvkFF>e5dV;4Djoj1Y5@N0{T-aY-559#PSvbZ^PY({?3X6XB=RTT$p1s&@=agL0qDQr-*jgw&nxg7rh z;+uk@d9sFiE*9=ah~fy210Kb_$JuL(m&wuCqNtaqTm)5SVqb31Tk72GPCrHOPmR{Z zn($BF2Aw0PISk4NY%e^1wQKk?Rq{wyrFgc{!*HZ{Qh+jj8Mz<%s!CL>NJiZiYjV*% zhCem2VC4tYtbIQ!=5nX5i>Tjyli?7_m6R^m7>_(;`+bcxUG^lEbWgXl5u>ETd`t-5 z!<~)Cn;pegD!U~qUqlT<;n}}dwt`l+oJJN4nA;5)`hqq)rhV#f1v*_6_tje~biT0F zx>u-U(E6s`-(+A7QUxxK2h!4DamVdT7&yw!DJjQ6=Bd>-7Mg6Dht0;8%BtCB45eF; z!H2jt(e7;L_WsNlt2(yQey@T%Rq?H6;mbC{Bqt(8@c~&BjxY#5^;+@(Q{o3sHuSl$ zhH0Xe%Us%AC|!VZd?p<22a$~PPwrW`PnDH&7OKGAlD=dnm1miuv{r>=J zMMJjHMQzqPJ(a}GEZ-?-Xu>5>xp^78?H$MMMvNTWYBA4jE@R@|z$omcM?JVcW12?T zQbKLDOL?wLR{4@h_;wf|A8p@n#qCg1rd6v%)q+_8X;`l8fTOTlDl(GT9FpjZKAaeX z-8W%Z9)hOv6j=z@@tb&=lx4P%iaT!q0O_Tg;)jRHj>I^E4CQ<>j5~K+pYKecNLS^I z-@84s#NK4yRC{}h6K{=w^iheoizo;0+18VH_G<)R#U2+Kvr3Gl`+NhEVriWRVUTwLmXUY5T(w&pk5WWi|P zwhjlAc0Z7Z{-IqhS~y>36)+jBjc5Pa Date: Thu, 16 Mar 2023 05:08:21 -0700 Subject: [PATCH 61/93] added tests --- client/python/test/test_client.py | 18 ++++++++++++++++++ 1 file changed, 18 insertions(+) diff --git a/client/python/test/test_client.py b/client/python/test/test_client.py index a1ace313b166f..c1685ae7aee4f 100644 --- a/client/python/test/test_client.py +++ b/client/python/test/test_client.py @@ -1,2 +1,20 @@ +import json + import pytest +from gradio_client import Client + + +class TestPredictionsFromSpaces: + @pytest.mark.flaky + def test_numerical_to_label_space(self): + client = Client(space="abidlabs/titanic-survival") + output = client.predict("male", 77, 10).result() + assert json.load(open(output))["label"] == "Perishes" + + @pytest.mark.flaky + def test_private_space(self): + access_token = "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" # Intentionally revealing this key for testing purposes + client = Client(space="gradio-tests/not-actually-private-space", access_token=access_token) + output = client.predict("abc").result() + assert output == "abc" From ed4d85138a7b9b45ec2dfff13ff98bd91c1a875c Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 05:16:26 -0700 Subject: [PATCH 62/93] adding scripts --- client/python/scripts/backend_ci.sh | 13 +++++++++++++ client/python/test/test_utils.py | 5 +++-- 2 files changed, 16 insertions(+), 2 deletions(-) create mode 100644 client/python/scripts/backend_ci.sh diff --git a/client/python/scripts/backend_ci.sh b/client/python/scripts/backend_ci.sh new file mode 100644 index 0000000000000..87a0eb2329387 --- /dev/null +++ b/client/python/scripts/backend_ci.sh @@ -0,0 +1,13 @@ +#!/bin/bash + +cd "$(dirname ${0})/.." + +echo "Linting..." + +python -m black --check test gradio_client +python -m isort --profile=black --check-only test gradio_client +python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403,F541 test gradio_client --exclude gradio_client/__init__.py + +echo "Testing..." + +python -m pytest test \ No newline at end of file diff --git a/client/python/test/test_utils.py b/client/python/test/test_utils.py index 73279b1e455e2..24a37062a3039 100644 --- a/client/python/test/test_utils.py +++ b/client/python/test/test_utils.py @@ -2,6 +2,7 @@ import tempfile from unittest.mock import MagicMock import json +from pathlib import Path import pytest from gradio import media_data @@ -11,13 +12,13 @@ def test_encode_url_or_file_to_base64(): output_base64 = utils.encode_url_or_file_to_base64( - "gradio/test_data/test_image.png" + Path(__file__).parent / "../../../gradio/test_data/test_image.png" ) assert output_base64 == deepcopy(media_data.BASE64_IMAGE) def test_encode_file_to_base64(): output_base64 = utils.encode_file_to_base64( - "gradio/test_data/test_image.png" + Path(__file__).parent / "../../../gradio/test_data/test_image.png" ) assert output_base64 == deepcopy(media_data.BASE64_IMAGE) From a60585e12e1049f9b628723843b991bd8d6bf701 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 05:18:02 -0700 Subject: [PATCH 63/93] formatting --- client/python/gradio_client/client.py | 3 ++- .../python/scripts/{backend_ci.sh => ci.sh} | 0 client/python/scripts/format.sh | 8 +++++++ client/python/test/test_client.py | 4 +++- client/python/test/test_utils.py | 23 ++++++++----------- 5 files changed, 23 insertions(+), 15 deletions(-) rename client/python/scripts/{backend_ci.sh => ci.sh} (100%) create mode 100644 client/python/scripts/format.sh diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 9888b1b1d60fe..a8493ad327d99 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -10,9 +10,10 @@ import requests import websockets +from packaging import version + from gradio_client import serializing, utils from gradio_client.serializing import Serializable -from packaging import version class Client: diff --git a/client/python/scripts/backend_ci.sh b/client/python/scripts/ci.sh similarity index 100% rename from client/python/scripts/backend_ci.sh rename to client/python/scripts/ci.sh diff --git a/client/python/scripts/format.sh b/client/python/scripts/format.sh new file mode 100644 index 0000000000000..a46d57d5ea573 --- /dev/null +++ b/client/python/scripts/format.sh @@ -0,0 +1,8 @@ +#!/bin/bash + +cd "$(dirname ${0})/.." + +echo "Formatting the backend... Our style follows the Black code style." +python -m black test gradio_client +python -m isort --profile=black test gradio_client +python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403 test gradio_client --exclude gradio/__init__.py client/python/gradio_client/__init__.py diff --git a/client/python/test/test_client.py b/client/python/test/test_client.py index c1685ae7aee4f..fff6b6ebe23d0 100644 --- a/client/python/test/test_client.py +++ b/client/python/test/test_client.py @@ -15,6 +15,8 @@ def test_numerical_to_label_space(self): @pytest.mark.flaky def test_private_space(self): access_token = "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" # Intentionally revealing this key for testing purposes - client = Client(space="gradio-tests/not-actually-private-space", access_token=access_token) + client = Client( + space="gradio-tests/not-actually-private-space", access_token=access_token + ) output = client.predict("abc").result() assert output == "abc" diff --git a/client/python/test/test_utils.py b/client/python/test/test_utils.py index 24a37062a3039..014a8acc3789e 100644 --- a/client/python/test/test_utils.py +++ b/client/python/test/test_utils.py @@ -1,8 +1,8 @@ -from copy import deepcopy -import tempfile -from unittest.mock import MagicMock import json +import tempfile +from copy import deepcopy from pathlib import Path +from unittest.mock import MagicMock import pytest from gradio import media_data @@ -16,12 +16,14 @@ def test_encode_url_or_file_to_base64(): ) assert output_base64 == deepcopy(media_data.BASE64_IMAGE) + def test_encode_file_to_base64(): output_base64 = utils.encode_file_to_base64( Path(__file__).parent / "../../../gradio/test_data/test_image.png" ) assert output_base64 == deepcopy(media_data.BASE64_IMAGE) + @pytest.mark.flaky def test_encode_url_to_base64(): output_base64 = utils.encode_url_to_base64( @@ -29,25 +31,21 @@ def test_encode_url_to_base64(): ) assert output_base64 == deepcopy(media_data.BASE64_IMAGE) + def test_decode_base64_to_binary(): - binary = utils.decode_base64_to_binary( - deepcopy(media_data.BASE64_IMAGE) - ) + binary = utils.decode_base64_to_binary(deepcopy(media_data.BASE64_IMAGE)) assert deepcopy(media_data.BINARY_IMAGE) == binary + def test_decode_base64_to_file(): - temp_file = utils.decode_base64_to_file( - deepcopy(media_data.BASE64_IMAGE) - ) + temp_file = utils.decode_base64_to_file(deepcopy(media_data.BASE64_IMAGE)) assert isinstance(temp_file, tempfile._TemporaryFileWrapper) def test_download_private_file(): url_path = "https://gradio-tests-not-actually-private-space.hf.space/file=lion.jpg" access_token = "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" # Intentionally revealing this key for testing purposes - file = utils.download_tmp_copy_of_file( - url_path=url_path, access_token=access_token - ) + file = utils.download_tmp_copy_of_file(url_path=url_path, access_token=access_token) assert file.name.endswith(".jpg") @@ -100,4 +98,3 @@ async def test_get_pred_from_ws_raises_if_queue_full(): hash_data = json.dumps({"session_hash": "daslskdf", "fn_index": "foo"}) with pytest.raises(utils.QueueError, match="Queue is full!"): await utils.get_pred_from_ws(mock_ws, data, hash_data) - From 17db4ec966a7bc7071e60accd28eb0762bedc5bd Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 05:28:33 -0700 Subject: [PATCH 64/93] address review comments --- client/python/gradio_client/client.py | 11 +++++++++-- client/python/gradio_client/version.txt | 2 +- client/python/scripts/upload_pypi.sh | 0 3 files changed, 10 insertions(+), 3 deletions(-) create mode 100644 client/python/scripts/upload_pypi.sh diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index a8493ad327d99..640557eafc5dd 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -3,6 +3,7 @@ import concurrent.futures import json +import pkgutil import re import uuid from concurrent.futures import Future @@ -10,6 +11,7 @@ import requests import websockets +from huggingface_hub.utils import build_hf_headers from packaging import version from gradio_client import serializing, utils @@ -25,8 +27,13 @@ def __init__( max_workers: int = 40, ): self.access_token = access_token - self.headers = ( - {"Authorization": f"Bearer {access_token}"} if access_token else {} + library_version = ( + (pkgutil.get_data(__name__, "version.txt") or b"").decode("ascii").strip() + ) + self.headers = build_hf_headers( + token=access_token, + library_name="gradio_client", + library_version=library_version, ) if space is None and src is None: diff --git a/client/python/gradio_client/version.txt b/client/python/gradio_client/version.txt index 8a9ecc2ea99d6..7bcd0e3612da7 100644 --- a/client/python/gradio_client/version.txt +++ b/client/python/gradio_client/version.txt @@ -1 +1 @@ -0.0.1 \ No newline at end of file +0.0.2 \ No newline at end of file diff --git a/client/python/scripts/upload_pypi.sh b/client/python/scripts/upload_pypi.sh new file mode 100644 index 0000000000000..e69de29bb2d1d From 4b7c9d4df478b43a6a9aba86d617c54652f00840 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 05:32:32 -0700 Subject: [PATCH 65/93] readme --- client/python/README.md | 79 +++++++++++++++++++++++---- client/python/gradio_client/client.py | 22 -------- 2 files changed, 67 insertions(+), 34 deletions(-) diff --git a/client/python/README.md b/client/python/README.md index ec4048d5526d6..c802a258ae8e2 100644 --- a/client/python/README.md +++ b/client/python/README.md @@ -1,6 +1,6 @@ # `gradio_client`: Use any Gradio app as an API -- in 3 lines of Python -This directory contains the source code for `gradio_client`, a lightweight Python library that makes it very easy to use any Gradio app as an API. This library is +This directory contains the source code for `gradio_client`, a lightweight Python library that makes it very easy to use any Gradio app as an API. This library is CURRENTLY IN ALPHA. APIS MAY CHANGE. WE DO NOT RECOMMEND USING IT EXCEPT IN EXPERIMENTAL PROJECTS. As an example, consider the Stable Diffusion Gradio app, which is hosted on Hugging Face Spaces, and which generates images given a text prompt. Using the `gradio_client` library, we can easily use the Gradio as an API to generates images programmatically. @@ -19,7 +19,9 @@ job.result() ## Installation -If you already have a recent version of `gradio`, then the `gradio_client` is included as a dependency. Otherwise, the lightweight `gradio_client` package can be installed from pip (or pip3) and works with Python versions 3.7 or higher: +If you already have a recent version of `gradio`, then the `gradio_client` is included as a dependency. + +Otherwise, the lightweight `gradio_client` package can be installed from pip (or pip3) and works with Python versions 3.7 or higher: ```bash $ pip install gradio_client @@ -29,22 +31,75 @@ $ pip install gradio_client ### Connecting to a Space or a Gradio app -1. Connecting to a Space -2. Connecting a general Gradio app +Start by connecting instantiating a `Client` object and connecting it to a Gradio app +that is running on Spaces (or anywhere else)! + +**Connecting to a Space** + +```python +import gradio_client as grc + +client = grc.Client(space="abidlabs/en2fr") +``` + +**Connecting a general Gradio app** -### Inspecting the API +If your app is running somewhere else, provide the full URL instead to the `src` argument. Here's an example of making predictions to a Gradio app that is running on a share URL: + +```python +import gradio_client as grc -1. Listing all of the available APIs -2. Getting more info about the parameters for a speciic API +client = grc.Client(src="btd372-js72hd.gradio.app") +``` ### Making a prediction -1. client.run +The simplest way to make a prediction is simply to call the `.predict()` function with the appropriate arguments and then immediately calling `.result()`, like this: + + +```python +import gradio_client as grc + +client = grc.Client(space="abidlabs/en2fr") + +client.predict("Hello").result() + +>> Bonjour +``` + +**Running jobs asyncronously** + +Oe should note that `.result()` is a *blocking* operation as it waits for the operation to complete before returning the prediction. + +In many cases, you may be better off letting the job run asynchronously and waiting to call `.result()` when you need the results of the prediction. For example: + + +```python +import gradio_client as grc + +client = grc.Client(space="abidlabs/en2fr") + +job = client.predict("Hello") + +# Do something else -### Submitting a job (for asynchronous worklows) +job.result() + +>> Bonjour +``` + +**Adding callbacks** + +Alternatively, one can add callbacks to perform actions after the job has completed running, like this: -1. job = client.submit -2. job.status -3. callbacks +```python +import gradio_client as grc + +def print_result(x): + print(x"The translated result is: {x}") +client = grc.Client(space="abidlabs/en2fr") + +job = client.predict("Hello", callbacks=[print_result]) +``` diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 640557eafc5dd..cad4bed623222 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -90,28 +90,6 @@ def fn(future): return job - def info(self, api_name: str | None = None) -> Dict: - if api_name: - fn_index = self._infer_fn_index(api_name) - dependency = self.config["dependencies"][fn_index] - return { - api_name: { - "input_parameters": ["(str) value"], - "output_values": ["(str) value"], - } - } - else: - api_info = {"named_endpoints": {}} - for dependency in self.config["dependencies"]: - if dependency.get("api_name") and dependency["backend_fn"]: - api_name = dependency["api_name"] - api_info["named_endpoints"] = self.info(api_name) - api_info["num_named_endpoints"] = len(api_info) # type: ignore - return api_info - - def pprint(self, api_name: str | None = None) -> None: - print(json.dumps(self.info(api_name), indent=2)) - ################################## # Private helper methods ################################## From ae2a7404c675bb01cfb15c6a08ea73d061e83ef4 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 05:33:19 -0700 Subject: [PATCH 66/93] serialize info --- gradio/components.py | 18 ------------------ 1 file changed, 18 deletions(-) diff --git a/gradio/components.py b/gradio/components.py index 869ce3c5f1a84..5cc7c21c13a1e 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -157,12 +157,6 @@ def style( self.parent.variant = "compact" return self - def serialize_info(self): - return { - "input": "value", - "output": "value", - } - class IOComponent(Component): """ @@ -338,12 +332,6 @@ def __init__( self.test_input = value self.type = type - def serialize_info(self): - return { - "input": "(str) value", - "output": "(str) value", - } - def get_config(self): return { "lines": self.lines, @@ -1574,12 +1562,6 @@ def get_config(self): **IOComponent.get_config(self), } - def serialize_info(self): - return { - "input": "(str) filepath or URL to image", - "output": "(str) filepath or URL to image", - } - @staticmethod def update( value: Any | Literal[_Keywords.NO_VALUE] | None = _Keywords.NO_VALUE, From 7c27d7a67dfbff2958984bf1435427933353fbe3 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 05:57:06 -0700 Subject: [PATCH 67/93] remove from changelog --- CHANGELOG.md | 2 -- 1 file changed, 2 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index eac066da347b1..151f05578c6b6 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -76,8 +76,6 @@ with gr.Blocks() as demo: By [@aliabid94](https://github.com/aliabid94) in [PR 3399](https://github.com/gradio-app/gradio/pull/3399) -- Adds a new lightweight library `gradio_client` which can be used to make predictions via API to -hosted Gradio apps. See `client/python/README.md` for more info. By [@abidlabs](https://github.com/abidlabs) in [PR 3300](https://github.com/gradio-app/gradio/pull/3300) - The `Textbox` component now includes a copy button by [@abidlabs](https://github.com/abidlabs) in [PR 3452](https://github.com/gradio-app/gradio/pull/3452) From fd6692474980a7d194f2d20be20c63a1d0769d95 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 06:10:49 -0700 Subject: [PATCH 68/93] version 0.0.2 released --- client/python/scripts/upload_pypi.sh | 7 +++++++ 1 file changed, 7 insertions(+) diff --git a/client/python/scripts/upload_pypi.sh b/client/python/scripts/upload_pypi.sh index e69de29bb2d1d..403fe8cd0daa3 100644 --- a/client/python/scripts/upload_pypi.sh +++ b/client/python/scripts/upload_pypi.sh @@ -0,0 +1,7 @@ +#!/bin/bash + +cd "$(dirname ${0})/.." + +python -m pip install build twine +python -m build +twine upload dist/* \ No newline at end of file From ce8d66f9a8198fc7c191c33b3282231d4092c98c Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 16 Mar 2023 06:11:49 -0700 Subject: [PATCH 69/93] lint --- client/python/gradio_client/client.py | 5 ++--- 1 file changed, 2 insertions(+), 3 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index cad4bed623222..b48232ee74611 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -11,11 +11,10 @@ import requests import websockets -from huggingface_hub.utils import build_hf_headers -from packaging import version - from gradio_client import serializing, utils from gradio_client.serializing import Serializable +from huggingface_hub.utils import build_hf_headers +from packaging import version class Client: From 83fa33ef13ad9db4a1dfb842faaf851687730f7f Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 14:09:25 -0700 Subject: [PATCH 70/93] type fix --- gradio/components.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradio/components.py b/gradio/components.py index 49f980233ca32..017259be4bae8 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -315,7 +315,7 @@ def base64_to_temp_file_if_needed( full_temp_file_path = str(utils.abspath(temp_dir / f.name)) if not Path(full_temp_file_path).exists(): - data, _ = processing_utils.decode_base64_to_binary(base64_encoding) + data, _ = client_utils.decode_base64_to_binary(base64_encoding) with open(full_temp_file_path, "wb") as fb: fb.write(data) From 36eecd0313d90be556f226f1b08c1c7854fbe34e Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 16:49:30 -0700 Subject: [PATCH 71/93] check --- gradio/components.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/gradio/components.py b/gradio/components.py index 017259be4bae8..437d9e268908c 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -312,7 +312,7 @@ def base64_to_temp_file_if_needed( file_name = "file" f = tempfile.NamedTemporaryFile(delete=False, dir=temp_dir) f.name = file_name - full_temp_file_path = str(utils.abspath(temp_dir / f.name)) + full_temp_file_path = str(utils.abspath(temp_dir / file_name)) if not Path(full_temp_file_path).exists(): data, _ = client_utils.decode_base64_to_binary(base64_encoding) From 02d6f727a31d33e939c6de091d491ec6ab9d52cc Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 16:54:09 -0700 Subject: [PATCH 72/93] type issues --- gradio/components.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gradio/components.py b/gradio/components.py index 437d9e268908c..7d806d1ed4850 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -311,8 +311,8 @@ def base64_to_temp_file_if_needed( else: file_name = "file" f = tempfile.NamedTemporaryFile(delete=False, dir=temp_dir) - f.name = file_name - full_temp_file_path = str(utils.abspath(temp_dir / file_name)) + f.name = file_name # type: ignore + full_temp_file_path = str(utils.abspath(temp_dir / file_name)) # type: ignore if not Path(full_temp_file_path).exists(): data, _ = client_utils.decode_base64_to_binary(base64_encoding) From 78caf50d57ad2b0e81555653eefcdcf2c50524aa Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 16:55:26 -0700 Subject: [PATCH 73/93] hf_token --- gradio/components.py | 4 +++- gradio/processing_utils.py | 1 + test/test_components.py | 10 +++++++--- 3 files changed, 11 insertions(+), 4 deletions(-) diff --git a/gradio/components.py b/gradio/components.py index 7d806d1ed4850..fb5b87db9fd93 100644 --- a/gradio/components.py +++ b/gradio/components.py @@ -262,7 +262,9 @@ async def save_uploaded_file(self, file: UploadFile, upload_dir: str) -> str: if file.filename: file_name = Path(file.filename).name - output_file_obj.name = client_utils.strip_invalid_filename_characters(file_name) + output_file_obj.name = client_utils.strip_invalid_filename_characters( + file_name + ) full_temp_file_path = str(utils.abspath(temp_dir / output_file_obj.name)) diff --git a/gradio/processing_utils.py b/gradio/processing_utils.py index 0ea77a5599e4e..ad7d027a9bb3f 100644 --- a/gradio/processing_utils.py +++ b/gradio/processing_utils.py @@ -202,6 +202,7 @@ def convert_to_16_bit_wav(data): # OUTPUT ################## + def _convert(image, dtype, force_copy=False, uniform=False): """ Adapted from: https://github.com/scikit-image/scikit-image/blob/main/skimage/util/dtype.py#L510-L531 diff --git a/test/test_components.py b/test/test_components.py index f1539e6b2bc66..bfee1c769b7f5 100644 --- a/test/test_components.py +++ b/test/test_components.py @@ -2597,11 +2597,15 @@ def test_component_functions(self): assert code.preprocess("# hello friends") == "# hello friends" assert code.preprocess("def fn(a):\n return a") == "def fn(a):\n return a" - assert code.postprocess( - """ + assert ( + code.postprocess( + """ def fn(a): return a - """) == "def fn(a):\n return a" + """ + ) + == "def fn(a):\n return a" + ) test_file_dir = Path(Path(__file__).parent, "test_files") path = str(Path(test_file_dir, "test_label_json.json")) From 5ac8870aed47bec70dc4b23ea02703fa893c2603 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 16:57:43 -0700 Subject: [PATCH 74/93] update hf token --- client/python/gradio_client/client.py | 12 +++++------ client/python/gradio_client/serializing.py | 24 +++++++++++----------- client/python/gradio_client/utils.py | 4 ++-- client/python/test/test_client.py | 4 ++-- client/python/test/test_utils.py | 4 ++-- 5 files changed, 24 insertions(+), 24 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index b48232ee74611..4067dca19a826 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -22,15 +22,15 @@ def __init__( self, space: str | None = None, src: str | None = None, - access_token: str | None = None, + hf_token: str | None = None, max_workers: int = 40, ): - self.access_token = access_token + self.hf_token = hf_token library_version = ( (pkgutil.get_data(__name__, "version.txt") or b"").decode("ascii").strip() ) self.headers = build_hf_headers( - token=access_token, + token=hf_token, library_name="gradio_client", library_version=library_version, ) @@ -42,7 +42,7 @@ def __init__( self.src = src or self._space_name_to_src(space) if self.src is None: raise ValueError( - f"Could not find Space: {space}. If it is a private Space, please provide an access_token." + f"Could not find Space: {space}. If it is a private Space, please provide an hf_token." ) else: print(f"Loaded as API: {self.src} ✔") @@ -140,7 +140,7 @@ def __init__(self, client: Client, fn_index: int, dependency: Dict): self.headers = client.headers self.config = client.config self.use_ws = self._use_websocket(self.dependency) - self.access_token = client.access_token + self.hf_token = client.hf_token try: self.serializers, self.deserializers = self._setup_serializers() self.is_valid = self.dependency[ @@ -201,7 +201,7 @@ def deserialize(self, *data) -> Tuple: ), f"Expected {len(self.deserializers)} outputs, got {len(data)}" return tuple( [ - s.deserialize(d, access_token=self.access_token) + s.deserialize(d, hf_token=self.hf_token) for s, d in zip(self.deserializers, data) ] ) diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index 1741f1bee2140..8f1d1e85436bd 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -24,7 +24,7 @@ def deserialize( x: Any, save_dir: str | Path | None = None, root_url: str | None = None, - access_token: str | None = None, + hf_token: str | None = None, ): """ Convert data from serialized format for a browser to human-readable format. @@ -47,7 +47,7 @@ def deserialize( x: Any, save_dir: str | Path | None = None, root_url: str | None = None, - access_token: str | None = None, + hf_token: str | None = None, ): """ Convert data from serialized format to human-readable format. For SimpleSerializable components, this is a no-op. @@ -55,7 +55,7 @@ def deserialize( x: Input data to deserialize save_dir: Ignored root_url: Ignored - access_token: Ignored + hf_token: Ignored """ return x @@ -84,7 +84,7 @@ def deserialize( x: str | None, save_dir: str | Path | None = None, root_url: str | None = None, - access_token: str | None = None, + hf_token: str | None = None, ) -> str | None: """ Convert from serialized representation of a file (base64) to a human-friendly @@ -93,7 +93,7 @@ def deserialize( x: Base64 representation of image to deserialize into a string filepath save_dir: Path to directory to save the deserialized image to root_url: Ignored - access_token: Ignored + hf_token: Ignored """ if x is None or x == "": return None @@ -129,7 +129,7 @@ def deserialize( x: str | Dict | None, save_dir: Path | str | None = None, root_url: str | None = None, - access_token: str | None = None, + hf_token: str | None = None, ) -> str | None: """ Convert from serialized representation of a file (base64) to a human-friendly @@ -138,7 +138,7 @@ def deserialize( x: Base64 representation of file to deserialize into a string filepath save_dir: Path to directory to save the deserialized file to root_url: If this component is loaded from an external Space, this is the URL of the Space - access_token: If this component is loaded from an external private Space, this is the access token for the Space + hf_token: If this component is loaded from an external private Space, this is the access token for the Space """ if x is None: return None @@ -151,7 +151,7 @@ def deserialize( if root_url is not None: file_name = utils.download_tmp_copy_of_file( root_url + "file=" + x["name"], - access_token=access_token, + hf_token=hf_token, dir=save_dir, ).name else: @@ -189,7 +189,7 @@ def deserialize( x: str | Dict, save_dir: str | Path | None = None, root_url: str | None = None, - access_token: str | None = None, + hf_token: str | None = None, ) -> str | None: """ Convert from serialized representation (json string) to a human-friendly @@ -198,7 +198,7 @@ def deserialize( x: Json string save_dir: Path to save the deserialized json file to root_url: Ignored - access_token: Ignored + hf_token: Ignored """ if x is None: return None @@ -225,7 +225,7 @@ def deserialize( x: Any, save_dir: str = "", root_url: str | None = None, - access_token: str | None = None, + hf_token: str | None = None, ) -> None | str: if x is None: return None @@ -238,7 +238,7 @@ def deserialize( else: caption = None name = FileSerializable().deserialize( - img_data, gallery_path, root_url=root_url, access_token=access_token + img_data, gallery_path, root_url=root_url, hf_token=hf_token ) captions[name] = caption captions_file = gallery_path / "captions.json" diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index 6bb83bf9c7c26..38fc0b25a2fb7 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -75,11 +75,11 @@ async def get_pred_from_ws( def download_tmp_copy_of_file( - url_path: str, access_token: str | None = None, dir: str | None = None + url_path: str, hf_token: str | None = None, dir: str | None = None ) -> tempfile._TemporaryFileWrapper: if dir is not None: os.makedirs(dir, exist_ok=True) - headers = {"Authorization": "Bearer " + access_token} if access_token else {} + headers = {"Authorization": "Bearer " + hf_token} if hf_token else {} prefix = Path(url_path).stem suffix = Path(url_path).suffix file_obj = tempfile.NamedTemporaryFile( diff --git a/client/python/test/test_client.py b/client/python/test/test_client.py index fff6b6ebe23d0..cdc028cfcaee1 100644 --- a/client/python/test/test_client.py +++ b/client/python/test/test_client.py @@ -14,9 +14,9 @@ def test_numerical_to_label_space(self): @pytest.mark.flaky def test_private_space(self): - access_token = "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" # Intentionally revealing this key for testing purposes + hf_token = "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" # Intentionally revealing this key for testing purposes client = Client( - space="gradio-tests/not-actually-private-space", access_token=access_token + space="gradio-tests/not-actually-private-space", hf_token=hf_token ) output = client.predict("abc").result() assert output == "abc" diff --git a/client/python/test/test_utils.py b/client/python/test/test_utils.py index 014a8acc3789e..5dca48b0f3ed9 100644 --- a/client/python/test/test_utils.py +++ b/client/python/test/test_utils.py @@ -44,8 +44,8 @@ def test_decode_base64_to_file(): def test_download_private_file(): url_path = "https://gradio-tests-not-actually-private-space.hf.space/file=lion.jpg" - access_token = "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" # Intentionally revealing this key for testing purposes - file = utils.download_tmp_copy_of_file(url_path=url_path, access_token=access_token) + hf_token = "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" # Intentionally revealing this key for testing purposes + file = utils.download_tmp_copy_of_file(url_path=url_path, hf_token=hf_token) assert file.name.endswith(".jpg") From 566ff8fc30b492300131e25c8e3d1f04692cc45c Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 17:24:17 -0700 Subject: [PATCH 75/93] telemetry --- client/python/gradio_client/client.py | 31 ++++++++++++++++++++------- 1 file changed, 23 insertions(+), 8 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 4067dca19a826..33c8eb659a879 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -3,17 +3,17 @@ import concurrent.futures import json -import pkgutil import re import uuid from concurrent.futures import Future +import threading from typing import Any, Callable, Dict, List, Tuple import requests import websockets -from gradio_client import serializing, utils +from gradio_client import serializing, utils, __version__ from gradio_client.serializing import Serializable -from huggingface_hub.utils import build_hf_headers +from huggingface_hub.utils import build_hf_headers, send_telemetry from packaging import version @@ -26,13 +26,10 @@ def __init__( max_workers: int = 40, ): self.hf_token = hf_token - library_version = ( - (pkgutil.get_data(__name__, "version.txt") or b"").decode("ascii").strip() - ) self.headers = build_hf_headers( token=hf_token, library_name="gradio_client", - library_version=library_version, + library_version=__version__, ) if space is None and src is None: @@ -58,7 +55,10 @@ def __init__( # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) - + + # Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1 + threading.Thread(target=self._telemetry_thread).start() + def predict( self, *args, @@ -92,6 +92,21 @@ def fn(future): ################################## # Private helper methods ################################## + + def _telemetry_thread(self) -> None: + # Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1 + data = { + "src": self.src, + } + try: + send_telemetry( + topic="py_client/initiated", + library_name="gradio_client", + library_version=__version__, + user_agent=data, + ) + except Exception: + pass def _infer_fn_index(self, api_name: str) -> int: for i, d in enumerate(self.config["dependencies"]): From d7df04cea65f15442826379b3e02dc1682d89839 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 17:38:28 -0700 Subject: [PATCH 76/93] docs, circle dependency --- client/python/gradio_client/__init__.py | 5 +---- client/python/gradio_client/client.py | 6 +++--- client/python/gradio_client/utils.py | 3 +++ gradio/external.py | 7 ++++++- 4 files changed, 13 insertions(+), 8 deletions(-) diff --git a/client/python/gradio_client/__init__.py b/client/python/gradio_client/__init__.py index 3f51421edef3e..fcf00b0e45fad 100644 --- a/client/python/gradio_client/__init__.py +++ b/client/python/gradio_client/__init__.py @@ -1,5 +1,2 @@ -import pkgutil - from gradio_client.client import Client - -__version__ = (pkgutil.get_data(__name__, "version.txt") or b"").decode("ascii").strip() +from gradio_client.utils import __version__ \ No newline at end of file diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 33c8eb659a879..20f228b20446e 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -11,7 +11,7 @@ import requests import websockets -from gradio_client import serializing, utils, __version__ +from gradio_client import serializing, utils from gradio_client.serializing import Serializable from huggingface_hub.utils import build_hf_headers, send_telemetry from packaging import version @@ -29,7 +29,7 @@ def __init__( self.headers = build_hf_headers( token=hf_token, library_name="gradio_client", - library_version=__version__, + library_version=utils.__version__, ) if space is None and src is None: @@ -102,7 +102,7 @@ def _telemetry_thread(self) -> None: send_telemetry( topic="py_client/initiated", library_name="gradio_client", - library_version=__version__, + library_version=utils.__version__, user_agent=data, ) except Exception: diff --git a/client/python/gradio_client/utils.py b/client/python/gradio_client/utils.py index 38fc0b25a2fb7..423009fcf6e2a 100644 --- a/client/python/gradio_client/utils.py +++ b/client/python/gradio_client/utils.py @@ -4,6 +4,7 @@ import json import mimetypes import os +import pkgutil import shutil import tempfile from pathlib import Path @@ -16,6 +17,8 @@ API_URL = "{}/api/predict/" WS_URL = "{}/queue/join" +__version__ = (pkgutil.get_data(__name__, "version.txt") or b"").decode("ascii").strip() + class TooManyRequestsError(Exception): """Raised when the API returns a 429 status code.""" diff --git a/gradio/external.py b/gradio/external.py index 64c23ce725daf..0cef78e30f5d5 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -14,6 +14,7 @@ import gradio from gradio import components, utils from gradio.context import Context +from gradio.documentation import document, set_documentation_group from gradio.exceptions import Error, TooManyRequestsError from gradio.external_utils import ( cols_to_rows, @@ -30,6 +31,10 @@ from gradio.interface import Interface +set_documentation_group("helpers") + + +@document() def load( name: str, src: str | None = None, @@ -38,7 +43,7 @@ def load( **kwargs, ) -> Blocks: """ - Class method that constructs an Interface from a Hugging Face repo. Can accept + Class method that constructs a Blocks from a Hugging Face repo. Can accept model repos (if src is "models") or Space repos (if src is "spaces"). The input and output components are automatically loaded from the repo. Parameters: From 47aa8582d388aa44b2c433ab98ebd5828a2ff4ce Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 17:41:35 -0700 Subject: [PATCH 77/93] hf token --- gradio/external.py | 15 +++++++++------ 1 file changed, 9 insertions(+), 6 deletions(-) diff --git a/gradio/external.py b/gradio/external.py index 0cef78e30f5d5..98c35f341b17b 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -39,6 +39,7 @@ def load( name: str, src: str | None = None, api_key: str | None = None, + hf_token: str | None = None, alias: str | None = None, **kwargs, ) -> Blocks: @@ -49,19 +50,21 @@ def load( Parameters: name: the name of the model (e.g. "gpt2" or "facebook/bart-base") or space (e.g. "flax-community/spanish-gpt2"), can include the `src` as prefix (e.g. "models/facebook/bart-base") src: the source of the model: `models` or `spaces` (or leave empty if source is provided as a prefix in `name`) - api_key: optional access token for loading private Hugging Face Hub models or spaces. Find your token here: https://huggingface.co/settings/tokens + api_key: Deprecated. Please use the `hf_token` parameter instead. + hf_token: optional access token for loading private Hugging Face Hub models or spaces. Find your token here: https://huggingface.co/settings/tokens alias: optional string used as the name of the loaded model instead of the default name (only applies if loading a Space running Gradio 2.x) Returns: - a Gradio Interface object for the given model + a Gradio Blocks object for the given model Example: import gradio as gr - description = "Story generation with GPT" - examples = [["An adventurer is approached by a mysterious stranger in the tavern for a new quest."]] - demo = gr.load("models/EleutherAI/gpt-neo-1.3B", description=description, examples=examples) + demo = gr.load("models/EleutherAI/gpt-neo-1.3B") demo.launch() """ + if hf_token is None and api_key: + warnings.warn("The `api_key` parameter will be deprecated. Please use the `hf_token` parameter going forward.") + hf_token = api_key return load_blocks_from_repo( - name=name, src=src, api_key=api_key, alias=alias, **kwargs + name=name, src=src, api_key=hf_token, alias=alias, **kwargs ) From 3c2f2866ab8bd487410c5eea11ae295fa7c08587 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 17:44:03 -0700 Subject: [PATCH 78/93] formatting --- client/python/gradio_client/__init__.py | 2 +- client/python/gradio_client/client.py | 10 +++++----- gradio/external.py | 4 +++- 3 files changed, 9 insertions(+), 7 deletions(-) diff --git a/client/python/gradio_client/__init__.py b/client/python/gradio_client/__init__.py index fcf00b0e45fad..e0e1c94e23ff8 100644 --- a/client/python/gradio_client/__init__.py +++ b/client/python/gradio_client/__init__.py @@ -1,2 +1,2 @@ from gradio_client.client import Client -from gradio_client.utils import __version__ \ No newline at end of file +from gradio_client.utils import __version__ diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 20f228b20446e..f3321eef850c5 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -4,9 +4,9 @@ import concurrent.futures import json import re +import threading import uuid from concurrent.futures import Future -import threading from typing import Any, Callable, Dict, List, Tuple import requests @@ -55,10 +55,10 @@ def __init__( # Create a pool of threads to handle the requests self.executor = concurrent.futures.ThreadPoolExecutor(max_workers=max_workers) - + # Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1 threading.Thread(target=self._telemetry_thread).start() - + def predict( self, *args, @@ -92,7 +92,7 @@ def fn(future): ################################## # Private helper methods ################################## - + def _telemetry_thread(self) -> None: # Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1 data = { @@ -106,7 +106,7 @@ def _telemetry_thread(self) -> None: user_agent=data, ) except Exception: - pass + pass def _infer_fn_index(self, api_name: str) -> int: for i, d in enumerate(self.config["dependencies"]): diff --git a/gradio/external.py b/gradio/external.py index 98c35f341b17b..9c009faa0c7ed 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -61,7 +61,9 @@ def load( demo.launch() """ if hf_token is None and api_key: - warnings.warn("The `api_key` parameter will be deprecated. Please use the `hf_token` parameter going forward.") + warnings.warn( + "The `api_key` parameter will be deprecated. Please use the `hf_token` parameter going forward." + ) hf_token = api_key return load_blocks_from_repo( name=name, src=src, api_key=hf_token, alias=alias, **kwargs From 8f3fcd6629682398eecf41dfc7c3a6b1215b3c20 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 17:46:07 -0700 Subject: [PATCH 79/93] updates --- client/python/gradio_client/serializing.py | 2 +- client/python/gradio_client/version.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/client/python/gradio_client/serializing.py b/client/python/gradio_client/serializing.py index 8f1d1e85436bd..6c8814e58cbd3 100644 --- a/client/python/gradio_client/serializing.py +++ b/client/python/gradio_client/serializing.py @@ -271,7 +271,7 @@ def deserialize( "highlightedtext": JSONSerializable, "json": JSONSerializable, "html": SimpleSerializable, - "gallery": GallerySerializable, # TODO: Make this a proper Serializable class + "gallery": GallerySerializable, "chatbot": JSONSerializable, "model3d": FileSerializable, "plot": JSONSerializable, diff --git a/client/python/gradio_client/version.txt b/client/python/gradio_client/version.txt index 7bcd0e3612da7..6812f8122ef3f 100644 --- a/client/python/gradio_client/version.txt +++ b/client/python/gradio_client/version.txt @@ -1 +1 @@ -0.0.2 \ No newline at end of file +0.0.3 \ No newline at end of file From e21158a3234759f0cc38f8076983436a3a44904a Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 17:48:04 -0700 Subject: [PATCH 80/93] sort --- client/python/gradio_client/client.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index f3321eef850c5..9dfed76e94c23 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -11,11 +11,12 @@ import requests import websockets -from gradio_client import serializing, utils -from gradio_client.serializing import Serializable from huggingface_hub.utils import build_hf_headers, send_telemetry from packaging import version +from gradio_client import serializing, utils +from gradio_client.serializing import Serializable + class Client: def __init__( From cc35b572dfd175d28d1a5bc1a88551513c512e3e Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 17:52:22 -0700 Subject: [PATCH 81/93] script --- client/python/scripts/upload_pypi.sh | 1 + 1 file changed, 1 insertion(+) diff --git a/client/python/scripts/upload_pypi.sh b/client/python/scripts/upload_pypi.sh index 403fe8cd0daa3..02b7ea57ca147 100644 --- a/client/python/scripts/upload_pypi.sh +++ b/client/python/scripts/upload_pypi.sh @@ -3,5 +3,6 @@ cd "$(dirname ${0})/.." python -m pip install build twine +rm -rf dist/* python -m build twine upload dist/* \ No newline at end of file From 76df75e028fbba3103eb0ad52a9768299528dd40 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 18:00:07 -0700 Subject: [PATCH 82/93] external --- gradio/external.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/gradio/external.py b/gradio/external.py index 9c009faa0c7ed..32185d867396a 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -44,7 +44,7 @@ def load( **kwargs, ) -> Blocks: """ - Class method that constructs a Blocks from a Hugging Face repo. Can accept + Method that constructs a Blocks from a Hugging Face repo. Can accept model repos (if src is "models") or Space repos (if src is "spaces"). The input and output components are automatically loaded from the repo. Parameters: @@ -57,7 +57,7 @@ def load( a Gradio Blocks object for the given model Example: import gradio as gr - demo = gr.load("models/EleutherAI/gpt-neo-1.3B") + demo = gr.load("gradio/question-answering", src="spaces") demo.launch() """ if hf_token is None and api_key: From 6a87b70c686545bc2e20c3001b70a2cf14a9c9f4 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 18:04:52 -0700 Subject: [PATCH 83/93] docs --- gradio/blocks.py | 4 ++-- gradio/interface.py | 15 +++++---------- 2 files changed, 7 insertions(+), 12 deletions(-) diff --git a/gradio/blocks.py b/gradio/blocks.py index 1f831542f818d..f314489bd2480 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -1192,7 +1192,7 @@ def load( method, the two of which, confusingly, do two completely different things. - Class method: loads a demo from a Hugging Face Spaces repo and creates it locally and returns a block instance. Equivalent to gradio.load() + Class method: loads a demo from a Hugging Face Spaces repo and creates it locally and returns a block instance. Warning: this method will be deprecated. Use the equivalent `gradio.load()` instead. Instance method: adds event that runs as soon as the demo loads in the browser. Example usage below. @@ -1230,7 +1230,7 @@ def get_time(): raise ValueError( "Blocks.load() requires passing parameters as keyword arguments" ) - return external.load_blocks_from_repo(name, src, api_key, alias, **kwargs) + return external.load(name=name, src=src, hf_token=api_key, alias=alias, **kwargs) else: return self_or_cls.set_event_trigger( event_name="load", diff --git a/gradio/interface.py b/gradio/interface.py index d997c0fc2aa44..4c5346ceffdcf 100644 --- a/gradio/interface.py +++ b/gradio/interface.py @@ -13,7 +13,7 @@ import weakref from typing import TYPE_CHECKING, Any, Callable, List, Tuple -from gradio import Examples, interpretation, utils +from gradio import Examples, external, interpretation, utils from gradio.blocks import Blocks from gradio.components import ( Button, @@ -77,9 +77,10 @@ def load( api_key: str | None = None, alias: str | None = None, **kwargs, - ) -> Interface: + ) -> Blocks: """ - Class method that constructs an Interface from a Hugging Face repo. Can accept + Warning: this method will be deprecated. Use the equivalent `gradio.load()` instead. This is a class + method that constructs a Blocks from a Hugging Face repo. Can accept model repos (if src is "models") or Space repos (if src is "spaces"). The input and output components are automatically loaded from the repo. Parameters: @@ -89,15 +90,9 @@ def load( alias: optional string used as the name of the loaded model instead of the default name (only applies if loading a Space running Gradio 2.x) Returns: a Gradio Interface object for the given model - Example: - import gradio as gr - description = "Story generation with GPT" - examples = [["An adventurer is approached by a mysterious stranger in the tavern for a new quest."]] - demo = gr.load("models/EleutherAI/gpt-neo-1.3B", description=description, examples=examples) - demo.launch() """ warnings.warn("gr.Intrerface.load() will be deprecated. Use gr.load() instead.") - return super().load(name=name, src=src, api_key=api_key, alias=alias, **kwargs) + return external.load(name=name, src=src, hf_token=api_key, alias=alias, **kwargs) @classmethod def from_pipeline(cls, pipeline: Pipeline, **kwargs) -> Interface: From 96247fed54e2e10384dd5472bdeb59ba2cde75f8 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 18:09:28 -0700 Subject: [PATCH 84/93] formatting --- client/python/gradio_client/client.py | 5 ++--- gradio/blocks.py | 4 +++- gradio/interface.py | 6 ++++-- 3 files changed, 9 insertions(+), 6 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 9dfed76e94c23..f3321eef850c5 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -11,11 +11,10 @@ import requests import websockets -from huggingface_hub.utils import build_hf_headers, send_telemetry -from packaging import version - from gradio_client import serializing, utils from gradio_client.serializing import Serializable +from huggingface_hub.utils import build_hf_headers, send_telemetry +from packaging import version class Client: diff --git a/gradio/blocks.py b/gradio/blocks.py index f314489bd2480..c4add889537f4 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -1230,7 +1230,9 @@ def get_time(): raise ValueError( "Blocks.load() requires passing parameters as keyword arguments" ) - return external.load(name=name, src=src, hf_token=api_key, alias=alias, **kwargs) + return external.load( + name=name, src=src, hf_token=api_key, alias=alias, **kwargs + ) else: return self_or_cls.set_event_trigger( event_name="load", diff --git a/gradio/interface.py b/gradio/interface.py index 4c5346ceffdcf..e2a4a98023823 100644 --- a/gradio/interface.py +++ b/gradio/interface.py @@ -79,7 +79,7 @@ def load( **kwargs, ) -> Blocks: """ - Warning: this method will be deprecated. Use the equivalent `gradio.load()` instead. This is a class + Warning: this method will be deprecated. Use the equivalent `gradio.load()` instead. This is a class method that constructs a Blocks from a Hugging Face repo. Can accept model repos (if src is "models") or Space repos (if src is "spaces"). The input and output components are automatically loaded from the repo. @@ -92,7 +92,9 @@ def load( a Gradio Interface object for the given model """ warnings.warn("gr.Intrerface.load() will be deprecated. Use gr.load() instead.") - return external.load(name=name, src=src, hf_token=api_key, alias=alias, **kwargs) + return external.load( + name=name, src=src, hf_token=api_key, alias=alias, **kwargs + ) @classmethod def from_pipeline(cls, pipeline: Pipeline, **kwargs) -> Interface: From 66872db6bcb5f6cf9c1defb696453ec7846c02c7 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 18:24:49 -0700 Subject: [PATCH 85/93] fixes --- client/python/gradio_client/client.py | 5 +++-- client/python/gradio_client/version.txt | 2 +- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index f3321eef850c5..9dfed76e94c23 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -11,11 +11,12 @@ import requests import websockets -from gradio_client import serializing, utils -from gradio_client.serializing import Serializable from huggingface_hub.utils import build_hf_headers, send_telemetry from packaging import version +from gradio_client import serializing, utils +from gradio_client.serializing import Serializable + class Client: def __init__( diff --git a/client/python/gradio_client/version.txt b/client/python/gradio_client/version.txt index 6812f8122ef3f..05b19b1f76ec5 100644 --- a/client/python/gradio_client/version.txt +++ b/client/python/gradio_client/version.txt @@ -1 +1 @@ -0.0.3 \ No newline at end of file +0.0.4 \ No newline at end of file From dfb468eb36a2bfd51eee14ab8417db456204c45f Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 18:27:20 -0700 Subject: [PATCH 86/93] scripts --- client/python/scripts/ci.sh | 2 -- 1 file changed, 2 deletions(-) diff --git a/client/python/scripts/ci.sh b/client/python/scripts/ci.sh index 87a0eb2329387..13541e8a3fb5c 100644 --- a/client/python/scripts/ci.sh +++ b/client/python/scripts/ci.sh @@ -3,11 +3,9 @@ cd "$(dirname ${0})/.." echo "Linting..." - python -m black --check test gradio_client python -m isort --profile=black --check-only test gradio_client python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403,F541 test gradio_client --exclude gradio_client/__init__.py echo "Testing..." - python -m pytest test \ No newline at end of file From 1f018cfc3195e31400ce294c5fc53e2594f47b56 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 18:37:19 -0700 Subject: [PATCH 87/93] requirements --- client/python/requirements.txt | 3 ++- requirements.txt | 2 +- 2 files changed, 3 insertions(+), 2 deletions(-) diff --git a/client/python/requirements.txt b/client/python/requirements.txt index 50d8962759dc7..eaad6afbd2e12 100644 --- a/client/python/requirements.txt +++ b/client/python/requirements.txt @@ -1,4 +1,5 @@ requests websockets packaging -fsspec \ No newline at end of file +fsspec +huggingface_hub diff --git a/requirements.txt b/requirements.txt index f1647504e3460..9a71de09051ea 100644 --- a/requirements.txt +++ b/requirements.txt @@ -3,7 +3,7 @@ aiohttp altair>=4.2.0 fastapi ffmpy -gradio_client +gradio_client>=0.0.4 httpx huggingface_hub>=0.13.0 Jinja2 From bce41b3d371c7415c462219037f4ff8b6f083fa7 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 18:49:19 -0700 Subject: [PATCH 88/93] fix tests --- client/python/gradio_client/client.py | 5 ++--- gradio/blocks.py | 2 +- gradio/external.py | 6 +++--- gradio/routes.py | 4 ++-- test/test_external.py | 2 +- 5 files changed, 9 insertions(+), 10 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 9dfed76e94c23..f3321eef850c5 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -11,11 +11,10 @@ import requests import websockets -from huggingface_hub.utils import build_hf_headers, send_telemetry -from packaging import version - from gradio_client import serializing, utils from gradio_client.serializing import Serializable +from huggingface_hub.utils import build_hf_headers, send_telemetry +from packaging import version class Client: diff --git a/gradio/blocks.py b/gradio/blocks.py index c4add889537f4..d4ff533b64c0b 100644 --- a/gradio/blocks.py +++ b/gradio/blocks.py @@ -942,7 +942,7 @@ def deserialize_data(self, fn_index: int, outputs: List[Any]) -> List[Any]: block, components.IOComponent ), f"{block.__class__} Component with id {output_id} not a valid output component." deserialized = block.deserialize( - outputs[o], root_url=block.root_url, access_token=Context.access_token + outputs[o], root_url=block.root_url, hf_token=Context.hf_token ) predictions.append(deserialized) diff --git a/gradio/external.py b/gradio/external.py index 32185d867396a..2f1a12536214b 100644 --- a/gradio/external.py +++ b/gradio/external.py @@ -98,11 +98,11 @@ def load_blocks_from_repo( ) if api_key is not None: - if Context.access_token is not None and Context.access_token != api_key: + if Context.hf_token is not None and Context.hf_token != api_key: warnings.warn( """You are loading a model/Space with a different access token than the one you used to load a previous model/Space. This is not recommended, as it may cause unexpected behavior.""" ) - Context.access_token = api_key + Context.hf_token = api_key blocks: gradio.Blocks = factory_methods[src](name, api_key, alias, **kwargs) return blocks @@ -452,7 +452,7 @@ def from_spaces( def from_spaces_blocks(space: str, api_key: str | None) -> Blocks: - client = Client(space=space, access_token=api_key) + client = Client(space=space, hf_token=api_key) predict_fns = [endpoint._predict_resolve for endpoint in client.endpoints] return gradio.Blocks.from_config(client.config, predict_fns, client.src) diff --git a/gradio/routes.py b/gradio/routes.py index b27cb8f9017d9..3a017983c5694 100644 --- a/gradio/routes.py +++ b/gradio/routes.py @@ -282,8 +282,8 @@ async def reverse_proxy(url_path: str): # Adapted from: https://github.com/tiangolo/fastapi/issues/1788 url = httpx.URL(url_path) headers = {} - if Context.access_token is not None: - headers["Authorization"] = f"Bearer {Context.access_token}" + if Context.hf_token is not None: + headers["Authorization"] = f"Bearer {Context.hf_token}" rp_req = client.build_request("GET", url, headers=headers) rp_resp = await client.send(rp_req, stream=True) return StreamingResponse( diff --git a/test/test_external.py b/test/test_external.py index d195b1ce11697..6eae11f0ff9c4 100644 --- a/test/test_external.py +++ b/test/test_external.py @@ -305,7 +305,7 @@ def test_multiple_spaces_one_private(self): gr.load( "spaces/gradio/test-loading-examples", ) - assert Context.access_token == api_key + assert Context.hf_token == api_key def test_loading_files_via_proxy_works(self): api_key = "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" # Intentionally revealing this key for testing purposes From f0dc386e319e90bc016ffa5a14d1aace8ad5ffc7 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Tue, 21 Mar 2023 18:51:19 -0700 Subject: [PATCH 89/93] context --- gradio/context.py | 4 +--- 1 file changed, 1 insertion(+), 3 deletions(-) diff --git a/gradio/context.py b/gradio/context.py index 6048312eb0179..18eecbc84ec8f 100644 --- a/gradio/context.py +++ b/gradio/context.py @@ -13,6 +13,4 @@ class Context: block: BlockContext | None = None # The current block that children are added to. id: int = 0 # Running id to uniquely refer to any block that gets defined ip_address: str | None = None # The IP address of the user. - access_token: str | None = ( - None # The HF token that is provided when loading private models or Spaces - ) + hf_token: str | None = None # The token provided when loading private HF repos From d85365fe1694c22af7330381ff4d6137344c73fb Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 23 Mar 2023 13:11:13 -0700 Subject: [PATCH 90/93] changes --- client/python/README.md | 4 ++-- client/python/gradio_client/client.py | 11 ++++------- 2 files changed, 6 insertions(+), 9 deletions(-) diff --git a/client/python/README.md b/client/python/README.md index c802a258ae8e2..f27d88f198d8c 100644 --- a/client/python/README.md +++ b/client/python/README.md @@ -1,6 +1,6 @@ # `gradio_client`: Use any Gradio app as an API -- in 3 lines of Python -This directory contains the source code for `gradio_client`, a lightweight Python library that makes it very easy to use any Gradio app as an API. This library is CURRENTLY IN ALPHA. APIS MAY CHANGE. WE DO NOT RECOMMEND USING IT EXCEPT IN EXPERIMENTAL PROJECTS. +This directory contains the source code for `gradio_client`, a lightweight Python library that makes it very easy to use any Gradio app as an API. Warning: This library is **currently in alpha, and APIs may change**. As an example, consider the Stable Diffusion Gradio app, which is hosted on Hugging Face Spaces, and which generates images given a text prompt. Using the `gradio_client` library, we can easily use the Gradio as an API to generates images programmatically. @@ -13,7 +13,7 @@ client = grc.Client(space="stability-ai/stable-diffusion") job = client.predict("a hyperrealistic portrait of a cat wearing cyberpunk armor") job.result() ->> URL +>> https://stabilityai-stable-diffusion.hf.space/kjbcxadsk3ada9k/image.png # URL to generated image ``` diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index f3321eef850c5..c9a1f458f87ed 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -64,7 +64,7 @@ def predict( *args, api_name: str | None = None, fn_index: int = 0, - result_callbacks: List[Callable] | None = None, + result_callbacks: Callable | List[Callable] | None = None, ) -> Future: if api_name: fn_index = self._infer_fn_index(api_name) @@ -74,14 +74,15 @@ def predict( job = Job(future) if result_callbacks: - + if isinstance(result_callbacks, Callable): + result_callbacks = [result_callbacks] + def create_fn(callback) -> Callable: def fn(future): if isinstance(future.result(), tuple): callback(*future.result()) else: callback(future.result()) - return fn for callback in result_callbacks: @@ -89,10 +90,6 @@ def fn(future): return job - ################################## - # Private helper methods - ################################## - def _telemetry_thread(self) -> None: # Disable telemetry by setting the env variable HF_HUB_DISABLE_TELEMETRY=1 data = { From d411500db632fbe8906361b3416a310937e37e77 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 23 Mar 2023 13:11:30 -0700 Subject: [PATCH 91/93] formatting --- client/python/gradio_client/client.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index c9a1f458f87ed..78d3c6b16359b 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -75,14 +75,15 @@ def predict( if result_callbacks: if isinstance(result_callbacks, Callable): - result_callbacks = [result_callbacks] - + result_callbacks = [result_callbacks] + def create_fn(callback) -> Callable: def fn(future): if isinstance(future.result(), tuple): callback(*future.result()) else: callback(future.result()) + return fn for callback in result_callbacks: From da7d62a2f7295eee1d6e34001be0d7730e1e0444 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 23 Mar 2023 15:02:22 -0700 Subject: [PATCH 92/93] fixes --- client/python/gradio_client/client.py | 19 +++++++------------ client/python/requirements.txt | 2 +- client/python/scripts/ci.sh | 3 ++- client/python/scripts/format.sh | 2 +- test/test_blocks.py | 1 - 5 files changed, 11 insertions(+), 16 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 78d3c6b16359b..1b430530bf0ba 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -11,11 +11,13 @@ import requests import websockets -from gradio_client import serializing, utils -from gradio_client.serializing import Serializable +import huggingface_hub from huggingface_hub.utils import build_hf_headers, send_telemetry from packaging import version +from gradio_client import serializing, utils +from gradio_client.serializing import Serializable + class Client: def __init__( @@ -45,7 +47,7 @@ def __init__( print(f"Loaded as API: {self.src} ✔") self.api_url = utils.API_URL.format(self.src) - self.ws_url = utils.WS_URL.format(self.src).replace("https", "wss") + self.ws_url = utils.WS_URL.format(self.src).replace("http", "ws", 1) self.config = self._get_config() self.endpoints = [ @@ -117,14 +119,7 @@ def __del__(self): self.executor.shutdown(wait=True) def _space_name_to_src(self, space) -> str | None: - return ( - requests.get( - f"https://huggingface.co/api/spaces/{space}/host", - headers=self.headers, - ) - .json() - .get("host") - ) + return huggingface_hub.space_info(space, token=self.hf_token).host # type: ignore def _get_config(self) -> Dict: assert self.src is not None @@ -137,7 +132,7 @@ def _get_config(self) -> Dict: raise ValueError(f"Could not get Gradio config from: {self.src}") if "allow_flagging" in config: raise ValueError( - f"Gradio 2.x is not supported by this client. Please upgrade this app to Gradio 3.x." + "Gradio 2.x is not supported by this client. Please upgrade this app to Gradio 3.x." ) return config diff --git a/client/python/requirements.txt b/client/python/requirements.txt index eaad6afbd2e12..83587c7dea8af 100644 --- a/client/python/requirements.txt +++ b/client/python/requirements.txt @@ -2,4 +2,4 @@ requests websockets packaging fsspec -huggingface_hub +huggingface_hub>=0.13.0 diff --git a/client/python/scripts/ci.sh b/client/python/scripts/ci.sh index 13541e8a3fb5c..0d97a227c6e44 100644 --- a/client/python/scripts/ci.sh +++ b/client/python/scripts/ci.sh @@ -1,4 +1,5 @@ #!/bin/bash +set -e cd "$(dirname ${0})/.." @@ -8,4 +9,4 @@ python -m isort --profile=black --check-only test gradio_client python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403,F541 test gradio_client --exclude gradio_client/__init__.py echo "Testing..." -python -m pytest test \ No newline at end of file +python -m pytest test diff --git a/client/python/scripts/format.sh b/client/python/scripts/format.sh index a46d57d5ea573..e37f54f8a8233 100644 --- a/client/python/scripts/format.sh +++ b/client/python/scripts/format.sh @@ -5,4 +5,4 @@ cd "$(dirname ${0})/.." echo "Formatting the backend... Our style follows the Black code style." python -m black test gradio_client python -m isort --profile=black test gradio_client -python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403 test gradio_client --exclude gradio/__init__.py client/python/gradio_client/__init__.py +python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403 test gradio_client --exclude gradio_client/__init__.py diff --git a/test/test_blocks.py b/test/test_blocks.py index 2141f49d652b7..92de5f9f2dc06 100644 --- a/test/test_blocks.py +++ b/test/test_blocks.py @@ -126,7 +126,6 @@ def fake_func(): demo.load(fake_func, [], [textbox]) config = demo.get_config_file() - print("config\n", config) assert assert_configs_are_equivalent_besides_ids(XRAY_CONFIG, config) assert config["show_api"] is True _ = demo.launch(prevent_thread_lock=True, show_api=False) From 71cbd738dd5d0b91177132676596f524a04bd852 Mon Sep 17 00:00:00 2001 From: Abubakar Abid Date: Thu, 23 Mar 2023 15:04:07 -0700 Subject: [PATCH 93/93] format fix --- client/python/gradio_client/client.py | 7 +++---- scripts/format_backend.sh | 2 +- 2 files changed, 4 insertions(+), 5 deletions(-) diff --git a/client/python/gradio_client/client.py b/client/python/gradio_client/client.py index 1b430530bf0ba..0447f24952f9f 100644 --- a/client/python/gradio_client/client.py +++ b/client/python/gradio_client/client.py @@ -9,14 +9,13 @@ from concurrent.futures import Future from typing import Any, Callable, Dict, List, Tuple +import huggingface_hub import requests import websockets -import huggingface_hub -from huggingface_hub.utils import build_hf_headers, send_telemetry -from packaging import version - from gradio_client import serializing, utils from gradio_client.serializing import Serializable +from huggingface_hub.utils import build_hf_headers, send_telemetry +from packaging import version class Client: diff --git a/scripts/format_backend.sh b/scripts/format_backend.sh index aec3b882e003d..d3eea12b628b1 100755 --- a/scripts/format_backend.sh +++ b/scripts/format_backend.sh @@ -5,4 +5,4 @@ cd "$(dirname ${0})/.." echo "Formatting the backend... Our style follows the Black code style." python -m black gradio test client/python/gradio_client python -m isort --profile=black gradio test client/python/gradio_client -python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403 gradio test client/python/gradio_client --exclude gradio/__init__.py client/python/gradio_client/__init__.py +python -m flake8 --ignore=E731,E501,E722,W503,E126,E203,F403 gradio test client/python/gradio_client --exclude gradio/__init__.py,client/python/gradio_client/__init__.py