Skip to content

Commit

Permalink
HF transfer enable (#790)
Browse files Browse the repository at this point in the history
* make HF_HUB_ENABLE_HF_TRANSFER a setting
defaults to True

* remove from makefile
  • Loading branch information
pascal-pfeiffer authored Jul 30, 2024
1 parent 2e095cd commit a305bae
Show file tree
Hide file tree
Showing 4 changed files with 26 additions and 3 deletions.
2 changes: 0 additions & 2 deletions Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -165,7 +165,6 @@ test-ui-github-actions: reports setup-ui

.PHONY: wave
wave:
HF_HUB_ENABLE_HF_TRANSFER=True \
H2O_WAVE_MAX_REQUEST_SIZE=25MB \
H2O_WAVE_NO_LOG=true \
H2O_WAVE_PRIVATE_DIR="/download/@$(WORKDIR)/output/download" \
Expand All @@ -182,7 +181,6 @@ llmstudio:
llmstudio-conda:
CONDA_ACTIVATE="source $$(conda info --base)/etc/profile.d/conda.sh ; conda activate llmstudio" && \
bash -c "$$CONDA_ACTIVATE && \
HF_HUB_ENABLE_HF_TRANSFER=True \
H2O_WAVE_MAX_REQUEST_SIZE=25MB \
H2O_WAVE_NO_LOG=true \
H2O_WAVE_PRIVATE_DIR="/download/@$(WORKDIR)/output/download" \
Expand Down
3 changes: 3 additions & 0 deletions llm_studio/app_utils/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -103,6 +103,9 @@ def get_size(x):
"default_neptune_project": os.getenv("NEPTUNE_PROJECT", ""),
"default_neptune_api_token": os.getenv("NEPTUNE_API_TOKEN", ""),
"default_huggingface_api_token": os.getenv("HUGGINGFACE_TOKEN", ""),
"default_hf_hub_enable_hf_transfer": os.getenv(
"HF_HUB_ENABLE_HF_TRANSFER", True
),
"default_openai_azure": os.getenv("OPENAI_API_TYPE", "open_ai") == "azure",
"default_openai_api_token": os.getenv("OPENAI_API_KEY", ""),
"default_openai_api_base": os.getenv(
Expand Down
23 changes: 22 additions & 1 deletion llm_studio/app_utils/sections/settings.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ async def settings(q: Q) -> None:
await clean_dashboard(q, mode="full")
q.client["nav/active"] = "settings"

label_width = "250px"
label_width = "280px"
textbox_width = "350px"

q.page["settings/content"] = ui.form_card(
Expand Down Expand Up @@ -276,6 +276,27 @@ async def settings(q: Q) -> None:
),
]
),
ui.inline(
items=[
ui.label("Huggingface Hub Enable HF Transfer", width=label_width),
ui.toggle(
name="default_hf_hub_enable_hf_transfer",
value=(
True
if q.client["default_hf_hub_enable_hf_transfer"]
else False
),
tooltip=(
"Toggle to enable \
<a href='https://github.com/huggingface/hf_transfer' \
target='_blank'>HF Transfer</a> for faster \
downloads. Toggle, if you are experiencing issues on down-\
or upload. EXPERIMENTAL."
),
trigger=False,
),
]
),
ui.inline(
items=[
ui.label("OpenAI API Token", width=label_width),
Expand Down
1 change: 1 addition & 0 deletions llm_studio/app_utils/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -1994,6 +1994,7 @@ def start_experiment(
"NEPTUNE_API_TOKEN": q.client["default_neptune_api_token"],
"OPENAI_API_KEY": q.client["default_openai_api_token"],
"GPT_EVAL_MAX": str(q.client["default_gpt_eval_max"]),
"HF_HUB_ENABLE_HF_TRANSFER": str(q.client["default_hf_hub_enable_hf_transfer"]),
}
if q.client["default_openai_azure"]:
env_vars.update(
Expand Down

0 comments on commit a305bae

Please sign in to comment.