From 429bf1ffebece6936639c560432fe02f8827cec3 Mon Sep 17 00:00:00 2001 From: Shaojun Liu <61072813+liu-shaojun@users.noreply.github.com> Date: Tue, 17 Dec 2024 14:22:50 +0800 Subject: [PATCH] Change: Use cn mirror for PyTorch extension installation to resolve network issues (#12559) * Update Dockerfile * Update Dockerfile * Update Dockerfile --- docker/llm/inference/cpu/docker/Dockerfile | 2 +- docker/llm/inference/xpu/docker/Dockerfile | 4 ++-- docker/llm/serving/xpu/docker/Dockerfile | 4 ++-- 3 files changed, 5 insertions(+), 5 deletions(-) diff --git a/docker/llm/inference/cpu/docker/Dockerfile b/docker/llm/inference/cpu/docker/Dockerfile index a21feea0fe8..1e82f50ca74 100644 --- a/docker/llm/inference/cpu/docker/Dockerfile +++ b/docker/llm/inference/cpu/docker/Dockerfile @@ -65,7 +65,7 @@ RUN env DEBIAN_FRONTEND=noninteractive apt-get update && \ pip install Jinja2==3.1.3 && \ pip install torch==2.2.0 torchvision==0.17.0 torchaudio==2.2.0 --index-url https://download.pytorch.org/whl/cpu && \ pip install intel-extension-for-pytorch==2.2.0 && \ - pip install oneccl_bind_pt==2.2.0 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/cpu/us/ && \ + pip install oneccl_bind_pt==2.2.0 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/cpu/cn/ && \ pip install transformers==4.36.2 ENTRYPOINT ["/bin/bash"] diff --git a/docker/llm/inference/xpu/docker/Dockerfile b/docker/llm/inference/xpu/docker/Dockerfile index 0a6221f14aa..b96f9203f90 100644 --- a/docker/llm/inference/xpu/docker/Dockerfile +++ b/docker/llm/inference/xpu/docker/Dockerfile @@ -54,7 +54,7 @@ RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRO python3 get-pip.py && \ rm get-pip.py && \ pip install --upgrade requests argparse urllib3 && \ - pip install --pre --upgrade ipex-llm[xpu_arc] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ && \ + pip install --pre --upgrade ipex-llm[xpu_arc] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/ && \ pip install --pre pytorch-triton-xpu==3.0.0+1b2f15840e --index-url https://download.pytorch.org/whl/nightly/xpu && \ # Fix Trivy CVE Issues pip install transformers_stream_generator einops tiktoken && \ @@ -83,7 +83,7 @@ RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRO # Download Deepspeed-AutoTP cp -r ./ipex-llm/python/llm/example/GPU/Deepspeed-AutoTP/ ./Deepspeed-AutoTP && \ # Install related library of Deepspeed-AutoTP - pip install oneccl_bind_pt==2.3.100 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ && \ + pip install oneccl_bind_pt==2.3.100 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/ && \ pip install git+https://github.com/microsoft/DeepSpeed.git@ed8aed5 && \ pip install git+https://github.com/intel/intel-extension-for-deepspeed.git@0eb734b && \ pip install mpi4py && \ diff --git a/docker/llm/serving/xpu/docker/Dockerfile b/docker/llm/serving/xpu/docker/Dockerfile index dd3979200fa..0b1e7267047 100644 --- a/docker/llm/serving/xpu/docker/Dockerfile +++ b/docker/llm/serving/xpu/docker/Dockerfile @@ -39,7 +39,7 @@ RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRO python3 get-pip.py && \ rm get-pip.py && \ pip install --upgrade requests argparse urllib3 && \ - pip install --pre --upgrade ipex-llm[xpu,serving] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ && \ + pip install --pre --upgrade ipex-llm[xpu,serving] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/ && \ pip install transformers_stream_generator einops tiktoken && \ pip install --upgrade colorama && \ # Download all-in-one benchmark and examples @@ -62,7 +62,7 @@ RUN wget -O- https://apt.repos.intel.com/intel-gpg-keys/GPG-PUB-KEY-INTEL-SW-PRO rm -rf ./ipex-llm && \ # Install torch-ccl cd /tmp/ && \ - pip install torch==2.1.0.post2 torchvision==0.16.0.post2 torchaudio==2.1.0.post2 intel-extension-for-pytorch==2.1.30.post0 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/us/ && \ + pip install torch==2.1.0.post2 torchvision==0.16.0.post2 torchaudio==2.1.0.post2 intel-extension-for-pytorch==2.1.30.post0 --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/ && \ # Internal oneccl wget https://sourceforge.net/projects/oneccl-wks/files/2024.0.0.6.2-release/oneccl_wks_installer_2024.0.0.6.2.sh && \ bash oneccl_wks_installer_2024.0.0.6.2.sh && \