From f03c029914386902095e1b28945f767344b7f809 Mon Sep 17 00:00:00 2001 From: Zhicun <59141989+ivy-lv11@users.noreply.github.com> Date: Tue, 9 Apr 2024 09:48:42 +0800 Subject: [PATCH] pydantic version>=2.0.0 for llamaindex (#10694) * pydantic version * pydantic version * upgrade version --- .github/workflows/llm_unit_tests.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/llm_unit_tests.yml b/.github/workflows/llm_unit_tests.yml index f1b762d9bb4..ff97daec213 100644 --- a/.github/workflows/llm_unit_tests.yml +++ b/.github/workflows/llm_unit_tests.yml @@ -224,6 +224,7 @@ jobs: run: | pip install llama-index-readers-file llama-index-vector-stores-postgres llama-index-embeddings-huggingface pip install transformers==4.31.0 + pip install "pydantic>=2.0.0" bash python/llm/test/run-llm-llamaindex-tests.sh llm-unit-test-on-arc: needs: [setup-python-version, llm-cpp-build] @@ -398,4 +399,5 @@ jobs: pip install --pre --upgrade ipex-llm[xpu_2.0] --extra-index-url https://pytorch-extension.intel.com/release-whl/stable/xpu/cn/ source /home/arda/intel/oneapi/setvars.sh fi + pip install "pydantic>=2.0.0" bash python/llm/test/run-llm-llamaindex-tests-gpu.sh \ No newline at end of file