-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathbuild_llamacpp.sh
executable file
·66 lines (50 loc) · 1.61 KB
/
build_llamacpp.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
#!/bin/bash
set -e -x
llamacpp_tag=$1
hw_platform=$2
model_url=$3
build_cuda() {
export PATH=/usr/local/cuda-12.3/bin${PATH:+:${PATH}}
export LD_LIBRARY_PATH=/usr/local/cuda-12.3/lib64${LD_LIBRARY_PATH:+:${LD_LIBRARY_PATH}}
cmake -B build -DLLAMA_CUBLAS=ON
cmake --build build --config Release -- -j8
}
build_intel() {
source /opt/intel/oneapi/mkl/latest/env/vars.sh
source /opt/intel/oneapi/setvars.sh
cmake -B build -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=Intel10_64lp -DCMAKE_C_COMPILER=icx -DCMAKE_CXX_COMPILER=icpx -DLLAMA_NATIVE=ON
cmake --build build --config Release -- -j8
}
build_openblas() {
cmake -B build -DLLAMA_BLAS=ON -DLLAMA_BLAS_VENDOR=OpenBLAS
cmake --build build --config Release -- -j8
}
# stop the service if running
if systemctl list-units --type=service --all | grep -q "llamacpp"; then
systemctl stop "llamacpp"
fi
cd /opt/llamaup/app
if [ -d "llama.cpp" ]; then
cd llama.cpp
git reset --hard
git pull origin $llamacpp_tag
else
git clone --depth=1 --branch $llamacpp_tag https://github.com/ggerganov/llama.cpp
git config --global --add safe.directory /opt/llamaup/app/llama.cpp
cd llama.cpp
fi
rm -rf build
echo "building for hardware platform $hw_platform"
if [ "$hw_platform" == "cuda" ]; then
build_cuda
elif [ "$hw_platform" == "intel" ]; then
build_intel
elif [ "$hw_platform" == "openblas" ]; then
build_openblas
else
# build openblas by default if no platform specified or is unknown
build_openblas
fi
wget $model_url -O /opt/llamaup/data/model.gguf
chown -R llamaup:llamaup /opt/llamaup
systemctl restart llamacpp