-
Notifications
You must be signed in to change notification settings - Fork 40
/
install.sh
166 lines (123 loc) Β· 4.86 KB
/
install.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
#!/bin/bash
set -e
status() { echo ">>> $*" >&2; }
error() { echo "ERROR $*"; }
warning() { echo "WARNING: $*"; }
print_message() {
local message="$1"
local color="$2"
echo -e "\e[${color}m${message}\e[0m"
}
# OS and Architecture Detection
if [[ "$OSTYPE" == "linux-gnu"* ]]; then
os="linux"
elif [[ "$OSTYPE" == "darwin"* ]]; then
os="darwin"
else
error "Unsupported operating system. Only Linux and macOS are currently supported."
exit 1
fi
if [[ "$(uname -m)" == "x86_64" ]]; then
arch="amd64"
elif [[ "$(uname -m)" == "aarch64" || "$(uname -m)" == "arm64" ]]; then
arch="arm64"
else
error "Unsupported architecture. tlm requires a 64-bit system (x86_64 or arm64)."
exit 1
fi
# Download URL Construction
version="1.1"
base_url="https://github.com/yusufcanb/tlm/releases/download"
download_url="${base_url}/${version}/tlm_${version}_${os}_${arch}"
if [ -n "${OLLAMA_HOST+x}" ]; then
ollama_host=$OLLAMA_HOST
else
ollama_host="http://localhost:11434"
fi
# Ollama check
if ! curl -fsSL $ollama_host &>/dev/null; then
if [[ "$os" == "darwin" ]]; then
print_message "ERR: Ollama not found." "31" # Red color
print_message "If you have Ollama installed, please make sure it's running and accessible at ${ollama_host}" "31"
print_message "or configure OLLAMA_HOST environment variable." "31"
echo """
>>> If have Ollama on your system or network, you can set the OLLAMA_HOST like below;
$ export OLLAMA_HOST=http://localhost:11434
>>> If you don't have Ollama installed, you can install it using the following methods;
$(print_message "*** macOS: ***" "32")
Download instructions can be followed at the following link: https://ollama.com/download
$(print_message "*** Official Docker Images: ***" "32")
Ollama can run with GPU acceleration inside Docker containers for Nvidia GPUs.
To get started using the Docker image, please follow these steps:
$(print_message "1. *** CPU only: ***" "32")
$ docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
$(print_message "2. *** Nvidia GPU: ***" "32")
$ docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
"""
print_message "Installation aborted..." "31"
print_message "Please install or configure Ollama using the methods above and try again." "31"
exit 1
elif [[ "$os" == "linux" ]]; then
print_message "ERR: Ollama not found." "31" # Red color
print_message "If you have Ollama installed, please make sure it's running and accessible at ${ollama_host}" "31"
print_message "or configure OLLAMA_HOST environment variable." "31"
echo """
>>> If have Ollama on your system or network, you can set the OLLAMA_HOST like below;
$ export OLLAMA_HOST=http://localhost:11434
>>> If you don't have Ollama installed, you can install it using the following methods;
$(print_message "*** Linux: ***" "32")
Download instructions can be followed at the following link: https://ollama.com/download
$(print_message "*** Official Docker Images: ***" "32")
Ollama can run with GPU acceleration inside Docker containers for Nvidia GPUs.
To get started using the Docker image, please follow these steps:
$(print_message "1. *** CPU only: ***" "32")
$ docker run -d -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
$(print_message "2. *** Nvidia GPU: ***" "32")
$ docker run -d --gpus=all -v ollama:/root/.ollama -p 11434:11434 --name ollama ollama/ollama
"""
print_message "Installation aborted..." "31"
print_message "Please install or configure Ollama using the methods above and try again." "31"
exit 1
fi
fi
# Download the binary
status "Downloading tlm version ${version} for ${os}/${arch}..."
if ! curl -fsSL -o tlm ${download_url}; then
error "Download failed. Please check your internet connection and try again."
exit 1
fi
# Make executable
chmod +x tlm
# Move to installation directory
status "Installing tlm..."
SUDO=
if [ "$(id -u)" -ne 0 ]; then
# Running as root, no need for sudo
if ! available sudo; then
error "This script requires superuser permissions. Please re-run as root."
exit 1
fi
SUDO="sudo"
fi
$SUDO mv tlm /usr/local/bin/
# set ollama host
if ! tlm config set llm.host ${ollama_host} &>/dev/null; then
error "tlm config set llm.host <${ollama_host}> failed."
exit 1
fi
# set shell auto
if ! tlm config set shell auto &>/dev/null; then
error "tlm config set shell <auto> failed."
exit 1
fi
# deploy tlm modelfiles
if ! tlm deploy; then
error "tlm deploy ${ollama_host} failed."
exit 1
else
echo ""
fi
# change ownership of tlm config file to user
$SUDO chown $SUDO_USER ~/.tlm.yaml
status "Type 'tlm' to get started."
exit 0