-
Notifications
You must be signed in to change notification settings - Fork 0
/
local_llm.sh
34 lines (24 loc) · 981 Bytes
/
local_llm.sh
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
# default port 11434 sometimes not working
OLLAMA_HOST="127.0.0.1:11435"
echo "install ollama https://github.com/ollama/ollama"
curl -fsSL https://ollama.com/install.sh | sh
echo "pull a ollama model for tests"
ollama pull llama3.2:1b
echo "create ollama user"
sudo useradd -r -s /bin/false -m -d /usr/share/ollama ollama
echo "installing ollama as a systemd service"
sudo curl https://raw.githubusercontent.com/tbs1-bo/provision/refs/heads/main/ollama.service | sudo tee /etc/systemd/system/ollama.service
sudo systemctl daemon-reload
sudo systemctl enable ollama
sudo systemctl start ollama
echo "installing docker"
sudo apt install docker.io
echo "adding current user to docker group (maybe restart required)"
sudo usermod -a -G docker $USER
echo "starting docker container"
docker run -d --network=host \
-v open-webui:/app/backend/data \
-e OLLAMA_BASE_URL=http://$OLLAMA_HOST \
--name open-webui \
--restart always \
ghcr.io/open-webui/open-webui:main