-
Notifications
You must be signed in to change notification settings - Fork 7.3k
New issue
Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.
By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.
Already on GitHub? Sign in to your account
Fix docker-compose.yaml #2073
Fix docker-compose.yaml #2073
Changes from all commits
File filter
Filter by extension
Conversations
Jump to
Diff view
Diff view
There are no files selected for viewing
Original file line number | Diff line number | Diff line change |
---|---|---|
@@ -1,13 +1,11 @@ | ||
services: | ||
version: '3.9' | ||
|
||
#----------------------------------- | ||
#---- Private-GPT services --------- | ||
#----------------------------------- | ||
services: | ||
|
||
# Private-GPT service for the Ollama CPU and GPU modes | ||
# This service builds from an external Dockerfile and runs the Ollama mode. | ||
private-gpt-ollama: | ||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-ollama # x-release-please-version | ||
user: root | ||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-ollama | ||
build: | ||
context: . | ||
dockerfile: Dockerfile.ollama | ||
|
@@ -27,11 +25,14 @@ services: | |
- ollama-cpu | ||
- ollama-cuda | ||
- ollama-api | ||
depends_on: | ||
- ollama | ||
networks: | ||
- ollama-net | ||
|
||
# Private-GPT service for the local mode | ||
# This service builds from a local Dockerfile and runs the application in local mode. | ||
private-gpt-llamacpp-cpu: | ||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-llamacpp-cpu # x-release-please-version | ||
image: ${PGPT_IMAGE:-zylonai/private-gpt}:${PGPT_TAG:-0.6.2}-llamacpp-cpu | ||
build: | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please don't remove |
||
context: . | ||
dockerfile: Dockerfile.llamacpp-cpu | ||
|
@@ -47,45 +48,34 @@ services: | |
HF_TOKEN: ${HF_TOKEN} | ||
profiles: | ||
- llamacpp-cpu | ||
|
||
#----------------------------------- | ||
#---- Ollama services -------------- | ||
#----------------------------------- | ||
networks: | ||
- ollama-net | ||
|
||
# Traefik reverse proxy for the Ollama service | ||
# This will route requests to the Ollama service based on the profile. | ||
ollama: | ||
traefik: | ||
image: traefik:v2.10 | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. If you change service name, you cannot connect to ollama using |
||
ports: | ||
- "8081:8080" | ||
- "11434:11434" # Exposing port 11434 for Ollama | ||
command: | ||
- "--providers.file.filename=/etc/router.yml" | ||
- "--log.level=ERROR" | ||
- "--api.insecure=true" | ||
- "--providers.docker=true" | ||
- "--providers.docker.exposedbydefault=false" | ||
- "--entrypoints.web.address=:11434" | ||
- "--api.insecure=true" | ||
- "--log.level=ERROR" | ||
volumes: | ||
- /var/run/docker.sock:/var/run/docker.sock:ro | ||
- ./.docker/router.yml:/etc/router.yml:ro | ||
extra_hosts: | ||
- "host.docker.internal:host-gateway" | ||
profiles: | ||
- "" | ||
- ollama-cpu | ||
- ollama-cuda | ||
- ollama-api | ||
networks: | ||
- ollama-net | ||
|
||
# Ollama service for the CPU mode | ||
ollama-cpu: | ||
# Ollama service | ||
ollama: | ||
image: ollama/ollama:latest | ||
There was a problem hiding this comment. Choose a reason for hiding this commentThe reason will be displayed to describe this comment to others. Learn more. Please go back to |
||
volumes: | ||
- ./models:/root/.ollama | ||
profiles: | ||
- "" | ||
- ollama-cpu | ||
networks: | ||
- ollama-net | ||
|
||
# Ollama service for the CUDA mode | ||
# Ollama service for CUDA mode | ||
ollama-cuda: | ||
image: ollama/ollama:latest | ||
volumes: | ||
|
@@ -98,4 +88,10 @@ services: | |
count: 1 | ||
capabilities: [gpu] | ||
profiles: | ||
- ollama-cuda | ||
- ollama-cuda | ||
networks: | ||
- ollama-net | ||
|
||
networks: | ||
ollama-net: | ||
driver: bridge |
There was a problem hiding this comment.
Choose a reason for hiding this comment
The reason will be displayed to describe this comment to others. Learn more.
Please don't remove
# x-release-please-version