1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
services:
# Optional: Uncomment if you want to run Ollama in Docker as well
# ollama:
# image: ollama/ollama:latest
# container_name: ollama
# volumes:
# - ollama_data:/root/.ollama
# ports:
# - "11434:11434"
# restart: unless-stopped
ollamamq:
build: .
image: chlebon/ollamamq:latest
container_name: ollamamq
ports:
- "11435:11435"
environment:
# If using host Ollama on Linux:
# 1. Ensure Ollama is listening on 0.0.0.0 (export OLLAMA_HOST=0.0.0.0)
# 2. Use http://host.docker.internal:11434
# If using the 'ollama' service above, use http://ollama:11434
- OLLAMA_URLS=http://host.docker.internal:11434
- PORT=11435
- TIMEOUT=300
- RUST_LOG=info
command:
extra_hosts:
- "host.docker.internal:host-gateway"
restart: unless-stopped
healthcheck:
test:
interval: 30s
timeout: 10s
retries: 3
start_period: 10s
# volumes:
# ollama_data: