docker compose up -d
docker compose exec ollama ollama pull deepseek-r1:1.5b
docker compose exec ollama ollama pull llama3.2:3b
docker compose exec ollama ollama pull qwen2.5:0.5b
services: | |
ollama: | |
image: ollama/ollama:latest | |
container_name: ollama | |
restart: unless-stopped | |
ports: | |
- "11434:11434" | |
volumes: | |
- ollama_data:/root/.ollama | |
command: ["serve"] | |
webui: | |
image: ghcr.io/open-webui/open-webui:main | |
container_name: open-webui | |
restart: unless-stopped | |
ports: | |
- "3000:8080" | |
environment: | |
- OLLAMA_API_BASE_URL=http://ollama:11434 | |
depends_on: | |
- ollama | |
volumes: | |
ollama_data: |