Files
Studio-IA-Modulaire/docker-compose.yml
2026-03-03 13:43:20 +01:00

102 lines
2.3 KiB
YAML

services:
# --- LE PORTIER ---
gateway:
image: nginx:latest
container_name: studio-gateway-1
ports:
- "80:80"
volumes:
- ./nginx/default.conf:/etc/nginx/conf.d/default.conf
networks:
- studio-net
depends_on:
open-webui:
condition: service_healthy
audio-api:
condition: service_started
restart: always
# --- LE TUNNEL HTTPS ---
cloudflared:
image: cloudflare/cloudflared:latest
container_name: studio-cloudflared
command: tunnel --no-autoupdate run --token ${CF_TUNNEL_TOKEN}
environment:
- CF_TUNNEL_TOKEN=${CF_TUNNEL_TOKEN}
networks:
- studio-net
depends_on:
- gateway
restart: always
# --- L'INTERFACE (Open WebUI) ---
open-webui:
image: ghcr.io/open-webui/open-webui:main
container_name: open-webui
environment:
- OLLAMA_BASE_URL=http://ollama:11434
- WEBUI_SECRET_KEY=${WEBUI_SECRET_KEY}
- WEBUI_URL=${WEBUI_URL}
- ENABLE_WEBSOCKETS=True
volumes:
- webui_data:/app/backend/data
networks:
- studio-net
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8080/health"]
interval: 30s
timeout: 10s
retries: 3
restart: always
# --- LE CERVEAU (Ollama) ---
ollama:
image: ollama/ollama:latest
container_name: ollama
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
volumes:
- ollama_data:/root/.ollama
networks:
- studio-net
expose:
- "11434"
restart: always
# --- LE SERVICE AUDIO ---
audio-api:
build:
context: ./services/audio-api
dockerfile: Dockerfile
container_name: audio-api
environment:
- NVIDIA_VISIBLE_DEVICES=all
- PYTHONUNBUFFERED=1
entrypoint: ["python3.11", "/app/server.py"]
volumes:
- ./services/audio-api:/app
- ./models:/app/models
- ./outputs:/app/outputs
deploy:
resources:
reservations:
devices:
- driver: nvidia
count: all
capabilities: [gpu]
ports:
- "7860:7860"
networks:
- studio-net
restart: always
networks:
studio-net:
driver: bridge
volumes:
webui_data:
ollama_data: