63 lines
1.4 KiB
YAML
63 lines
1.4 KiB
YAML
version: '3.8'
|
|
|
|
services:
|
|
# --- L'INTERFACE (La Glue) ---
|
|
open-webui:
|
|
image: ghcr.io/open-webui/open-webui:main
|
|
container_name: open-webui
|
|
ports:
|
|
- "3000:8080"
|
|
environment:
|
|
- OLLAMA_BASE_URL=http://ollama:11434
|
|
- WEBUI_SECRET_KEY=${WEBUI_SECRET_KEY:-supersecretkey}
|
|
volumes:
|
|
- webui_data:/app/backend/data
|
|
networks:
|
|
- studio-net
|
|
restart: always
|
|
|
|
# --- LE CERVEAU (LLM) ---
|
|
ollama:
|
|
image: ollama/ollama:latest
|
|
container_name: ollama
|
|
deploy:
|
|
resources:
|
|
reservations:
|
|
devices:
|
|
- driver: nvidia
|
|
count: all
|
|
capabilities: [gpu]
|
|
volumes:
|
|
- ollama_data:/root/.ollama
|
|
networks:
|
|
- studio-net
|
|
|
|
# --- LE PREMIER SERVICE AUDIO (Exemple) ---
|
|
audio-api:
|
|
build:
|
|
context: ./services/audio-api # On créera ce dossier juste après
|
|
container_name: audio-api
|
|
ports:
|
|
- "7860:7860"
|
|
deploy:
|
|
resources:
|
|
limits:
|
|
memroy: 16G # On empêche un service de manger toute la RAM système
|
|
reservations:
|
|
devices:
|
|
- driver: nvidia
|
|
count: all
|
|
capabilities: [gpu]
|
|
volumes:
|
|
- ./models:/app/models
|
|
- ./outputs:/app/outputs
|
|
networks:
|
|
- studio-net
|
|
|
|
networks:
|
|
studio-net:
|
|
driver: bridge
|
|
|
|
volumes:
|
|
webui_data:
|
|
ollama_data: |