remove ollama service for prod

This commit is contained in:
YannAhlgrim
2025-10-08 15:32:40 +02:00
parent c493a99b1d
commit ea421a2d14
2 changed files with 10 additions and 10 deletions
+9 -9
View File
@@ -16,15 +16,15 @@ services:
- "8002:8002"
restart: unless-stopped
ollama:
# This is a placeholder image; ensure you have an Ollama-compatible image and models available.
image: ollama/ollama:latest
container_name: ollama
ports:
- "11434:11434"
restart: unless-stopped
volumes:
- ./ollama-data:/root/.ollama
# ollama:
# # This is a placeholder image; ensure you have an Ollama-compatible image and models available.
# image: ollama/ollama:latest
# container_name: ollama
# ports:
# - "11434:11434"
# restart: unless-stopped
# volumes:
# - ./ollama-data:/root/.ollama
middleware:
build: ./middleware
+1 -1
View File
@@ -26,7 +26,7 @@ logger = logging.getLogger("middleware")
WHISPER_URL = "http://whisper:8001/transcribe"
COQUITTS_URL = "http://coquitts:8002/speak"
OLLAMA_URL = "http://ollama:11434/api/generate"
OLLAMA_URL = "http://ollama.ahlgrim.net:11434/api/generate"
LLM_MODEL = os.getenv("LLM_MODEL", "gemma3:270m")
logger.info("Using LLM model: %s", LLM_MODEL)