diff --git a/docker-compose.yml b/docker-compose.yml index 23b86fb..7ccfd99 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -16,15 +16,15 @@ services: - "8002:8002" restart: unless-stopped - ollama: - # This is a placeholder image; ensure you have an Ollama-compatible image and models available. - image: ollama/ollama:latest - container_name: ollama - ports: - - "11434:11434" - restart: unless-stopped - volumes: - - ./ollama-data:/root/.ollama + # ollama: + # # This is a placeholder image; ensure you have an Ollama-compatible image and models available. + # image: ollama/ollama:latest + # container_name: ollama + # ports: + # - "11434:11434" + # restart: unless-stopped + # volumes: + # - ./ollama-data:/root/.ollama middleware: build: ./middleware diff --git a/middleware/server.py b/middleware/server.py index 86ccd43..b25f2d3 100644 --- a/middleware/server.py +++ b/middleware/server.py @@ -26,7 +26,7 @@ logger = logging.getLogger("middleware") WHISPER_URL = "http://whisper:8001/transcribe" COQUITTS_URL = "http://coquitts:8002/speak" -OLLAMA_URL = "http://ollama:11434/api/generate" +OLLAMA_URL = "http://ollama.ahlgrim.net:11434/api/generate" LLM_MODEL = os.getenv("LLM_MODEL", "gemma3:270m") logger.info("Using LLM model: %s", LLM_MODEL)