diff --git a/.env.development b/.env.development index 8f4e69db1..8c3af6ca2 100644 --- a/.env.development +++ b/.env.development @@ -169,8 +169,12 @@ OLLAMA_URL=http://localhost:11434 # mana-llm (OpenAI-compatible gateway, port 3025 locally / llm.mana.how prod) # Used by server-side voice quick-add proxies (parse-task, parse-habit). -# API key is required when pointing at the GPU LLM proxy (gpu-llm.mana.how). -MANA_LLM_URL=http://localhost:3025 +# Defaults to the shared dev gateway because nobody runs mana-llm in +# local Docker — same convention as STT_URL above. If you want a fully +# offline local stack, override this to http://localhost:3025 and run +# `docker compose up mana-llm`. API key is required when pointing at +# the GPU LLM proxy (gpu-llm.mana.how). +MANA_LLM_URL=https://llm.mana.how MANA_LLM_API_KEY= # ============================================