mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-14 20:21:09 +02:00
- Fix telegram_user_id column type (integer -> bigint) for large user IDs - Add local STT support via mana-stt service (Whisper MLX + Voxtral) - Add STT provider config (local/openai) with fallback support - Add Grafana dashboard for mana-stt service metrics - Add ollama-metrics-proxy for LLM metrics collection - Add Grafana dashboard for Ollama LLM metrics Services added/updated: - telegram-project-doc-bot: local STT integration - mana-stt: Grafana dashboard - ollama-metrics-proxy: new service for Ollama metrics Co-Authored-By: Claude Opus 4.5 <noreply@anthropic.com>
29 lines
836 B
Text
29 lines
836 B
Text
# Server
|
|
PORT=3302
|
|
|
|
# Telegram
|
|
TELEGRAM_BOT_TOKEN=your-bot-token-from-botfather
|
|
TELEGRAM_ALLOWED_USERS= # Optional: comma-separated user IDs
|
|
|
|
# Database
|
|
DATABASE_URL=postgresql://postgres:postgres@localhost:5432/projectdoc
|
|
|
|
# Storage (MinIO)
|
|
S3_ENDPOINT=http://localhost:9000
|
|
S3_REGION=us-east-1
|
|
S3_ACCESS_KEY=minioadmin
|
|
S3_SECRET_KEY=minioadmin
|
|
S3_BUCKET=projectdoc-storage
|
|
|
|
# AI - Transcription (STT)
|
|
STT_PROVIDER=local # local | openai
|
|
STT_LOCAL_URL=http://localhost:3020 # mana-stt service URL
|
|
STT_MODEL=whisper # whisper | voxtral
|
|
|
|
# OpenAI (optional fallback for STT, required if STT_PROVIDER=openai)
|
|
OPENAI_API_KEY=sk-your-openai-key
|
|
|
|
# AI - Generation
|
|
LLM_PROVIDER=ollama # ollama | openai
|
|
OLLAMA_URL=http://localhost:11435 # Use :11435 for metrics proxy, :11434 for direct
|
|
OLLAMA_MODEL=gemma3:4b
|