From 677f6b799d1615d486a0de087155e5f726a5d81e Mon Sep 17 00:00:00 2001 From: Till JS Date: Mon, 13 Apr 2026 23:45:37 +0200 Subject: [PATCH] feat(brain): add NudgeToast, server LLM fallback, trigger-event bridge MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Three remaining TODOs resolved: 1. NudgeToast (in-app nudge display): - New NudgeToast.svelte in bottom-stack alongside SuggestionToast - Evaluates Pulse Rules every 60s, shows nudges as toasts - Action button navigates to module route, dismiss records outcome - Badge shows count when multiple nudges are queued 2. Server LLM fallback: - Companion engine now tries local LLM (Gemma/WebGPU) first - Falls back to mana-api /api/v1/chat/completions if no WebGPU - isCompanionAvailable() returns true if either path works - Graceful error messages when neither is available 3. Trigger-Event bridge (legacy automation migration): - event-bridge.ts maps 13 domain event types to legacy (appId, collection, op) format - Existing user automations now fire on domain events too - Domain events carry decrypted data → condition matching on encrypted fields (title, etc.) works correctly - Bridge wired into layout startup/cleanup Co-Authored-By: Claude Opus 4.6 (1M context) --- .../web/src/lib/components/NudgeToast.svelte | 201 ++++++++++ .../web/src/lib/modules/companion/engine.ts | 109 ++++-- .../web/src/lib/modules/mood/ListView.svelte | 46 ++- .../modules/mood/components/QuickLog.svelte | 349 ++++++++---------- .../apps/web/src/lib/triggers/event-bridge.ts | 57 +++ .../apps/web/src/routes/(app)/+layout.svelte | 10 + 6 files changed, 521 insertions(+), 251 deletions(-) create mode 100644 apps/mana/apps/web/src/lib/components/NudgeToast.svelte create mode 100644 apps/mana/apps/web/src/lib/triggers/event-bridge.ts diff --git a/apps/mana/apps/web/src/lib/components/NudgeToast.svelte b/apps/mana/apps/web/src/lib/components/NudgeToast.svelte new file mode 100644 index 000000000..87750d7e5 --- /dev/null +++ b/apps/mana/apps/web/src/lib/components/NudgeToast.svelte @@ -0,0 +1,201 @@ + + + +{#if currentNudge} + +{/if} + + diff --git a/apps/mana/apps/web/src/lib/modules/companion/engine.ts b/apps/mana/apps/web/src/lib/modules/companion/engine.ts index 106896bc0..b43c0079e 100644 --- a/apps/mana/apps/web/src/lib/modules/companion/engine.ts +++ b/apps/mana/apps/web/src/lib/modules/companion/engine.ts @@ -1,28 +1,78 @@ /** * Companion Chat Engine — Orchestrates LLM + Context Document + Tool Calling. * - * Flow: - * 1. Build system prompt from Context Document (projections + streaks) - * 2. Collect conversation history - * 3. Send to LLM with tool schemas - * 4. If LLM returns tool_use → execute tool → feed result back → repeat - * 5. Return final assistant message - * - * Currently uses @mana/local-llm directly (Gemma, browser-local). - * Tool calling is simulated via JSON extraction since Gemma doesn't - * natively support function calling — the system prompt instructs the - * model to output JSON when it wants to call a tool. + * Tries local LLM (Gemma via @mana/local-llm) first. If WebGPU is not + * available, falls back to the mana-llm server endpoint. Tool calling + * uses JSON extraction from the LLM output. */ -import { generate, getLocalLlmStatus, loadLocalLlm } from '@mana/local-llm'; +import { generate, getLocalLlmStatus, loadLocalLlm, isLocalLlmSupported } from '@mana/local-llm'; import { generateContextDocument } from '$lib/data/projections/context-document'; import { getToolsForLlm, executeTool } from '$lib/data/tools'; +import { authStore } from '$lib/stores/auth.svelte'; import type { DaySnapshot, StreakInfo } from '$lib/data/projections/types'; import type { LocalMessage } from './types'; import type { ToolResult } from '$lib/data/tools/types'; const MAX_TOOL_ROUNDS = 3; +type LlmMessage = { role: 'user' | 'assistant' | 'system'; content: string }; + +/** Try local LLM, fall back to server if WebGPU unavailable. */ +async function callLlm(messages: LlmMessage[], onToken?: (token: string) => void): Promise { + // Try local first (WebGPU + Gemma) + if (isLocalLlmSupported()) { + const status = getLocalLlmStatus(); + if (status.current.state !== 'ready') { + try { + await loadLocalLlm(); + } catch { + // Fall through to server + return callServerLlm(messages); + } + } + const result = await generate({ messages, temperature: 0.7, maxTokens: 1024, onToken }); + return result.content; + } + + // Fallback: server-side LLM via mana-api + return callServerLlm(messages); +} + +async function callServerLlm(messages: LlmMessage[]): Promise { + const apiUrl = + (typeof window !== 'undefined' && + (window as unknown as Record).__PUBLIC_MANA_API_URL__) || + import.meta.env.PUBLIC_MANA_API_URL || + ''; + + if (!apiUrl) { + return 'LLM nicht verfuegbar — weder WebGPU noch Server-Endpoint konfiguriert.'; + } + + const headers: Record = { 'Content-Type': 'application/json' }; + try { + const token = await authStore.getValidToken(); + if (token) headers['Authorization'] = `Bearer ${token}`; + } catch { + // Continue without auth — server will decide + } + + const response = await fetch(`${apiUrl}/api/v1/chat/completions`, { + method: 'POST', + headers, + body: JSON.stringify({ messages, model: 'companion' }), + }); + + if (!response.ok) { + const err = await response.text().catch(() => response.statusText); + return `Server-Fehler: ${err}`; + } + + const data = (await response.json()) as { choices?: { message?: { content?: string } }[] }; + return data.choices?.[0]?.message?.content ?? 'Keine Antwort vom Server.'; +} + interface EngineResult { content: string; toolCalls: { name: string; params: Record; result: ToolResult }[]; @@ -107,17 +157,10 @@ export async function runCompanionChat( streaks: StreakInfo[], onToken?: (token: string) => void ): Promise { - // Ensure local LLM is loaded - const status = getLocalLlmStatus(); - if (status.current.state !== 'ready') { - await loadLocalLlm(); - } - const systemPrompt = buildSystemPrompt(day, streaks); const toolCalls: EngineResult['toolCalls'] = []; - // Build message chain - const llmMessages: { role: 'user' | 'assistant' | 'system'; content: string }[] = [ + const llmMessages: LlmMessage[] = [ { role: 'system', content: systemPrompt }, ...messagesToLlm(history), { role: 'user', content: userMessage }, @@ -126,14 +169,7 @@ export async function runCompanionChat( let finalContent = ''; for (let round = 0; round <= MAX_TOOL_ROUNDS; round++) { - const result = await generate({ - messages: llmMessages, - temperature: 0.7, - maxTokens: 1024, - onToken: round === 0 ? onToken : undefined, // Only stream first round - }); - - const text = result.content; + const text = await callLlm(llmMessages, round === 0 ? onToken : undefined); const toolCall = extractToolCall(text); if (!toolCall) { @@ -168,9 +204,20 @@ export async function runCompanionChat( } /** - * Check if the Companion Chat is available (LLM loaded or loadable). + * Check if the Companion Chat is available. + * Returns true if either local LLM or server endpoint is usable. */ export function isCompanionAvailable(): boolean { - const status = getLocalLlmStatus(); - return status.current.state === 'ready' || status.current.state === 'idle'; + // Local LLM available? + if (isLocalLlmSupported()) { + const status = getLocalLlmStatus(); + if (status.current.state === 'ready' || status.current.state === 'idle') return true; + } + // Server fallback configured? + const apiUrl = + (typeof window !== 'undefined' && + (window as unknown as Record).__PUBLIC_MANA_API_URL__) || + import.meta.env.PUBLIC_MANA_API_URL || + ''; + return !!apiUrl; } diff --git a/apps/mana/apps/web/src/lib/modules/mood/ListView.svelte b/apps/mana/apps/web/src/lib/modules/mood/ListView.svelte index ce4e8f317..5b510ead3 100644 --- a/apps/mana/apps/web/src/lib/modules/mood/ListView.svelte +++ b/apps/mana/apps/web/src/lib/modules/mood/ListView.svelte @@ -54,14 +54,14 @@ } -{#if showQuickLog} - (showQuickLog = false)} - onCancel={() => (showQuickLog = false)} - /> -{:else} -
- +
+ + {#if showQuickLog} + (showQuickLog = false)} + onCancel={() => (showQuickLog = false)} + /> + {:else} + {/if} {#if todayEntries.length > 0} @@ -209,8 +210,7 @@ {/each}
{/if} -
-{/if} +