mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-14 21:21:10 +02:00
fix(llm): user-friendly messages + settings link for all LLM errors
Move getUserMessage() to the base LlmError class so every error type gets a German explanation with a clickable settings deep-link: - TierTooLowError: "Kein KI-Modell aktiviert. Mindestens X benötigt." - ProviderBlockedError: "… hat die Anfrage blockiert (Inhaltsfilter)." - BackendUnreachableError: "… ist nicht erreichbar." - EdgeLoadFailedError: "Browser-Modell konnte nicht geladen werden." - Generic fallback: also includes the settings link now The companion engine now catches LlmError (base class) instead of only NoTierAvailableError, covering all failure modes. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
fa31fa0caf
commit
1cfd05939e
2 changed files with 28 additions and 6 deletions
|
|
@ -11,7 +11,7 @@
|
|||
* routes through text-completion).
|
||||
*/
|
||||
|
||||
import { llmOrchestrator, NoTierAvailableError } from '@mana/shared-llm';
|
||||
import { llmOrchestrator, LlmError } from '@mana/shared-llm';
|
||||
import { isLocalLlmSupported, getLocalLlmStatus, loadLocalLlm } from '@mana/local-llm';
|
||||
import { companionChatTask } from '$lib/llm-tasks/companion-chat';
|
||||
import { generateContextDocument } from '$lib/data/projections/context-document';
|
||||
|
|
@ -54,11 +54,11 @@ async function callLlm(messages: LlmMessage[], onToken?: (token: string) => void
|
|||
});
|
||||
return result.value.content;
|
||||
} catch (err) {
|
||||
if (err instanceof NoTierAvailableError) {
|
||||
if (err instanceof LlmError) {
|
||||
return err.getUserMessage();
|
||||
}
|
||||
const msg = err instanceof Error ? err.message : String(err);
|
||||
return `LLM nicht verfügbar: ${msg}`;
|
||||
return `LLM nicht verfügbar: ${msg}\n\n[KI-Einstellungen öffnen](/?app=settings#ai-options)`;
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -6,11 +6,18 @@
|
|||
|
||||
import type { LlmTier } from './tiers';
|
||||
|
||||
const SETTINGS_LINK = '[KI-Einstellungen öffnen](/?app=settings#ai-options)';
|
||||
|
||||
export class LlmError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'LlmError';
|
||||
}
|
||||
|
||||
/** User-friendly German explanation with settings deep-link (Markdown). */
|
||||
getUserMessage(): string {
|
||||
return `${this.message}\n\n${SETTINGS_LINK}`;
|
||||
}
|
||||
}
|
||||
|
||||
/** Why a specific tier was skipped. */
|
||||
|
|
@ -88,11 +95,14 @@ export class TierTooLowError extends LlmError {
|
|||
public readonly requiredTier: LlmTier,
|
||||
public readonly userTier: LlmTier
|
||||
) {
|
||||
super(
|
||||
`Task '${taskName}' requires tier '${requiredTier}' but user is on '${userTier}'. Activate the higher tier in settings.`
|
||||
);
|
||||
super(`Task '${taskName}' requires tier '${requiredTier}' but user is on '${userTier}'.`);
|
||||
this.name = 'TierTooLowError';
|
||||
}
|
||||
|
||||
getUserMessage(): string {
|
||||
const needed = tierLabel(this.requiredTier);
|
||||
return `Kein KI-Modell aktiviert. Mindestens **${needed}** wird benötigt.\n\n${SETTINGS_LINK}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -109,6 +119,10 @@ export class ProviderBlockedError extends LlmError {
|
|||
super(`Provider '${tier}' blocked the request: ${providerMessage}`);
|
||||
this.name = 'ProviderBlockedError';
|
||||
}
|
||||
|
||||
getUserMessage(): string {
|
||||
return `**${tierLabel(this.tier)}** hat die Anfrage blockiert (Inhaltsfilter). Versuche es mit einer anderen Formulierung oder wechsle den Anbieter.\n\n${SETTINGS_LINK}`;
|
||||
}
|
||||
}
|
||||
|
||||
/** Network/server error from a remote tier (mana-server, cloud). */
|
||||
|
|
@ -123,6 +137,10 @@ export class BackendUnreachableError extends LlmError {
|
|||
);
|
||||
this.name = 'BackendUnreachableError';
|
||||
}
|
||||
|
||||
getUserMessage(): string {
|
||||
return `**${tierLabel(this.tier)}** ist nicht erreichbar. Prüfe ob der Service läuft.\n\n${SETTINGS_LINK}`;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -134,4 +152,8 @@ export class EdgeLoadFailedError extends LlmError {
|
|||
super(`Edge LLM failed to load: ${cause}`);
|
||||
this.name = 'EdgeLoadFailedError';
|
||||
}
|
||||
|
||||
getUserMessage(): string {
|
||||
return `Browser-Modell konnte nicht geladen werden: ${this.cause}\n\n${SETTINGS_LINK}`;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue