diff --git a/apps/chat/apps/web/src/lib/components/compare/CompareInput.svelte b/apps/chat/apps/web/src/lib/components/compare/CompareInput.svelte new file mode 100644 index 000000000..79ffb05bd --- /dev/null +++ b/apps/chat/apps/web/src/lib/components/compare/CompareInput.svelte @@ -0,0 +1,116 @@ + + +
+ + + + +
+ +
+ + onTemperatureChange(parseFloat(e.currentTarget.value))} + disabled={isRunning || disabled} + class="w-24 h-2 bg-muted rounded-full appearance-none cursor-pointer + [&::-webkit-slider-thumb]:appearance-none + [&::-webkit-slider-thumb]:w-4 + [&::-webkit-slider-thumb]:h-4 + [&::-webkit-slider-thumb]:rounded-full + [&::-webkit-slider-thumb]:bg-primary + [&::-webkit-slider-thumb]:cursor-pointer + disabled:opacity-50" + /> +
+ + +
+ + +
+ + +
+ + + +
+
diff --git a/apps/chat/apps/web/src/lib/components/compare/CompareProgress.svelte b/apps/chat/apps/web/src/lib/components/compare/CompareProgress.svelte new file mode 100644 index 000000000..820c8faeb --- /dev/null +++ b/apps/chat/apps/web/src/lib/components/compare/CompareProgress.svelte @@ -0,0 +1,43 @@ + + +
+ +
+

+ Verarbeite Modell {currentIndex + 1} von + {totalModels}: + {currentModelName} +

+ +
+ + +
+
+
+ + +

+ {Math.round(progress)}% +

+
diff --git a/apps/chat/apps/web/src/lib/components/compare/ModelResponseCard.svelte b/apps/chat/apps/web/src/lib/components/compare/ModelResponseCard.svelte new file mode 100644 index 000000000..06d10dcfb --- /dev/null +++ b/apps/chat/apps/web/src/lib/components/compare/ModelResponseCard.svelte @@ -0,0 +1,185 @@ + + +
+ +
+

{result.modelName}

+ + {#if result.status === 'loading'} + + + {statusConfig().label} + + {:else if result.status === 'complete' && formattedDuration()} + {formattedDuration()} + {:else} + {statusConfig().label} + {/if} + +
+ + +
+ {#if result.status === 'pending'} +

Wartet auf Verarbeitung...

+ {:else if result.status === 'loading'} +
+ + + +
+ {:else if result.status === 'error'} +

{result.error || 'Ein Fehler ist aufgetreten'}

+ {:else if result.response} +
+ {@html htmlContent} +
+ {/if} +
+ + + {#if result.status === 'complete' && result.usage} +
+ {result.usage.total_tokens} tokens + {#if tokensPerSecond()} + | + {tokensPerSecond()} t/s + {/if} +
+ {/if} +
+ + diff --git a/apps/chat/apps/web/src/lib/components/compare/ModelResponseGrid.svelte b/apps/chat/apps/web/src/lib/components/compare/ModelResponseGrid.svelte new file mode 100644 index 000000000..cd35c7d85 --- /dev/null +++ b/apps/chat/apps/web/src/lib/components/compare/ModelResponseGrid.svelte @@ -0,0 +1,27 @@ + + +{#if results.length === 0} +
+

Keine Ergebnisse vorhanden.

+

Gib einen Prompt ein und starte den Vergleich.

+
+{:else} +
+ {#each results as result, index (result.modelId)} + + {/each} +
+{/if} diff --git a/apps/chat/apps/web/src/lib/stores/compare.svelte.ts b/apps/chat/apps/web/src/lib/stores/compare.svelte.ts new file mode 100644 index 000000000..90fa2574b --- /dev/null +++ b/apps/chat/apps/web/src/lib/stores/compare.svelte.ts @@ -0,0 +1,183 @@ +/** + * Compare Store - Manages model comparison state using Svelte 5 runes + */ + +import { chatService } from '$lib/services/chat'; +import type { AIModel, CompareModelResult, CompareModelStatus, ChatMessage } from '@chat/types'; + +// State +let results = $state([]); +let prompt = $state(''); +let temperature = $state(0.7); +let maxTokens = $state(1024); +let isRunning = $state(false); +let currentIndex = $state(0); +let abortController = $state(null); + +export const compareStore = { + // Getters + get results() { + return results; + }, + get prompt() { + return prompt; + }, + get temperature() { + return temperature; + }, + get maxTokens() { + return maxTokens; + }, + get isRunning() { + return isRunning; + }, + get currentIndex() { + return currentIndex; + }, + get totalModels() { + return results.length; + }, + get completedCount() { + return results.filter((r) => r.status === 'complete' || r.status === 'error').length; + }, + get currentModelName() { + const current = results[currentIndex]; + return current?.modelName || ''; + }, + get progress() { + if (results.length === 0) return 0; + return (this.completedCount / results.length) * 100; + }, + + // Setters + setPrompt(value: string) { + prompt = value; + }, + setTemperature(value: number) { + temperature = value; + }, + setMaxTokens(value: number) { + maxTokens = value; + }, + + // Actions + async startComparison(models: AIModel[]) { + if (isRunning || !prompt.trim() || models.length === 0) return; + + isRunning = true; + currentIndex = 0; + abortController = new AbortController(); + + // Initialize results with pending status + results = models.map((model) => ({ + modelId: model.id, + modelName: model.name, + status: 'pending' as CompareModelStatus, + })); + + // Process models sequentially + for (let i = 0; i < models.length; i++) { + if (abortController?.signal.aborted) break; + + currentIndex = i; + const model = models[i]; + + // Update status to loading + results = results.map((r, idx) => + idx === i ? { ...r, status: 'loading' as CompareModelStatus } : r + ); + + const startTime = Date.now(); + + try { + const messages: ChatMessage[] = [{ role: 'user', content: prompt }]; + + const response = await chatService.createCompletion({ + messages, + modelId: model.id, + temperature, + maxTokens, + }); + + const duration = Date.now() - startTime; + + if (abortController?.signal.aborted) break; + + if (response) { + results = results.map((r, idx) => + idx === i + ? { + ...r, + status: 'complete' as CompareModelStatus, + response: response.content, + duration, + usage: response.usage, + } + : r + ); + } else { + results = results.map((r, idx) => + idx === i + ? { + ...r, + status: 'error' as CompareModelStatus, + error: 'Keine Antwort erhalten', + duration, + } + : r + ); + } + } catch (e) { + const duration = Date.now() - startTime; + if (!abortController?.signal.aborted) { + results = results.map((r, idx) => + idx === i + ? { + ...r, + status: 'error' as CompareModelStatus, + error: e instanceof Error ? e.message : 'Unbekannter Fehler', + duration, + } + : r + ); + } + } + } + + isRunning = false; + abortController = null; + }, + + cancelComparison() { + if (abortController) { + abortController.abort(); + abortController = null; + } + isRunning = false; + + // Mark remaining pending/loading items as cancelled + results = results.map((r) => + r.status === 'pending' || r.status === 'loading' + ? { ...r, status: 'error' as CompareModelStatus, error: 'Abgebrochen' } + : r + ); + }, + + reset() { + results = []; + prompt = ''; + temperature = 0.7; + maxTokens = 1024; + isRunning = false; + currentIndex = 0; + if (abortController) { + abortController.abort(); + abortController = null; + } + }, + + clearResults() { + results = []; + currentIndex = 0; + }, +}; diff --git a/apps/chat/apps/web/src/routes/(protected)/compare/+page.svelte b/apps/chat/apps/web/src/routes/(protected)/compare/+page.svelte new file mode 100644 index 000000000..de99b5427 --- /dev/null +++ b/apps/chat/apps/web/src/routes/(protected)/compare/+page.svelte @@ -0,0 +1,146 @@ + + + + Modell-Vergleich | ManaChat + + +
+
+ +
+

Modell-Vergleich

+

+ Vergleiche Antworten verschiedener lokaler Ollama-Modelle nebeneinander. +

+
+ + {#if isLoading} + +
+
+
+

Lade Modelle...

+
+
+ {:else if error} + +
+

{error}

+ +
+ {:else if ollamaModels.length === 0} + +
+
🤖
+

Keine Ollama-Modelle gefunden

+

+ Es sind keine lokalen Ollama-Modelle verfügbar. Stelle sicher, dass Ollama läuft und + Modelle installiert sind. +

+
+

VerfĂĽgbare Modelle ({models.length}):

+
    + {#each models as model} +
  • + {model.provider}: + {model.name} +
  • + {/each} +
+
+
+ {:else} + +
+ + compareStore.setPrompt(v)} + onTemperatureChange={(v) => compareStore.setTemperature(v)} + onMaxTokensChange={(v) => compareStore.setMaxTokens(v)} + onCompare={handleCompare} + /> + + +

+ {ollamaModels.length} Ollama-Modelle verfĂĽgbar: + {ollamaModels.map((m) => m.name).join(', ')} +

+ + + {#if compareStore.isRunning} + compareStore.cancelComparison()} + /> + {/if} + + + + + + {#if compareStore.results.length > 0 && !compareStore.isRunning} +
+ +
+ {/if} +
+ {/if} +
+
diff --git a/apps/chat/packages/chat-types/src/index.ts b/apps/chat/packages/chat-types/src/index.ts index fec5145e5..c35e3d12a 100644 --- a/apps/chat/packages/chat-types/src/index.ts +++ b/apps/chat/packages/chat-types/src/index.ts @@ -45,7 +45,7 @@ export interface AIModel { id: string; name: string; description?: string; - provider: 'gemini' | 'azure' | 'openai'; + provider: 'gemini' | 'azure' | 'openai' | 'ollama' | 'openrouter'; parameters: AIModelParameters; isActive: boolean; isDefault: boolean; @@ -125,3 +125,16 @@ export interface Document { export interface DocumentWithConversation extends Document { conversationTitle: string; } + +// Model Comparison Types +export type CompareModelStatus = 'pending' | 'loading' | 'complete' | 'error'; + +export interface CompareModelResult { + modelId: string; + modelName: string; + status: CompareModelStatus; + response?: string; + error?: string; + duration?: number; + usage?: TokenUsage; +} diff --git a/packages/shared-ui/src/navigation/PillNavigation.svelte b/packages/shared-ui/src/navigation/PillNavigation.svelte index 2eee4ccff..12b961930 100644 --- a/packages/shared-ui/src/navigation/PillNavigation.svelte +++ b/packages/shared-ui/src/navigation/PillNavigation.svelte @@ -53,6 +53,7 @@ CreditCard, Buildings, User, + Scales, } from '@manacore/shared-icons'; // Map icon names to Phosphor components @@ -103,6 +104,7 @@ palette: Palette, creditCard: CreditCard, building: Buildings, + scale: Scales, }; // Convert app items to dropdown items (will be computed as derived)