mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-14 18:41:08 +02:00
feat(articles): bulk-import AI tool wiring (Phase 6)
Adds import_articles_from_urls tool to the articles module so the AI Workbench can kick off a bulk-import job in one call. Auto-policy: the job itself is the unit of approval, no per-article propose card. - shared-ai schemas: declare the tool name + propose/auto policy - articles/tools.ts: implement parseUrls + articleImportsStore.createJob - consume-pickup.ts: handle the new event type - events/catalog.ts: register article-import lifecycle events - imports.svelte.ts: minor polish Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
5f0a1b5053
commit
0fc16d1bfd
5 changed files with 130 additions and 3 deletions
|
|
@ -502,7 +502,19 @@ export interface ArticleSavedPayload {
|
|||
articleId: string;
|
||||
title: string;
|
||||
}
|
||||
export type NewsEventType = 'ArticleSaved';
|
||||
export interface ArticleImportStartedPayload {
|
||||
jobId: string;
|
||||
totalUrls: number;
|
||||
}
|
||||
export interface ArticleImportFinishedPayload {
|
||||
jobId: string;
|
||||
totalUrls: number;
|
||||
savedCount: number;
|
||||
duplicateCount: number;
|
||||
errorCount: number;
|
||||
warningCount: number;
|
||||
}
|
||||
export type NewsEventType = 'ArticleSaved' | 'ArticleImportStarted' | 'ArticleImportFinished';
|
||||
|
||||
// ── Recipes ─────────────────────────────────────────
|
||||
|
||||
|
|
@ -773,6 +785,8 @@ export type ManaEvent =
|
|||
| DomainEvent<'PlantDeleted', PlantDeletedPayload>
|
||||
// News
|
||||
| DomainEvent<'ArticleSaved', ArticleSavedPayload>
|
||||
| DomainEvent<'ArticleImportStarted', ArticleImportStartedPayload>
|
||||
| DomainEvent<'ArticleImportFinished', ArticleImportFinishedPayload>
|
||||
// Recipes
|
||||
| DomainEvent<'RecipeCreated', RecipeCreatedPayload>
|
||||
| DomainEvent<'RecipeDeleted', RecipeDeletedPayload>
|
||||
|
|
|
|||
|
|
@ -22,9 +22,18 @@
|
|||
*/
|
||||
|
||||
import { liveQuery, type Subscription } from 'dexie';
|
||||
import { articleExtractPickupTable, articleImportItemTable } from './collections';
|
||||
import { emitDomainEvent } from '$lib/data/events';
|
||||
import {
|
||||
articleExtractPickupTable,
|
||||
articleImportItemTable,
|
||||
articleImportJobTable,
|
||||
} from './collections';
|
||||
import { articlesStore } from './stores/articles.svelte';
|
||||
import type { ArticleImportItemState, LocalArticleExtractPickup } from './types';
|
||||
import type {
|
||||
ArticleImportItemState,
|
||||
LocalArticleExtractPickup,
|
||||
LocalArticleImportJob,
|
||||
} from './types';
|
||||
|
||||
const LOCK_NAME = 'mana:articles:pickup';
|
||||
|
||||
|
|
@ -33,6 +42,11 @@ const LOCK_NAME = 'mana:articles:pickup';
|
|||
const inFlight = new Set<string>();
|
||||
|
||||
let subscription: Subscription | null = null;
|
||||
let jobWatchSubscription: Subscription | null = null;
|
||||
|
||||
/** Track which jobs we've already emitted ArticleImportFinished for so a
|
||||
* liveQuery re-tick doesn't double-fire when other rows change. */
|
||||
const finishedEmitted = new Set<string>();
|
||||
|
||||
/**
|
||||
* Start watching the pickup inbox. Idempotent — second call returns
|
||||
|
|
@ -61,13 +75,43 @@ export function startArticlePickupConsumer(): () => void {
|
|||
console.error('[articles-import] pickup liveQuery error:', err);
|
||||
},
|
||||
});
|
||||
|
||||
// Independently watch the jobs table for status='done' flips so we
|
||||
// can emit `ArticleImportFinished` once per job. Server-worker
|
||||
// flips the status; this is the only client-side observer for the
|
||||
// terminal transition.
|
||||
const jobsQuery = liveQuery(async () =>
|
||||
articleImportJobTable.filter((j) => j.status === 'done' && !j.deletedAt).toArray()
|
||||
);
|
||||
jobWatchSubscription = jobsQuery.subscribe({
|
||||
next: (jobs: LocalArticleImportJob[]) => {
|
||||
for (const j of jobs) {
|
||||
if (finishedEmitted.has(j.id)) continue;
|
||||
finishedEmitted.add(j.id);
|
||||
emitDomainEvent('ArticleImportFinished', 'articles', 'articleImportJobs', j.id, {
|
||||
jobId: j.id,
|
||||
totalUrls: j.totalUrls,
|
||||
savedCount: j.savedCount ?? 0,
|
||||
duplicateCount: j.duplicateCount ?? 0,
|
||||
errorCount: j.errorCount ?? 0,
|
||||
warningCount: j.warningCount ?? 0,
|
||||
});
|
||||
}
|
||||
},
|
||||
error: (err) => {
|
||||
console.error('[articles-import] job-watch liveQuery error:', err);
|
||||
},
|
||||
});
|
||||
return stopArticlePickupConsumer;
|
||||
}
|
||||
|
||||
export function stopArticlePickupConsumer(): void {
|
||||
subscription?.unsubscribe();
|
||||
subscription = null;
|
||||
jobWatchSubscription?.unsubscribe();
|
||||
jobWatchSubscription = null;
|
||||
inFlight.clear();
|
||||
finishedEmitted.clear();
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
|
|||
|
|
@ -13,6 +13,7 @@
|
|||
* Plan: docs/plans/articles-bulk-import.md.
|
||||
*/
|
||||
|
||||
import { emitDomainEvent } from '$lib/data/events';
|
||||
import { articleImportJobTable, articleImportItemTable } from '../collections';
|
||||
import type {
|
||||
ArticleImportItemState,
|
||||
|
|
@ -119,6 +120,11 @@ export const articleImportsStore = {
|
|||
await articleImportItemTable.bulkAdd(items);
|
||||
await articleImportJobTable.add(job);
|
||||
|
||||
emitDomainEvent('ArticleImportStarted', 'articles', 'articleImportJobs', jobId, {
|
||||
jobId,
|
||||
totalUrls: urls.length,
|
||||
});
|
||||
|
||||
return jobId;
|
||||
},
|
||||
|
||||
|
|
|
|||
|
|
@ -23,6 +23,7 @@ import { scopedForModule, scopedGet } from '$lib/data/scope';
|
|||
import { tagMutations, useAllTags } from '@mana/shared-stores';
|
||||
import type { ModuleTool } from '$lib/data/tools/types';
|
||||
import { articlesStore } from './stores/articles.svelte';
|
||||
import { articleImportsStore, parseUrls } from './stores/imports.svelte';
|
||||
import { highlightsStore } from './stores/highlights.svelte';
|
||||
import { articleTagOps } from './stores/tags.svelte';
|
||||
import { toArticle } from './queries';
|
||||
|
|
@ -305,4 +306,51 @@ export const articlesTools: ModuleTool[] = [
|
|||
};
|
||||
},
|
||||
},
|
||||
|
||||
// ─── Bulk-Import (docs/plans/articles-bulk-import.md) ───
|
||||
{
|
||||
name: 'import_articles_from_urls',
|
||||
module: 'articles',
|
||||
description:
|
||||
'Erstellt einen Bulk-Import-Job für mehrere URLs. Server extrahiert sie nacheinander im Hintergrund. Auto-policy: kein Approval pro Artikel, der Job ist ein einziger Task.',
|
||||
parameters: [
|
||||
{
|
||||
name: 'urls',
|
||||
type: 'array',
|
||||
description: 'Liste der Artikel-URLs (max 50)',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
execute: async (params: Record<string, unknown>) => {
|
||||
const rawUrls = params.urls;
|
||||
if (!Array.isArray(rawUrls) || rawUrls.length === 0) {
|
||||
return { success: false, message: 'urls muss ein nicht-leeres Array sein' };
|
||||
}
|
||||
if (rawUrls.length > 50) {
|
||||
return {
|
||||
success: false,
|
||||
message: 'Maximal 50 URLs pro Job. Splitte in mehrere Aufrufe.',
|
||||
};
|
||||
}
|
||||
const blob = rawUrls.filter((u): u is string => typeof u === 'string').join('\n');
|
||||
const parsed = parseUrls(blob);
|
||||
if (parsed.valid.length === 0) {
|
||||
return {
|
||||
success: false,
|
||||
message: `Keine gültigen URLs (alle ${rawUrls.length} verworfen)`,
|
||||
};
|
||||
}
|
||||
const jobId = await articleImportsStore.createJob(parsed.valid);
|
||||
return {
|
||||
success: true,
|
||||
message: `Bulk-Import gestartet (${parsed.valid.length} URLs${parsed.duplicates.length ? `, ${parsed.duplicates.length} Duplikate übersprungen` : ''}${parsed.invalid.length ? `, ${parsed.invalid.length} ungültig` : ''})`,
|
||||
data: {
|
||||
jobId,
|
||||
accepted: parsed.valid.length,
|
||||
duplicates: parsed.duplicates.length,
|
||||
invalid: parsed.invalid.length,
|
||||
},
|
||||
};
|
||||
},
|
||||
},
|
||||
];
|
||||
|
|
|
|||
|
|
@ -436,6 +436,21 @@ export const AI_TOOL_CATALOG: readonly ToolSchema[] = [
|
|||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'import_articles_from_urls',
|
||||
module: 'articles',
|
||||
description:
|
||||
'Erstellt einen Bulk-Import-Job für mehrere URLs. Server extrahiert sie nacheinander im Hintergrund (Concurrency 3, Retries pro URL). Auto-Policy: kein Approval pro Artikel, der Job wird als ein einziger Task angelegt. Returns die jobId zum Tracking.',
|
||||
defaultPolicy: 'auto',
|
||||
parameters: [
|
||||
{
|
||||
name: 'urls',
|
||||
type: 'array',
|
||||
description: 'Liste der Artikel-URLs (max 50)',
|
||||
required: true,
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
name: 'archive_article',
|
||||
module: 'articles',
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue