mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-14 19:41:09 +02:00
feat(sync): .mana backup import — zip parser + replay (M4a)
Client-side restore for the same-account case: - lib/data/backup/format.ts: hand-rolled .mana (zip) parser. Walks the central directory, inflates DEFLATE entries via pako (already in the repo), exposes manifest + events.jsonl + recomputed sha256. No new dependency; the archive shape is narrow enough that 200 lines cover it. - lib/data/backup/import.ts: validates manifest (userId match is hard- refused, eventsSha256 must match, schemaVersionMax ≤ client support), streams events through iterateEvents(), batches 300 per appId and replays via the existing applyServerChanges() path. LWW makes the operation idempotent. - settings/my-data: file picker, progress bar, per-phase labels, success summary with event count + source timestamp. Scope is intentionally same-account only: events originate from the server for this user, so re-pushing them is unnecessary. Cross-account migration needs the MK transfer path in M5. Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
cf9f4ecd52
commit
7aee552ab4
3 changed files with 594 additions and 1 deletions
259
apps/mana/apps/web/src/lib/data/backup/format.ts
Normal file
259
apps/mana/apps/web/src/lib/data/backup/format.ts
Normal file
|
|
@ -0,0 +1,259 @@
|
|||
/**
|
||||
* .mana archive parser — client side.
|
||||
*
|
||||
* mana-sync emits a small, well-defined zip (archive/zip) with exactly two
|
||||
* entries: events.jsonl and manifest.json, both DEFLATE-compressed, no
|
||||
* encryption, no multi-part, no Zip64. That narrow scope means we can hand-
|
||||
* roll the parser against the central-directory record format rather than
|
||||
* pull in a ~20KB zip dependency.
|
||||
*
|
||||
* Inflate itself runs through `pako`, which the repo already uses for
|
||||
* spiral-db and qr-export PNG compression — so no new dependency is added.
|
||||
*
|
||||
* The parser is structured so the importer can stream events.jsonl line by
|
||||
* line without materializing the entire (potentially large) decompressed
|
||||
* body, though at this file-size scale we do decompress-to-string for
|
||||
* simplicity. If users ever ship multi-GB backups we can swap the jsonl
|
||||
* entry for a chunk iterator without changing the public surface.
|
||||
*/
|
||||
|
||||
import { inflateRaw } from 'pako';
|
||||
|
||||
export const BACKUP_FORMAT_VERSION = 1;
|
||||
export const BACKUP_FILENAME_EXT = '.mana';
|
||||
|
||||
/**
|
||||
* Everything from manifest.json, plus the decoded events.jsonl body. Kept
|
||||
* tight so it round-trips cleanly through the import UI without pulling any
|
||||
* extra zip-format leakage into the rest of the app.
|
||||
*/
|
||||
export interface ParsedBackup {
|
||||
manifest: BackupManifest;
|
||||
eventsJsonl: string;
|
||||
/** Re-computed sha256 of the uncompressed events.jsonl; hex string. */
|
||||
computedEventsSha256: string;
|
||||
}
|
||||
|
||||
export interface BackupManifest {
|
||||
formatVersion: number;
|
||||
schemaVersion: number;
|
||||
userId: string;
|
||||
createdAt: string;
|
||||
eventCount: number;
|
||||
eventsSha256: string;
|
||||
apps: string[];
|
||||
producedBy?: string;
|
||||
schemaVersionMin?: number;
|
||||
schemaVersionMax?: number;
|
||||
}
|
||||
|
||||
export interface BackupEvent {
|
||||
eventId: string;
|
||||
schemaVersion: number;
|
||||
appId: string;
|
||||
table: string;
|
||||
id: string;
|
||||
op: 'insert' | 'update' | 'delete';
|
||||
data?: Record<string, unknown>;
|
||||
fieldTimestamps?: Record<string, string>;
|
||||
clientId: string;
|
||||
createdAt: string;
|
||||
}
|
||||
|
||||
// ─── Public API ─────────────────────────────────────────────────
|
||||
|
||||
/**
|
||||
* Parse a .mana file into its manifest + raw events.jsonl. Also re-hashes
|
||||
* the decompressed events body with SHA-256 so the caller can compare
|
||||
* against manifest.eventsSha256 for integrity.
|
||||
*/
|
||||
export async function parseBackup(file: Blob): Promise<ParsedBackup> {
|
||||
const buf = new Uint8Array(await file.arrayBuffer());
|
||||
const entries = readZipEntries(buf);
|
||||
|
||||
const manifestEntry = entries.get('manifest.json');
|
||||
const eventsEntry = entries.get('events.jsonl');
|
||||
if (!manifestEntry) throw new BackupParseError('missing manifest.json in archive');
|
||||
if (!eventsEntry) throw new BackupParseError('missing events.jsonl in archive');
|
||||
|
||||
const manifestText = new TextDecoder().decode(inflateEntry(manifestEntry));
|
||||
let manifest: BackupManifest;
|
||||
try {
|
||||
manifest = JSON.parse(manifestText);
|
||||
} catch (e) {
|
||||
throw new BackupParseError(`manifest.json is not valid JSON: ${(e as Error).message}`);
|
||||
}
|
||||
validateManifest(manifest);
|
||||
|
||||
const eventsBytes = inflateEntry(eventsEntry);
|
||||
const eventsJsonl = new TextDecoder().decode(eventsBytes);
|
||||
|
||||
const computedEventsSha256 = await sha256Hex(eventsBytes);
|
||||
|
||||
return { manifest, eventsJsonl, computedEventsSha256 };
|
||||
}
|
||||
|
||||
/**
|
||||
* Yield events from the JSONL body one at a time. Skips blank lines; throws
|
||||
* on a non-parseable row so corruption is not silently masked. Returns a
|
||||
* generator so the caller can stream apply-batches without loading all
|
||||
* events into a single array.
|
||||
*/
|
||||
export function* iterateEvents(jsonl: string): Generator<BackupEvent> {
|
||||
let start = 0;
|
||||
while (start < jsonl.length) {
|
||||
const nl = jsonl.indexOf('\n', start);
|
||||
const end = nl === -1 ? jsonl.length : nl;
|
||||
const line = jsonl.slice(start, end).trim();
|
||||
start = end + 1;
|
||||
if (!line) continue;
|
||||
try {
|
||||
yield JSON.parse(line) as BackupEvent;
|
||||
} catch (e) {
|
||||
throw new BackupParseError(`events.jsonl line parse failed: ${(e as Error).message}`);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
export class BackupParseError extends Error {
|
||||
constructor(message: string) {
|
||||
super(message);
|
||||
this.name = 'BackupParseError';
|
||||
}
|
||||
}
|
||||
|
||||
// ─── Validation ─────────────────────────────────────────────────
|
||||
|
||||
function validateManifest(m: unknown): asserts m is BackupManifest {
|
||||
if (!m || typeof m !== 'object') throw new BackupParseError('manifest must be an object');
|
||||
const o = m as Record<string, unknown>;
|
||||
if (typeof o.formatVersion !== 'number')
|
||||
throw new BackupParseError('manifest.formatVersion missing');
|
||||
if (o.formatVersion !== BACKUP_FORMAT_VERSION) {
|
||||
throw new BackupParseError(
|
||||
`unsupported backup formatVersion ${o.formatVersion} (this build supports ${BACKUP_FORMAT_VERSION})`
|
||||
);
|
||||
}
|
||||
if (typeof o.userId !== 'string' || !o.userId)
|
||||
throw new BackupParseError('manifest.userId missing');
|
||||
if (typeof o.eventsSha256 !== 'string' || !o.eventsSha256)
|
||||
throw new BackupParseError('manifest.eventsSha256 missing');
|
||||
if (typeof o.eventCount !== 'number') throw new BackupParseError('manifest.eventCount missing');
|
||||
if (!Array.isArray(o.apps)) throw new BackupParseError('manifest.apps missing');
|
||||
}
|
||||
|
||||
// ─── Zip parser (central directory only) ───────────────────────
|
||||
//
|
||||
// ZIP structure we rely on:
|
||||
// End Of Central Directory Record (EOCD) at the tail
|
||||
// Central Directory entries (one per file)
|
||||
// Local File Header + data for each file, earlier in the stream
|
||||
//
|
||||
// We locate EOCD, walk the central directory, and for each entry seek to
|
||||
// the local header to read the actual compressed payload. This is the
|
||||
// standard "seek-by-central-dir" approach and matches what libraries like
|
||||
// fflate and jszip do internally.
|
||||
|
||||
interface ZipEntry {
|
||||
nameUtf8: string;
|
||||
method: number; // 0 = stored, 8 = deflate
|
||||
crc32: number;
|
||||
compressedSize: number;
|
||||
uncompressedSize: number;
|
||||
localHeaderOffset: number;
|
||||
source: Uint8Array; // full archive buffer, held so inflate can seek
|
||||
}
|
||||
|
||||
const SIG_EOCD = 0x06054b50;
|
||||
const SIG_CENTRAL = 0x02014b50;
|
||||
const SIG_LOCAL = 0x04034b50;
|
||||
|
||||
function readZipEntries(buf: Uint8Array): Map<string, ZipEntry> {
|
||||
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
|
||||
// Find EOCD by scanning backward from the tail. The comment field is up
|
||||
// to 65535 bytes, so in the worst case we scan 65557 bytes — fine.
|
||||
const eocdOffset = findEOCD(view);
|
||||
if (eocdOffset < 0) throw new BackupParseError('not a valid zip archive (no EOCD)');
|
||||
|
||||
const entryCount = view.getUint16(eocdOffset + 10, true);
|
||||
const cdOffset = view.getUint32(eocdOffset + 16, true);
|
||||
|
||||
const entries = new Map<string, ZipEntry>();
|
||||
let p = cdOffset;
|
||||
for (let i = 0; i < entryCount; i++) {
|
||||
if (view.getUint32(p, true) !== SIG_CENTRAL) {
|
||||
throw new BackupParseError('central directory entry signature mismatch');
|
||||
}
|
||||
const method = view.getUint16(p + 10, true);
|
||||
const crc32 = view.getUint32(p + 16, true);
|
||||
const compressedSize = view.getUint32(p + 20, true);
|
||||
const uncompressedSize = view.getUint32(p + 24, true);
|
||||
const nameLen = view.getUint16(p + 28, true);
|
||||
const extraLen = view.getUint16(p + 30, true);
|
||||
const commentLen = view.getUint16(p + 32, true);
|
||||
const localHeaderOffset = view.getUint32(p + 42, true);
|
||||
const nameUtf8 = new TextDecoder('utf-8').decode(buf.subarray(p + 46, p + 46 + nameLen));
|
||||
|
||||
entries.set(nameUtf8, {
|
||||
nameUtf8,
|
||||
method,
|
||||
crc32,
|
||||
compressedSize,
|
||||
uncompressedSize,
|
||||
localHeaderOffset,
|
||||
source: buf,
|
||||
});
|
||||
|
||||
p += 46 + nameLen + extraLen + commentLen;
|
||||
}
|
||||
return entries;
|
||||
}
|
||||
|
||||
function findEOCD(view: DataView): number {
|
||||
const maxCommentLen = 65535;
|
||||
const minOffset = Math.max(0, view.byteLength - 22 - maxCommentLen);
|
||||
for (let i = view.byteLength - 22; i >= minOffset; i--) {
|
||||
if (view.getUint32(i, true) === SIG_EOCD) return i;
|
||||
}
|
||||
return -1;
|
||||
}
|
||||
|
||||
function inflateEntry(entry: ZipEntry): Uint8Array {
|
||||
const buf = entry.source;
|
||||
const view = new DataView(buf.buffer, buf.byteOffset, buf.byteLength);
|
||||
const p = entry.localHeaderOffset;
|
||||
if (view.getUint32(p, true) !== SIG_LOCAL) {
|
||||
throw new BackupParseError(`local header signature mismatch for ${entry.nameUtf8}`);
|
||||
}
|
||||
const nameLen = view.getUint16(p + 26, true);
|
||||
const extraLen = view.getUint16(p + 28, true);
|
||||
const dataStart = p + 30 + nameLen + extraLen;
|
||||
const compressed = buf.subarray(dataStart, dataStart + entry.compressedSize);
|
||||
|
||||
switch (entry.method) {
|
||||
case 0:
|
||||
return compressed.slice();
|
||||
case 8:
|
||||
return inflateRaw(compressed);
|
||||
default:
|
||||
throw new BackupParseError(`unsupported zip compression method ${entry.method}`);
|
||||
}
|
||||
}
|
||||
|
||||
// ─── SHA-256 ────────────────────────────────────────────────────
|
||||
|
||||
async function sha256Hex(bytes: Uint8Array): Promise<string> {
|
||||
// Copy into a fresh ArrayBuffer so subtle.digest is happy regardless of
|
||||
// whether the input is backed by SharedArrayBuffer — the DOM typings
|
||||
// refuse ArrayBufferLike unions even though runtime accepts them.
|
||||
const copy = new Uint8Array(bytes.byteLength);
|
||||
copy.set(bytes);
|
||||
const digest = await crypto.subtle.digest('SHA-256', copy.buffer);
|
||||
const hex: string[] = [];
|
||||
const view = new Uint8Array(digest);
|
||||
for (let i = 0; i < view.length; i++) {
|
||||
hex.push(view[i].toString(16).padStart(2, '0'));
|
||||
}
|
||||
return hex.join('');
|
||||
}
|
||||
218
apps/mana/apps/web/src/lib/data/backup/import.ts
Normal file
218
apps/mana/apps/web/src/lib/data/backup/import.ts
Normal file
|
|
@ -0,0 +1,218 @@
|
|||
/**
|
||||
* Backup import — streams a .mana archive into IndexedDB.
|
||||
*
|
||||
* Flow:
|
||||
*
|
||||
* 1. parseBackup() unzips the container and re-hashes events.jsonl.
|
||||
* 2. validate manifest:
|
||||
* - formatVersion supported (enforced inside parseBackup)
|
||||
* - userId matches the currently signed-in user (refuse otherwise —
|
||||
* accidental restore into someone else's account would be a privacy
|
||||
* disaster)
|
||||
* - eventsSha256 matches the recomputed hash (integrity)
|
||||
* 3. iterate events, group by appId, apply in batches via the existing
|
||||
* applyServerChanges() path. That function already handles LWW, type
|
||||
* guards, suppressed hooks, and quota recovery — reusing it means
|
||||
* imported events can never diverge from the server's own apply logic.
|
||||
*
|
||||
* Idempotency: applyServerChanges is LWW-safe, so re-running import with
|
||||
* the same file is a no-op beyond wasted work. A future optimization will
|
||||
* write eventIds into a _appliedEventIds dedup table, but the LWW semantics
|
||||
* already make the operation safe today.
|
||||
*
|
||||
* Scope (M4a): same-account restore. Events originate from mana-sync for
|
||||
* this user; after import, IndexedDB is repopulated without re-pushing to
|
||||
* the server (server already has every event, LWW would dedupe anyway).
|
||||
* Cross-account migration requires the MK transfer path (M5).
|
||||
*/
|
||||
|
||||
import { applyServerChanges, type SyncChange } from '$lib/data/sync';
|
||||
import { authStore } from '$lib/stores/auth.svelte';
|
||||
import { iterateEvents, parseBackup, type BackupEvent, type ParsedBackup } from './format';
|
||||
|
||||
/** Emitted periodically during import so the UI can drive a progress bar. */
|
||||
export interface ImportProgress {
|
||||
phase: 'parsing' | 'validating' | 'applying' | 'done';
|
||||
applied: number;
|
||||
total: number;
|
||||
currentAppId?: string;
|
||||
}
|
||||
|
||||
export interface ImportOptions {
|
||||
/**
|
||||
* If true, skip the eventsSha256 integrity check. Reserved for CLI
|
||||
* debugging — production UI should always leave this false.
|
||||
*/
|
||||
skipIntegrityCheck?: boolean;
|
||||
/**
|
||||
* Called after each batch so the UI can render progress. Called at
|
||||
* least once with phase='done' on successful completion.
|
||||
*/
|
||||
onProgress?: (p: ImportProgress) => void;
|
||||
}
|
||||
|
||||
export interface ImportResult {
|
||||
manifest: ParsedBackup['manifest'];
|
||||
appliedEvents: number;
|
||||
perApp: Record<string, number>;
|
||||
}
|
||||
|
||||
export class BackupImportError extends Error {
|
||||
constructor(
|
||||
message: string,
|
||||
public readonly kind:
|
||||
| 'parse'
|
||||
| 'user-mismatch'
|
||||
| 'integrity'
|
||||
| 'schema-too-new'
|
||||
| 'not-authenticated'
|
||||
| 'apply'
|
||||
) {
|
||||
super(message);
|
||||
this.name = 'BackupImportError';
|
||||
}
|
||||
}
|
||||
|
||||
const APPLY_BATCH_SIZE = 300;
|
||||
|
||||
// Mirrors CURRENT_SCHEMA_VERSION in sync.ts. We can't import the constant
|
||||
// here without pulling sync.ts into every code path, but a tiny duplicate
|
||||
// keyed on the same const is easier to audit than a transitive import.
|
||||
// Update in lockstep when bumping the protocol version.
|
||||
const MAX_SUPPORTED_IMPORT_SCHEMA_VERSION = 1;
|
||||
|
||||
/**
|
||||
* Import a user-provided .mana file into IndexedDB. Throws on user-mismatch,
|
||||
* integrity failure, or unsupported schema version. Callers should catch
|
||||
* BackupImportError and surface `kind` to the UI so the user gets a
|
||||
* specific error message instead of a generic "import failed".
|
||||
*/
|
||||
export async function importBackup(file: File, opts: ImportOptions = {}): Promise<ImportResult> {
|
||||
const { onProgress, skipIntegrityCheck = false } = opts;
|
||||
|
||||
const currentUserId = authStore.user?.id;
|
||||
if (!currentUserId) {
|
||||
throw new BackupImportError(
|
||||
'not signed in — log in before importing a backup',
|
||||
'not-authenticated'
|
||||
);
|
||||
}
|
||||
|
||||
onProgress?.({ phase: 'parsing', applied: 0, total: 0 });
|
||||
let parsed: ParsedBackup;
|
||||
try {
|
||||
parsed = await parseBackup(file);
|
||||
} catch (e) {
|
||||
throw new BackupImportError(`parse failed: ${(e as Error).message}`, 'parse');
|
||||
}
|
||||
const { manifest, eventsJsonl, computedEventsSha256 } = parsed;
|
||||
|
||||
onProgress?.({ phase: 'validating', applied: 0, total: manifest.eventCount });
|
||||
|
||||
if (manifest.userId !== currentUserId) {
|
||||
throw new BackupImportError(
|
||||
`backup is for user ${manifest.userId}, but you are signed in as ${currentUserId}`,
|
||||
'user-mismatch'
|
||||
);
|
||||
}
|
||||
|
||||
if (!skipIntegrityCheck && manifest.eventsSha256 !== computedEventsSha256) {
|
||||
throw new BackupImportError(
|
||||
`events.jsonl integrity check failed (manifest=${manifest.eventsSha256}, computed=${computedEventsSha256})`,
|
||||
'integrity'
|
||||
);
|
||||
}
|
||||
|
||||
const highestSeen = manifest.schemaVersionMax ?? manifest.schemaVersion;
|
||||
if (highestSeen > MAX_SUPPORTED_IMPORT_SCHEMA_VERSION) {
|
||||
throw new BackupImportError(
|
||||
`backup contains events at schemaVersion=${highestSeen}; this build only supports up to ${MAX_SUPPORTED_IMPORT_SCHEMA_VERSION}. Update the app and try again.`,
|
||||
'schema-too-new'
|
||||
);
|
||||
}
|
||||
|
||||
// ─── Replay ───────────────────────────────────────────────
|
||||
// Group by appId inside each batch so applyServerChanges can scope its
|
||||
// per-table apply lock tightly. Batches are kept small enough to stay
|
||||
// responsive (progress reports every 300 events) but large enough that
|
||||
// the per-call overhead doesn't dominate.
|
||||
const perApp: Record<string, number> = {};
|
||||
let applied = 0;
|
||||
|
||||
const batch: Record<string, SyncChange[]> = {};
|
||||
let batchCount = 0;
|
||||
|
||||
const flush = async () => {
|
||||
for (const [appId, changes] of Object.entries(batch)) {
|
||||
if (changes.length === 0) continue;
|
||||
onProgress?.({ phase: 'applying', applied, total: manifest.eventCount, currentAppId: appId });
|
||||
try {
|
||||
await applyServerChanges(appId, changes);
|
||||
} catch (e) {
|
||||
throw new BackupImportError(
|
||||
`apply failed for app=${appId}: ${(e as Error).message}`,
|
||||
'apply'
|
||||
);
|
||||
}
|
||||
perApp[appId] = (perApp[appId] ?? 0) + changes.length;
|
||||
applied += changes.length;
|
||||
batch[appId] = [];
|
||||
}
|
||||
batchCount = 0;
|
||||
};
|
||||
|
||||
for (const event of iterateEvents(eventsJsonl)) {
|
||||
const change = toSyncChange(event);
|
||||
if (!batch[event.appId]) batch[event.appId] = [];
|
||||
batch[event.appId].push(change);
|
||||
batchCount++;
|
||||
if (batchCount >= APPLY_BATCH_SIZE) {
|
||||
await flush();
|
||||
}
|
||||
}
|
||||
if (batchCount > 0) await flush();
|
||||
|
||||
onProgress?.({ phase: 'done', applied, total: manifest.eventCount });
|
||||
|
||||
return { manifest, appliedEvents: applied, perApp };
|
||||
}
|
||||
|
||||
// ─── Event → SyncChange mapping ─────────────────────────────────
|
||||
// The backup JSONL stores raw-store shape (data + fieldTimestamps). The
|
||||
// sync-engine's SyncChange uses folded shape (fields: { key: { value,
|
||||
// updatedAt } }) for updates. This mirrors the server-side projection in
|
||||
// mana-sync's changeFromRow.
|
||||
|
||||
function toSyncChange(event: BackupEvent): SyncChange {
|
||||
const base: SyncChange = {
|
||||
eventId: event.eventId,
|
||||
schemaVersion: event.schemaVersion,
|
||||
table: event.table,
|
||||
id: event.id,
|
||||
op: event.op,
|
||||
};
|
||||
|
||||
switch (event.op) {
|
||||
case 'insert':
|
||||
base.data = event.data ?? {};
|
||||
break;
|
||||
case 'update':
|
||||
if (event.data && event.fieldTimestamps) {
|
||||
const fields: Record<string, { value: unknown; updatedAt: string }> = {};
|
||||
for (const [key, updatedAt] of Object.entries(event.fieldTimestamps)) {
|
||||
if (key in event.data) {
|
||||
fields[key] = { value: event.data[key], updatedAt };
|
||||
}
|
||||
}
|
||||
base.fields = fields;
|
||||
}
|
||||
break;
|
||||
case 'delete': {
|
||||
const deletedAt = event.data?.deletedAt;
|
||||
if (typeof deletedAt === 'string') base.deletedAt = deletedAt;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
return base;
|
||||
}
|
||||
|
|
@ -20,6 +20,12 @@
|
|||
import QRExportModal from '$lib/components/my-data/QRExportModal.svelte';
|
||||
import { myDataService, type UserDataSummary } from '$lib/api/services/my-data';
|
||||
import { backupService } from '$lib/api/services/backup';
|
||||
import {
|
||||
importBackup,
|
||||
BackupImportError,
|
||||
type ImportProgress,
|
||||
type ImportResult,
|
||||
} from '$lib/data/backup/import';
|
||||
import type { DeleteUserDataResponse } from '$lib/api/services/admin';
|
||||
import { authStore } from '$lib/stores/auth.svelte';
|
||||
|
||||
|
|
@ -37,7 +43,7 @@
|
|||
// QR Export dialog state
|
||||
let showQRDialog = $state(false);
|
||||
|
||||
// Backup (M1 thin slice) state
|
||||
// Backup download state
|
||||
let backupLoading = $state(false);
|
||||
let backupError = $state<string | null>(null);
|
||||
|
||||
|
|
@ -53,6 +59,55 @@
|
|||
}
|
||||
}
|
||||
|
||||
// Backup import state
|
||||
let importInput = $state<HTMLInputElement | null>(null);
|
||||
let importing = $state(false);
|
||||
let importProgress = $state<ImportProgress | null>(null);
|
||||
let importResult = $state<ImportResult | null>(null);
|
||||
let importError = $state<string | null>(null);
|
||||
|
||||
async function handleImportFileChange(e: Event) {
|
||||
const input = e.currentTarget as HTMLInputElement;
|
||||
const file = input.files?.[0];
|
||||
input.value = '';
|
||||
if (!file) return;
|
||||
|
||||
importing = true;
|
||||
importError = null;
|
||||
importResult = null;
|
||||
importProgress = { phase: 'parsing', applied: 0, total: 0 };
|
||||
|
||||
try {
|
||||
const result = await importBackup(file, {
|
||||
onProgress: (p) => (importProgress = p),
|
||||
});
|
||||
importResult = result;
|
||||
} catch (e) {
|
||||
if (e instanceof BackupImportError) {
|
||||
importError = `${e.kind}: ${e.message}`;
|
||||
} else {
|
||||
importError = e instanceof Error ? e.message : 'Import fehlgeschlagen';
|
||||
}
|
||||
} finally {
|
||||
importing = false;
|
||||
}
|
||||
}
|
||||
|
||||
function importProgressLabel(p: ImportProgress): string {
|
||||
switch (p.phase) {
|
||||
case 'parsing':
|
||||
return 'Archiv wird entpackt…';
|
||||
case 'validating':
|
||||
return 'Manifest & Integritat werden gepruft…';
|
||||
case 'applying':
|
||||
return p.currentAppId
|
||||
? `Wende Events an (${p.applied}/${p.total}) — ${p.currentAppId}`
|
||||
: `Wende Events an (${p.applied}/${p.total})`;
|
||||
case 'done':
|
||||
return `Fertig — ${p.applied} Events eingespielt`;
|
||||
}
|
||||
}
|
||||
|
||||
async function loadMyData() {
|
||||
loading = true;
|
||||
error = null;
|
||||
|
|
@ -412,6 +467,67 @@
|
|||
{#if backupError}
|
||||
<p class="text-sm text-red-600 mt-3">{backupError}</p>
|
||||
{/if}
|
||||
|
||||
<!-- Import -->
|
||||
<div class="mt-6 pt-6 border-t">
|
||||
<h4 class="font-medium mb-2">Backup einspielen</h4>
|
||||
<p class="text-sm text-muted-foreground mb-3">
|
||||
Wahle eine <code>.mana</code>-Datei aus. Die enthaltenen Events werden in deine lokale
|
||||
Datenbank gespielt — nur Backups deines eigenen Accounts werden akzeptiert.
|
||||
</p>
|
||||
<div class="flex items-center gap-3">
|
||||
<input
|
||||
bind:this={importInput}
|
||||
type="file"
|
||||
accept=".mana,application/zip"
|
||||
onchange={handleImportFileChange}
|
||||
disabled={importing}
|
||||
class="hidden"
|
||||
/>
|
||||
<button
|
||||
onclick={() => importInput?.click()}
|
||||
disabled={importing}
|
||||
class="flex items-center gap-2 px-4 py-2 border rounded-lg hover:bg-muted disabled:opacity-50 transition-colors"
|
||||
>
|
||||
<DownloadSimple size={16} class="rotate-180" />
|
||||
<span>{importing ? 'Importiere…' : 'Datei wahlen…'}</span>
|
||||
</button>
|
||||
</div>
|
||||
|
||||
{#if importProgress}
|
||||
<div class="mt-3">
|
||||
<p class="text-sm">{importProgressLabel(importProgress)}</p>
|
||||
{#if importProgress.total > 0}
|
||||
<div class="mt-2 h-2 bg-muted rounded overflow-hidden">
|
||||
<div
|
||||
class="h-full bg-indigo-500 transition-all"
|
||||
style="width: {Math.min(
|
||||
100,
|
||||
Math.round((importProgress.applied / importProgress.total) * 100)
|
||||
)}%"
|
||||
></div>
|
||||
</div>
|
||||
{/if}
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if importResult}
|
||||
<div
|
||||
class="mt-3 p-3 rounded-lg bg-green-50 dark:bg-green-900/20 border border-green-200 dark:border-green-800"
|
||||
>
|
||||
<p class="text-sm text-green-800 dark:text-green-200">
|
||||
<CheckCircle size={14} class="inline" weight="fill" />
|
||||
{importResult.appliedEvents} Events aus Backup vom
|
||||
{formatDate(importResult.manifest.createdAt)} eingespielt ({importResult.manifest
|
||||
.apps.length} Apps).
|
||||
</p>
|
||||
</div>
|
||||
{/if}
|
||||
|
||||
{#if importError}
|
||||
<p class="text-sm text-red-600 mt-3">{importError}</p>
|
||||
{/if}
|
||||
</div>
|
||||
</div>
|
||||
</Card>
|
||||
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue