mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-14 20:01:09 +02:00
fix(workbench): dedup duplicate Home scenes accumulated by seeding race
The Home-seeder in workbench-scenes.svelte.ts writes new scenes without spaceId, so the creating-hook stamps them with the _personal:<userId> sentinel. The per-space dedup check filters by the real space UUID and never finds them — every login adds another Home row, and every visit to a non-personal Space (Brand/Family/Team) drops yet another seed into the personal Space. This is Schicht D-soft of the broader cleanup plan (docs/plans/workbench-seeding-cleanup.md): a one-shot dedup pass that collapses duplicate "Home" rows per spaceId, merging openApps from the losers into the survivor (most apps wins, ties by most-recent updatedAt) and soft-deleting the rest so mana-sync propagates the cleanup to other devices. Touches only rows that look like fresh default seeds — anything customized (description, wallpaper, agent binding, scope tags, non-Home name) is left alone. Wired in two places: a Dexie v48 upgrade so it runs once per device on schema bump, and a belt-and-suspenders pass in (app)/+layout.svelte right after reconcileSentinels() to catch the edge case where sentinel-stamped rows just collapsed into the same UUID group as already-reconciled rows. The structural fix that prevents new duplicates from ever forming (per-space-seeds registry + deterministic seed ids + creating-hook hardening) ships in follow-up commits per the plan. Co-Authored-By: Claude Opus 4.7 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
ac12b61de2
commit
d62ae8f1e3
5 changed files with 507 additions and 0 deletions
|
|
@ -1079,6 +1079,29 @@ db.version(46).stores({
|
|||
_scopeCursor: null,
|
||||
});
|
||||
|
||||
// v48 — One-shot dedup of duplicate "Home" scenes that the seeding race
|
||||
// in `stores/workbench-scenes.svelte.ts` has been accumulating since the
|
||||
// Spaces-Foundation migration shipped 2026-04-22. The seeder writes new
|
||||
// scenes without `spaceId`, so the creating-hook stamps them with the
|
||||
// `_personal:<userId>` sentinel. The dedup check in
|
||||
// `onActiveSpaceChanged` filters by the *real* space UUID and never
|
||||
// finds them — every login adds another Home row.
|
||||
//
|
||||
// This upgrade is the soft cleanup. The structural fix (per-space-seeds
|
||||
// registry + deterministic ids + creating-hook hardening) ships in
|
||||
// follow-up commits — see docs/plans/workbench-seeding-cleanup.md.
|
||||
//
|
||||
// No schema/index change. The upgrade only soft-deletes the loser rows
|
||||
// (sets `deletedAt`) so mana-sync propagates the cleanup to other
|
||||
// devices instead of resurrecting them on next pull.
|
||||
db.version(48).upgrade(async (tx) => {
|
||||
const { dedupHomeScenesOn } = await import('./scope/dedup-workbench-scenes');
|
||||
const removed = await dedupHomeScenesOn(tx.table('workbenchScenes'));
|
||||
if (removed > 0) {
|
||||
console.info(`[workbench-scenes v48] deduped ${removed} duplicate Home scenes`);
|
||||
}
|
||||
});
|
||||
|
||||
// ─── Sync Routing ──────────────────────────────────────────
|
||||
// SYNC_APP_MAP, TABLE_TO_SYNC_NAME, TABLE_TO_APP, SYNC_NAME_TO_TABLE,
|
||||
// toSyncName() and fromSyncName() are now derived from per-module
|
||||
|
|
|
|||
|
|
@ -0,0 +1,219 @@
|
|||
/**
|
||||
* Unit tests for `dedupHomeScenesOn` — the soft-cleanup pass that
|
||||
* collapses duplicate "Home" scenes accumulated by the seeding race
|
||||
* (see docs/plans/workbench-seeding-cleanup.md).
|
||||
*
|
||||
* Uses an isolated Dexie db with just a `workbenchScenes` table so the
|
||||
* test doesn't drag in `database.ts`'s side-effect imports (auth store,
|
||||
* triggers, funnel tracking, …) — the function under test only needs a
|
||||
* Table reference, so a one-table fixture is enough.
|
||||
*/
|
||||
|
||||
import 'fake-indexeddb/auto';
|
||||
import { afterEach, beforeEach, describe, expect, it } from 'vitest';
|
||||
import Dexie, { type Table } from 'dexie';
|
||||
import type { LocalWorkbenchScene } from '$lib/types/workbench-scenes';
|
||||
import { dedupHomeScenesOn } from './dedup-workbench-scenes';
|
||||
|
||||
// Public LocalWorkbenchScene doesn't carry the runtime-stamped scope
|
||||
// fields (spaceId/authorId/visibility) — they're added by the creating
|
||||
// hook. Tests need to set spaceId explicitly to drive grouping, so we
|
||||
// model the row as the public shape plus an optional spaceId override.
|
||||
type SceneRow = LocalWorkbenchScene & { spaceId?: string };
|
||||
|
||||
interface FixtureDb extends Dexie {
|
||||
workbenchScenes: Table<SceneRow, string>;
|
||||
}
|
||||
|
||||
let db: FixtureDb;
|
||||
|
||||
function makeDb(): FixtureDb {
|
||||
const fresh = new Dexie(`dedup-test-${crypto.randomUUID()}`) as FixtureDb;
|
||||
fresh.version(1).stores({ workbenchScenes: 'id, order' });
|
||||
return fresh;
|
||||
}
|
||||
|
||||
function makeScene(overrides: Partial<SceneRow>): SceneRow {
|
||||
return {
|
||||
id: 'scene-default',
|
||||
name: 'Home',
|
||||
openApps: [{ appId: 'todo' }, { appId: 'calendar' }, { appId: 'notes' }],
|
||||
order: 0,
|
||||
createdAt: '2026-04-25T10:00:00.000Z',
|
||||
updatedAt: '2026-04-25T10:00:00.000Z',
|
||||
spaceId: 'space-personal',
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
beforeEach(async () => {
|
||||
db = makeDb();
|
||||
await db.open();
|
||||
});
|
||||
|
||||
afterEach(async () => {
|
||||
db.close();
|
||||
await Dexie.delete(db.name);
|
||||
});
|
||||
|
||||
describe('dedupHomeScenesOn', () => {
|
||||
it('returns 0 and changes nothing when there are no duplicates', async () => {
|
||||
await db.workbenchScenes.add(makeScene({ id: 's1' }));
|
||||
await db.workbenchScenes.add(makeScene({ id: 's2', spaceId: 'space-other' }));
|
||||
|
||||
const removed = await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
expect(removed).toBe(0);
|
||||
const remaining = await db.workbenchScenes
|
||||
.toArray()
|
||||
.then((rows) => rows.filter((r) => !r.deletedAt));
|
||||
expect(remaining).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('keeps one survivor per (spaceId) group and soft-deletes the rest', async () => {
|
||||
await db.workbenchScenes.bulkAdd([
|
||||
makeScene({ id: 's1', updatedAt: '2026-04-25T09:00:00.000Z' }),
|
||||
makeScene({ id: 's2', updatedAt: '2026-04-25T10:00:00.000Z' }),
|
||||
makeScene({ id: 's3', updatedAt: '2026-04-25T11:00:00.000Z' }),
|
||||
]);
|
||||
|
||||
const removed = await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
expect(removed).toBe(2);
|
||||
const all = await db.workbenchScenes.toArray();
|
||||
const alive = all.filter((r) => !r.deletedAt);
|
||||
const dead = all.filter((r) => r.deletedAt);
|
||||
expect(alive).toHaveLength(1);
|
||||
expect(dead).toHaveLength(2);
|
||||
});
|
||||
|
||||
it('picks the survivor with the most openApps, then most recent updatedAt', async () => {
|
||||
await db.workbenchScenes.bulkAdd([
|
||||
makeScene({
|
||||
id: 'older-richer',
|
||||
openApps: [{ appId: 'todo' }, { appId: 'calendar' }, { appId: 'notes' }],
|
||||
updatedAt: '2026-04-25T09:00:00.000Z',
|
||||
}),
|
||||
makeScene({
|
||||
id: 'newer-leaner',
|
||||
openApps: [{ appId: 'todo' }],
|
||||
updatedAt: '2026-04-25T11:00:00.000Z',
|
||||
}),
|
||||
]);
|
||||
|
||||
await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
const alive = await db.workbenchScenes
|
||||
.toArray()
|
||||
.then((rows) => rows.filter((r) => !r.deletedAt));
|
||||
expect(alive.map((r) => r.id)).toEqual(['older-richer']);
|
||||
});
|
||||
|
||||
it('merges openApps from losers into the survivor (dedup by appId)', async () => {
|
||||
await db.workbenchScenes.bulkAdd([
|
||||
makeScene({
|
||||
id: 'survivor',
|
||||
openApps: [{ appId: 'todo' }, { appId: 'calendar' }, { appId: 'notes' }],
|
||||
}),
|
||||
makeScene({
|
||||
id: 'loser-extra',
|
||||
openApps: [{ appId: 'notes' }, { appId: 'mood' }],
|
||||
}),
|
||||
]);
|
||||
|
||||
await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
const survivor = await db.workbenchScenes.get('survivor');
|
||||
expect(survivor?.openApps?.map((a) => a.appId).sort()).toEqual([
|
||||
'calendar',
|
||||
'mood',
|
||||
'notes',
|
||||
'todo',
|
||||
]);
|
||||
});
|
||||
|
||||
it('keeps groups separate by spaceId — no cross-space merging', async () => {
|
||||
await db.workbenchScenes.bulkAdd([
|
||||
makeScene({ id: 'a1', spaceId: 'space-A' }),
|
||||
makeScene({ id: 'a2', spaceId: 'space-A' }),
|
||||
makeScene({ id: 'b1', spaceId: 'space-B' }),
|
||||
]);
|
||||
|
||||
const removed = await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
expect(removed).toBe(1);
|
||||
const alive = await db.workbenchScenes
|
||||
.toArray()
|
||||
.then((rows) => rows.filter((r) => !r.deletedAt));
|
||||
expect(alive).toHaveLength(2);
|
||||
expect(alive.map((r) => r.spaceId).sort()).toEqual(['space-A', 'space-B']);
|
||||
});
|
||||
|
||||
it('leaves user-customized scenes alone (description / wallpaper / agent / scope)', async () => {
|
||||
await db.workbenchScenes.bulkAdd([
|
||||
makeScene({ id: 's1' }),
|
||||
makeScene({ id: 's2', description: 'Mein Workspace' }),
|
||||
makeScene({ id: 's3', viewingAsAgentId: 'agent-1' }),
|
||||
makeScene({ id: 's4', scopeTagIds: ['tag-1'] }),
|
||||
]);
|
||||
|
||||
const removed = await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
// s1 is the only mergeable row in its group of 1 → no removal.
|
||||
expect(removed).toBe(0);
|
||||
const alive = await db.workbenchScenes
|
||||
.toArray()
|
||||
.then((rows) => rows.filter((r) => !r.deletedAt));
|
||||
expect(alive).toHaveLength(4);
|
||||
});
|
||||
|
||||
it('leaves non-Home scenes alone even when duplicated by name', async () => {
|
||||
await db.workbenchScenes.bulkAdd([
|
||||
makeScene({ id: 'd1', name: 'Deep Work' }),
|
||||
makeScene({ id: 'd2', name: 'Deep Work' }),
|
||||
]);
|
||||
|
||||
const removed = await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
expect(removed).toBe(0);
|
||||
});
|
||||
|
||||
it('skips already-tombstoned rows', async () => {
|
||||
await db.workbenchScenes.bulkAdd([
|
||||
makeScene({ id: 's1' }),
|
||||
makeScene({ id: 's2', deletedAt: '2026-04-24T10:00:00.000Z' }),
|
||||
]);
|
||||
|
||||
const removed = await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
// Only one live row in the group → no removal.
|
||||
expect(removed).toBe(0);
|
||||
const stillDeleted = await db.workbenchScenes.get('s2');
|
||||
expect(stillDeleted?.deletedAt).toBe('2026-04-24T10:00:00.000Z');
|
||||
});
|
||||
|
||||
it('is idempotent — running twice produces the same end state', async () => {
|
||||
await db.workbenchScenes.bulkAdd([
|
||||
makeScene({ id: 's1' }),
|
||||
makeScene({ id: 's2' }),
|
||||
makeScene({ id: 's3' }),
|
||||
]);
|
||||
|
||||
const firstRemoved = await dedupHomeScenesOn(db.workbenchScenes);
|
||||
const secondRemoved = await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
expect(firstRemoved).toBe(2);
|
||||
expect(secondRemoved).toBe(0);
|
||||
});
|
||||
|
||||
it('skips rows without a string spaceId (ambiguous group key)', async () => {
|
||||
await db.workbenchScenes.bulkAdd([
|
||||
makeScene({ id: 's1', spaceId: undefined }),
|
||||
makeScene({ id: 's2', spaceId: undefined }),
|
||||
]);
|
||||
|
||||
const removed = await dedupHomeScenesOn(db.workbenchScenes);
|
||||
|
||||
expect(removed).toBe(0);
|
||||
});
|
||||
});
|
||||
123
apps/mana/apps/web/src/lib/data/scope/dedup-workbench-scenes.ts
Normal file
123
apps/mana/apps/web/src/lib/data/scope/dedup-workbench-scenes.ts
Normal file
|
|
@ -0,0 +1,123 @@
|
|||
/**
|
||||
* Dedup pass for the `workbenchScenes` table — collapses the duplicate
|
||||
* "Home" scenes the seeding race in `workbench-scenes.svelte.ts` has been
|
||||
* accumulating since the Spaces-Foundation migration shipped 2026-04-22.
|
||||
*
|
||||
* Background: the seeder writes rows without `spaceId`, so the Dexie
|
||||
* creating-hook stamps `_personal:<userId>` (sentinel). The dedup check
|
||||
* in `onActiveSpaceChanged` filters by the *real* space UUID and never
|
||||
* finds them — every login adds duplicates. Full root-cause + the
|
||||
* upcoming structural fix (per-space-seeds registry + deterministic
|
||||
* ids + creating-hook hardening) live in
|
||||
* `docs/plans/workbench-seeding-cleanup.md`.
|
||||
*
|
||||
* This file is the soft cleanup: idempotent, content-aware, takes
|
||||
* `name === 'Home'` rows that look like default seeds (no description /
|
||||
* wallpaper / viewingAsAgentId / scopeTagIds — i.e. nothing the user
|
||||
* has customised), groups them by `spaceId`, picks one survivor per
|
||||
* group, merges every loser's `openApps` into it, and soft-deletes the
|
||||
* rest so mana-sync propagates the cleanup to other devices.
|
||||
*
|
||||
* Pure: takes a Dexie Table reference, never reaches into the live
|
||||
* `db`. That keeps it import-cycle-free so it can run inside a
|
||||
* `db.version(N).upgrade()` callback (where it gets `tx.table(...)`)
|
||||
* AND from app-runtime callers (where they pass `db.table(...)`).
|
||||
*/
|
||||
|
||||
import type { Table } from 'dexie';
|
||||
import type { LocalWorkbenchScene, WorkbenchSceneApp } from '$lib/types/workbench-scenes';
|
||||
|
||||
const HOME_NAME = 'Home';
|
||||
|
||||
/**
|
||||
* A scene is a candidate for merging when it looks like a fresh default
|
||||
* "Home" seed — anything the user might have set themselves disqualifies
|
||||
* the row so we never destroy custom layouts.
|
||||
*/
|
||||
function isDefaultHomeSeed(row: LocalWorkbenchScene): boolean {
|
||||
if (row.deletedAt) return false;
|
||||
if (row.name !== HOME_NAME) return false;
|
||||
if (row.description) return false;
|
||||
if (row.wallpaper) return false;
|
||||
if (row.viewingAsAgentId) return false;
|
||||
if (row.scopeTagIds && row.scopeTagIds.length > 0) return false;
|
||||
return true;
|
||||
}
|
||||
|
||||
/**
|
||||
* Run dedup on the given `workbenchScenes` table. Returns the number of
|
||||
* rows soft-deleted. Idempotent — safe to invoke repeatedly.
|
||||
*
|
||||
* The caller is expected to wrap this in a transaction when called
|
||||
* outside of a Dexie `upgrade()` callback (upgrade callbacks already
|
||||
* give a transaction-bound `tx.table()` reference).
|
||||
*/
|
||||
export async function dedupHomeScenesOn(
|
||||
table: Table<LocalWorkbenchScene, string>
|
||||
): Promise<number> {
|
||||
const rows = await table.toArray();
|
||||
|
||||
// Bucket by spaceId. Rows without a spaceId can't be safely grouped
|
||||
// (their target space is ambiguous) — skip them. Rows that look like
|
||||
// user-customised scenes are also out, even if they happen to be
|
||||
// named "Home", so a deliberate two-Home setup stays intact.
|
||||
const groups = new Map<string, LocalWorkbenchScene[]>();
|
||||
for (const row of rows) {
|
||||
if (!isDefaultHomeSeed(row)) continue;
|
||||
const spaceId = (row as { spaceId?: unknown }).spaceId;
|
||||
if (typeof spaceId !== 'string' || !spaceId) continue;
|
||||
let group = groups.get(spaceId);
|
||||
if (!group) {
|
||||
group = [];
|
||||
groups.set(spaceId, group);
|
||||
}
|
||||
group.push(row);
|
||||
}
|
||||
|
||||
const now = new Date().toISOString();
|
||||
let removed = 0;
|
||||
|
||||
for (const group of groups.values()) {
|
||||
if (group.length <= 1) continue;
|
||||
|
||||
// Survivor pick: the row with the most openApps wins (it's the
|
||||
// most likely to carry the user's accumulated app additions),
|
||||
// breaking ties by most-recent updatedAt.
|
||||
group.sort((a, b) => {
|
||||
const aLen = a.openApps?.length ?? 0;
|
||||
const bLen = b.openApps?.length ?? 0;
|
||||
if (aLen !== bLen) return bLen - aLen;
|
||||
const aTime = a.updatedAt ?? '';
|
||||
const bTime = b.updatedAt ?? '';
|
||||
return bTime.localeCompare(aTime);
|
||||
});
|
||||
const [survivor, ...losers] = group;
|
||||
|
||||
// Merge every loser's openApps into the survivor, dedupe by
|
||||
// appId so the user doesn't end up with two `todo` panels.
|
||||
const merged: WorkbenchSceneApp[] = [...(survivor.openApps ?? [])];
|
||||
const seen = new Set(merged.map((a) => a.appId));
|
||||
for (const loser of losers) {
|
||||
for (const app of loser.openApps ?? []) {
|
||||
if (!seen.has(app.appId)) {
|
||||
seen.add(app.appId);
|
||||
merged.push(app);
|
||||
}
|
||||
}
|
||||
}
|
||||
const survivorAppCount = survivor.openApps?.length ?? 0;
|
||||
if (merged.length !== survivorAppCount) {
|
||||
await table.update(survivor.id, { openApps: merged, updatedAt: now });
|
||||
}
|
||||
|
||||
// Soft-delete the losers via deletedAt so the unified sync engine
|
||||
// propagates the dedup to other devices instead of resurrecting
|
||||
// the rows on next pull.
|
||||
for (const loser of losers) {
|
||||
await table.update(loser.id, { deletedAt: now, updatedAt: now });
|
||||
removed++;
|
||||
}
|
||||
}
|
||||
|
||||
return removed;
|
||||
}
|
||||
|
|
@ -614,6 +614,23 @@
|
|||
if (rewritten > 0) {
|
||||
console.info(`[spaces] reconciled ${rewritten} sentinel records to active space`);
|
||||
}
|
||||
|
||||
// Belt-and-suspenders dedup of duplicate "Home" workbench
|
||||
// scenes. The Dexie v48 upgrade already does one pass at
|
||||
// schema-bump time; this second pass covers the edge case
|
||||
// where reconcileSentinels just collapsed sentinel-stamped
|
||||
// rows into the same space-id as already-reconciled rows,
|
||||
// producing fresh duplicates. Idempotent — a no-op when
|
||||
// nothing matches. The structural fix that prevents new
|
||||
// duplicates ships separately, see
|
||||
// docs/plans/workbench-seeding-cleanup.md.
|
||||
const { dedupHomeScenesOn } = await import('$lib/data/scope/dedup-workbench-scenes');
|
||||
const dedupedCount = await db.transaction('rw', 'workbenchScenes', () =>
|
||||
dedupHomeScenesOn(db.table('workbenchScenes'))
|
||||
);
|
||||
if (dedupedCount > 0) {
|
||||
console.info(`[workbench-scenes] deduped ${dedupedCount} duplicate Home scenes`);
|
||||
}
|
||||
} catch (err) {
|
||||
console.warn('[spaces] active-space boot failed — sync will use sentinel scope', err);
|
||||
}
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue