docs(plans): mark llm-fallback-aliases SHIPPED, add M-by-M commit table

All 5 milestones landed today in one continuous session: registry,
health cache, fallback router, observability, and consumer migration.
115 service-side tests, validator covers 2538 files.
This commit is contained in:
Till JS 2026-04-26 21:27:57 +02:00
parent 30eb7ef72d
commit 7766ea5021
27 changed files with 662 additions and 346 deletions

View file

@ -78,6 +78,11 @@ export async function readLatestRecords(
/**
* Write a new record via sync_changes INSERT. The record will appear
* on the user's devices on their next sync cycle.
*
* MCP-Tool calls always carry `origin='agent'` because the pipeline
* that produced the value is an AI agent invoking a tool the
* actor's `kind` may be `system` (the MCP server itself) but the
* write semantics are agent-driven for conflict-detection purposes.
*/
export async function writeRecord(
userId: string,
@ -86,17 +91,18 @@ export async function writeRecord(
recordId: string,
op: 'insert' | 'update' | 'delete',
data: Record<string, unknown>,
fieldTimestamps: Record<string, string>
fieldMeta: Record<string, string>
): Promise<void> {
await withUser(userId, async (tx) => {
await tx`
INSERT INTO sync_changes
(app_id, table_name, record_id, user_id, op, data, field_timestamps, client_id, schema_version, actor)
(app_id, table_name, record_id, user_id, op, data, field_meta, client_id, schema_version, actor, origin)
VALUES
(${appId}, ${tableName}, ${recordId}, ${userId}, ${op},
${tx.json(data as never)}, ${tx.json(fieldTimestamps as never)},
${tx.json(data as never)}, ${tx.json(fieldMeta as never)},
'mcp-server', 1,
${tx.json({ kind: 'system', principalId: 'system:mcp', displayName: 'MCP Server' } as never)})
${tx.json({ kind: 'system', principalId: 'system:mcp', displayName: 'MCP Server' } as never)},
'agent')
`;
});
}

View file

@ -2,9 +2,8 @@
* Mission store CRUD + lifecycle operations.
*
* Missions go through the unified Dexie write path, which means the Dexie
* hooks stamp `userId`, `__lastActor`, `__fieldTimestamps`, `__fieldActors`
* and track the row into `_pendingChanges`. Callers never touch those
* fields directly.
* hooks stamp `userId` + `__fieldMeta` and track the row into
* `_pendingChanges`. Callers never touch those fields directly.
*
* Iterations are intentionally stored inline (`Mission.iterations`) rather
* than in a child table. They are append-only, each Mission stays small

View file

@ -37,7 +37,6 @@
import { db } from './database';
import { subscribeSyncConflicts, type SyncConflictPayload } from './sync';
import { FIELD_TIMESTAMPS_KEY } from './database';
/** How long a conflict stays visible before auto-dismissing. */
const CONFLICT_TTL_MS = 30_000;
@ -155,24 +154,16 @@ async function restore(id: string): Promise<void> {
const now = new Date().toISOString();
const updates: Record<string, unknown> = { updatedAt: now };
const ftPatch: Record<string, string> = {};
for (const [field, info] of Object.entries(conflict.fields)) {
updates[field] = info.wasLocal;
ftPatch[field] = now;
}
// Read the current row's __fieldTimestamps and merge our patch in
// so we don't blow away unrelated server-side timestamps.
const row = await db.table(conflict.tableName).get(conflict.recordId);
if (row) {
const existingFT =
((row as Record<string, unknown>)[FIELD_TIMESTAMPS_KEY] as Record<string, string>) ?? {};
updates[FIELD_TIMESTAMPS_KEY] = { ...existingFT, ...ftPatch };
} else {
updates[FIELD_TIMESTAMPS_KEY] = ftPatch;
}
// The Dexie updating-hook re-stamps `__fieldMeta` for every modified
// field with origin='user' and `at: now`, which is exactly what we
// want here: the restore is a fresh user edit that should win LWW
// against the server's overwrite on the next sync round. No manual
// __fieldMeta patching needed.
try {
await db.table(conflict.tableName).update(conflict.recordId, updates);
} catch (err) {

View file

@ -20,8 +20,8 @@ import { fire as fireTrigger } from '$lib/triggers/registry';
import { checkInlineSuggestion } from '$lib/triggers/inline-suggest';
import { getEffectiveUserId, GUEST_USER_ID } from './current-user';
import { getEffectiveSpaceId } from './scope/active-space.svelte';
import { getCurrentActor } from './events/actor';
import type { Actor } from './events/actor';
import { getCurrentActor, makeFieldMeta } from './events/actor';
import type { Actor, FieldMeta, FieldOrigin } from './events/actor';
import { isQuotaError, notifyQuotaExceeded } from './quota-detect';
import {
SYNC_APP_MAP,
@ -1231,6 +1231,35 @@ db.version(50).upgrade(async (tx) => {
}
});
// v51 — Lasts module (docs/plans/lasts-module.md M1).
// Mirror sibling to firsts: the *last* time you did/felt/saw something —
// either marked manually or surfaced retrospectively by the inference
// scanner that watches places/contacts/food/habits for frequency drops.
//
// Single space-scoped table. Index strategy:
// - status for the suspected/confirmed/reclaimed tab filter
// - category for the category tab filter
// - date for chronological sort + anniversary scans
// - recognisedAt for the "recognised X years ago" reminder
// - isPinned, isArchived for the standard meta-filters
db.version(51).stores({
lasts: 'id, status, category, date, recognisedAt, isPinned, isArchived',
});
// v52 — Lasts inference cooldown (docs/plans/lasts-module.md M3).
// Records dismissed inference candidates so the scanner doesn't keep
// re-suggesting the same place / contact / habit for ~12 months. ID is
// deterministic (`${refTable}:${refId}`) for structural idempotency:
// re-dismissing the same candidate is a Dexie put no-op-equivalent.
//
// Plaintext only — refTable/refId/dismissedAt are all metadata, no
// user-typed content. Indexed by refTable + dismissedAt so the scanner
// can quickly probe "is this place on cooldown?" and the cooldown sweep
// can expire entries by age.
db.version(52).stores({
lastsCooldown: 'id, refTable, dismissedAt, [refTable+refId]',
});
// ─── Sync Routing ──────────────────────────────────────────
// SYNC_APP_MAP, TABLE_TO_SYNC_NAME, TABLE_TO_APP, SYNC_NAME_TO_TABLE,
// toSyncName() and fromSyncName() are now derived from per-module
@ -1360,29 +1389,22 @@ function trackActivity(
}
/**
* Hidden field on every synced record holding per-field LWW timestamps.
* Not indexed, not sent to the server in pending-change payloads.
* Hidden field on every synced record carrying per-field write metadata.
*
* Shape: `{ [fieldKey]: FieldMeta }` where `FieldMeta = { at, actor, origin }`.
* Replaces the older triple `__fieldTimestamps` + `__fieldActors` +
* `__lastActor` same information, single source of truth.
*
* Not indexed, never sent to the server as a top-level payload field
* (the wire format carries it as part of `change.fields[k]` instead).
*
* For `__lastActor` consumers: the previous "actor that last wrote the
* record as a whole" is now derived as `__fieldMeta[argmax(at)].actor`.
*/
export const FIELD_TIMESTAMPS_KEY = '__fieldTimestamps';
/**
* Hidden field holding the {@link Actor} that last wrote the record as a
* whole. Used by the Workbench UI to badge records the AI has touched.
*/
export const LAST_ACTOR_KEY = '__lastActor';
/**
* Hidden field holding the per-field {@link Actor} map, mirroring
* `__fieldTimestamps`. Enables "the AI changed the due date, the user
* changed the title" attribution when rendering diffs.
*/
export const FIELD_ACTORS_KEY = '__fieldActors';
export const FIELD_META_KEY = '__fieldMeta';
function isInternalKey(key: string): boolean {
return (
key === 'id' ||
key === FIELD_TIMESTAMPS_KEY ||
key === LAST_ACTOR_KEY ||
key === FIELD_ACTORS_KEY
);
return key === 'id' || key === FIELD_META_KEY;
}
/**
@ -1392,8 +1414,7 @@ function isInternalKey(key: string): boolean {
* creating-hook continues to stamp `userId` on these; data tables
* (tasks, events, tags, ) stopped carrying `userId` in Phase 2c of
* the space-scoped data model rollout attribution there lives on
* the Actor fields (`__lastActor` / `__fieldActors`) and tenancy on
* `spaceId`.
* `__fieldMeta` and tenancy on `spaceId`.
*
* Keeping this list explicit instead of inferring by naming
* convention: the audit in docs/plans/space-scoped-data-model.md
@ -1476,27 +1497,22 @@ for (const [appId, tables] of Object.entries(SYNC_APP_MAP)) {
}
}
// Stamp every real field with the create-time so future LWW comparisons
// have a baseline, and with the actor so field-level attribution works.
// Mutates obj in place — Dexie persists the mutation.
const ft: Record<string, string> = {};
const fa: Record<string, Actor> = {};
// Stamp every user-data field with `__fieldMeta[key] = { at, actor, origin }`.
// `at` drives field-LWW ordering, `actor` carries attribution forward
// across renames, `origin` distinguishes user edits from system /
// migration / agent / server-replay writes for conflict-detection.
// F1 hardcodes `origin: 'user'` here — F2 will derive it from the
// active actor.kind so AI-runner writes land as `'agent'` etc.
const origin: FieldOrigin = 'user';
const fieldMeta: Record<string, FieldMeta> = {};
for (const key of Object.keys(obj)) {
if (isInternalKey(key)) continue;
ft[key] = now;
fa[key] = actor;
fieldMeta[key] = makeFieldMeta(now, actor, origin);
}
objRecord[FIELD_TIMESTAMPS_KEY] = ft;
objRecord[FIELD_ACTORS_KEY] = fa;
objRecord[LAST_ACTOR_KEY] = actor;
objRecord[FIELD_META_KEY] = fieldMeta;
// Build payload for pending-change WITHOUT the internal bookkeeping fields
const {
[FIELD_TIMESTAMPS_KEY]: _ft,
[FIELD_ACTORS_KEY]: _fa,
[LAST_ACTOR_KEY]: _la,
...dataForSync
} = obj as Record<string, unknown>;
// Build payload for pending-change WITHOUT the internal bookkeeping field.
const { [FIELD_META_KEY]: _fm, ...dataForSync } = obj as Record<string, unknown>;
trackPendingChange(tableName, {
appId,
@ -1505,6 +1521,7 @@ for (const [appId, tables] of Object.entries(SYNC_APP_MAP)) {
op: 'insert',
data: dataForSync,
actor,
origin,
createdAt: now,
spaceId: typeof objRecord.spaceId === 'string' ? (objRecord.spaceId as string) : undefined,
});
@ -1525,7 +1542,8 @@ for (const [appId, tables] of Object.entries(SYNC_APP_MAP)) {
if (_applyingTables.has(tableName)) return undefined;
const now = new Date().toISOString();
const actor: Actor = getCurrentActor();
const fields: Record<string, { value: unknown; updatedAt: string }> = {};
const origin: FieldOrigin = 'user';
const fields: Record<string, { value: unknown; at: string }> = {};
// userId is immutable after creation. Silently strip any attempt to
// reassign it from a local update so a buggy or malicious caller can
@ -1542,23 +1560,19 @@ for (const [appId, tables] of Object.entries(SYNC_APP_MAP)) {
if ('spaceId' in mods) delete mods.spaceId;
if ('authorId' in mods) delete mods.authorId;
// Merge field timestamps and field actors: keep existing, overwrite
// each modified field with now / current actor.
const existingFT =
((obj as Record<string, unknown>)[FIELD_TIMESTAMPS_KEY] as
| Record<string, string>
// Merge __fieldMeta: keep existing entries (so untouched fields
// retain their original at/actor/origin), overwrite each modified
// field with the current write's metadata.
const existingMeta =
((obj as Record<string, unknown>)[FIELD_META_KEY] as
| Record<string, FieldMeta>
| undefined) ?? {};
const existingFA =
((obj as Record<string, unknown>)[FIELD_ACTORS_KEY] as Record<string, Actor> | undefined) ??
{};
const newFT: Record<string, string> = { ...existingFT };
const newFA: Record<string, Actor> = { ...existingFA };
const newMeta: Record<string, FieldMeta> = { ...existingMeta };
for (const [key, value] of Object.entries(modifications)) {
if (isInternalKey(key)) continue;
fields[key] = { value, updatedAt: now };
newFT[key] = now;
newFA[key] = actor;
fields[key] = { value, at: now };
newMeta[key] = makeFieldMeta(now, actor, origin);
}
const op = (modifications as Record<string, unknown>).deletedAt ? 'delete' : 'update';
@ -1577,6 +1591,7 @@ for (const [appId, tables] of Object.entries(SYNC_APP_MAP)) {
op,
fields,
actor,
origin,
deletedAt: (modifications as Record<string, unknown>).deletedAt as string | undefined,
createdAt: now,
spaceId: existingSpaceId,
@ -1585,13 +1600,10 @@ for (const [appId, tables] of Object.entries(SYNC_APP_MAP)) {
fireTrigger(appId, tableName, op, modifications as Record<string, unknown>);
// Returning an object from a Dexie 'updating' hook merges it into the
// modifications applied to the record — use this to persist the new
// per-field timestamps, per-field actors, and last-actor alongside
// the user's update.
// modifications applied to the record — use this to persist the merged
// __fieldMeta alongside the user's data update.
return {
[FIELD_TIMESTAMPS_KEY]: newFT,
[FIELD_ACTORS_KEY]: newFA,
[LAST_ACTOR_KEY]: actor,
[FIELD_META_KEY]: newMeta,
};
});
}

View file

@ -28,6 +28,8 @@ export type {
AiActor,
SystemActor,
SystemSource,
FieldMeta,
FieldOrigin,
} from '@mana/shared-ai';
export {
SYSTEM_PROJECTION,
@ -47,6 +49,8 @@ export {
isAiActor,
isSystemActor,
isFromMissionRunner,
makeFieldMeta,
isUserOriginatedField,
} from '@mana/shared-ai';
/**

View file

@ -33,7 +33,7 @@
* real user id.
*/
import { db, SYNC_APP_MAP, FIELD_TIMESTAMPS_KEY } from './database';
import { db, SYNC_APP_MAP, FIELD_META_KEY } from './database';
import { GUEST_USER_ID } from './current-user';
import { encryptRecord } from './crypto/record-helpers';
import { waitForActiveKey } from './crypto/key-provider';
@ -110,9 +110,9 @@ export async function migrateGuestDataToUser(newUserId: string): Promise<GuestMi
// Strip the bookkeeping fields the creating-hook will rebuild.
// Importantly, drop `userId` so the hook stamps the new id from
// getEffectiveUserId() instead of preserving 'guest'.
const { userId: _oldUser, [FIELD_TIMESTAMPS_KEY]: _oldFt, ...clean } = record;
const { userId: _oldUser, [FIELD_META_KEY]: _oldMeta, ...clean } = record;
void _oldUser;
void _oldFt;
void _oldMeta;
// Catch-up encryption: guest writes left these fields as
// plaintext because no key was available. Now that the

View file

@ -35,13 +35,14 @@ vi.mock('$lib/triggers/inline-suggest', () => ({
import {
isValidSyncChange,
readFieldTimestamps,
readFieldMeta,
applyServerChanges,
subscribeSyncConflicts,
type SyncChange,
type SyncConflictPayload,
} from './sync';
import { db, FIELD_TIMESTAMPS_KEY } from './database';
import { db, FIELD_META_KEY } from './database';
import { makeFieldMeta, USER_ACTOR } from './events/actor';
// ─── Pure tests ──────────────────────────────────────────────────
@ -63,8 +64,8 @@ describe('isValidSyncChange', () => {
id: 'task-1',
op: 'update',
fields: {
title: { value: 'updated', updatedAt: '2026-04-01T10:00:00Z' },
priority: { value: 'high', updatedAt: '2026-04-01T10:01:00Z' },
title: { value: 'updated', at: '2026-04-01T10:00:00Z' },
priority: { value: 'high', at: '2026-04-01T10:01:00Z' },
},
};
expect(isValidSyncChange(change)).toBe(true);
@ -112,12 +113,12 @@ describe('isValidSyncChange', () => {
})
).toBe(false);
// updatedAt must be a string when present
// `at` must be a string when present
expect(
isValidSyncChange({
...baseInsert,
op: 'update',
fields: { title: { value: 'x', updatedAt: 12345 } },
fields: { title: { value: 'x', at: 12345 } },
})
).toBe(false);
});
@ -131,25 +132,28 @@ describe('isValidSyncChange', () => {
});
});
describe('readFieldTimestamps', () => {
it('returns the field-timestamps map when present', () => {
const ft = { title: '2026-04-01T10:00:00Z', priority: '2026-04-01T11:00:00Z' };
const record = { id: 'x', [FIELD_TIMESTAMPS_KEY]: ft };
expect(readFieldTimestamps(record)).toEqual(ft);
describe('readFieldMeta', () => {
it('returns the field-meta map when present', () => {
const fieldMeta = {
title: makeFieldMeta('2026-04-01T10:00:00Z', USER_ACTOR, 'user'),
priority: makeFieldMeta('2026-04-01T11:00:00Z', USER_ACTOR, 'user'),
};
const record = { id: 'x', [FIELD_META_KEY]: fieldMeta };
expect(readFieldMeta(record)).toEqual(fieldMeta);
});
it('returns an empty map when the field is missing (legacy record)', () => {
expect(readFieldTimestamps({ id: 'x' })).toEqual({});
expect(readFieldMeta({ id: 'x' })).toEqual({});
});
it('handles null and non-object inputs gracefully', () => {
expect(readFieldTimestamps(null)).toEqual({});
expect(readFieldTimestamps(undefined)).toEqual({});
expect(readFieldTimestamps(42)).toEqual({});
expect(readFieldMeta(null)).toEqual({});
expect(readFieldMeta(undefined)).toEqual({});
expect(readFieldMeta(42)).toEqual({});
});
it('returns an empty map if __fieldTimestamps is not an object', () => {
expect(readFieldTimestamps({ id: 'x', [FIELD_TIMESTAMPS_KEY]: 'not-a-map' })).toEqual({});
it('returns an empty map if __fieldMeta is not an object', () => {
expect(readFieldMeta({ id: 'x', [FIELD_META_KEY]: 'not-a-map' })).toEqual({});
});
});
@ -169,7 +173,7 @@ describe('applyServerChanges (Dexie integration)', () => {
}
});
it('inserts a new record with __fieldTimestamps populated', async () => {
it('inserts a new record with __fieldMeta populated', async () => {
await applyServerChanges('todo', [
{
table: 'tasks',
@ -189,9 +193,11 @@ describe('applyServerChanges (Dexie integration)', () => {
const stored = await db.table('tasks').get('task-A');
expect(stored).toBeDefined();
expect(stored.title).toBe('Buy milk');
const ft = readFieldTimestamps(stored);
expect(ft.title).toBe('2026-04-01T10:00:00Z');
expect(ft.priority).toBe('2026-04-01T10:00:00Z');
const fm = readFieldMeta(stored);
expect(fm.title?.at).toBe('2026-04-01T10:00:00Z');
expect(fm.priority?.at).toBe('2026-04-01T10:00:00Z');
// applyServerChanges stamps replays with origin='server-replay'
expect(fm.title?.origin).toBe('server-replay');
});
it('field-level LWW: server wins per-field when newer', async () => {
@ -213,8 +219,8 @@ describe('applyServerChanges (Dexie integration)', () => {
id: 'task-B',
op: 'update',
fields: {
title: { value: 'new title', updatedAt: '2099-01-01T00:00:00Z' },
priority: { value: 'high', updatedAt: '2099-01-01T00:00:00Z' },
title: { value: 'new title', at: '2099-01-01T00:00:00Z' },
priority: { value: 'high', at: '2099-01-01T00:00:00Z' },
},
},
]);
@ -223,9 +229,9 @@ describe('applyServerChanges (Dexie integration)', () => {
expect(stored.title).toBe('new title');
expect(stored.priority).toBe('high');
const ft = readFieldTimestamps(stored);
expect(ft.title).toBe('2099-01-01T00:00:00Z');
expect(ft.priority).toBe('2099-01-01T00:00:00Z');
const fm = readFieldMeta(stored);
expect(fm.title?.at).toBe('2099-01-01T00:00:00Z');
expect(fm.priority?.at).toBe('2099-01-01T00:00:00Z');
});
it('field-level LWW: split outcome when one field is newer and one older', async () => {
@ -257,8 +263,8 @@ describe('applyServerChanges (Dexie integration)', () => {
id: 'task-C',
op: 'update',
fields: {
title: { value: 'server title (loser)', updatedAt: '1970-01-01T00:00:00Z' },
priority: { value: 'medium (winner)', updatedAt: '2099-01-01T00:00:00Z' },
title: { value: 'server title (loser)', at: '1970-01-01T00:00:00Z' },
priority: { value: 'medium (winner)', at: '2099-01-01T00:00:00Z' },
},
},
]);
@ -381,7 +387,7 @@ describe('applyServerChanges (Dexie integration)', () => {
id: 'task-conflict-1',
op: 'update',
fields: {
title: { value: 'their version', updatedAt: '2099-01-01T00:00:00Z' },
title: { value: 'their version', at: '2099-01-01T00:00:00Z' },
},
},
]);
@ -413,7 +419,7 @@ describe('applyServerChanges (Dexie integration)', () => {
id: 'task-conflict-2',
op: 'update',
fields: {
title: { value: 'first server title', updatedAt: '2099-01-01T00:00:00Z' },
title: { value: 'first server title', at: '2099-01-01T00:00:00Z' },
},
},
]);
@ -438,7 +444,7 @@ describe('applyServerChanges (Dexie integration)', () => {
id: 'task-conflict-3',
op: 'update',
fields: {
title: { value: 'same value', updatedAt: '2099-01-01T00:00:00Z' },
title: { value: 'same value', at: '2099-01-01T00:00:00Z' },
},
},
]);
@ -463,8 +469,8 @@ describe('applyServerChanges (Dexie integration)', () => {
id: 'task-conflict-4',
op: 'update',
fields: {
title: { value: 'server title', updatedAt: '2099-01-01T00:00:00Z' },
priority: { value: 'high', updatedAt: '2099-01-01T00:00:00Z' },
title: { value: 'server title', at: '2099-01-01T00:00:00Z' },
priority: { value: 'high', at: '2099-01-01T00:00:00Z' },
},
},
]);
@ -500,7 +506,7 @@ describe('applyServerChanges (Dexie integration)', () => {
id: 'task-conflict-5',
op: 'update',
fields: {
title: { value: 'changed', updatedAt: '2026-04-01T00:00:00Z' }, // exact tie
title: { value: 'changed', at: '2026-04-01T00:00:00Z' }, // exact tie
},
},
]);

View file

@ -20,20 +20,28 @@ import {
toSyncName,
fromSyncName,
beginApplyingTables,
FIELD_TIMESTAMPS_KEY,
FIELD_ACTORS_KEY,
LAST_ACTOR_KEY,
FIELD_META_KEY,
setPendingChangeListener,
} from './database';
import { isQuotaError, cleanupTombstones, notifyQuotaExceeded } from './quota';
import { emitSyncTelemetry, categorizeSyncError } from './sync-telemetry';
import { USER_ACTOR, type Actor } from './events/actor';
import {
USER_ACTOR,
makeFieldMeta,
type Actor,
type FieldMeta,
type FieldOrigin,
} from './events/actor';
/** Reads the per-field actor map off a record; empty for legacy rows. */
function readFieldActors(record: unknown): Record<string, Actor> {
/**
* Reads the per-field write metadata off a record. Returns an empty
* map for records that pre-date the field-meta migration so callers
* can fall back to record-level `updatedAt`.
*/
export function readFieldMeta(record: unknown): Record<string, FieldMeta> {
if (!record || typeof record !== 'object') return {};
const fa = (record as Record<string, unknown>)[FIELD_ACTORS_KEY];
return fa && typeof fa === 'object' ? (fa as Record<string, Actor>) : {};
const fm = (record as Record<string, unknown>)[FIELD_META_KEY];
return fm && typeof fm === 'object' ? (fm as Record<string, FieldMeta>) : {};
}
// ─── Types ────────────────────────────────────────────────────
@ -41,10 +49,12 @@ function readFieldActors(record: unknown): Record<string, Actor> {
/** Operations the sync protocol supports. */
export type SyncOp = 'insert' | 'update' | 'delete';
/** A single field-level change carrying its own LWW timestamp. */
/** A single field-level change carrying its own LWW timestamp.
* Per-field actor + origin live at the row level on `SyncChange`
* (each push = one actor + one origin), never per field. */
export interface FieldChange {
value: unknown;
updatedAt: string;
at: string;
}
/**
@ -87,6 +97,15 @@ export interface SyncChange {
* for back-compat with pre-actor clients.
*/
actor?: Actor;
/**
* Pipeline origin of the write what kind of code path produced the
* value. Drives conflict-detection: only `'user'`-origin writes can
* lose to a later server overwrite and surface as a conflict toast.
* Server-applied pulls always take `'server-replay'` regardless of
* what the originating client stamped, because the local client
* never typed the value itself.
*/
origin?: FieldOrigin;
}
interface PendingChange {
@ -99,6 +118,7 @@ interface PendingChange {
data?: Record<string, unknown>;
deletedAt?: string;
actor?: Actor;
origin?: FieldOrigin;
createdAt: string;
/**
* The Space (Better Auth organization id) the record belongs to. Stamped
@ -126,7 +146,7 @@ interface SyncMeta {
function isFieldChange(v: unknown): v is FieldChange {
if (!v || typeof v !== 'object') return false;
const f = v as Record<string, unknown>;
return 'value' in f && (f.updatedAt === undefined || typeof f.updatedAt === 'string');
return 'value' in f && (f.at === undefined || typeof f.at === 'string');
}
function isFieldsMap(v: unknown): v is Record<string, FieldChange> {
@ -162,22 +182,15 @@ export function isValidSyncChange(v: unknown): v is SyncChange {
// malformed actor doesn't corrupt data; worst case the Workbench shows
// "unknown" for that change.
if (c.actor !== undefined && (typeof c.actor !== 'object' || c.actor === null)) return false;
// `origin` is a fixed enum on the producing side; we accept any string
// here so a future enum extension on the server doesn't fail validation
// on older clients. The apply-path forces `'server-replay'` regardless.
if (c.origin !== undefined && typeof c.origin !== 'string') return false;
return true;
}
// ─── Apply Server Changes (top-level so unit tests can import directly) ──
/**
* Reads the per-field LWW timestamps off a record. Returns an empty map for
* legacy records that pre-date __fieldTimestamps so callers can fall back to
* record-level `updatedAt`.
*/
export function readFieldTimestamps(record: unknown): Record<string, string> {
if (!record || typeof record !== 'object') return {};
const ft = (record as Record<string, unknown>)[FIELD_TIMESTAMPS_KEY];
return ft && typeof ft === 'object' ? (ft as Record<string, string>) : {};
}
/**
* Applies a batch of server changes to the local Dexie database with
* field-level Last-Write-Wins conflict resolution.
@ -320,24 +333,33 @@ export async function applyServerChanges(appId: string, changes: unknown[]): Pro
for (const change of tableChanges) {
const recordId = change.id;
// All writes from this path are server-replays from the
// local client's perspective: even an originally `'user'`-
// origin push from another device arrives here as a pull,
// and the local user never typed it. Stamping `'server-replay'`
// keeps conflict-detection (F2) from treating later
// overwrites as "lost edits".
const replayOrigin: FieldOrigin = 'server-replay';
if (change.deletedAt || change.op === 'delete') {
const existing = await table.get(recordId);
if (!existing) continue;
if (change.deletedAt) {
const localFT = readFieldTimestamps(existing);
const localMeta = readFieldMeta(existing);
const serverTime = change.deletedAt;
const localDeletedAtTime =
localFT.deletedAt ??
localMeta.deletedAt?.at ??
((existing as Record<string, unknown>).deletedAt as string | undefined) ??
'';
if (serverTime >= localDeletedAtTime) {
const tombActor: Actor = change.actor ?? USER_ACTOR;
await table.update(recordId, {
deletedAt: serverTime,
updatedAt: serverTime,
[FIELD_TIMESTAMPS_KEY]: {
...localFT,
deletedAt: serverTime,
updatedAt: serverTime,
[FIELD_META_KEY]: {
...localMeta,
deletedAt: makeFieldMeta(serverTime, tombActor, replayOrigin),
updatedAt: makeFieldMeta(serverTime, tombActor, replayOrigin),
},
});
}
@ -359,51 +381,32 @@ export async function applyServerChanges(appId: string, changes: unknown[]): Pro
const changeActor: Actor = change.actor ?? USER_ACTOR;
if (!existing) {
const ft: Record<string, string> = {};
const fa: Record<string, Actor> = {};
const fieldMeta: Record<string, FieldMeta> = {};
for (const key of Object.keys(changeData)) {
if (
key === 'id' ||
key === FIELD_TIMESTAMPS_KEY ||
key === FIELD_ACTORS_KEY ||
key === LAST_ACTOR_KEY
) {
continue;
}
ft[key] = recordTime;
fa[key] = changeActor;
if (key === 'id' || key === FIELD_META_KEY) continue;
fieldMeta[key] = makeFieldMeta(recordTime, changeActor, replayOrigin);
}
await table.put({
...changeData,
id: recordId,
[FIELD_TIMESTAMPS_KEY]: ft,
[FIELD_ACTORS_KEY]: fa,
[LAST_ACTOR_KEY]: changeActor,
[FIELD_META_KEY]: fieldMeta,
});
} else {
const localFT = readFieldTimestamps(existing);
const localFA = readFieldActors(existing);
const localMeta = readFieldMeta(existing);
const localUpdatedAt =
((existing as Record<string, unknown>).updatedAt as string | undefined) ?? '';
const updates: Record<string, unknown> = {};
const newFT: Record<string, string> = { ...localFT };
const newFA: Record<string, Actor> = { ...localFA };
const newMeta: Record<string, FieldMeta> = { ...localMeta };
for (const [key, val] of Object.entries(changeData)) {
if (
key === 'id' ||
key === FIELD_TIMESTAMPS_KEY ||
key === FIELD_ACTORS_KEY ||
key === LAST_ACTOR_KEY
) {
continue;
}
const localFieldTime = localFT[key] ?? localUpdatedAt;
if (key === 'id' || key === FIELD_META_KEY) continue;
const localFieldTime = localMeta[key]?.at ?? localUpdatedAt;
if (recordTime >= localFieldTime) {
// Conflict signal: server STRICTLY wins (>) and the local
// field had a non-empty value that differs from the new
// one. Equal-time ties don't fire because there's no
// edit to lose.
// edit to lose. F2 will additionally gate this on
// localMeta[key].origin === 'user'.
const localValue = (existing as Record<string, unknown>)[key];
if (
recordTime > localFieldTime &&
@ -421,14 +424,11 @@ export async function applyServerChanges(appId: string, changes: unknown[]): Pro
});
}
updates[key] = val;
newFT[key] = recordTime;
newFA[key] = changeActor;
newMeta[key] = makeFieldMeta(recordTime, changeActor, replayOrigin);
}
}
if (Object.keys(updates).length > 0) {
updates[FIELD_TIMESTAMPS_KEY] = newFT;
updates[FIELD_ACTORS_KEY] = newFA;
updates[LAST_ACTOR_KEY] = changeActor;
updates[FIELD_META_KEY] = newMeta;
await table.update(recordId, updates);
}
}
@ -443,35 +443,29 @@ export async function applyServerChanges(appId: string, changes: unknown[]): Pro
// record was deleted locally — recreate it under the server's
// authority.
const record: Record<string, unknown> = { id: recordId };
const ft: Record<string, string> = {};
const fa: Record<string, Actor> = {};
const fieldMeta: Record<string, FieldMeta> = {};
const fallback = new Date().toISOString();
for (const [key, fc] of Object.entries(serverFields)) {
record[key] = fc.value;
ft[key] = fc.updatedAt ?? fallback;
fa[key] = changeActor;
fieldMeta[key] = makeFieldMeta(fc.at ?? fallback, changeActor, replayOrigin);
}
record[FIELD_TIMESTAMPS_KEY] = ft;
record[FIELD_ACTORS_KEY] = fa;
record[LAST_ACTOR_KEY] = changeActor;
record[FIELD_META_KEY] = fieldMeta;
await table.put(record);
} else {
// Per-field comparison. Falls back to record-level updatedAt
// only for legacy records that pre-date __fieldTimestamps.
const localFT = readFieldTimestamps(existing);
const localFA = readFieldActors(existing);
// Per-field comparison.
const localMeta = readFieldMeta(existing);
const localUpdatedAt =
((existing as Record<string, unknown>).updatedAt as string | undefined) ?? '';
const updates: Record<string, unknown> = {};
const newFT: Record<string, string> = { ...localFT };
const newFA: Record<string, Actor> = { ...localFA };
const newMeta: Record<string, FieldMeta> = { ...localMeta };
for (const [key, fc] of Object.entries(serverFields)) {
const serverTime = fc.updatedAt ?? '';
const localFieldTime = localFT[key] ?? localUpdatedAt;
const serverTime = fc.at ?? '';
const localFieldTime = localMeta[key]?.at ?? localUpdatedAt;
if (serverTime >= localFieldTime) {
// Same conflict criteria as the insert-as-update path:
// strictly newer + non-empty local + actually different.
// F2 will additionally gate on localMeta[key].origin === 'user'.
const localValue = (existing as Record<string, unknown>)[key];
if (
serverTime > localFieldTime &&
@ -489,14 +483,11 @@ export async function applyServerChanges(appId: string, changes: unknown[]): Pro
});
}
updates[key] = fc.value;
newFT[key] = serverTime;
newFA[key] = changeActor;
newMeta[key] = makeFieldMeta(serverTime, changeActor, replayOrigin);
}
}
if (Object.keys(updates).length > 0) {
updates[FIELD_TIMESTAMPS_KEY] = newFT;
updates[FIELD_ACTORS_KEY] = newFA;
updates[LAST_ACTOR_KEY] = changeActor;
updates[FIELD_META_KEY] = newMeta;
await table.update(recordId, updates);
}
}

View file

@ -198,8 +198,8 @@ export const tagPresetsStore = {
}
// Encrypt + write each row. The Dexie creating-hook stamps
// __lastActor / __fieldActors automatically; spaceId is
// pre-populated here so the hook leaves it alone.
// __fieldMeta automatically; spaceId is pre-populated here so
// the hook leaves it alone.
await db.transaction('rw', db.table('globalTags'), db.table('tagGroups'), async () => {
for (const group of groupsToWrite) {
await encryptRecord('tagGroups', group);

View file

@ -51,7 +51,7 @@ export const memosStore = {
// createdAt + updatedAt are required by LocalMemo's type but the
// previous create() never set them — DetailView showed
// "Erstellt: Invalid Date" for every memo. The Dexie creating
// hook only auto-stamps userId + __fieldTimestamps; module
// hook only auto-stamps userId + __fieldMeta; module
// stores have to set their own createdAt/updatedAt explicitly
// (consistent with the rest of the Mana modules).
createdAt: now,

View file

@ -94,8 +94,8 @@ describe('notes encryption pilot', () => {
expect(stored.isPinned).toBe(false);
expect(stored.isArchived).toBe(false);
expect(stored.userId).toBe('test-user');
// Auto-stamped __fieldTimestamps stays plaintext too — LWW relies on it.
expect((stored as unknown as Record<string, unknown>).__fieldTimestamps).toBeDefined();
// Auto-stamped __fieldMeta stays plaintext too — LWW relies on it.
expect((stored as unknown as Record<string, unknown>).__fieldMeta).toBeDefined();
});
it('updates encrypt the modified content fields, leave flags untouched', async () => {