mirror of
https://github.com/Memo-2023/mana-monorepo.git
synced 2026-05-14 18:41:08 +02:00
fix(spiral-db): add test suite and fix critical bugs
Add comprehensive test suite (174 tests) covering encoding, schema, image, database CRUD, and PNG round-trip. Fix critical bugs: - PNG compression: replace non-functional zlibCompress with pako.deflate - PNG import: add CRC validation, support all filter types (Sub/Up/Avg/Paeth) - Input validation: validate records against schema before insert - Index overflow: dynamic dataStartRing prevents index/data ring overlap - Image expansion: expand before writes instead of after to prevent OOB - update() read bug: search index from end to find latest entry, not deleted one - String encoding: enforce 511-byte max length - Index ring count: use 6 bits (2 pixels) instead of 3 bits for >7 ring support Co-Authored-By: Claude Opus 4.6 (1M context) <noreply@anthropic.com>
This commit is contained in:
parent
3f5c17adbc
commit
d4d08cc68b
9 changed files with 2227 additions and 163 deletions
|
|
@ -138,7 +138,7 @@ if (existsSync(pngPath)) {
|
|||
console.log(` Loaded back: ${loadedImage.width}×${loadedImage.height} pixels`);
|
||||
|
||||
// Verify data integrity
|
||||
const loadedDb = SpiralDB.fromImage(loadedImage, createTodoSchema());
|
||||
const loadedDb = SpiralDB.fromImage<Record<string, unknown>>(loadedImage, createTodoSchema());
|
||||
const loadedTodos = loadedDb.getAll();
|
||||
console.log(` Verified: ${loadedTodos.length} todos recovered`);
|
||||
|
||||
|
|
|
|||
660
packages/spiral-db/src/database.test.ts
Normal file
660
packages/spiral-db/src/database.test.ts
Normal file
|
|
@ -0,0 +1,660 @@
|
|||
/**
|
||||
* Database Tests — CRUD, edge cases, fromImage, compact
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { SpiralDB } from './database.js';
|
||||
import { createTodoSchema } from './schema.js';
|
||||
import { getPixelByIndex } from './image.js';
|
||||
import { MAGIC_VALID } from './constants.js';
|
||||
import type { SchemaDefinition } from './types.js';
|
||||
|
||||
// =============================================================================
|
||||
// HELPERS
|
||||
// =============================================================================
|
||||
|
||||
interface TodoData {
|
||||
id: number;
|
||||
status: number;
|
||||
priority: number;
|
||||
createdAt: Date;
|
||||
dueDate: Date | null;
|
||||
completedAt: Date | null;
|
||||
title: string;
|
||||
description: string | null;
|
||||
tags: number[];
|
||||
}
|
||||
|
||||
function makeTodo(overrides: Partial<TodoData> = {}): TodoData {
|
||||
return {
|
||||
id: 0,
|
||||
status: 0,
|
||||
priority: 1,
|
||||
createdAt: new Date('2025-01-15'),
|
||||
dueDate: null,
|
||||
completedAt: null,
|
||||
title: 'Test Todo',
|
||||
description: null,
|
||||
tags: [],
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
function createDb(opts?: { compression?: boolean }) {
|
||||
return new SpiralDB<TodoData>({
|
||||
schema: createTodoSchema(),
|
||||
compression: opts?.compression,
|
||||
});
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// INITIALIZATION
|
||||
// =============================================================================
|
||||
|
||||
describe('Database Initialization', () => {
|
||||
it('should create empty database with magic byte', () => {
|
||||
const db = createDb();
|
||||
const image = db.getImage();
|
||||
expect(getPixelByIndex(image, 0)).toBe(MAGIC_VALID);
|
||||
});
|
||||
|
||||
it('should start with 0 records', () => {
|
||||
const db = createDb();
|
||||
const stats = db.getStats();
|
||||
expect(stats.totalRecords).toBe(0);
|
||||
expect(stats.activeRecords).toBe(0);
|
||||
expect(stats.deletedRecords).toBe(0);
|
||||
});
|
||||
|
||||
it('should have odd-sized square image', () => {
|
||||
const db = createDb();
|
||||
const image = db.getImage();
|
||||
expect(image.width).toBe(image.height);
|
||||
expect(image.width % 2).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// INSERT
|
||||
// =============================================================================
|
||||
|
||||
describe('Insert', () => {
|
||||
it('should insert a record and return success', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(makeTodo());
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.recordId).toBe(0);
|
||||
});
|
||||
|
||||
it('should assign incremental IDs', () => {
|
||||
const db = createDb();
|
||||
expect(db.insert(makeTodo()).recordId).toBe(0);
|
||||
expect(db.insert(makeTodo({ title: 'Second' })).recordId).toBe(1);
|
||||
expect(db.insert(makeTodo({ title: 'Third' })).recordId).toBe(2);
|
||||
});
|
||||
|
||||
it('should update stats after insert', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo());
|
||||
db.insert(makeTodo({ title: 'Second' }));
|
||||
const stats = db.getStats();
|
||||
expect(stats.totalRecords).toBe(2);
|
||||
expect(stats.activeRecords).toBe(2);
|
||||
});
|
||||
|
||||
it('should handle insert with all fields populated', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(
|
||||
makeTodo({
|
||||
priority: 5,
|
||||
dueDate: new Date('2025-12-31'),
|
||||
completedAt: new Date('2025-06-15'),
|
||||
title: 'Full Todo',
|
||||
description: 'A detailed description with special chars: <>&"',
|
||||
tags: [1, 2, 3, 4],
|
||||
})
|
||||
);
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
|
||||
it('should handle insert with empty string title', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(makeTodo({ title: '' }));
|
||||
expect(result.success).toBe(true);
|
||||
const read = db.read(result.recordId!);
|
||||
expect(read.record?.data.title).toBe('');
|
||||
});
|
||||
|
||||
it('should handle insert with max tags', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(makeTodo({ tags: [1, 2, 3, 4, 5, 6, 7, 8] }));
|
||||
expect(result.success).toBe(true);
|
||||
const read = db.read(result.recordId!);
|
||||
expect(read.record?.data.tags).toEqual([1, 2, 3, 4, 5, 6, 7, 8]);
|
||||
});
|
||||
|
||||
it('should auto-expand image when needed', () => {
|
||||
const db = createDb();
|
||||
const initialSize = db.getImage().width;
|
||||
|
||||
// Insert enough records to trigger expansion
|
||||
for (let i = 0; i < 20; i++) {
|
||||
db.insert(makeTodo({ title: `Todo ${i} with a longer title to use more pixels` }));
|
||||
}
|
||||
|
||||
expect(db.getImage().width).toBeGreaterThanOrEqual(initialSize);
|
||||
expect(db.getStats().totalRecords).toBe(20);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// READ
|
||||
// =============================================================================
|
||||
|
||||
describe('Read', () => {
|
||||
it('should read back inserted data correctly', () => {
|
||||
const db = createDb();
|
||||
db.insert(
|
||||
makeTodo({
|
||||
title: 'Read Test',
|
||||
priority: 3,
|
||||
tags: [10, 20],
|
||||
})
|
||||
);
|
||||
|
||||
const result = db.read(0);
|
||||
expect(result.success).toBe(true);
|
||||
expect(result.record?.data.title).toBe('Read Test');
|
||||
expect(result.record?.data.priority).toBe(3);
|
||||
expect(result.record?.data.tags).toEqual([10, 20]);
|
||||
});
|
||||
|
||||
it('should return metadata with record', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo());
|
||||
const result = db.read(0);
|
||||
expect(result.record?.meta.id).toBe(0);
|
||||
expect(result.record?.meta.status).toBe('active');
|
||||
expect(result.record?.meta.length).toBeGreaterThan(0);
|
||||
});
|
||||
|
||||
it('should fail for non-existent ID', () => {
|
||||
const db = createDb();
|
||||
const result = db.read(999);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toBe('Record not found');
|
||||
});
|
||||
|
||||
it('should fail for deleted record', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo());
|
||||
db.delete(0);
|
||||
const result = db.read(0);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toBe('Record has been deleted');
|
||||
});
|
||||
|
||||
it('should read nullable fields correctly', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ dueDate: null, description: null }));
|
||||
const result = db.read(0);
|
||||
expect(result.record?.data.dueDate).toBeNull();
|
||||
expect(result.record?.data.description).toBeNull();
|
||||
});
|
||||
|
||||
it('should read populated nullable fields', () => {
|
||||
const db = createDb();
|
||||
const dueDate = new Date('2025-12-31');
|
||||
db.insert(makeTodo({ dueDate, description: 'Has description' }));
|
||||
const result = db.read(0);
|
||||
expect(result.record?.data.description).toBe('Has description');
|
||||
expect(result.record?.data.dueDate).not.toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// DELETE
|
||||
// =============================================================================
|
||||
|
||||
describe('Delete', () => {
|
||||
it('should soft-delete a record', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo());
|
||||
const result = db.delete(0);
|
||||
expect(result.success).toBe(true);
|
||||
expect(db.getStats().deletedRecords).toBe(1);
|
||||
expect(db.getStats().activeRecords).toBe(0);
|
||||
});
|
||||
|
||||
it('should fail to delete non-existent record', () => {
|
||||
const db = createDb();
|
||||
const result = db.delete(999);
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toBe('Record not found');
|
||||
});
|
||||
|
||||
it('should fail to delete already-deleted record', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo());
|
||||
db.delete(0);
|
||||
const result = db.delete(0);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should not affect other records', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ title: 'Keep' }));
|
||||
db.insert(makeTodo({ title: 'Delete' }));
|
||||
db.insert(makeTodo({ title: 'Also Keep' }));
|
||||
db.delete(1);
|
||||
|
||||
expect(db.read(0).record?.data.title).toBe('Keep');
|
||||
expect(db.read(1).success).toBe(false);
|
||||
expect(db.read(2).record?.data.title).toBe('Also Keep');
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// COMPLETE
|
||||
// =============================================================================
|
||||
|
||||
describe('Complete', () => {
|
||||
it('should mark record as completed', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo());
|
||||
const result = db.complete(0);
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const read = db.read(0);
|
||||
expect(read.record?.meta.status).toBe('completed');
|
||||
});
|
||||
|
||||
it('should fail for non-active record', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo());
|
||||
db.delete(0);
|
||||
const result = db.complete(0);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail for already-completed record', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo());
|
||||
db.complete(0);
|
||||
const result = db.complete(0);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail for non-existent record', () => {
|
||||
const db = createDb();
|
||||
const result = db.complete(999);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// UPDATE
|
||||
// =============================================================================
|
||||
|
||||
describe('Update', () => {
|
||||
it('should update record data', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ title: 'Original', priority: 1 }));
|
||||
const result = db.update(0, { title: 'Updated' } as Partial<TodoData>);
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const read = db.read(0);
|
||||
expect(read.record?.data.title).toBe('Updated');
|
||||
expect(read.record?.data.priority).toBe(1); // unchanged
|
||||
});
|
||||
|
||||
it('should fail for non-existent record', () => {
|
||||
const db = createDb();
|
||||
const result = db.update(999, { title: 'Nope' } as Partial<TodoData>);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
|
||||
it('should fail for deleted record', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo());
|
||||
db.delete(0);
|
||||
const result = db.update(0, { title: 'Nope' } as Partial<TodoData>);
|
||||
expect(result.success).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// GETALL
|
||||
// =============================================================================
|
||||
|
||||
describe('getAll', () => {
|
||||
it('should return all active records', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ title: 'A' }));
|
||||
db.insert(makeTodo({ title: 'B' }));
|
||||
|
||||
const all = db.getAll();
|
||||
expect(all.length).toBe(2);
|
||||
});
|
||||
|
||||
it('should exclude deleted records', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ title: 'A' }));
|
||||
db.insert(makeTodo({ title: 'B' }));
|
||||
db.delete(0);
|
||||
|
||||
const all = db.getAll();
|
||||
expect(all.length).toBe(1);
|
||||
expect(all[0].data.title).toBe('B');
|
||||
});
|
||||
|
||||
it('should filter by status', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ title: 'Active' }));
|
||||
db.insert(makeTodo({ title: 'Completed' }));
|
||||
db.complete(1);
|
||||
|
||||
expect(db.getAll('active').length).toBe(1);
|
||||
expect(db.getAll('completed').length).toBe(1);
|
||||
expect(db.getAll('active')[0].data.title).toBe('Active');
|
||||
});
|
||||
|
||||
it('should return empty array for empty database', () => {
|
||||
const db = createDb();
|
||||
expect(db.getAll()).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// COMPACT
|
||||
// =============================================================================
|
||||
|
||||
describe('Compact', () => {
|
||||
it('should remove deleted records', () => {
|
||||
const db = createDb();
|
||||
for (let i = 0; i < 10; i++) {
|
||||
db.insert(makeTodo({ title: `Todo ${i}` }));
|
||||
}
|
||||
for (let i = 0; i < 5; i++) {
|
||||
db.delete(i);
|
||||
}
|
||||
|
||||
expect(db.getStats().deletedRecords).toBe(5);
|
||||
|
||||
db.compact();
|
||||
|
||||
expect(db.getStats().deletedRecords).toBe(0);
|
||||
expect(db.getStats().activeRecords).toBe(5);
|
||||
});
|
||||
|
||||
it('should preserve completed records', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ title: 'Active' }));
|
||||
db.insert(makeTodo({ title: 'Done' }));
|
||||
db.complete(1);
|
||||
db.insert(makeTodo({ title: 'Deleted' }));
|
||||
db.delete(2);
|
||||
|
||||
db.compact();
|
||||
|
||||
const all = db.getAll();
|
||||
expect(all.length).toBe(2);
|
||||
const titles = all.map((r) => r.data.title);
|
||||
expect(titles).toContain('Active');
|
||||
expect(titles).toContain('Done');
|
||||
});
|
||||
|
||||
it('should reduce image size after compaction', () => {
|
||||
const db = createDb();
|
||||
for (let i = 0; i < 20; i++) {
|
||||
db.insert(makeTodo({ title: `A longer todo title number ${i}` }));
|
||||
}
|
||||
const sizeBefore = db.getImage().width;
|
||||
|
||||
for (let i = 0; i < 15; i++) {
|
||||
db.delete(i);
|
||||
}
|
||||
|
||||
db.compact();
|
||||
|
||||
// Compacted image should be smaller or equal
|
||||
expect(db.getImage().width).toBeLessThanOrEqual(sizeBefore);
|
||||
});
|
||||
|
||||
it('should handle compacting empty database', () => {
|
||||
const db = createDb();
|
||||
db.compact();
|
||||
expect(db.getStats().totalRecords).toBe(0);
|
||||
});
|
||||
|
||||
it('should still work after compaction (insert/read)', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ title: 'Before' }));
|
||||
db.delete(0);
|
||||
db.compact();
|
||||
|
||||
const result = db.insert(makeTodo({ title: 'After Compact' }));
|
||||
expect(result.success).toBe(true);
|
||||
const read = db.read(result.recordId!);
|
||||
expect(read.record?.data.title).toBe('After Compact');
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// FROM IMAGE (ROUND-TRIP)
|
||||
// =============================================================================
|
||||
|
||||
describe('fromImage', () => {
|
||||
it('should reconstruct database from image', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ title: 'Persist Me', priority: 5, tags: [1, 2] }));
|
||||
db.insert(makeTodo({ title: 'Me Too', description: 'With desc' }));
|
||||
db.complete(1);
|
||||
|
||||
const image = db.getImage();
|
||||
|
||||
const restored = SpiralDB.fromImage<TodoData>(image, createTodoSchema());
|
||||
const all = restored.getAll();
|
||||
expect(all.length).toBe(2);
|
||||
|
||||
const first = restored.read(0);
|
||||
expect(first.record?.data.title).toBe('Persist Me');
|
||||
expect(first.record?.data.priority).toBe(5);
|
||||
expect(first.record?.data.tags).toEqual([1, 2]);
|
||||
|
||||
const second = restored.read(1);
|
||||
expect(second.record?.data.title).toBe('Me Too');
|
||||
expect(second.record?.meta.status).toBe('completed');
|
||||
});
|
||||
|
||||
it('should throw for invalid magic byte', () => {
|
||||
const db = createDb();
|
||||
const image = db.getImage();
|
||||
// Corrupt magic byte
|
||||
image.pixels[Math.floor(image.width / 2) * image.width * 3 + Math.floor(image.width / 2) * 3] =
|
||||
0;
|
||||
image.pixels[
|
||||
Math.floor(image.width / 2) * image.width * 3 + Math.floor(image.width / 2) * 3 + 1
|
||||
] = 0;
|
||||
image.pixels[
|
||||
Math.floor(image.width / 2) * image.width * 3 + Math.floor(image.width / 2) * 3 + 2
|
||||
] = 0;
|
||||
|
||||
expect(() => SpiralDB.fromImage(image, createTodoSchema())).toThrow('magic byte mismatch');
|
||||
});
|
||||
|
||||
it('should allow continued inserts after fromImage', () => {
|
||||
const db = createDb();
|
||||
db.insert(makeTodo({ title: 'First' }));
|
||||
|
||||
const restored = SpiralDB.fromImage<TodoData>(db.getImage(), createTodoSchema());
|
||||
const result = restored.insert(makeTodo({ title: 'Second' }));
|
||||
expect(result.success).toBe(true);
|
||||
expect(restored.getStats().totalRecords).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// COMPRESSION
|
||||
// =============================================================================
|
||||
|
||||
describe('Compression', () => {
|
||||
it('should work with compression enabled', () => {
|
||||
const db = createDb({ compression: true });
|
||||
const result = db.insert(
|
||||
makeTodo({
|
||||
title: 'Compressed todo with a somewhat longer title for testing',
|
||||
description: 'This is a longer description that should benefit from compression',
|
||||
})
|
||||
);
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const read = db.read(result.recordId!);
|
||||
expect(read.record?.data.title).toBe(
|
||||
'Compressed todo with a somewhat longer title for testing'
|
||||
);
|
||||
expect(read.record?.data.description).toBe(
|
||||
'This is a longer description that should benefit from compression'
|
||||
);
|
||||
});
|
||||
|
||||
it('should produce fewer record pixels with compression for repetitive data', () => {
|
||||
const dbUncompressed = createDb({ compression: false });
|
||||
const dbCompressed = createDb({ compression: true });
|
||||
|
||||
// Use a string > 20 bytes (compression threshold) but not so large it triggers overflow
|
||||
const longTitle = 'ab'.repeat(40); // 80 chars, compressible
|
||||
|
||||
const r1 = dbUncompressed.insert(makeTodo({ title: longTitle }));
|
||||
const r2 = dbCompressed.insert(makeTodo({ title: longTitle }));
|
||||
|
||||
expect(r1.success).toBe(true);
|
||||
expect(r2.success).toBe(true);
|
||||
|
||||
const uncompressedRecord = dbUncompressed.read(r1.recordId!);
|
||||
const compressedRecord = dbCompressed.read(r2.recordId!);
|
||||
|
||||
expect(uncompressedRecord.success).toBe(true);
|
||||
expect(compressedRecord.success).toBe(true);
|
||||
|
||||
// Compressed record should use fewer pixels
|
||||
expect(compressedRecord.record!.meta.length).toBeLessThan(
|
||||
uncompressedRecord.record!.meta.length
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// STRESS / EDGE CASES
|
||||
// =============================================================================
|
||||
|
||||
describe('Stress Tests', () => {
|
||||
it('should handle 100 inserts', () => {
|
||||
const db = createDb();
|
||||
for (let i = 0; i < 100; i++) {
|
||||
const result = db.insert(makeTodo({ title: `Todo #${i}` }));
|
||||
expect(result.success).toBe(true);
|
||||
}
|
||||
expect(db.getStats().activeRecords).toBe(100);
|
||||
});
|
||||
|
||||
it('should handle interleaved operations (small scale)', () => {
|
||||
const db = createDb();
|
||||
|
||||
db.insert(makeTodo({ title: 'T0' }));
|
||||
db.insert(makeTodo({ title: 'T1' }));
|
||||
db.insert(makeTodo({ title: 'T2' }));
|
||||
|
||||
db.delete(1);
|
||||
db.complete(2);
|
||||
|
||||
const stats = db.getStats();
|
||||
expect(stats.activeRecords).toBe(1); // T0
|
||||
expect(stats.deletedRecords).toBe(1); // T1
|
||||
|
||||
const all = db.getAll();
|
||||
expect(all.length).toBe(2); // T0 (active) + T2 (completed)
|
||||
});
|
||||
|
||||
it('should handle UTF-8 in titles', () => {
|
||||
const db = createDb();
|
||||
const titles = ['日本語テスト', 'Ünïcödë', '🎉🚀✨', 'مرحبا'];
|
||||
for (const title of titles) {
|
||||
const result = db.insert(makeTodo({ title }));
|
||||
expect(result.success).toBe(true);
|
||||
const read = db.read(result.recordId!);
|
||||
expect(read.record?.data.title).toBe(title);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// INPUT VALIDATION
|
||||
// =============================================================================
|
||||
|
||||
describe('Input Validation on Insert', () => {
|
||||
it('should reject record with wrong type for int field', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(makeTodo({ priority: 'high' as unknown as number }));
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('Validation failed');
|
||||
});
|
||||
|
||||
it('should reject record with out-of-range int', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(makeTodo({ id: 5000 })); // max 4095
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('out of range');
|
||||
});
|
||||
|
||||
it('should reject record with string too long', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(makeTodo({ title: 'x'.repeat(256) })); // max 255
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('too long');
|
||||
});
|
||||
|
||||
it('should reject record with wrong type for timestamp', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(makeTodo({ createdAt: 'not-a-date' as unknown as Date }));
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('Validation failed');
|
||||
});
|
||||
|
||||
it('should reject record with too many array items', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(makeTodo({ tags: [1, 2, 3, 4, 5, 6, 7, 8, 9] }));
|
||||
expect(result.success).toBe(false);
|
||||
expect(result.error).toContain('too many');
|
||||
});
|
||||
|
||||
it('should accept valid record after validation', () => {
|
||||
const db = createDb();
|
||||
const result = db.insert(makeTodo());
|
||||
expect(result.success).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// CUSTOM SCHEMAS
|
||||
// =============================================================================
|
||||
|
||||
describe('Custom Schema', () => {
|
||||
it('should work with a minimal schema', () => {
|
||||
const schema: SchemaDefinition = {
|
||||
version: 1,
|
||||
name: 'minimal',
|
||||
fields: [
|
||||
{ name: 'id', type: 'int', maxLength: 8 },
|
||||
{ name: 'name', type: 'string', maxLength: 50 },
|
||||
],
|
||||
};
|
||||
|
||||
const db = new SpiralDB<{ id: number; name: string }>({ schema });
|
||||
const result = db.insert({ id: 42, name: 'Test' });
|
||||
expect(result.success).toBe(true);
|
||||
|
||||
const read = db.read(0);
|
||||
expect(read.record?.data.name).toBe('Test');
|
||||
});
|
||||
});
|
||||
|
|
@ -54,12 +54,13 @@ import {
|
|||
expandImage,
|
||||
} from './image.js';
|
||||
import { getRingInfo, findSpaceForRecord, getTotalPixelsForRing } from './spiral.js';
|
||||
import { encodeSchema } from './schema.js';
|
||||
import { encodeSchema, validateRecord } from './schema.js';
|
||||
|
||||
export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown>> {
|
||||
export class SpiralDB<T extends object = Record<string, unknown>> {
|
||||
private image: SpiralImage;
|
||||
private schema: SchemaDefinition;
|
||||
private index: MasterIndex;
|
||||
private dataStartRing: number;
|
||||
private currentRing: number;
|
||||
private currentOffset: number;
|
||||
private compression: boolean;
|
||||
|
|
@ -69,7 +70,8 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
this.compression = options.compression ?? false;
|
||||
|
||||
// Initialize with minimum size for header + schema + index
|
||||
const initialRing = Math.max(RING_DATA_START, options.initialSize ?? RING_DATA_START);
|
||||
this.dataStartRing = RING_DATA_START;
|
||||
const initialRing = Math.max(this.dataStartRing, options.initialSize ?? this.dataStartRing);
|
||||
this.image = createImageForRing(initialRing);
|
||||
|
||||
// Initialize empty index
|
||||
|
|
@ -80,7 +82,7 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
};
|
||||
|
||||
// Start writing data after index ring
|
||||
this.currentRing = RING_DATA_START;
|
||||
this.currentRing = this.dataStartRing;
|
||||
this.currentOffset = 0;
|
||||
|
||||
// Write initial structure
|
||||
|
|
@ -133,6 +135,85 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
writePixelRange(this.image, ringInfo.startIndex, schemaPixels);
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate how many rings the index needs for a given record count.
|
||||
*/
|
||||
private getIndexRingsNeeded(recordCount: number): number {
|
||||
// Header: 12 bits (count) + 12 bits (nextId) = 24 bits
|
||||
// Per entry: 12+8+8+9+3 = 40 bits
|
||||
const totalBits = 24 + recordCount * 40;
|
||||
const totalPixels = Math.ceil(totalBits / 3);
|
||||
|
||||
let ringsNeeded = 0;
|
||||
let pixelsAvailable = 0;
|
||||
let ring = RING_INDEX;
|
||||
while (pixelsAvailable < totalPixels) {
|
||||
pixelsAvailable += getRingInfo(ring).pixelCount;
|
||||
ring++;
|
||||
ringsNeeded++;
|
||||
}
|
||||
return ringsNeeded;
|
||||
}
|
||||
|
||||
/**
|
||||
* Ensure there's room for additional index entries without overlapping data.
|
||||
* If the index would overflow into the data region, rebuild the database
|
||||
* with a higher dataStartRing.
|
||||
*/
|
||||
private ensureIndexCapacity(additionalEntries: number): void {
|
||||
const futureCount = this.index.records.length + additionalEntries;
|
||||
const ringsNeeded = this.getIndexRingsNeeded(futureCount);
|
||||
const requiredDataStart = RING_INDEX + ringsNeeded;
|
||||
|
||||
if (requiredDataStart > this.dataStartRing) {
|
||||
// Need to rebuild: collect all current records, recreate with more index space
|
||||
const activeRecords = this.getAll('active');
|
||||
const completedRecords = this.getAll('completed');
|
||||
const allRecords = [...activeRecords, ...completedRecords];
|
||||
|
||||
// Reset with new data start
|
||||
this.dataStartRing = requiredDataStart;
|
||||
this.image = createImageForRing(this.dataStartRing);
|
||||
this.index = { records: [], deletedIds: new Set(), nextId: 0 };
|
||||
this.currentRing = this.dataStartRing;
|
||||
this.currentOffset = 0;
|
||||
this.initializeDatabase();
|
||||
|
||||
// Re-insert all records
|
||||
for (const record of allRecords) {
|
||||
const id = this.index.nextId++;
|
||||
const pixels = this.serializeRecord(id, record.meta.status, record.data);
|
||||
const space = findSpaceForRecord(this.currentRing, this.currentOffset, pixels.length);
|
||||
|
||||
if (space.ring > Math.floor(this.image.width / 2)) {
|
||||
this.image = expandImage(this.image, space.ring);
|
||||
}
|
||||
|
||||
const ringInfo = getRingInfo(space.ring);
|
||||
const startIndex = ringInfo.startIndex + space.offset;
|
||||
writePixelRange(this.image, startIndex, pixels);
|
||||
|
||||
this.index.records.push({
|
||||
id,
|
||||
ring: space.ring,
|
||||
offset: space.offset,
|
||||
length: pixels.length,
|
||||
status: record.meta.status,
|
||||
});
|
||||
|
||||
this.currentRing = space.ring;
|
||||
this.currentOffset = space.offset + pixels.length;
|
||||
if (this.currentOffset >= ringInfo.pixelCount) {
|
||||
this.currentRing++;
|
||||
this.currentOffset = 0;
|
||||
}
|
||||
}
|
||||
|
||||
this.writeHeader();
|
||||
this.writeIndex();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Write the master index to Ring 3+
|
||||
* Index can span multiple rings if needed
|
||||
|
|
@ -158,11 +239,16 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
const pixels = bitsToPixels(stream.bits);
|
||||
|
||||
// Write index pixels starting at Ring 3
|
||||
// May span multiple rings if needed
|
||||
let pixelIndex = 0;
|
||||
let currentRing = RING_INDEX;
|
||||
|
||||
while (pixelIndex < pixels.length) {
|
||||
// Expand image if this ring doesn't fit
|
||||
const maxRing = Math.floor(this.image.width / 2);
|
||||
if (currentRing > maxRing) {
|
||||
this.image = expandImage(this.image, currentRing);
|
||||
}
|
||||
|
||||
const ringInfo = getRingInfo(currentRing);
|
||||
const pixelsInRing = Math.min(pixels.length - pixelIndex, ringInfo.pixelCount);
|
||||
|
||||
|
|
@ -177,11 +263,13 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
}
|
||||
|
||||
// Store how many rings the index spans (for loading)
|
||||
// We use the last pixel of Ring 2 (schema ring) to store this
|
||||
// Encode as 2 pixels (6 bits, max 63 rings) at end of Ring 2
|
||||
const indexRingCount = currentRing - RING_INDEX;
|
||||
const ring2Info = getRingInfo(RING_SCHEMA);
|
||||
const countPixelIndex = ring2Info.startIndex + ring2Info.pixelCount - 1;
|
||||
setPixelByIndex(this.image, countPixelIndex, indexRingCount as ColorIndex);
|
||||
const countPixelIndex = ring2Info.startIndex + ring2Info.pixelCount - 2;
|
||||
// High 3 bits in second-to-last pixel, low 3 bits in last pixel
|
||||
setPixelByIndex(this.image, countPixelIndex, ((indexRingCount >> 3) & 0x7) as ColorIndex);
|
||||
setPixelByIndex(this.image, countPixelIndex + 1, (indexRingCount & 0x7) as ColorIndex);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -198,7 +286,7 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
|
||||
// Encode each field according to schema
|
||||
for (const field of this.schema.fields) {
|
||||
const value = data[field.name];
|
||||
const value = (data as Record<string, unknown>)[field.name];
|
||||
|
||||
// Null flag for nullable fields
|
||||
if (field.nullable) {
|
||||
|
|
@ -305,6 +393,15 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
*/
|
||||
insert(data: T): WriteResult {
|
||||
try {
|
||||
// Validate record against schema before writing
|
||||
const validation = validateRecord(this.schema, data as unknown as Record<string, unknown>);
|
||||
if (!validation.valid) {
|
||||
return { success: false, error: `Validation failed: ${validation.errors.join('; ')}` };
|
||||
}
|
||||
|
||||
// Ensure index has room for one more entry without overlapping data
|
||||
this.ensureIndexCapacity(1);
|
||||
|
||||
const id = this.index.nextId++;
|
||||
const pixels = this.serializeRecord(id, 'active', data);
|
||||
|
||||
|
|
@ -315,8 +412,9 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
// Find space for the record
|
||||
const space = findSpaceForRecord(this.currentRing, this.currentOffset, pixels.length);
|
||||
|
||||
// Expand image if needed
|
||||
if (space.needsExpansion) {
|
||||
// Expand image if needed (check both ring advancement and image bounds)
|
||||
const maxRing = Math.floor(this.image.width / 2);
|
||||
if (space.ring > maxRing) {
|
||||
this.image = expandImage(this.image, space.ring);
|
||||
}
|
||||
|
||||
|
|
@ -365,7 +463,15 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
* Read a record by ID
|
||||
*/
|
||||
read(id: number): ReadResult<T> {
|
||||
const entry = this.index.records.find((r) => r.id === id);
|
||||
// Find the latest non-deleted entry for this ID
|
||||
// (update creates a new entry with the same ID, so we search from the end)
|
||||
let entry: IndexEntry | undefined;
|
||||
for (let i = this.index.records.length - 1; i >= 0; i--) {
|
||||
if (this.index.records[i].id === id) {
|
||||
entry = this.index.records[i];
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (!entry) {
|
||||
return { success: false, error: 'Record not found' };
|
||||
|
|
@ -421,7 +527,7 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
|
||||
const space = findSpaceForRecord(this.currentRing, this.currentOffset, pixels.length);
|
||||
|
||||
if (space.needsExpansion) {
|
||||
if (space.ring > Math.floor(this.image.width / 2)) {
|
||||
this.image = expandImage(this.image, space.ring);
|
||||
}
|
||||
|
||||
|
|
@ -539,7 +645,7 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
const deletedRecords = this.index.records.filter((r) => r.status === 'deleted').length;
|
||||
|
||||
const usedPixels = this.index.records.reduce((sum, r) => sum + r.length, 0);
|
||||
const headerPixels = getTotalPixelsForRing(RING_INDEX);
|
||||
const headerPixels = getTotalPixelsForRing(this.dataStartRing - 1);
|
||||
|
||||
return {
|
||||
imageSize: this.image.width,
|
||||
|
|
@ -576,6 +682,7 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
|
||||
this.image = newDb.image;
|
||||
this.index = newDb.index;
|
||||
this.dataStartRing = newDb.dataStartRing;
|
||||
this.currentRing = newDb.currentRing;
|
||||
this.currentOffset = newDb.currentOffset;
|
||||
|
||||
|
|
@ -585,10 +692,7 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
/**
|
||||
* Load database from an existing image
|
||||
*/
|
||||
static fromImage<T extends Record<string, unknown>>(
|
||||
image: SpiralImage,
|
||||
schema: SchemaDefinition
|
||||
): SpiralDB<T> {
|
||||
static fromImage<T extends object>(image: SpiralImage, schema: SchemaDefinition): SpiralDB<T> {
|
||||
const db = new SpiralDB<T>({ schema });
|
||||
db.image = image;
|
||||
|
||||
|
|
@ -608,10 +712,15 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
* Load index from image
|
||||
*/
|
||||
private loadIndex(): void {
|
||||
// Read index ring count from last pixel of Ring 2
|
||||
// Read index ring count from last 2 pixels of Ring 2 (6 bits, max 63)
|
||||
const ring2Info = getRingInfo(RING_SCHEMA);
|
||||
const countPixelIndex = ring2Info.startIndex + ring2Info.pixelCount - 1;
|
||||
const indexRingCount = getPixelByIndex(this.image, countPixelIndex) || 1;
|
||||
const countPixelIndex = ring2Info.startIndex + ring2Info.pixelCount - 2;
|
||||
const highBits = getPixelByIndex(this.image, countPixelIndex);
|
||||
const lowBits = getPixelByIndex(this.image, countPixelIndex + 1);
|
||||
const indexRingCount = (highBits << 3) | lowBits || 1;
|
||||
|
||||
// Set data start ring based on stored index size
|
||||
this.dataStartRing = RING_INDEX + indexRingCount;
|
||||
|
||||
// Read pixels from all index rings
|
||||
const allPixels: ColorIndex[] = [];
|
||||
|
|
@ -659,6 +768,9 @@ export class SpiralDB<T extends Record<string, unknown> = Record<string, unknown
|
|||
this.currentRing++;
|
||||
this.currentOffset = 0;
|
||||
}
|
||||
} else {
|
||||
this.currentRing = this.dataStartRing;
|
||||
this.currentOffset = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
|||
446
packages/spiral-db/src/encoding.test.ts
Normal file
446
packages/spiral-db/src/encoding.test.ts
Normal file
|
|
@ -0,0 +1,446 @@
|
|||
/**
|
||||
* Encoding/Decoding Tests
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
bitsToColor,
|
||||
colorToBits,
|
||||
colorToRGB,
|
||||
rgbToColor,
|
||||
createBitStream,
|
||||
writeBits,
|
||||
readBits,
|
||||
peekBits,
|
||||
hasMoreBits,
|
||||
bitsToPixels,
|
||||
pixelsToBits,
|
||||
encodeInt,
|
||||
decodeInt,
|
||||
encodeBool,
|
||||
decodeBool,
|
||||
encodeString,
|
||||
decodeString,
|
||||
encodeTimestamp,
|
||||
decodeTimestamp,
|
||||
encodeIntArray,
|
||||
decodeIntArray,
|
||||
encodeStringArray,
|
||||
decodeStringArray,
|
||||
} from './encoding.js';
|
||||
import type { ColorIndex } from './types.js';
|
||||
|
||||
// =============================================================================
|
||||
// COLOR ↔ BITS
|
||||
// =============================================================================
|
||||
|
||||
describe('Color ↔ Bits Conversion', () => {
|
||||
it('should convert all 8 colors to bits and back', () => {
|
||||
for (let i = 0; i < 8; i++) {
|
||||
const color = i as ColorIndex;
|
||||
const bits = colorToBits(color);
|
||||
const back = bitsToColor(bits[0], bits[1], bits[2]);
|
||||
expect(back).toBe(color);
|
||||
}
|
||||
});
|
||||
|
||||
it('should map black to [0,0,0] and white to [1,1,1]', () => {
|
||||
expect(colorToBits(0)).toEqual([0, 0, 0]);
|
||||
expect(colorToBits(7)).toEqual([1, 1, 1]);
|
||||
});
|
||||
|
||||
it('should map red (4) to [1,0,0]', () => {
|
||||
expect(colorToBits(4)).toEqual([1, 0, 0]);
|
||||
});
|
||||
});
|
||||
|
||||
describe('Color ↔ RGB Conversion', () => {
|
||||
it('should convert all 8 colors to RGB', () => {
|
||||
expect(colorToRGB(0)).toEqual({ r: 0, g: 0, b: 0 }); // black
|
||||
expect(colorToRGB(1)).toEqual({ r: 0, g: 0, b: 255 }); // blue
|
||||
expect(colorToRGB(2)).toEqual({ r: 0, g: 255, b: 0 }); // green
|
||||
expect(colorToRGB(4)).toEqual({ r: 255, g: 0, b: 0 }); // red
|
||||
expect(colorToRGB(7)).toEqual({ r: 255, g: 255, b: 255 }); // white
|
||||
});
|
||||
|
||||
it('should round-trip exact RGB values', () => {
|
||||
for (let i = 0; i < 8; i++) {
|
||||
const color = i as ColorIndex;
|
||||
const rgb = colorToRGB(color);
|
||||
const back = rgbToColor(rgb.r, rgb.g, rgb.b);
|
||||
expect(back).toBe(color);
|
||||
}
|
||||
});
|
||||
|
||||
it('should threshold at 128 for non-exact values', () => {
|
||||
expect(rgbToColor(127, 127, 127)).toBe(0); // all below → black
|
||||
expect(rgbToColor(128, 128, 128)).toBe(7); // all above → white
|
||||
expect(rgbToColor(200, 50, 50)).toBe(4); // red-ish → red
|
||||
expect(rgbToColor(50, 200, 50)).toBe(2); // green-ish → green
|
||||
});
|
||||
|
||||
it('should handle boundary values (0 and 255)', () => {
|
||||
expect(rgbToColor(0, 0, 0)).toBe(0);
|
||||
expect(rgbToColor(255, 255, 255)).toBe(7);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// BIT STREAM
|
||||
// =============================================================================
|
||||
|
||||
describe('BitStream', () => {
|
||||
it('should create empty stream', () => {
|
||||
const stream = createBitStream();
|
||||
expect(stream.bits).toEqual([]);
|
||||
expect(stream.position).toBe(0);
|
||||
});
|
||||
|
||||
it('should write and read bits', () => {
|
||||
const stream = createBitStream();
|
||||
writeBits(stream, 0b101, 3);
|
||||
stream.position = 0;
|
||||
expect(readBits(stream, 3)).toBe(0b101);
|
||||
});
|
||||
|
||||
it('should write multiple values', () => {
|
||||
const stream = createBitStream();
|
||||
writeBits(stream, 5, 4); // 0101
|
||||
writeBits(stream, 3, 3); // 011
|
||||
stream.position = 0;
|
||||
expect(readBits(stream, 4)).toBe(5);
|
||||
expect(readBits(stream, 3)).toBe(3);
|
||||
});
|
||||
|
||||
it('should throw on read past end', () => {
|
||||
const stream = createBitStream();
|
||||
writeBits(stream, 1, 1);
|
||||
stream.position = 0;
|
||||
readBits(stream, 1);
|
||||
expect(() => readBits(stream, 1)).toThrow('Unexpected end of bit stream');
|
||||
});
|
||||
|
||||
it('should peek without consuming', () => {
|
||||
const stream = createBitStream();
|
||||
writeBits(stream, 0b110, 3);
|
||||
stream.position = 0;
|
||||
expect(peekBits(stream, 3)).toBe(0b110);
|
||||
expect(stream.position).toBe(0); // not consumed
|
||||
expect(readBits(stream, 3)).toBe(0b110);
|
||||
expect(stream.position).toBe(3); // now consumed
|
||||
});
|
||||
|
||||
it('should check hasMoreBits', () => {
|
||||
const stream = createBitStream();
|
||||
writeBits(stream, 0b11, 2);
|
||||
stream.position = 0;
|
||||
expect(hasMoreBits(stream, 2)).toBe(true);
|
||||
expect(hasMoreBits(stream, 3)).toBe(false);
|
||||
readBits(stream, 1);
|
||||
expect(hasMoreBits(stream, 1)).toBe(true);
|
||||
expect(hasMoreBits(stream, 2)).toBe(false);
|
||||
});
|
||||
|
||||
it('should handle writing 0 bits correctly', () => {
|
||||
const stream = createBitStream();
|
||||
writeBits(stream, 0, 8);
|
||||
stream.position = 0;
|
||||
expect(readBits(stream, 8)).toBe(0);
|
||||
});
|
||||
|
||||
it('should handle max value for bit width', () => {
|
||||
const stream = createBitStream();
|
||||
writeBits(stream, 0xfff, 12); // max 12-bit value
|
||||
stream.position = 0;
|
||||
expect(readBits(stream, 12)).toBe(4095);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// BITS ↔ PIXELS
|
||||
// =============================================================================
|
||||
|
||||
describe('Bits ↔ Pixels', () => {
|
||||
it('should convert bits to pixels (3 bits per pixel)', () => {
|
||||
const pixels = bitsToPixels([1, 0, 1, 0, 1, 0]);
|
||||
expect(pixels).toEqual([5, 2]); // 101 = 5 (magenta), 010 = 2 (green)
|
||||
});
|
||||
|
||||
it('should pad to 3-bit boundary', () => {
|
||||
const pixels = bitsToPixels([1, 0]); // only 2 bits → padded to 100
|
||||
expect(pixels).toHaveLength(1);
|
||||
expect(pixels[0]).toBe(4); // 100 = red
|
||||
});
|
||||
|
||||
it('should round-trip pixels ↔ bits', () => {
|
||||
const original: ColorIndex[] = [0, 3, 5, 7, 1, 6];
|
||||
const bits = pixelsToBits(original);
|
||||
const back = bitsToPixels(bits);
|
||||
expect(back).toEqual(original);
|
||||
});
|
||||
|
||||
it('should handle empty input', () => {
|
||||
expect(bitsToPixels([])).toEqual([]);
|
||||
expect(pixelsToBits([])).toEqual([]);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// VALUE ENCODING: INT
|
||||
// =============================================================================
|
||||
|
||||
describe('Int Encoding', () => {
|
||||
it('should encode and decode 0', () => {
|
||||
const stream = createBitStream();
|
||||
encodeInt(stream, 0, 12);
|
||||
stream.position = 0;
|
||||
expect(decodeInt(stream, 12)).toBe(0);
|
||||
});
|
||||
|
||||
it('should encode and decode max value', () => {
|
||||
const stream = createBitStream();
|
||||
encodeInt(stream, 4095, 12);
|
||||
stream.position = 0;
|
||||
expect(decodeInt(stream, 12)).toBe(4095);
|
||||
});
|
||||
|
||||
it('should reject negative integers', () => {
|
||||
const stream = createBitStream();
|
||||
expect(() => encodeInt(stream, -1, 12)).toThrow('Negative integers not supported');
|
||||
});
|
||||
|
||||
it('should reject values too large for bit width', () => {
|
||||
const stream = createBitStream();
|
||||
expect(() => encodeInt(stream, 4096, 12)).toThrow('too large for 12 bits');
|
||||
});
|
||||
|
||||
it('should handle various bit widths', () => {
|
||||
for (const bits of [1, 3, 8, 12, 24]) {
|
||||
const maxVal = 2 ** bits - 1;
|
||||
const stream = createBitStream();
|
||||
encodeInt(stream, maxVal, bits);
|
||||
stream.position = 0;
|
||||
expect(decodeInt(stream, bits)).toBe(maxVal);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// VALUE ENCODING: BOOL
|
||||
// =============================================================================
|
||||
|
||||
describe('Bool Encoding', () => {
|
||||
it('should encode and decode true', () => {
|
||||
const stream = createBitStream();
|
||||
encodeBool(stream, true);
|
||||
stream.position = 0;
|
||||
expect(decodeBool(stream)).toBe(true);
|
||||
});
|
||||
|
||||
it('should encode and decode false', () => {
|
||||
const stream = createBitStream();
|
||||
encodeBool(stream, false);
|
||||
stream.position = 0;
|
||||
expect(decodeBool(stream)).toBe(false);
|
||||
});
|
||||
|
||||
it('should use exactly 1 bit', () => {
|
||||
const stream = createBitStream();
|
||||
encodeBool(stream, true);
|
||||
expect(stream.bits.length).toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// VALUE ENCODING: STRING
|
||||
// =============================================================================
|
||||
|
||||
describe('String Encoding', () => {
|
||||
it('should encode and decode simple ASCII', () => {
|
||||
const stream = createBitStream();
|
||||
encodeString(stream, 'Hello');
|
||||
stream.position = 0;
|
||||
expect(decodeString(stream)).toBe('Hello');
|
||||
});
|
||||
|
||||
it('should encode and decode empty string', () => {
|
||||
const stream = createBitStream();
|
||||
encodeString(stream, '');
|
||||
stream.position = 0;
|
||||
expect(decodeString(stream)).toBe('');
|
||||
});
|
||||
|
||||
it('should encode and decode UTF-8 (emoji, umlauts)', () => {
|
||||
const stream = createBitStream();
|
||||
encodeString(stream, 'Hëllo 🌍');
|
||||
stream.position = 0;
|
||||
expect(decodeString(stream)).toBe('Hëllo 🌍');
|
||||
});
|
||||
|
||||
it('should encode and decode with compression (long strings)', () => {
|
||||
const longString = 'a'.repeat(100);
|
||||
const stream = createBitStream();
|
||||
encodeString(stream, longString, true);
|
||||
stream.position = 0;
|
||||
expect(decodeString(stream)).toBe(longString);
|
||||
});
|
||||
|
||||
it('should skip compression for short strings even when enabled', () => {
|
||||
const stream = createBitStream();
|
||||
encodeString(stream, 'short', true); // < 20 bytes, won't compress
|
||||
stream.position = 0;
|
||||
expect(decodeString(stream)).toBe('short');
|
||||
});
|
||||
|
||||
it('should handle max-length string (511 bytes UTF-8)', () => {
|
||||
const str = 'x'.repeat(511);
|
||||
const stream = createBitStream();
|
||||
encodeString(stream, str);
|
||||
stream.position = 0;
|
||||
expect(decodeString(stream)).toBe(str);
|
||||
});
|
||||
|
||||
it('should handle string with only whitespace', () => {
|
||||
const stream = createBitStream();
|
||||
encodeString(stream, ' \t\n');
|
||||
stream.position = 0;
|
||||
expect(decodeString(stream)).toBe(' \t\n');
|
||||
});
|
||||
|
||||
it('should reject string exceeding 511 bytes', () => {
|
||||
const stream = createBitStream();
|
||||
const tooLong = 'x'.repeat(512); // 512 bytes > 511 max
|
||||
expect(() => encodeString(stream, tooLong)).toThrow('String too long');
|
||||
});
|
||||
|
||||
it('should accept string of exactly 511 bytes', () => {
|
||||
const stream = createBitStream();
|
||||
const maxStr = 'x'.repeat(511);
|
||||
expect(() => encodeString(stream, maxStr)).not.toThrow();
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// VALUE ENCODING: TIMESTAMP
|
||||
// =============================================================================
|
||||
|
||||
describe('Timestamp Encoding', () => {
|
||||
it('should encode and decode a date', () => {
|
||||
const date = new Date('2025-06-15');
|
||||
const stream = createBitStream();
|
||||
encodeTimestamp(stream, date);
|
||||
stream.position = 0;
|
||||
const decoded = decodeTimestamp(stream);
|
||||
expect(decoded).not.toBeNull();
|
||||
// Compare as days since epoch (precision is days)
|
||||
const expectedDays = Math.floor(date.getTime() / (1000 * 60 * 60 * 24));
|
||||
const decodedDays = Math.floor(decoded!.getTime() / (1000 * 60 * 60 * 24));
|
||||
expect(decodedDays).toBe(expectedDays);
|
||||
});
|
||||
|
||||
it('should encode null as 0 and decode back to null', () => {
|
||||
const stream = createBitStream();
|
||||
encodeTimestamp(stream, null);
|
||||
stream.position = 0;
|
||||
expect(decodeTimestamp(stream)).toBeNull();
|
||||
});
|
||||
|
||||
it('should use exactly 24 bits', () => {
|
||||
const stream = createBitStream();
|
||||
encodeTimestamp(stream, new Date());
|
||||
expect(stream.bits.length).toBe(24);
|
||||
});
|
||||
|
||||
it('should handle epoch date (1970-01-01) — decodes to null since days=0', () => {
|
||||
const stream = createBitStream();
|
||||
encodeTimestamp(stream, new Date(0));
|
||||
stream.position = 0;
|
||||
// days since epoch = 0 → decoded as null (ambiguity!)
|
||||
expect(decodeTimestamp(stream)).toBeNull();
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// VALUE ENCODING: ARRAYS
|
||||
// =============================================================================
|
||||
|
||||
describe('Int Array Encoding', () => {
|
||||
it('should encode and decode int array', () => {
|
||||
const stream = createBitStream();
|
||||
encodeIntArray(stream, [1, 2, 3], 12);
|
||||
stream.position = 0;
|
||||
expect(decodeIntArray(stream, 12)).toEqual([1, 2, 3]);
|
||||
});
|
||||
|
||||
it('should handle empty array', () => {
|
||||
const stream = createBitStream();
|
||||
encodeIntArray(stream, [], 12);
|
||||
stream.position = 0;
|
||||
expect(decodeIntArray(stream, 12)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle single-element array', () => {
|
||||
const stream = createBitStream();
|
||||
encodeIntArray(stream, [42], 8);
|
||||
stream.position = 0;
|
||||
expect(decodeIntArray(stream, 8)).toEqual([42]);
|
||||
});
|
||||
|
||||
it('should handle max array count (255)', () => {
|
||||
const arr = Array.from({ length: 255 }, (_, i) => i % 256);
|
||||
const stream = createBitStream();
|
||||
encodeIntArray(stream, arr, 8);
|
||||
stream.position = 0;
|
||||
expect(decodeIntArray(stream, 8)).toEqual(arr);
|
||||
});
|
||||
});
|
||||
|
||||
describe('String Array Encoding', () => {
|
||||
it('should encode and decode string array', () => {
|
||||
const stream = createBitStream();
|
||||
encodeStringArray(stream, ['hello', 'world']);
|
||||
stream.position = 0;
|
||||
expect(decodeStringArray(stream)).toEqual(['hello', 'world']);
|
||||
});
|
||||
|
||||
it('should handle empty string array', () => {
|
||||
const stream = createBitStream();
|
||||
encodeStringArray(stream, []);
|
||||
stream.position = 0;
|
||||
expect(decodeStringArray(stream)).toEqual([]);
|
||||
});
|
||||
|
||||
it('should handle array with empty strings', () => {
|
||||
const stream = createBitStream();
|
||||
encodeStringArray(stream, ['', '', '']);
|
||||
stream.position = 0;
|
||||
expect(decodeStringArray(stream)).toEqual(['', '', '']);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// MULTI-FIELD ROUND-TRIP
|
||||
// =============================================================================
|
||||
|
||||
describe('Multi-field Round-trip', () => {
|
||||
it('should encode and decode multiple fields in sequence', () => {
|
||||
const stream = createBitStream();
|
||||
|
||||
encodeInt(stream, 42, 12);
|
||||
encodeInt(stream, 3, 3);
|
||||
encodeBool(stream, true);
|
||||
encodeTimestamp(stream, new Date('2025-01-01'));
|
||||
encodeString(stream, 'Test todo');
|
||||
encodeIntArray(stream, [1, 2], 12);
|
||||
|
||||
stream.position = 0;
|
||||
|
||||
expect(decodeInt(stream, 12)).toBe(42);
|
||||
expect(decodeInt(stream, 3)).toBe(3);
|
||||
expect(decodeBool(stream)).toBe(true);
|
||||
const ts = decodeTimestamp(stream);
|
||||
expect(ts).not.toBeNull();
|
||||
expect(decodeString(stream)).toBe('Test todo');
|
||||
expect(decodeIntArray(stream, 12)).toEqual([1, 2]);
|
||||
});
|
||||
});
|
||||
|
|
@ -185,6 +185,11 @@ export function decodeBool(stream: BitStream): boolean {
|
|||
export function encodeString(stream: BitStream, value: string, compress = false): void {
|
||||
const bytes = new TextEncoder().encode(value);
|
||||
|
||||
// 9-bit length field can hold max 511 bytes
|
||||
if (bytes.length > 511) {
|
||||
throw new Error(`String too long: ${bytes.length} bytes (max 511)`);
|
||||
}
|
||||
|
||||
if (compress && bytes.length > 20) {
|
||||
const compressed = pako.deflate(bytes);
|
||||
if (compressed.length < bytes.length) {
|
||||
|
|
|
|||
251
packages/spiral-db/src/image.test.ts
Normal file
251
packages/spiral-db/src/image.test.ts
Normal file
|
|
@ -0,0 +1,251 @@
|
|||
/**
|
||||
* Image Tests
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
createImage,
|
||||
createImageForRing,
|
||||
getPixelByIndex,
|
||||
setPixelByIndex,
|
||||
getPixelByXY,
|
||||
setPixelByXY,
|
||||
readPixelRange,
|
||||
writePixelRange,
|
||||
expandImage,
|
||||
getMaxRingForImage,
|
||||
imageToRGBA,
|
||||
rgbaToImage,
|
||||
imageToColorGrid,
|
||||
visualizeSpiralOrder,
|
||||
visualizeImageEmoji,
|
||||
} from './image.js';
|
||||
import type { ColorIndex } from './types.js';
|
||||
|
||||
// =============================================================================
|
||||
// CREATE IMAGE
|
||||
// =============================================================================
|
||||
|
||||
describe('createImage', () => {
|
||||
it('should create a square image with correct dimensions', () => {
|
||||
const image = createImage(5);
|
||||
expect(image.width).toBe(5);
|
||||
expect(image.height).toBe(5);
|
||||
expect(image.pixels.length).toBe(5 * 5 * 3);
|
||||
});
|
||||
|
||||
it('should initialize all pixels to black (0)', () => {
|
||||
const image = createImage(3);
|
||||
for (let i = 0; i < image.pixels.length; i++) {
|
||||
expect(image.pixels[i]).toBe(0);
|
||||
}
|
||||
});
|
||||
|
||||
it('should reject even sizes', () => {
|
||||
expect(() => createImage(4)).toThrow('Image size must be odd');
|
||||
});
|
||||
|
||||
it('should create 1x1 image', () => {
|
||||
const image = createImage(1);
|
||||
expect(image.width).toBe(1);
|
||||
expect(image.pixels.length).toBe(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('createImageForRing', () => {
|
||||
it('should create correct size for ring 0', () => {
|
||||
const image = createImageForRing(0);
|
||||
expect(image.width).toBe(1);
|
||||
});
|
||||
|
||||
it('should create correct size for ring 2', () => {
|
||||
const image = createImageForRing(2);
|
||||
expect(image.width).toBe(5);
|
||||
});
|
||||
|
||||
it('should create correct size for ring 5', () => {
|
||||
const image = createImageForRing(5);
|
||||
expect(image.width).toBe(11);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// PIXEL ACCESS
|
||||
// =============================================================================
|
||||
|
||||
describe('Pixel Access by XY', () => {
|
||||
it('should set and get pixel', () => {
|
||||
const image = createImage(3);
|
||||
setPixelByXY(image, 1, 1, 7); // white at center
|
||||
expect(getPixelByXY(image, 1, 1)).toBe(7);
|
||||
});
|
||||
|
||||
it('should set all 8 colors', () => {
|
||||
const image = createImage(3);
|
||||
for (let i = 0; i < 8; i++) {
|
||||
setPixelByXY(image, i % 3, Math.floor(i / 3), i as ColorIndex);
|
||||
}
|
||||
for (let i = 0; i < 8; i++) {
|
||||
expect(getPixelByXY(image, i % 3, Math.floor(i / 3))).toBe(i);
|
||||
}
|
||||
});
|
||||
|
||||
it('should throw on out-of-bounds access', () => {
|
||||
const image = createImage(3);
|
||||
expect(() => getPixelByXY(image, -1, 0)).toThrow('out of bounds');
|
||||
expect(() => getPixelByXY(image, 3, 0)).toThrow('out of bounds');
|
||||
expect(() => getPixelByXY(image, 0, 3)).toThrow('out of bounds');
|
||||
expect(() => setPixelByXY(image, 0, -1, 0)).toThrow('out of bounds');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pixel Access by Index', () => {
|
||||
it('should set and get center pixel (index 0)', () => {
|
||||
const image = createImage(5);
|
||||
setPixelByIndex(image, 0, 7);
|
||||
expect(getPixelByIndex(image, 0)).toBe(7);
|
||||
});
|
||||
|
||||
it('should set and get ring 1 pixels', () => {
|
||||
const image = createImage(5);
|
||||
for (let i = 1; i <= 8; i++) {
|
||||
setPixelByIndex(image, i, (i % 8) as ColorIndex);
|
||||
}
|
||||
for (let i = 1; i <= 8; i++) {
|
||||
expect(getPixelByIndex(image, i)).toBe(i % 8);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
describe('Pixel Range Operations', () => {
|
||||
it('should write and read a range', () => {
|
||||
const image = createImage(5);
|
||||
const colors: ColorIndex[] = [1, 2, 3, 4, 5];
|
||||
writePixelRange(image, 0, colors);
|
||||
const read = readPixelRange(image, 0, 5);
|
||||
expect(read).toEqual(colors);
|
||||
});
|
||||
|
||||
it('should handle range of 1', () => {
|
||||
const image = createImage(3);
|
||||
writePixelRange(image, 0, [7]);
|
||||
expect(readPixelRange(image, 0, 1)).toEqual([7]);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// IMAGE EXPANSION
|
||||
// =============================================================================
|
||||
|
||||
describe('expandImage', () => {
|
||||
it('should grow image to accommodate new ring', () => {
|
||||
const image = createImage(3); // ring 1
|
||||
setPixelByIndex(image, 0, 7); // white center
|
||||
|
||||
const expanded = expandImage(image, 3);
|
||||
expect(expanded.width).toBe(7); // ring 3 → 2*3+1
|
||||
expect(expanded.height).toBe(7);
|
||||
|
||||
// Center pixel should be preserved
|
||||
expect(getPixelByIndex(expanded, 0)).toBe(7);
|
||||
});
|
||||
|
||||
it('should not expand if already large enough', () => {
|
||||
const image = createImage(7);
|
||||
const same = expandImage(image, 2); // ring 2 needs 5, we have 7
|
||||
expect(same).toBe(image); // same reference
|
||||
});
|
||||
|
||||
it('should preserve all existing pixels', () => {
|
||||
const image = createImage(3);
|
||||
// Set all 9 pixels
|
||||
for (let i = 0; i < 9; i++) {
|
||||
setPixelByIndex(image, i, (i % 8) as ColorIndex);
|
||||
}
|
||||
|
||||
const expanded = expandImage(image, 3);
|
||||
|
||||
// Verify all original pixels preserved
|
||||
for (let i = 0; i < 9; i++) {
|
||||
expect(getPixelByIndex(expanded, i)).toBe(i % 8);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// FORMAT CONVERSIONS
|
||||
// =============================================================================
|
||||
|
||||
describe('RGBA Conversion', () => {
|
||||
it('should convert to RGBA and back', () => {
|
||||
const image = createImage(3);
|
||||
setPixelByIndex(image, 0, 7); // white center
|
||||
setPixelByIndex(image, 1, 4); // red
|
||||
|
||||
const rgba = imageToRGBA(image);
|
||||
expect(rgba.length).toBe(3 * 3 * 4); // 4 bytes per pixel
|
||||
|
||||
const back = rgbaToImage(rgba, 3, 3);
|
||||
expect(getPixelByIndex(back, 0)).toBe(7);
|
||||
expect(getPixelByIndex(back, 1)).toBe(4);
|
||||
});
|
||||
|
||||
it('should set alpha to 255 in RGBA', () => {
|
||||
const image = createImage(1);
|
||||
const rgba = imageToRGBA(image);
|
||||
expect(rgba[3]).toBe(255); // alpha
|
||||
});
|
||||
|
||||
it('should reject non-square RGBA', () => {
|
||||
const rgba = new Uint8Array(2 * 3 * 4);
|
||||
expect(() => rgbaToImage(rgba, 2, 3)).toThrow('must be square');
|
||||
});
|
||||
|
||||
it('should reject even-sized RGBA', () => {
|
||||
const rgba = new Uint8Array(4 * 4 * 4);
|
||||
expect(() => rgbaToImage(rgba, 4, 4)).toThrow('must be odd');
|
||||
});
|
||||
});
|
||||
|
||||
describe('Color Grid', () => {
|
||||
it('should return 2D grid of color indices', () => {
|
||||
const image = createImage(3);
|
||||
setPixelByXY(image, 0, 0, 4);
|
||||
setPixelByXY(image, 2, 2, 2);
|
||||
|
||||
const grid = imageToColorGrid(image);
|
||||
expect(grid.length).toBe(3);
|
||||
expect(grid[0].length).toBe(3);
|
||||
expect(grid[0][0]).toBe(4);
|
||||
expect(grid[2][2]).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getMaxRingForImage', () => {
|
||||
it('should return correct max ring', () => {
|
||||
expect(getMaxRingForImage(createImage(1))).toBe(0);
|
||||
expect(getMaxRingForImage(createImage(3))).toBe(1);
|
||||
expect(getMaxRingForImage(createImage(5))).toBe(2);
|
||||
expect(getMaxRingForImage(createImage(11))).toBe(5);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// VISUALIZATION
|
||||
// =============================================================================
|
||||
|
||||
describe('Visualization', () => {
|
||||
it('should produce emoji visualization with correct dimensions', () => {
|
||||
const image = createImage(3);
|
||||
setPixelByIndex(image, 0, 7);
|
||||
const emoji = visualizeImageEmoji(image);
|
||||
const lines = emoji.split('\n');
|
||||
expect(lines.length).toBe(3);
|
||||
});
|
||||
|
||||
it('should produce spiral order visualization', () => {
|
||||
const viz = visualizeSpiralOrder(3);
|
||||
expect(viz).toContain('0'); // center
|
||||
expect(viz.split('\n')).toHaveLength(3);
|
||||
});
|
||||
});
|
||||
335
packages/spiral-db/src/png.test.ts
Normal file
335
packages/spiral-db/src/png.test.ts
Normal file
|
|
@ -0,0 +1,335 @@
|
|||
/**
|
||||
* PNG Export/Import Tests — round-trip, validation, edge cases
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import { exportToPngBytes, exportToPngBytesCompressed, importFromPngBytes } from './png.js';
|
||||
import { SpiralDB } from './database.js';
|
||||
import { createTodoSchema } from './schema.js';
|
||||
import { createImage, setPixelByIndex, getPixelByIndex } from './image.js';
|
||||
import type { ColorIndex } from './types.js';
|
||||
|
||||
// CRC32 helper for test — mirrors the one in png.ts
|
||||
const CRC_TABLE: number[] = [];
|
||||
for (let n = 0; n < 256; n++) {
|
||||
let c = n;
|
||||
for (let k = 0; k < 8; k++) {
|
||||
c = c & 1 ? 0xedb88320 ^ (c >>> 1) : c >>> 1;
|
||||
}
|
||||
CRC_TABLE[n] = c;
|
||||
}
|
||||
function crc32ForTest(data: Uint8Array): number {
|
||||
let crc = 0xffffffff;
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
crc = CRC_TABLE[(crc ^ data[i]) & 0xff] ^ (crc >>> 8);
|
||||
}
|
||||
return crc ^ 0xffffffff;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// HELPERS
|
||||
// =============================================================================
|
||||
|
||||
interface TodoData {
|
||||
id: number;
|
||||
status: number;
|
||||
priority: number;
|
||||
createdAt: Date;
|
||||
dueDate: Date | null;
|
||||
completedAt: Date | null;
|
||||
title: string;
|
||||
description: string | null;
|
||||
tags: number[];
|
||||
}
|
||||
|
||||
function makeTodo(overrides: Partial<TodoData> = {}): TodoData {
|
||||
return {
|
||||
id: 0,
|
||||
status: 0,
|
||||
priority: 1,
|
||||
createdAt: new Date('2025-01-15'),
|
||||
dueDate: null,
|
||||
completedAt: null,
|
||||
title: 'Test Todo',
|
||||
description: null,
|
||||
tags: [],
|
||||
...overrides,
|
||||
};
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// PNG SIGNATURE
|
||||
// =============================================================================
|
||||
|
||||
describe('PNG Signature', () => {
|
||||
it('should produce valid PNG signature', () => {
|
||||
const image = createImage(3);
|
||||
const bytes = exportToPngBytes(image);
|
||||
// PNG magic bytes
|
||||
expect(bytes[0]).toBe(137);
|
||||
expect(bytes[1]).toBe(80); // P
|
||||
expect(bytes[2]).toBe(78); // N
|
||||
expect(bytes[3]).toBe(71); // G
|
||||
expect(bytes[4]).toBe(13);
|
||||
expect(bytes[5]).toBe(10);
|
||||
expect(bytes[6]).toBe(26);
|
||||
expect(bytes[7]).toBe(10);
|
||||
});
|
||||
|
||||
it('should reject invalid signature on import', async () => {
|
||||
const badData = new Uint8Array([0, 0, 0, 0, 0, 0, 0, 0]);
|
||||
await expect(importFromPngBytes(badData)).rejects.toThrow('Invalid PNG signature');
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// ROUND-TRIP: UNCOMPRESSED
|
||||
// =============================================================================
|
||||
|
||||
describe('PNG Round-trip (uncompressed)', () => {
|
||||
it('should round-trip a minimal 1x1 image', async () => {
|
||||
const image = createImage(1);
|
||||
setPixelByIndex(image, 0, 7); // white
|
||||
|
||||
const bytes = exportToPngBytes(image);
|
||||
const imported = await importFromPngBytes(bytes);
|
||||
|
||||
expect(imported.width).toBe(1);
|
||||
expect(imported.height).toBe(1);
|
||||
expect(getPixelByIndex(imported, 0)).toBe(7);
|
||||
});
|
||||
|
||||
it('should round-trip a 3x3 image with all colors', async () => {
|
||||
const image = createImage(3);
|
||||
for (let i = 0; i < 8; i++) {
|
||||
setPixelByIndex(image, i, i as ColorIndex);
|
||||
}
|
||||
|
||||
const bytes = exportToPngBytes(image);
|
||||
const imported = await importFromPngBytes(bytes);
|
||||
|
||||
for (let i = 0; i < 8; i++) {
|
||||
expect(getPixelByIndex(imported, i)).toBe(i);
|
||||
}
|
||||
});
|
||||
|
||||
it('should round-trip a larger image (11x11)', async () => {
|
||||
const image = createImage(11);
|
||||
// Set various pixels
|
||||
setPixelByIndex(image, 0, 7);
|
||||
setPixelByIndex(image, 10, 4);
|
||||
setPixelByIndex(image, 50, 2);
|
||||
|
||||
const bytes = exportToPngBytes(image);
|
||||
const imported = await importFromPngBytes(bytes);
|
||||
|
||||
expect(imported.width).toBe(11);
|
||||
expect(getPixelByIndex(imported, 0)).toBe(7);
|
||||
expect(getPixelByIndex(imported, 10)).toBe(4);
|
||||
expect(getPixelByIndex(imported, 50)).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// ROUND-TRIP: COMPRESSED (pako)
|
||||
// =============================================================================
|
||||
|
||||
describe('PNG Round-trip (compressed)', () => {
|
||||
it('should round-trip with pako compression', async () => {
|
||||
const image = createImage(5);
|
||||
setPixelByIndex(image, 0, 7);
|
||||
setPixelByIndex(image, 5, 3);
|
||||
|
||||
const bytes = await exportToPngBytesCompressed(image);
|
||||
const imported = await importFromPngBytes(bytes);
|
||||
|
||||
expect(imported.width).toBe(5);
|
||||
expect(getPixelByIndex(imported, 0)).toBe(7);
|
||||
expect(getPixelByIndex(imported, 5)).toBe(3);
|
||||
});
|
||||
|
||||
it('should produce smaller output than uncompressed', async () => {
|
||||
const image = createImage(11);
|
||||
// All black = very compressible
|
||||
const uncompressed = exportToPngBytes(image);
|
||||
const compressed = await exportToPngBytesCompressed(image);
|
||||
|
||||
expect(compressed.length).toBeLessThanOrEqual(uncompressed.length);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// DATABASE PNG ROUND-TRIP
|
||||
// =============================================================================
|
||||
|
||||
describe('Database PNG Round-trip', () => {
|
||||
it('should persist and restore database via PNG', async () => {
|
||||
const db = new SpiralDB<TodoData>({
|
||||
schema: createTodoSchema(),
|
||||
});
|
||||
|
||||
db.insert(makeTodo({ title: 'PNG Test', priority: 3, tags: [1, 2] }));
|
||||
db.insert(makeTodo({ title: 'Second', description: 'Desc' }));
|
||||
db.complete(1);
|
||||
|
||||
const image = db.getImage();
|
||||
const pngBytes = await exportToPngBytesCompressed(image);
|
||||
const importedImage = await importFromPngBytes(pngBytes);
|
||||
|
||||
const restored = SpiralDB.fromImage<TodoData>(importedImage, createTodoSchema());
|
||||
const all = restored.getAll();
|
||||
expect(all.length).toBe(2);
|
||||
|
||||
const first = restored.read(0);
|
||||
expect(first.record?.data.title).toBe('PNG Test');
|
||||
expect(first.record?.data.tags).toEqual([1, 2]);
|
||||
|
||||
const second = restored.read(1);
|
||||
expect(second.record?.data.title).toBe('Second');
|
||||
expect(second.record?.meta.status).toBe('completed');
|
||||
});
|
||||
|
||||
it('should round-trip empty database via PNG', async () => {
|
||||
const db = new SpiralDB<TodoData>({ schema: createTodoSchema() });
|
||||
const pngBytes = await exportToPngBytesCompressed(db.getImage());
|
||||
const importedImage = await importFromPngBytes(pngBytes);
|
||||
|
||||
const restored = SpiralDB.fromImage<TodoData>(importedImage, createTodoSchema());
|
||||
expect(restored.getAll()).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should round-trip database with many records', async () => {
|
||||
const db = new SpiralDB<TodoData>({ schema: createTodoSchema() });
|
||||
|
||||
for (let i = 0; i < 50; i++) {
|
||||
db.insert(makeTodo({ title: `Todo ${i}`, priority: i % 8 }));
|
||||
}
|
||||
|
||||
const pngBytes = await exportToPngBytesCompressed(db.getImage());
|
||||
const importedImage = await importFromPngBytes(pngBytes);
|
||||
const restored = SpiralDB.fromImage<TodoData>(importedImage, createTodoSchema());
|
||||
|
||||
expect(restored.getStats().totalRecords).toBe(50);
|
||||
|
||||
// Spot check
|
||||
const record25 = restored.read(25);
|
||||
expect(record25.record?.data.title).toBe('Todo 25');
|
||||
expect(record25.record?.data.priority).toBe(25 % 8);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// VALIDATION / ERROR HANDLING
|
||||
// =============================================================================
|
||||
|
||||
describe('PNG Import Validation', () => {
|
||||
it('should reject truncated PNG', async () => {
|
||||
const image = createImage(3);
|
||||
const bytes = exportToPngBytes(image);
|
||||
const truncated = bytes.slice(0, 20);
|
||||
await expect(importFromPngBytes(truncated)).rejects.toThrow();
|
||||
});
|
||||
|
||||
it('should reject empty buffer', async () => {
|
||||
await expect(importFromPngBytes(new Uint8Array(0))).rejects.toThrow('data too short');
|
||||
});
|
||||
|
||||
it('should reject buffer shorter than 8 bytes', async () => {
|
||||
await expect(importFromPngBytes(new Uint8Array(5))).rejects.toThrow('data too short');
|
||||
});
|
||||
|
||||
it('should detect CRC corruption in IHDR', async () => {
|
||||
const image = createImage(3);
|
||||
const bytes = exportToPngBytes(image);
|
||||
const corrupted = new Uint8Array(bytes);
|
||||
// IHDR CRC is at offset 8 (sig) + 4 (len) + 4 (type) + 13 (data) = 29
|
||||
corrupted[29] ^= 0xff; // flip CRC bits
|
||||
await expect(importFromPngBytes(corrupted)).rejects.toThrow('CRC mismatch');
|
||||
});
|
||||
|
||||
it('should detect CRC corruption in IDAT', async () => {
|
||||
const image = createImage(3);
|
||||
const bytes = exportToPngBytes(image);
|
||||
const corrupted = new Uint8Array(bytes);
|
||||
// Find IDAT CRC (it's the last 4 bytes before IEND)
|
||||
// IEND chunk is 12 bytes at the end
|
||||
const idatCrcOffset = corrupted.length - 12 - 4;
|
||||
corrupted[idatCrcOffset] ^= 0xff;
|
||||
await expect(importFromPngBytes(corrupted)).rejects.toThrow('CRC mismatch');
|
||||
});
|
||||
|
||||
it('should reject non-square image', async () => {
|
||||
const image = createImage(3);
|
||||
const bytes = exportToPngBytes(image);
|
||||
const corrupted = new Uint8Array(bytes);
|
||||
// Change width to 5 in IHDR (offset 16)
|
||||
const view = new DataView(corrupted.buffer);
|
||||
view.setUint32(16, 5, false);
|
||||
// Recalculate IHDR CRC so it doesn't fail on CRC first
|
||||
const ihdrTypeAndData = corrupted.slice(12, 12 + 4 + 13);
|
||||
const newCrc = crc32ForTest(ihdrTypeAndData);
|
||||
view.setUint32(12 + 4 + 13, newCrc >>> 0, false);
|
||||
await expect(importFromPngBytes(corrupted)).rejects.toThrow('odd square');
|
||||
});
|
||||
|
||||
it('should reject even-sized image', async () => {
|
||||
const image = createImage(3);
|
||||
const bytes = exportToPngBytes(image);
|
||||
const corrupted = new Uint8Array(bytes);
|
||||
const view = new DataView(corrupted.buffer);
|
||||
// Set width and height to 4 (even)
|
||||
view.setUint32(16, 4, false);
|
||||
view.setUint32(20, 4, false);
|
||||
// Recalculate IHDR CRC
|
||||
const ihdrTypeAndData = corrupted.slice(12, 12 + 4 + 13);
|
||||
const newCrc = crc32ForTest(ihdrTypeAndData);
|
||||
view.setUint32(12 + 4 + 13, newCrc >>> 0, false);
|
||||
await expect(importFromPngBytes(corrupted)).rejects.toThrow('odd square');
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// IHDR CHUNK
|
||||
// =============================================================================
|
||||
|
||||
describe('PNG IHDR', () => {
|
||||
it('should encode correct width and height', async () => {
|
||||
const image = createImage(7);
|
||||
const bytes = exportToPngBytes(image);
|
||||
|
||||
// IHDR data starts at offset 16 (8 signature + 4 length + 4 type)
|
||||
const view = new DataView(bytes.buffer, bytes.byteOffset);
|
||||
const width = view.getUint32(16, false);
|
||||
const height = view.getUint32(20, false);
|
||||
expect(width).toBe(7);
|
||||
expect(height).toBe(7);
|
||||
|
||||
// Bit depth = 8, color type = 2 (RGB)
|
||||
expect(bytes[24]).toBe(8);
|
||||
expect(bytes[25]).toBe(2);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// PIXEL FIDELITY
|
||||
// =============================================================================
|
||||
|
||||
describe('Pixel Fidelity', () => {
|
||||
it('should preserve exact RGB values through PNG round-trip', async () => {
|
||||
const image = createImage(3);
|
||||
|
||||
// Set all 8 possible colors
|
||||
const colors: ColorIndex[] = [0, 1, 2, 3, 4, 5, 6, 7];
|
||||
for (let i = 0; i < colors.length; i++) {
|
||||
setPixelByIndex(image, i, colors[i]);
|
||||
}
|
||||
|
||||
const bytes = await exportToPngBytesCompressed(image);
|
||||
const imported = await importFromPngBytes(bytes);
|
||||
|
||||
// Verify each pixel has exact same RGB values
|
||||
for (let i = 0; i < image.pixels.length; i++) {
|
||||
expect(imported.pixels[i]).toBe(image.pixels[i]);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
|
@ -5,6 +5,7 @@
|
|||
|
||||
import type { SpiralImage } from './types.js';
|
||||
import { createImage } from './image.js';
|
||||
import pako from 'pako';
|
||||
|
||||
// =============================================================================
|
||||
// PNG ENCODING (Pure JavaScript - works everywhere)
|
||||
|
|
@ -60,72 +61,10 @@ function createChunk(type: string, data: Uint8Array): Uint8Array {
|
|||
}
|
||||
|
||||
/**
|
||||
* Adler-32 checksum for zlib
|
||||
*/
|
||||
function adler32(data: Uint8Array): number {
|
||||
let a = 1;
|
||||
let b = 0;
|
||||
for (let i = 0; i < data.length; i++) {
|
||||
a = (a + data[i]) % 65521;
|
||||
b = (b + a) % 65521;
|
||||
}
|
||||
return (b << 16) | a;
|
||||
}
|
||||
|
||||
/**
|
||||
* Simple zlib compression (store only, no actual compression)
|
||||
* For small images this is fine; for larger ones, use pako
|
||||
* Compress data using pako (zlib deflate)
|
||||
*/
|
||||
function zlibCompress(data: Uint8Array): Uint8Array {
|
||||
// For simplicity, we use uncompressed deflate blocks
|
||||
// This works but doesn't actually compress
|
||||
const maxBlockSize = 65535;
|
||||
const blocks: Uint8Array[] = [];
|
||||
|
||||
for (let i = 0; i < data.length; i += maxBlockSize) {
|
||||
const blockData = data.slice(i, Math.min(i + maxBlockSize, data.length));
|
||||
const isLast = i + maxBlockSize >= data.length;
|
||||
|
||||
// Block header: 1 byte (BFINAL=1 for last, BTYPE=00 for no compression)
|
||||
const header = isLast ? 0x01 : 0x00;
|
||||
|
||||
// Length and complement
|
||||
const len = blockData.length;
|
||||
const nlen = len ^ 0xffff;
|
||||
|
||||
const block = new Uint8Array(5 + blockData.length);
|
||||
block[0] = header;
|
||||
block[1] = len & 0xff;
|
||||
block[2] = (len >> 8) & 0xff;
|
||||
block[3] = nlen & 0xff;
|
||||
block[4] = (nlen >> 8) & 0xff;
|
||||
block.set(blockData, 5);
|
||||
|
||||
blocks.push(block);
|
||||
}
|
||||
|
||||
// Calculate total size
|
||||
const totalBlockSize = blocks.reduce((sum, b) => sum + b.length, 0);
|
||||
|
||||
// zlib header (2 bytes) + blocks + adler32 (4 bytes)
|
||||
const result = new Uint8Array(2 + totalBlockSize + 4);
|
||||
const view = new DataView(result.buffer);
|
||||
|
||||
// zlib header: CMF=0x78 (deflate, 32K window), FLG=0x01 (no dict, check bits)
|
||||
result[0] = 0x78;
|
||||
result[1] = 0x01;
|
||||
|
||||
// Copy blocks
|
||||
let offset = 2;
|
||||
for (const block of blocks) {
|
||||
result.set(block, offset);
|
||||
offset += block.length;
|
||||
}
|
||||
|
||||
// Adler-32 checksum (big-endian)
|
||||
view.setUint32(offset, adler32(data), false);
|
||||
|
||||
return result;
|
||||
return pako.deflate(data);
|
||||
}
|
||||
|
||||
/**
|
||||
|
|
@ -190,76 +129,166 @@ export function exportToPngBytes(image: SpiralImage): Uint8Array {
|
|||
}
|
||||
|
||||
/**
|
||||
* Export SpiralImage to PNG with pako compression (smaller files)
|
||||
* Export SpiralImage to PNG with best compression (smaller files)
|
||||
* Uses pako.deflate with maximum compression level.
|
||||
*/
|
||||
export async function exportToPngBytesCompressed(image: SpiralImage): Promise<Uint8Array> {
|
||||
// Try to use pako for better compression
|
||||
try {
|
||||
const pakoModule = await import('pako');
|
||||
const pako = pakoModule.default || pakoModule;
|
||||
const { width, height } = image;
|
||||
|
||||
const { width, height } = image;
|
||||
const signature = new Uint8Array([137, 80, 78, 71, 13, 10, 26, 10]);
|
||||
|
||||
const signature = new Uint8Array([137, 80, 78, 71, 13, 10, 26, 10]);
|
||||
// IHDR
|
||||
const ihdrData = new Uint8Array(13);
|
||||
const ihdrView = new DataView(ihdrData.buffer);
|
||||
ihdrView.setUint32(0, width, false);
|
||||
ihdrView.setUint32(4, height, false);
|
||||
ihdrData[8] = 8;
|
||||
ihdrData[9] = 2;
|
||||
ihdrData[10] = 0;
|
||||
ihdrData[11] = 0;
|
||||
ihdrData[12] = 0;
|
||||
const ihdrChunk = createChunk('IHDR', ihdrData);
|
||||
|
||||
// IHDR
|
||||
const ihdrData = new Uint8Array(13);
|
||||
const ihdrView = new DataView(ihdrData.buffer);
|
||||
ihdrView.setUint32(0, width, false);
|
||||
ihdrView.setUint32(4, height, false);
|
||||
ihdrData[8] = 8;
|
||||
ihdrData[9] = 2;
|
||||
ihdrData[10] = 0;
|
||||
ihdrData[11] = 0;
|
||||
ihdrData[12] = 0;
|
||||
const ihdrChunk = createChunk('IHDR', ihdrData);
|
||||
|
||||
// Raw data with filter bytes
|
||||
const rawData = new Uint8Array(height * (1 + width * 3));
|
||||
let rawOffset = 0;
|
||||
for (let y = 0; y < height; y++) {
|
||||
rawData[rawOffset++] = 0; // Filter byte
|
||||
for (let x = 0; x < width; x++) {
|
||||
const pixelOffset = (y * width + x) * 3;
|
||||
rawData[rawOffset++] = image.pixels[pixelOffset];
|
||||
rawData[rawOffset++] = image.pixels[pixelOffset + 1];
|
||||
rawData[rawOffset++] = image.pixels[pixelOffset + 2];
|
||||
}
|
||||
// Raw data with filter bytes
|
||||
const rawData = new Uint8Array(height * (1 + width * 3));
|
||||
let rawOffset = 0;
|
||||
for (let y = 0; y < height; y++) {
|
||||
rawData[rawOffset++] = 0; // Filter byte
|
||||
for (let x = 0; x < width; x++) {
|
||||
const pixelOffset = (y * width + x) * 3;
|
||||
rawData[rawOffset++] = image.pixels[pixelOffset];
|
||||
rawData[rawOffset++] = image.pixels[pixelOffset + 1];
|
||||
rawData[rawOffset++] = image.pixels[pixelOffset + 2];
|
||||
}
|
||||
|
||||
// Use pako.deflate which returns zlib-wrapped data (header + compressed + adler32)
|
||||
const zlibData = pako.deflate(rawData);
|
||||
|
||||
const idatChunk = createChunk('IDAT', zlibData);
|
||||
const iendChunk = createChunk('IEND', new Uint8Array(0));
|
||||
|
||||
const png = new Uint8Array(
|
||||
signature.length + ihdrChunk.length + idatChunk.length + iendChunk.length
|
||||
);
|
||||
let offset = 0;
|
||||
png.set(signature, offset);
|
||||
offset += signature.length;
|
||||
png.set(ihdrChunk, offset);
|
||||
offset += ihdrChunk.length;
|
||||
png.set(idatChunk, offset);
|
||||
offset += idatChunk.length;
|
||||
png.set(iendChunk, offset);
|
||||
|
||||
return png;
|
||||
} catch {
|
||||
// Fall back to uncompressed
|
||||
return exportToPngBytes(image);
|
||||
}
|
||||
|
||||
const zlibData = pako.deflate(rawData, { level: 9 });
|
||||
|
||||
const idatChunk = createChunk('IDAT', zlibData);
|
||||
const iendChunk = createChunk('IEND', new Uint8Array(0));
|
||||
|
||||
const png = new Uint8Array(
|
||||
signature.length + ihdrChunk.length + idatChunk.length + iendChunk.length
|
||||
);
|
||||
let offset = 0;
|
||||
png.set(signature, offset);
|
||||
offset += signature.length;
|
||||
png.set(ihdrChunk, offset);
|
||||
offset += ihdrChunk.length;
|
||||
png.set(idatChunk, offset);
|
||||
offset += idatChunk.length;
|
||||
png.set(iendChunk, offset);
|
||||
|
||||
return png;
|
||||
}
|
||||
|
||||
// =============================================================================
|
||||
// PNG DECODING
|
||||
// =============================================================================
|
||||
|
||||
/**
|
||||
* Verify CRC of a PNG chunk.
|
||||
* Returns true if valid, throws on mismatch.
|
||||
*/
|
||||
function verifyChunkCrc(pngData: Uint8Array, chunkStart: number, dataLength: number): void {
|
||||
// CRC is computed over type (4 bytes) + data
|
||||
const crcDataStart = chunkStart + 4; // skip length field
|
||||
const crcDataLength = 4 + dataLength; // type + data
|
||||
const crcData = pngData.slice(crcDataStart, crcDataStart + crcDataLength);
|
||||
const computed = crc32(crcData) >>> 0;
|
||||
|
||||
const view = new DataView(pngData.buffer, pngData.byteOffset + chunkStart + 8 + dataLength);
|
||||
const stored = view.getUint32(0, false) >>> 0;
|
||||
|
||||
if (computed !== stored) {
|
||||
const type = String.fromCharCode(
|
||||
pngData[chunkStart + 4],
|
||||
pngData[chunkStart + 5],
|
||||
pngData[chunkStart + 6],
|
||||
pngData[chunkStart + 7]
|
||||
);
|
||||
throw new Error(
|
||||
`PNG CRC mismatch in ${type} chunk (expected ${stored.toString(16)}, got ${computed.toString(16)})`
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply PNG row filter to reconstruct original pixel data.
|
||||
* Supports filter types 0 (None), 1 (Sub), 2 (Up), 3 (Average), 4 (Paeth).
|
||||
*/
|
||||
function unfilterRow(
|
||||
filterType: number,
|
||||
currentRow: Uint8Array,
|
||||
previousRow: Uint8Array | null,
|
||||
bytesPerPixel: number
|
||||
): Uint8Array {
|
||||
const result = new Uint8Array(currentRow.length);
|
||||
|
||||
switch (filterType) {
|
||||
case 0: // None
|
||||
result.set(currentRow);
|
||||
break;
|
||||
|
||||
case 1: // Sub
|
||||
for (let i = 0; i < currentRow.length; i++) {
|
||||
const a = i >= bytesPerPixel ? result[i - bytesPerPixel] : 0;
|
||||
result[i] = (currentRow[i] + a) & 0xff;
|
||||
}
|
||||
break;
|
||||
|
||||
case 2: // Up
|
||||
for (let i = 0; i < currentRow.length; i++) {
|
||||
const b = previousRow ? previousRow[i] : 0;
|
||||
result[i] = (currentRow[i] + b) & 0xff;
|
||||
}
|
||||
break;
|
||||
|
||||
case 3: // Average
|
||||
for (let i = 0; i < currentRow.length; i++) {
|
||||
const a = i >= bytesPerPixel ? result[i - bytesPerPixel] : 0;
|
||||
const b = previousRow ? previousRow[i] : 0;
|
||||
result[i] = (currentRow[i] + Math.floor((a + b) / 2)) & 0xff;
|
||||
}
|
||||
break;
|
||||
|
||||
case 4: // Paeth
|
||||
for (let i = 0; i < currentRow.length; i++) {
|
||||
const a = i >= bytesPerPixel ? result[i - bytesPerPixel] : 0;
|
||||
const b = previousRow ? previousRow[i] : 0;
|
||||
const c = i >= bytesPerPixel && previousRow ? previousRow[i - bytesPerPixel] : 0;
|
||||
result[i] = (currentRow[i] + paethPredictor(a, b, c)) & 0xff;
|
||||
}
|
||||
break;
|
||||
|
||||
default:
|
||||
throw new Error(`Unknown PNG filter type: ${filterType}`);
|
||||
}
|
||||
|
||||
return result;
|
||||
}
|
||||
|
||||
/**
|
||||
* Paeth predictor function used in PNG filter type 4
|
||||
*/
|
||||
function paethPredictor(a: number, b: number, c: number): number {
|
||||
const p = a + b - c;
|
||||
const pa = Math.abs(p - a);
|
||||
const pb = Math.abs(p - b);
|
||||
const pc = Math.abs(p - c);
|
||||
if (pa <= pb && pa <= pc) return a;
|
||||
if (pb <= pc) return b;
|
||||
return c;
|
||||
}
|
||||
|
||||
/**
|
||||
* Parse PNG bytes to SpiralImage
|
||||
*/
|
||||
export async function importFromPngBytes(pngData: Uint8Array): Promise<SpiralImage> {
|
||||
if (pngData.length < 8) {
|
||||
throw new Error('Invalid PNG: data too short');
|
||||
}
|
||||
|
||||
// Verify PNG signature
|
||||
const signature = [137, 80, 78, 71, 13, 10, 26, 10];
|
||||
for (let i = 0; i < 8; i++) {
|
||||
|
|
@ -270,15 +299,19 @@ export async function importFromPngBytes(pngData: Uint8Array): Promise<SpiralIma
|
|||
|
||||
let width = 0;
|
||||
let height = 0;
|
||||
let bitDepth = 0;
|
||||
let colorType = 0;
|
||||
const idatChunks: Uint8Array[] = [];
|
||||
|
||||
// Parse chunks
|
||||
// Parse chunks with CRC validation
|
||||
let offset = 8;
|
||||
while (offset < pngData.length) {
|
||||
while (offset + 12 <= pngData.length) {
|
||||
const view = new DataView(pngData.buffer, pngData.byteOffset + offset);
|
||||
const length = view.getUint32(0, false);
|
||||
|
||||
// Validate chunk boundaries
|
||||
if (offset + 12 + length > pngData.length) {
|
||||
throw new Error('PNG chunk extends beyond file boundary');
|
||||
}
|
||||
|
||||
const type = String.fromCharCode(
|
||||
pngData[offset + 4],
|
||||
pngData[offset + 5],
|
||||
|
|
@ -287,12 +320,17 @@ export async function importFromPngBytes(pngData: Uint8Array): Promise<SpiralIma
|
|||
);
|
||||
const data = pngData.slice(offset + 8, offset + 8 + length);
|
||||
|
||||
// Validate CRC for critical chunks
|
||||
if (type === 'IHDR' || type === 'IDAT' || type === 'IEND') {
|
||||
verifyChunkCrc(pngData, offset, length);
|
||||
}
|
||||
|
||||
if (type === 'IHDR') {
|
||||
const ihdrView = new DataView(data.buffer, data.byteOffset);
|
||||
width = ihdrView.getUint32(0, false);
|
||||
height = ihdrView.getUint32(4, false);
|
||||
bitDepth = data[8];
|
||||
colorType = data[9];
|
||||
const bitDepth = data[8];
|
||||
const colorType = data[9];
|
||||
|
||||
if (bitDepth !== 8 || colorType !== 2) {
|
||||
throw new Error('Only 8-bit RGB PNGs are supported');
|
||||
|
|
@ -310,6 +348,14 @@ export async function importFromPngBytes(pngData: Uint8Array): Promise<SpiralIma
|
|||
throw new Error('Invalid PNG: no IHDR chunk');
|
||||
}
|
||||
|
||||
if (width !== height || width % 2 === 0) {
|
||||
throw new Error('SpiralDB requires odd square images');
|
||||
}
|
||||
|
||||
if (idatChunks.length === 0) {
|
||||
throw new Error('Invalid PNG: no IDAT chunks');
|
||||
}
|
||||
|
||||
// Combine IDAT chunks
|
||||
const compressedLength = idatChunks.reduce((sum, c) => sum + c.length, 0);
|
||||
const compressed = new Uint8Array(compressedLength);
|
||||
|
|
@ -322,33 +368,38 @@ export async function importFromPngBytes(pngData: Uint8Array): Promise<SpiralIma
|
|||
// Decompress using pako
|
||||
let rawData: Uint8Array;
|
||||
try {
|
||||
const pakoModule = await import('pako');
|
||||
const pako = pakoModule.default || pakoModule;
|
||||
// Decompress the zlib data (includes header)
|
||||
rawData = pako.inflate(compressed);
|
||||
} catch (e) {
|
||||
throw new Error(`PNG decompression failed: ${e}`);
|
||||
}
|
||||
|
||||
// Parse raw data (with filter bytes)
|
||||
const image = createImage(width);
|
||||
if (width !== height || width % 2 === 0) {
|
||||
throw new Error('SpiralDB requires odd square images');
|
||||
// Validate decompressed data size
|
||||
const expectedSize = height * (1 + width * 3); // filter byte + RGB per row
|
||||
if (rawData.length !== expectedSize) {
|
||||
throw new Error(
|
||||
`PNG data size mismatch: expected ${expectedSize} bytes, got ${rawData.length}`
|
||||
);
|
||||
}
|
||||
|
||||
// Parse raw data with filter support
|
||||
const image = createImage(width);
|
||||
const bytesPerPixel = 3; // RGB
|
||||
const rowBytes = width * 3;
|
||||
let previousRow: Uint8Array | null = null;
|
||||
let rawOffset = 0;
|
||||
for (let y = 0; y < height; y++) {
|
||||
const filterByte = rawData[rawOffset++];
|
||||
if (filterByte !== 0) {
|
||||
throw new Error(`Unsupported PNG filter: ${filterByte}`);
|
||||
}
|
||||
|
||||
for (let x = 0; x < width; x++) {
|
||||
const pixelOffset = (y * width + x) * 3;
|
||||
image.pixels[pixelOffset] = rawData[rawOffset++];
|
||||
image.pixels[pixelOffset + 1] = rawData[rawOffset++];
|
||||
image.pixels[pixelOffset + 2] = rawData[rawOffset++];
|
||||
}
|
||||
for (let y = 0; y < height; y++) {
|
||||
const filterType = rawData[rawOffset++];
|
||||
const filteredRow = rawData.slice(rawOffset, rawOffset + rowBytes);
|
||||
rawOffset += rowBytes;
|
||||
|
||||
const unfilteredRow = unfilterRow(filterType, filteredRow, previousRow, bytesPerPixel);
|
||||
|
||||
// Copy to image
|
||||
const pixelStart = y * width * 3;
|
||||
image.pixels.set(unfilteredRow, pixelStart);
|
||||
|
||||
previousRow = unfilteredRow;
|
||||
}
|
||||
|
||||
return image;
|
||||
|
|
|
|||
204
packages/spiral-db/src/schema.test.ts
Normal file
204
packages/spiral-db/src/schema.test.ts
Normal file
|
|
@ -0,0 +1,204 @@
|
|||
/**
|
||||
* Schema Tests
|
||||
*/
|
||||
|
||||
import { describe, it, expect } from 'vitest';
|
||||
import {
|
||||
encodeSchema,
|
||||
decodeSchema,
|
||||
getSchemaPixelCount,
|
||||
createTodoSchema,
|
||||
validateRecord,
|
||||
getFieldNames,
|
||||
} from './schema.js';
|
||||
|
||||
// =============================================================================
|
||||
// SCHEMA ENCODE / DECODE
|
||||
// =============================================================================
|
||||
|
||||
describe('Schema Encode/Decode', () => {
|
||||
it('should round-trip the todo schema', () => {
|
||||
const schema = createTodoSchema();
|
||||
const pixels = encodeSchema(schema);
|
||||
const fieldNames = getFieldNames(schema);
|
||||
const decoded = decodeSchema(pixels, fieldNames);
|
||||
|
||||
expect(decoded.version).toBe(schema.version);
|
||||
expect(decoded.fields.length).toBe(schema.fields.length);
|
||||
|
||||
for (let i = 0; i < schema.fields.length; i++) {
|
||||
expect(decoded.fields[i].name).toBe(schema.fields[i].name);
|
||||
expect(decoded.fields[i].type).toBe(schema.fields[i].type);
|
||||
expect(decoded.fields[i].maxLength).toBe(schema.fields[i].maxLength);
|
||||
expect(decoded.fields[i].nullable).toBe(schema.fields[i].nullable ?? false);
|
||||
}
|
||||
});
|
||||
|
||||
it('should encode a minimal schema (1 field)', () => {
|
||||
const schema = {
|
||||
version: 1,
|
||||
name: 'minimal',
|
||||
fields: [{ name: 'id', type: 'int' as const, maxLength: 8 }],
|
||||
};
|
||||
const pixels = encodeSchema(schema);
|
||||
expect(pixels.length).toBeGreaterThan(0);
|
||||
|
||||
const decoded = decodeSchema(pixels, ['id']);
|
||||
expect(decoded.fields).toHaveLength(1);
|
||||
expect(decoded.fields[0].type).toBe('int');
|
||||
expect(decoded.fields[0].maxLength).toBe(8);
|
||||
});
|
||||
|
||||
it('should handle nullable fields correctly', () => {
|
||||
const schema = {
|
||||
version: 2,
|
||||
name: 'nullable_test',
|
||||
fields: [
|
||||
{ name: 'required', type: 'string' as const, maxLength: 100 },
|
||||
{ name: 'optional', type: 'string' as const, maxLength: 100, nullable: true },
|
||||
],
|
||||
};
|
||||
const pixels = encodeSchema(schema);
|
||||
const decoded = decodeSchema(pixels, ['required', 'optional']);
|
||||
|
||||
expect(decoded.fields[0].nullable).toBe(false);
|
||||
expect(decoded.fields[1].nullable).toBe(true);
|
||||
});
|
||||
|
||||
it('should preserve version number', () => {
|
||||
const schema = { version: 511, name: 'max_ver', fields: [] };
|
||||
const pixels = encodeSchema(schema);
|
||||
const decoded = decodeSchema(pixels, []);
|
||||
expect(decoded.version).toBe(511);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getSchemaPixelCount', () => {
|
||||
it('should calculate pixel count for todo schema', () => {
|
||||
const schema = createTodoSchema();
|
||||
const count = getSchemaPixelCount(schema);
|
||||
const actualPixels = encodeSchema(schema);
|
||||
expect(count).toBe(actualPixels.length);
|
||||
});
|
||||
|
||||
it('should return at least 1 for empty schema', () => {
|
||||
const schema = { version: 0, name: 'empty', fields: [] };
|
||||
const count = getSchemaPixelCount(schema);
|
||||
expect(count).toBeGreaterThanOrEqual(1);
|
||||
});
|
||||
});
|
||||
|
||||
// =============================================================================
|
||||
// VALIDATE RECORD
|
||||
// =============================================================================
|
||||
|
||||
describe('validateRecord', () => {
|
||||
const schema = createTodoSchema();
|
||||
|
||||
const validTodo = {
|
||||
id: 0,
|
||||
status: 0,
|
||||
priority: 1,
|
||||
createdAt: new Date(),
|
||||
dueDate: null,
|
||||
completedAt: null,
|
||||
title: 'Test',
|
||||
description: null,
|
||||
tags: [],
|
||||
};
|
||||
|
||||
it('should accept a valid record', () => {
|
||||
const result = validateRecord(schema, validTodo);
|
||||
expect(result.valid).toBe(true);
|
||||
expect(result.errors).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('should reject missing required field', () => {
|
||||
const record = { ...validTodo };
|
||||
delete (record as Record<string, unknown>).title;
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors).toContain("Field 'title' is required");
|
||||
});
|
||||
|
||||
it('should allow null for nullable fields', () => {
|
||||
const record = { ...validTodo, dueDate: null, completedAt: null, description: null };
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject non-integer for int field', () => {
|
||||
const record = { ...validTodo, priority: 1.5 };
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes('priority'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject out-of-range int', () => {
|
||||
const record = { ...validTodo, id: 5000 }; // max 4095 for 12-bit
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes('out of range'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject negative int', () => {
|
||||
const record = { ...validTodo, priority: -1 };
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(false);
|
||||
});
|
||||
|
||||
it('should reject string too long', () => {
|
||||
const record = { ...validTodo, title: 'x'.repeat(256) }; // max 255
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes('too long'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject wrong type for timestamp', () => {
|
||||
const record = { ...validTodo, createdAt: '2025-01-01' };
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes('must be a Date'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject non-array for array field', () => {
|
||||
const record = { ...validTodo, tags: 'not-array' };
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes('must be an array'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should reject array with too many items', () => {
|
||||
const record = { ...validTodo, tags: [1, 2, 3, 4, 5, 6, 7, 8, 9] }; // max 8
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.some((e) => e.includes('too many items'))).toBe(true);
|
||||
});
|
||||
|
||||
it('should collect multiple errors', () => {
|
||||
const record = {
|
||||
id: -1,
|
||||
status: 'invalid',
|
||||
priority: 100,
|
||||
createdAt: 'not-a-date',
|
||||
dueDate: null,
|
||||
completedAt: null,
|
||||
title: 123,
|
||||
description: null,
|
||||
tags: 'not-array',
|
||||
};
|
||||
const result = validateRecord(schema, record);
|
||||
expect(result.valid).toBe(false);
|
||||
expect(result.errors.length).toBeGreaterThan(3);
|
||||
});
|
||||
});
|
||||
|
||||
describe('getFieldNames', () => {
|
||||
it('should return field names in order', () => {
|
||||
const schema = createTodoSchema();
|
||||
const names = getFieldNames(schema);
|
||||
expect(names[0]).toBe('id');
|
||||
expect(names[names.length - 1]).toBe('tags');
|
||||
expect(names.length).toBe(schema.fields.length);
|
||||
});
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue