fix(calendar,contacts,todo): pre-launch architecture audit fixes

Critical bugs: fix contacts delete() inverted logic, fix photo URLs
hardcoded to localhost:9000. Add missing DB indexes across all three
apps (27 indexes total). Add data integrity constraints: cascade delete
on tasks.projectId, unique label names per user, unique default calendar
per user with race condition handling. Wrap 12 multi-step operations in
transactions (todo). Replace contacts duplicate detection full-table scan
with targeted SQL GROUP BY queries. Fix calendar N+1 event tag queries
with batch loading. Fix contacts tagId filter not being applied. Add
proper RRULE error logging. Clear calendar auth store on sign-out.

Co-Authored-By: Claude Opus 4.6 <noreply@anthropic.com>
This commit is contained in:
Till JS 2026-03-19 10:00:08 +01:00
parent 7a56699d45
commit 8e4b331cb3
24 changed files with 796 additions and 426 deletions

View file

@ -104,17 +104,44 @@ export class CalendarService {
.limit(1);
if (anyCalendar.length > 0) {
// Make it the default
const [updated] = await this.db
.update(calendars)
.set({ isDefault: true, updatedAt: new Date() })
.where(eq(calendars.id, anyCalendar[0].id))
.returning();
return updated;
// Make it the default — unique partial index prevents duplicates
try {
const [updated] = await this.db
.update(calendars)
.set({ isDefault: true, updatedAt: new Date() })
.where(eq(calendars.id, anyCalendar[0].id))
.returning();
return updated;
} catch (error: any) {
// Unique constraint violation — another request already set a default
if (error?.code === '23505') {
const [defaultCal] = await this.db
.select()
.from(calendars)
.where(and(eq(calendars.userId, userId), eq(calendars.isDefault, true)));
return defaultCal;
}
throw error;
}
}
// Create default calendars for new user
await this.createDefaultCalendars(userId);
// Create default calendars for new user — unique partial index prevents
// concurrent requests from creating duplicate defaults
try {
await this.createDefaultCalendars(userId);
} catch (error: any) {
// Unique constraint violation — another request already created defaults
if (error?.code === '23505') {
const [defaultCal] = await this.db
.select()
.from(calendars)
.where(and(eq(calendars.userId, userId), eq(calendars.isDefault, true)));
if (defaultCal) {
return defaultCal;
}
}
throw error;
}
// Return the default one
const defaultCal = await this.db

View file

@ -1,4 +1,4 @@
import { pgTable, uuid, timestamp, varchar, unique, text } from 'drizzle-orm/pg-core';
import { pgTable, uuid, timestamp, varchar, unique, text, index } from 'drizzle-orm/pg-core';
import { calendars } from './calendars.schema';
/**
@ -35,6 +35,9 @@ export const calendarShares = pgTable(
(table) => ({
uniqueUserShare: unique().on(table.calendarId, table.sharedWithUserId),
uniqueEmailShare: unique().on(table.calendarId, table.sharedWithEmail),
calendarIdx: index('calendar_shares_calendar_idx').on(table.calendarId),
sharedWithUserIdx: index('calendar_shares_shared_with_user_idx').on(table.sharedWithUserId),
shareTokenIdx: index('calendar_shares_token_idx').on(table.shareToken),
})
);

View file

@ -1,4 +1,15 @@
import { pgTable, uuid, text, timestamp, varchar, boolean, jsonb } from 'drizzle-orm/pg-core';
import {
pgTable,
uuid,
text,
timestamp,
varchar,
boolean,
jsonb,
index,
uniqueIndex,
} from 'drizzle-orm/pg-core';
import { sql } from 'drizzle-orm';
/**
* Calendar settings stored in JSONB
@ -14,19 +25,28 @@ export interface CalendarSettings {
/**
* Calendars table - stores user calendars
*/
export const calendars = pgTable('calendars', {
id: uuid('id').primaryKey().defaultRandom(),
userId: text('user_id').notNull(),
name: varchar('name', { length: 255 }).notNull(),
description: text('description'),
color: varchar('color', { length: 7 }).default('#3B82F6'),
isDefault: boolean('is_default').default(false),
isVisible: boolean('is_visible').default(true),
timezone: varchar('timezone', { length: 100 }).default('Europe/Berlin'),
settings: jsonb('settings').$type<CalendarSettings>(),
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
});
export const calendars = pgTable(
'calendars',
{
id: uuid('id').primaryKey().defaultRandom(),
userId: text('user_id').notNull(),
name: varchar('name', { length: 255 }).notNull(),
description: text('description'),
color: varchar('color', { length: 7 }).default('#3B82F6'),
isDefault: boolean('is_default').default(false),
isVisible: boolean('is_visible').default(true),
timezone: varchar('timezone', { length: 100 }).default('Europe/Berlin'),
settings: jsonb('settings').$type<CalendarSettings>(),
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
},
(table) => ({
userIdx: index('calendars_user_idx').on(table.userId),
uniqueDefaultPerUser: uniqueIndex('calendars_unique_default_per_user')
.on(table.userId)
.where(sql`${table.isDefault} = true`),
})
);
export type Calendar = typeof calendars.$inferSelect;
export type NewCalendar = typeof calendars.$inferInsert;

View file

@ -7,6 +7,7 @@ import {
boolean,
jsonb,
integer,
index,
} from 'drizzle-orm/pg-core';
/**
@ -25,41 +26,52 @@ export interface ExternalCalendarProviderData {
/**
* External calendars table - stores CalDAV/iCal connections
*/
export const externalCalendars = pgTable('external_calendars', {
id: uuid('id').primaryKey().defaultRandom(),
userId: text('user_id').notNull(),
export const externalCalendars = pgTable(
'external_calendars',
{
id: uuid('id').primaryKey().defaultRandom(),
userId: text('user_id').notNull(),
// Calendar identification
name: varchar('name', { length: 255 }).notNull(),
provider: varchar('provider', { length: 50 }).notNull(), // google, apple, caldav, ical_url
// Calendar identification
name: varchar('name', { length: 255 }).notNull(),
provider: varchar('provider', { length: 50 }).notNull(), // google, apple, caldav, ical_url
// Connection details
calendarUrl: text('calendar_url').notNull(),
username: varchar('username', { length: 255 }),
encryptedPassword: text('encrypted_password'),
// Connection details
calendarUrl: text('calendar_url').notNull(),
username: varchar('username', { length: 255 }),
encryptedPassword: text('encrypted_password'),
// OAuth tokens (for Google, etc.)
accessToken: text('access_token'),
refreshToken: text('refresh_token'),
tokenExpiresAt: timestamp('token_expires_at', { withTimezone: true }),
// OAuth tokens (for Google, etc.)
accessToken: text('access_token'),
refreshToken: text('refresh_token'),
tokenExpiresAt: timestamp('token_expires_at', { withTimezone: true }),
// Sync settings
syncEnabled: boolean('sync_enabled').default(true),
syncDirection: varchar('sync_direction', { length: 20 }).default('both'), // import, export, both
syncInterval: integer('sync_interval').default(15), // Minutes between syncs
lastSyncAt: timestamp('last_sync_at', { withTimezone: true }),
lastSyncError: text('last_sync_error'),
// Sync settings
syncEnabled: boolean('sync_enabled').default(true),
syncDirection: varchar('sync_direction', { length: 20 }).default('both'), // import, export, both
syncInterval: integer('sync_interval').default(15), // Minutes between syncs
lastSyncAt: timestamp('last_sync_at', { withTimezone: true }),
lastSyncError: text('last_sync_error'),
// Display settings
color: varchar('color', { length: 7 }).default('#6B7280'),
isVisible: boolean('is_visible').default(true),
// Display settings
color: varchar('color', { length: 7 }).default('#6B7280'),
isVisible: boolean('is_visible').default(true),
// Provider-specific metadata
providerData: jsonb('provider_data').$type<ExternalCalendarProviderData>(),
// Provider-specific metadata
providerData: jsonb('provider_data').$type<ExternalCalendarProviderData>(),
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
});
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
},
(table) => ({
userIdx: index('external_calendars_user_idx').on(table.userId),
providerIdx: index('external_calendars_provider_idx').on(table.provider, table.userId),
syncEnabledIdx: index('external_calendars_sync_enabled_idx').on(
table.syncEnabled,
table.lastSyncAt
),
})
);
export type ExternalCalendar = typeof externalCalendars.$inferSelect;
export type NewExternalCalendar = typeof externalCalendars.$inferInsert;

View file

@ -78,6 +78,25 @@ export class EventTagService {
return results.map((r) => r.tag);
}
async getTagsForEvents(eventIds: string[]): Promise<Map<string, EventTag[]>> {
const tagMap = new Map<string, EventTag[]>();
if (eventIds.length === 0) return tagMap;
const results = await this.db
.select({ eventId: eventToTags.eventId, tag: eventTags })
.from(eventToTags)
.innerJoin(eventTags, eq(eventToTags.tagId, eventTags.id))
.where(inArray(eventToTags.eventId, eventIds));
for (const r of results) {
const existing = tagMap.get(r.eventId) || [];
existing.push(r.tag);
tagMap.set(r.eventId, existing);
}
return tagMap;
}
async getTagIdsForEvent(eventId: string): Promise<string[]> {
const results = await this.db
.select({ tagId: eventToTags.tagId })

View file

@ -226,17 +226,15 @@ export class EventService {
const result = await qb;
// Load tags for all events
const eventsWithCalendar = await Promise.all(
result.map(async (r) => {
const tags = await this.eventTagService.getTagsForEvent(r.event.id);
return {
...r.event,
calendar: r.calendar,
tags,
};
})
);
// Load tags for all events in a single batch query
const eventIds = result.map((r) => r.event.id);
const tagMap = await this.eventTagService.getTagsForEvents(eventIds);
const eventsWithCalendar = result.map((r) => ({
...r.event,
calendar: r.calendar,
tags: tagMap.get(r.event.id) || [],
}));
return eventsWithCalendar;
}

View file

@ -182,16 +182,18 @@ export const authStore = {
const authService = getAuthService();
if (!authService) {
user = null;
initialized = false;
return;
}
try {
await authService.signOut();
user = null;
} catch (error) {
console.error('Sign out error:', error);
// Clear user even if sign out fails
} finally {
// Always clear auth state, even if the remote sign-out fails
user = null;
initialized = false;
}
},

View file

@ -216,12 +216,19 @@ describe('ContactService', () => {
describe('delete', () => {
it('should delete a contact successfully', async () => {
// findById check (contact exists)
mockDb.where.mockResolvedValueOnce([mockContact]);
// delete call
mockDb.where.mockResolvedValueOnce(undefined);
// findById check (contact no longer exists)
mockDb.where.mockResolvedValueOnce([]);
await expect(service.delete('contact-1', 'user-1')).resolves.toBeUndefined();
});
it('should throw NotFoundException if contact does not exist', async () => {
// findById returns nothing
mockDb.where.mockResolvedValueOnce([]);
await expect(service.delete('contact-1', 'user-1')).rejects.toThrow(NotFoundException);
});
});
});

View file

@ -1,8 +1,8 @@
import { Injectable, Inject, NotFoundException } from '@nestjs/common';
import { eq, and, or, ilike, desc, sql, isNotNull } from 'drizzle-orm';
import { eq, and, or, ilike, desc, sql, isNotNull, inArray } from 'drizzle-orm';
import { DATABASE_CONNECTION } from '../db/database.module';
import { Database } from '../db/connection';
import { contacts } from '../db/schema';
import { contacts, contactToTags } from '../db/schema';
import type { Contact, NewContact } from '../db/schema';
export interface ContactBirthdaySummary {
@ -28,7 +28,20 @@ export class ContactService {
constructor(@Inject(DATABASE_CONNECTION) private db: Database) {}
async findByUserId(userId: string, filters: ContactFilters = {}): Promise<Contact[]> {
const { search, isFavorite, isArchived = false, limit = 50, offset = 0 } = filters;
const { search, isFavorite, isArchived = false, tagId, limit = 50, offset = 0 } = filters;
// If tagId is provided, get the set of contact IDs that have this tag
let tagContactIds: string[] | undefined;
if (tagId) {
const taggedContacts = await this.db
.select({ contactId: contactToTags.contactId })
.from(contactToTags)
.where(eq(contactToTags.tagId, tagId));
tagContactIds = taggedContacts.map((tc) => tc.contactId);
if (tagContactIds.length === 0) {
return [];
}
}
// When searching, use relevance-based sorting (name matches first, then company/email)
if (search) {
@ -57,6 +70,7 @@ export class ContactService {
eq(contacts.userId, userId),
eq(contacts.isArchived, isArchived),
isFavorite !== undefined ? eq(contacts.isFavorite, isFavorite) : undefined,
tagContactIds ? inArray(contacts.id, tagContactIds) : undefined,
or(
ilike(contacts.firstName, `%${search}%`),
ilike(contacts.lastName, `%${search}%`),
@ -82,7 +96,8 @@ export class ContactService {
and(
eq(contacts.userId, userId),
eq(contacts.isArchived, isArchived),
isFavorite !== undefined ? eq(contacts.isFavorite, isFavorite) : undefined
isFavorite !== undefined ? eq(contacts.isFavorite, isFavorite) : undefined,
tagContactIds ? inArray(contacts.id, tagContactIds) : undefined
)
)
.orderBy(desc(contacts.updatedAt))
@ -120,15 +135,12 @@ export class ContactService {
}
async delete(id: string, userId: string): Promise<void> {
const result = await this.db
.delete(contacts)
.where(and(eq(contacts.id, id), eq(contacts.userId, userId)));
// Drizzle doesn't return affected rows easily, so we check manually
const existing = await this.findById(id, userId);
if (existing) {
if (!existing) {
throw new NotFoundException('Contact not found');
}
await this.db.delete(contacts).where(and(eq(contacts.id, id), eq(contacts.userId, userId)));
}
async toggleFavorite(id: string, userId: string): Promise<Contact> {

View file

@ -1,18 +1,26 @@
import { pgTable, uuid, timestamp, varchar, text, jsonb } from 'drizzle-orm/pg-core';
import { pgTable, uuid, timestamp, varchar, text, jsonb, index } from 'drizzle-orm/pg-core';
import { contacts } from './contacts.schema';
export const contactActivities = pgTable('contact_activities', {
id: uuid('id').primaryKey().defaultRandom(),
contactId: uuid('contact_id')
.references(() => contacts.id, { onDelete: 'cascade' })
.notNull(),
userId: varchar('user_id', { length: 255 }).notNull(),
activityType: varchar('activity_type', { length: 50 }).notNull(),
// Types: 'created' | 'updated' | 'called' | 'emailed' | 'met' | 'note_added'
description: text('description'),
metadata: jsonb('metadata').$type<Record<string, unknown>>(),
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
});
export const contactActivities = pgTable(
'contact_activities',
{
id: uuid('id').primaryKey().defaultRandom(),
contactId: uuid('contact_id')
.references(() => contacts.id, { onDelete: 'cascade' })
.notNull(),
userId: varchar('user_id', { length: 255 }).notNull(),
activityType: varchar('activity_type', { length: 50 }).notNull(),
// Types: 'created' | 'updated' | 'called' | 'emailed' | 'met' | 'note_added'
description: text('description'),
metadata: jsonb('metadata').$type<Record<string, unknown>>(),
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
},
(table) => ({
contactIdx: index('contact_activities_contact_idx').on(table.contactId),
userIdx: index('contact_activities_user_idx').on(table.userId),
createdAtIdx: index('contact_activities_created_at_idx').on(table.createdAt),
})
);
export type ContactActivity = typeof contactActivities.$inferSelect;
export type NewContactActivity = typeof contactActivities.$inferInsert;

View file

@ -1,4 +1,4 @@
import { pgTable, uuid, timestamp, varchar, text, jsonb } from 'drizzle-orm/pg-core';
import { pgTable, uuid, timestamp, varchar, text, jsonb, index } from 'drizzle-orm/pg-core';
export interface GoogleContactsProviderData {
syncToken?: string;
@ -9,28 +9,35 @@ export interface GoogleContactsProviderData {
export type ProviderData = GoogleContactsProviderData;
export const connectedAccounts = pgTable('connected_accounts', {
id: uuid('id').primaryKey().defaultRandom(),
userId: varchar('user_id', { length: 255 }).notNull(),
export const connectedAccounts = pgTable(
'connected_accounts',
{
id: uuid('id').primaryKey().defaultRandom(),
userId: varchar('user_id', { length: 255 }).notNull(),
// Provider identification
provider: varchar('provider', { length: 50 }).notNull(), // 'google'
providerAccountId: varchar('provider_account_id', { length: 255 }),
providerEmail: varchar('provider_email', { length: 255 }),
// Provider identification
provider: varchar('provider', { length: 50 }).notNull(), // 'google'
providerAccountId: varchar('provider_account_id', { length: 255 }),
providerEmail: varchar('provider_email', { length: 255 }),
// OAuth tokens
accessToken: text('access_token').notNull(),
refreshToken: text('refresh_token'),
tokenExpiresAt: timestamp('token_expires_at', { withTimezone: true }),
scope: text('scope'),
// OAuth tokens
accessToken: text('access_token').notNull(),
refreshToken: text('refresh_token'),
tokenExpiresAt: timestamp('token_expires_at', { withTimezone: true }),
scope: text('scope'),
// Provider-specific metadata
providerData: jsonb('provider_data').$type<ProviderData>(),
// Provider-specific metadata
providerData: jsonb('provider_data').$type<ProviderData>(),
// Timestamps
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
});
// Timestamps
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
},
(table) => ({
userIdx: index('connected_accounts_user_idx').on(table.userId),
userProviderIdx: index('connected_accounts_user_provider_idx').on(table.userId, table.provider),
})
);
export type ConnectedAccount = typeof connectedAccounts.$inferSelect;
export type NewConnectedAccount = typeof connectedAccounts.$inferInsert;

View file

@ -1,68 +1,87 @@
import { pgTable, uuid, timestamp, varchar, text, boolean, date, jsonb } from 'drizzle-orm/pg-core';
import {
pgTable,
uuid,
timestamp,
varchar,
text,
boolean,
date,
jsonb,
index,
} from 'drizzle-orm/pg-core';
export const contacts = pgTable('contacts', {
id: uuid('id').primaryKey().defaultRandom(),
userId: varchar('user_id', { length: 255 }).notNull(),
export const contacts = pgTable(
'contacts',
{
id: uuid('id').primaryKey().defaultRandom(),
userId: varchar('user_id', { length: 255 }).notNull(),
// Basic Info
firstName: varchar('first_name', { length: 100 }),
lastName: varchar('last_name', { length: 100 }),
displayName: varchar('display_name', { length: 200 }),
nickname: varchar('nickname', { length: 100 }),
// Basic Info
firstName: varchar('first_name', { length: 100 }),
lastName: varchar('last_name', { length: 100 }),
displayName: varchar('display_name', { length: 200 }),
nickname: varchar('nickname', { length: 100 }),
// Contact Details
email: varchar('email', { length: 255 }),
phone: varchar('phone', { length: 50 }),
mobile: varchar('mobile', { length: 50 }),
// Contact Details
email: varchar('email', { length: 255 }),
phone: varchar('phone', { length: 50 }),
mobile: varchar('mobile', { length: 50 }),
// Address
street: varchar('street', { length: 255 }),
city: varchar('city', { length: 100 }),
postalCode: varchar('postal_code', { length: 20 }),
country: varchar('country', { length: 100 }),
// Address
street: varchar('street', { length: 255 }),
city: varchar('city', { length: 100 }),
postalCode: varchar('postal_code', { length: 20 }),
country: varchar('country', { length: 100 }),
// Organization
company: varchar('company', { length: 200 }),
jobTitle: varchar('job_title', { length: 200 }),
department: varchar('department', { length: 200 }),
// Organization
company: varchar('company', { length: 200 }),
jobTitle: varchar('job_title', { length: 200 }),
department: varchar('department', { length: 200 }),
// Additional
website: varchar('website', { length: 500 }),
birthday: date('birthday'),
notes: text('notes'),
photoUrl: varchar('photo_url', { length: 500 }),
customDates: jsonb('custom_dates').$type<CustomDate[]>().default([]),
// Additional
website: varchar('website', { length: 500 }),
birthday: date('birthday'),
notes: text('notes'),
photoUrl: varchar('photo_url', { length: 500 }),
customDates: jsonb('custom_dates').$type<CustomDate[]>().default([]),
// Social Media
linkedin: varchar('linkedin', { length: 255 }),
twitter: varchar('twitter', { length: 100 }),
facebook: varchar('facebook', { length: 255 }),
instagram: varchar('instagram', { length: 100 }),
xing: varchar('xing', { length: 255 }),
github: varchar('github', { length: 100 }),
youtube: varchar('youtube', { length: 255 }),
tiktok: varchar('tiktok', { length: 100 }),
telegram: varchar('telegram', { length: 100 }),
whatsapp: varchar('whatsapp', { length: 50 }),
signal: varchar('signal', { length: 50 }),
discord: varchar('discord', { length: 100 }),
bluesky: varchar('bluesky', { length: 100 }),
// Social Media
linkedin: varchar('linkedin', { length: 255 }),
twitter: varchar('twitter', { length: 100 }),
facebook: varchar('facebook', { length: 255 }),
instagram: varchar('instagram', { length: 100 }),
xing: varchar('xing', { length: 255 }),
github: varchar('github', { length: 100 }),
youtube: varchar('youtube', { length: 255 }),
tiktok: varchar('tiktok', { length: 100 }),
telegram: varchar('telegram', { length: 100 }),
whatsapp: varchar('whatsapp', { length: 50 }),
signal: varchar('signal', { length: 50 }),
discord: varchar('discord', { length: 100 }),
bluesky: varchar('bluesky', { length: 100 }),
// Flags
isFavorite: boolean('is_favorite').default(false),
isArchived: boolean('is_archived').default(false),
// Flags
isFavorite: boolean('is_favorite').default(false),
isArchived: boolean('is_archived').default(false),
// Manacore Integration
organizationId: uuid('organization_id'),
teamId: uuid('team_id'),
visibility: varchar('visibility', { length: 20 }).default('private'),
createdBy: varchar('created_by', { length: 255 }).notNull(),
sharedWith: jsonb('shared_with').$type<string[]>().default([]),
// Manacore Integration
organizationId: uuid('organization_id'),
teamId: uuid('team_id'),
visibility: varchar('visibility', { length: 20 }).default('private'),
createdBy: varchar('created_by', { length: 255 }).notNull(),
sharedWith: jsonb('shared_with').$type<string[]>().default([]),
// Metadata
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
});
// Metadata
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
},
(table) => ({
userIdx: index('contacts_user_idx').on(table.userId),
createdAtIdx: index('contacts_created_at_idx').on(table.createdAt),
organizationIdx: index('contacts_organization_idx').on(table.organizationId),
teamIdx: index('contacts_team_idx').on(table.teamId),
})
);
export type Contact = typeof contacts.$inferSelect;
export type NewContact = typeof contacts.$inferInsert;

View file

@ -1,17 +1,25 @@
import { pgTable, uuid, timestamp, varchar, text, boolean } from 'drizzle-orm/pg-core';
import { pgTable, uuid, timestamp, varchar, text, boolean, index } from 'drizzle-orm/pg-core';
import { contacts } from './contacts.schema';
export const contactNotes = pgTable('contact_notes', {
id: uuid('id').primaryKey().defaultRandom(),
contactId: uuid('contact_id')
.references(() => contacts.id, { onDelete: 'cascade' })
.notNull(),
userId: varchar('user_id', { length: 255 }).notNull(),
content: text('content').notNull(),
isPinned: boolean('is_pinned').default(false),
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
});
export const contactNotes = pgTable(
'contact_notes',
{
id: uuid('id').primaryKey().defaultRandom(),
contactId: uuid('contact_id')
.references(() => contacts.id, { onDelete: 'cascade' })
.notNull(),
userId: varchar('user_id', { length: 255 }).notNull(),
content: text('content').notNull(),
isPinned: boolean('is_pinned').default(false),
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
updatedAt: timestamp('updated_at', { withTimezone: true }).defaultNow().notNull(),
},
(table) => ({
contactIdx: index('contact_notes_contact_idx').on(table.contactId),
userIdx: index('contact_notes_user_idx').on(table.userId),
createdAtIdx: index('contact_notes_created_at_idx').on(table.createdAt),
})
);
export type ContactNote = typeof contactNotes.$inferSelect;
export type NewContactNote = typeof contactNotes.$inferInsert;

View file

@ -1,13 +1,19 @@
import { pgTable, uuid, timestamp, varchar, primaryKey } from 'drizzle-orm/pg-core';
import { pgTable, uuid, timestamp, varchar, primaryKey, index } from 'drizzle-orm/pg-core';
import { contacts } from './contacts.schema';
export const contactTags = pgTable('contact_tags', {
id: uuid('id').primaryKey().defaultRandom(),
userId: varchar('user_id', { length: 255 }).notNull(),
name: varchar('name', { length: 50 }).notNull(),
color: varchar('color', { length: 20 }),
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
});
export const contactTags = pgTable(
'contact_tags',
{
id: uuid('id').primaryKey().defaultRandom(),
userId: varchar('user_id', { length: 255 }).notNull(),
name: varchar('name', { length: 50 }).notNull(),
color: varchar('color', { length: 20 }),
createdAt: timestamp('created_at', { withTimezone: true }).defaultNow().notNull(),
},
(table) => ({
userIdx: index('contact_tags_user_idx').on(table.userId),
})
);
export const contactToTags = pgTable(
'contact_to_tags',
@ -21,6 +27,8 @@ export const contactToTags = pgTable(
},
(table) => ({
pk: primaryKey({ columns: [table.contactId, table.tagId] }),
contactIdx: index('contact_to_tags_contact_idx').on(table.contactId),
tagIdx: index('contact_to_tags_tag_idx').on(table.tagId),
})
);

View file

@ -1,5 +1,5 @@
import { Injectable, Inject, NotFoundException } from '@nestjs/common';
import { eq, and, or, ne, sql } from 'drizzle-orm';
import { eq, and, or, sql } from 'drizzle-orm';
import { DATABASE_CONNECTION } from '../db/database.module';
import { Database } from '../db/connection';
import { contacts } from '../db/schema';
@ -21,116 +21,268 @@ export interface MergeResult {
export class DuplicatesService {
constructor(@Inject(DATABASE_CONNECTION) private db: Database) {}
/** Maximum number of duplicate groups to return per match type */
private static readonly MAX_GROUPS_PER_TYPE = 50;
/**
* Find all potential duplicate groups for a user
* Find all potential duplicate groups for a user.
*
* Uses database-level grouping for email, phone, and name matches
* instead of loading all contacts into memory.
*/
async findDuplicates(userId: string): Promise<DuplicateGroup[]> {
const duplicateGroups: DuplicateGroup[] = [];
// Get all contacts for this user
const allContacts = await this.db
.select()
.from(contacts)
.where(and(eq(contacts.userId, userId), eq(contacts.isArchived, false)));
// Build lookup maps
const emailMap = new Map<string, Contact[]>();
const phoneMap = new Map<string, Contact[]>();
const nameMap = new Map<string, Contact[]>();
const processedIds = new Set<string>();
for (const contact of allContacts) {
// Group by email
if (contact.email) {
const normalizedEmail = this.normalizeEmail(contact.email);
if (!emailMap.has(normalizedEmail)) {
emailMap.set(normalizedEmail, []);
}
emailMap.get(normalizedEmail)!.push(contact);
// 1. Find email duplicates via SQL grouping
const emailDups = await this.findEmailDuplicates(userId);
for (const group of emailDups) {
const ids = group.contacts
.map((c) => c.id)
.sort()
.join('-');
if (!processedIds.has(ids)) {
processedIds.add(ids);
duplicateGroups.push(group);
}
}
// Group by phone (check both phone and mobile)
// 2. Find phone duplicates via SQL grouping
const phoneDups = await this.findPhoneDuplicates(userId);
for (const group of phoneDups) {
const ids = group.contacts
.map((c) => c.id)
.sort()
.join('-');
if (!processedIds.has(ids)) {
processedIds.add(ids);
duplicateGroups.push(group);
}
}
// 3. Find name duplicates via SQL grouping
const nameDups = await this.findNameDuplicates(userId);
for (const group of nameDups) {
const ids = group.contacts
.map((c) => c.id)
.sort()
.join('-');
if (!processedIds.has(ids)) {
processedIds.add(ids);
duplicateGroups.push(group);
}
}
return duplicateGroups;
}
/**
* Find contacts with duplicate emails using database-level grouping.
* Only fetches contacts that actually have duplicates.
*/
private async findEmailDuplicates(userId: string): Promise<DuplicateGroup[]> {
// Find normalized emails that appear more than once
const dupEmails = await this.db
.select({
normalizedEmail: sql<string>`LOWER(TRIM(${contacts.email}))`.as('normalized_email'),
})
.from(contacts)
.where(
and(
eq(contacts.userId, userId),
eq(contacts.isArchived, false),
sql`${contacts.email} IS NOT NULL AND TRIM(${contacts.email}) != ''`
)
)
.groupBy(sql`LOWER(TRIM(${contacts.email}))`)
.having(sql`COUNT(*) > 1`)
.limit(DuplicatesService.MAX_GROUPS_PER_TYPE);
if (dupEmails.length === 0) return [];
// Fetch the actual contacts for those duplicate emails
const emailValues = dupEmails.map((d) => d.normalizedEmail);
const dupContacts = await this.db
.select()
.from(contacts)
.where(
and(
eq(contacts.userId, userId),
eq(contacts.isArchived, false),
sql`LOWER(TRIM(${contacts.email})) = ANY(${emailValues})`
)
);
// Group by normalized email
const emailMap = new Map<string, Contact[]>();
for (const contact of dupContacts) {
const key = this.normalizeEmail(contact.email!);
if (!emailMap.has(key)) emailMap.set(key, []);
emailMap.get(key)!.push(contact);
}
return Array.from(emailMap.entries())
.filter(([, list]) => list.length > 1)
.map(([email, contactList]) => ({
id: `email-${contactList
.map((c) => c.id)
.sort()
.join('-')}`,
contacts: contactList,
matchType: 'email' as const,
matchValue: email,
}));
}
/**
* Find contacts with duplicate phone numbers using database-level grouping.
* Normalizes phone numbers by stripping non-digit characters (preserving leading +).
*/
private async findPhoneDuplicates(userId: string): Promise<DuplicateGroup[]> {
// Normalize phone: strip non-digits but preserve leading +
// We check both phone and mobile columns by unioning them
const phoneNormExpr = sql`
CASE
WHEN LEFT(phone_val, 1) = '+' THEN '+' || REGEXP_REPLACE(phone_val, '[^0-9]', '', 'g')
ELSE REGEXP_REPLACE(phone_val, '[^0-9]', '', 'g')
END
`;
// Use a CTE to union phone and mobile into a single column, then group
const dupPhones: { normalizedPhone: string }[] = await this.db.execute(sql`
WITH phone_values AS (
SELECT id, user_id, is_archived, phone AS phone_val FROM ${contacts}
WHERE user_id = ${userId} AND is_archived = false AND phone IS NOT NULL AND TRIM(phone) != ''
UNION ALL
SELECT id, user_id, is_archived, mobile AS phone_val FROM ${contacts}
WHERE user_id = ${userId} AND is_archived = false AND mobile IS NOT NULL AND TRIM(mobile) != ''
),
normalized AS (
SELECT
id,
${phoneNormExpr} AS normalized_phone
FROM phone_values
WHERE LENGTH(REGEXP_REPLACE(phone_val, '[^0-9]', '', 'g')) >= 6
)
SELECT normalized_phone AS "normalizedPhone"
FROM normalized
GROUP BY normalized_phone
HAVING COUNT(DISTINCT id) > 1
LIMIT ${DuplicatesService.MAX_GROUPS_PER_TYPE}
`);
if (dupPhones.length === 0) return [];
// Fetch contacts that have any of these duplicate phone numbers
const phoneValues = dupPhones.map((d) => d.normalizedPhone);
const dupContacts = await this.db
.select()
.from(contacts)
.where(
and(
eq(contacts.userId, userId),
eq(contacts.isArchived, false),
sql`(
(${contacts.phone} IS NOT NULL AND
CASE WHEN LEFT(${contacts.phone}, 1) = '+' THEN '+' || REGEXP_REPLACE(${contacts.phone}, '[^0-9]', '', 'g')
ELSE REGEXP_REPLACE(${contacts.phone}, '[^0-9]', '', 'g') END = ANY(${phoneValues}))
OR
(${contacts.mobile} IS NOT NULL AND
CASE WHEN LEFT(${contacts.mobile}, 1) = '+' THEN '+' || REGEXP_REPLACE(${contacts.mobile}, '[^0-9]', '', 'g')
ELSE REGEXP_REPLACE(${contacts.mobile}, '[^0-9]', '', 'g') END = ANY(${phoneValues}))
)`
)
);
// Group contacts by their matching normalized phone number
const phoneMap = new Map<string, Contact[]>();
for (const contact of dupContacts) {
for (const phone of [contact.phone, contact.mobile].filter(Boolean) as string[]) {
const normalizedPhone = this.normalizePhone(phone);
if (normalizedPhone.length >= 6) {
if (!phoneMap.has(normalizedPhone)) {
phoneMap.set(normalizedPhone, []);
}
const existing = phoneMap.get(normalizedPhone)!;
const normalized = this.normalizePhone(phone);
if (normalized.length >= 6 && phoneValues.includes(normalized)) {
if (!phoneMap.has(normalized)) phoneMap.set(normalized, []);
const existing = phoneMap.get(normalized)!;
if (!existing.some((c) => c.id === contact.id)) {
existing.push(contact);
}
}
}
}
// Group by name (first + last)
return Array.from(phoneMap.entries())
.filter(([, list]) => list.length > 1)
.map(([phone, contactList]) => ({
id: `phone-${contactList
.map((c) => c.id)
.sort()
.join('-')}`,
contacts: contactList,
matchType: 'phone' as const,
matchValue: phone,
}));
}
/**
* Find contacts with duplicate names using database-level grouping.
* Only considers contacts that have both first and last names.
*/
private async findNameDuplicates(userId: string): Promise<DuplicateGroup[]> {
const dupNames = await this.db
.select({
normalizedName:
sql<string>`LOWER(TRIM(${contacts.firstName})) || ' ' || LOWER(TRIM(${contacts.lastName}))`.as(
'normalized_name'
),
})
.from(contacts)
.where(
and(
eq(contacts.userId, userId),
eq(contacts.isArchived, false),
sql`${contacts.firstName} IS NOT NULL AND TRIM(${contacts.firstName}) != ''`,
sql`${contacts.lastName} IS NOT NULL AND TRIM(${contacts.lastName}) != ''`
)
)
.groupBy(sql`LOWER(TRIM(${contacts.firstName})) || ' ' || LOWER(TRIM(${contacts.lastName}))`)
.having(sql`COUNT(*) > 1`)
.limit(DuplicatesService.MAX_GROUPS_PER_TYPE);
if (dupNames.length === 0) return [];
// Fetch the actual contacts for those duplicate names
const nameValues = dupNames.map((d) => d.normalizedName);
const dupContacts = await this.db
.select()
.from(contacts)
.where(
and(
eq(contacts.userId, userId),
eq(contacts.isArchived, false),
sql`LOWER(TRIM(${contacts.firstName})) || ' ' || LOWER(TRIM(${contacts.lastName})) = ANY(${nameValues})`
)
);
// Group by normalized name
const nameMap = new Map<string, Contact[]>();
for (const contact of dupContacts) {
if (contact.firstName && contact.lastName) {
const normalizedName = this.normalizeName(contact.firstName, contact.lastName);
if (!nameMap.has(normalizedName)) {
nameMap.set(normalizedName, []);
}
nameMap.get(normalizedName)!.push(contact);
const key = this.normalizeName(contact.firstName, contact.lastName);
if (!nameMap.has(key)) nameMap.set(key, []);
nameMap.get(key)!.push(contact);
}
}
// Create duplicate groups from email matches
for (const [email, contactList] of emailMap) {
if (contactList.length > 1) {
const ids = contactList
return Array.from(nameMap.entries())
.filter(([, list]) => list.length > 1)
.map(([name, contactList]) => ({
id: `name-${contactList
.map((c) => c.id)
.sort()
.join('-');
if (!processedIds.has(ids)) {
processedIds.add(ids);
duplicateGroups.push({
id: `email-${ids}`,
contacts: contactList,
matchType: 'email',
matchValue: email,
});
}
}
}
// Create duplicate groups from phone matches
for (const [phone, contactList] of phoneMap) {
if (contactList.length > 1) {
const ids = contactList
.map((c) => c.id)
.sort()
.join('-');
if (!processedIds.has(ids)) {
processedIds.add(ids);
duplicateGroups.push({
id: `phone-${ids}`,
contacts: contactList,
matchType: 'phone',
matchValue: phone,
});
}
}
}
// Create duplicate groups from name matches (only if not already matched by email/phone)
for (const [name, contactList] of nameMap) {
if (contactList.length > 1) {
const ids = contactList
.map((c) => c.id)
.sort()
.join('-');
if (!processedIds.has(ids)) {
processedIds.add(ids);
duplicateGroups.push({
id: `name-${ids}`,
contacts: contactList,
matchType: 'name',
matchValue: name,
});
}
}
}
return duplicateGroups;
.join('-')}`,
contacts: contactList,
matchType: 'name' as const,
matchValue: name,
}));
}
/**

View file

@ -10,6 +10,8 @@ import {
validateFileSize,
validateFileExtension,
IMAGE_EXTENSIONS,
getStorageConfig,
BUCKETS,
} from '@manacore/shared-storage';
const MAX_FILE_SIZE = 5 * 1024 * 1024; // 5MB
@ -77,8 +79,9 @@ export class PhotoService {
public: true,
});
// Generate the URL (for MinIO, construct it manually)
const photoUrl = `http://localhost:9000/contacts-storage/${key}`;
// Generate the URL from S3 endpoint configuration
const { endpoint } = getStorageConfig();
const photoUrl = `${endpoint}/${BUCKETS.CONTACTS}/${key}`;
// Update contact with photo URL
await this.db
@ -125,7 +128,7 @@ export class PhotoService {
}
private extractKeyFromUrl(url: string): string | null {
// Extract key from URLs like http://localhost:9000/contacts-storage/users/xxx/file.jpg
// Extract key from URLs like {endpoint}/contacts-storage/users/xxx/file.jpg
const match = url.match(/contacts-storage\/(.+)$/);
return match ? match[1] : null;
}

View file

@ -1,4 +1,4 @@
import { pgTable, uuid, text, timestamp, varchar, index } from 'drizzle-orm/pg-core';
import { pgTable, uuid, text, timestamp, varchar, index, unique } from 'drizzle-orm/pg-core';
export const labels = pgTable(
'labels',
@ -12,6 +12,7 @@ export const labels = pgTable(
},
(table) => ({
userIdx: index('labels_user_idx').on(table.userId),
uniqueUserName: unique('labels_user_name_unique').on(table.userId, table.name),
})
);

View file

@ -30,6 +30,12 @@ export const reminders = pgTable(
taskIdx: index('reminders_task_idx').on(table.taskId),
userIdx: index('reminders_user_idx').on(table.userId),
pendingIdx: index('reminders_pending_idx').on(table.status, table.reminderTime),
// Composite indexes for user-scoped queries
userPendingIdx: index('reminders_user_pending_idx').on(
table.userId,
table.status,
table.reminderTime
),
})
);

View file

@ -44,7 +44,7 @@ export const tasks = pgTable(
'tasks',
{
id: uuid('id').primaryKey().defaultRandom(),
projectId: uuid('project_id').references(() => projects.id, { onDelete: 'set null' }),
projectId: uuid('project_id').references(() => projects.id, { onDelete: 'cascade' }),
userId: text('user_id').notNull(),
parentTaskId: uuid('parent_task_id'),
@ -101,6 +101,15 @@ export const tasks = pgTable(
parentIdx: index('tasks_parent_idx').on(table.parentTaskId),
orderIdx: index('tasks_order_idx').on(table.projectId, table.order),
columnIdx: index('tasks_column_idx').on(table.columnId, table.columnOrder),
// Composite indexes for common query patterns
userProjectIdx: index('tasks_user_project_idx').on(table.userId, table.projectId),
userStatusIdx: index('tasks_user_status_idx').on(table.userId, table.status),
userDueDateIdx: index('tasks_user_due_date_idx').on(table.userId, table.dueDate),
userCompletedIdx: index('tasks_user_completed_idx').on(table.userId, table.isCompleted),
userScheduledDateIdx: index('tasks_user_scheduled_date_idx').on(
table.userId,
table.scheduledDate
),
})
);

View file

@ -84,22 +84,29 @@ export class KanbanService {
return existingGlobal;
}
// Create global board
const [globalBoard] = await this.db
.insert(kanbanBoards)
.values({
// Create global board and default columns atomically
return this.db.transaction(async (tx) => {
const [globalBoard] = await tx
.insert(kanbanBoards)
.values({
userId,
name: 'Alle Aufgaben',
color: '#8b5cf6',
order: 0,
isGlobal: true,
})
.returning();
// Create default columns inline (can't call initializeDefaultColumns since it uses this.db)
const columnsToCreate: NewKanbanColumn[] = DEFAULT_COLUMNS.map((col) => ({
...col,
userId,
name: 'Alle Aufgaben',
color: '#8b5cf6',
order: 0,
isGlobal: true,
})
.returning();
boardId: globalBoard.id,
}));
await tx.insert(kanbanColumns).values(columnsToCreate);
// Initialize default columns for the global board
await this.initializeDefaultColumns(globalBoard.id, userId);
return globalBoard;
return globalBoard;
});
}
async createBoard(userId: string, dto: CreateBoardDto): Promise<KanbanBoard> {
@ -117,12 +124,19 @@ export class KanbanService {
isGlobal: false,
};
const [created] = await this.db.insert(kanbanBoards).values(newBoard).returning();
// Create board and default columns atomically
return this.db.transaction(async (tx) => {
const [created] = await tx.insert(kanbanBoards).values(newBoard).returning();
// Initialize default columns for the new board
await this.initializeDefaultColumns(created.id, userId);
const columnsToCreate: NewKanbanColumn[] = DEFAULT_COLUMNS.map((col) => ({
...col,
userId,
boardId: created.id,
}));
await tx.insert(kanbanColumns).values(columnsToCreate);
return created;
return created;
});
}
async updateBoard(id: string, userId: string, dto: UpdateBoardDto): Promise<KanbanBoard> {
@ -152,37 +166,41 @@ export class KanbanService {
const globalColumns = await this.findAllColumns(globalBoard.id, userId);
const firstGlobalColumn = globalColumns[0];
if (firstGlobalColumn) {
// Get all columns for this board
const boardColumns = await this.findAllColumns(id, userId);
// Move tasks and delete board atomically
await this.db.transaction(async (tx) => {
if (firstGlobalColumn) {
// Get all columns for this board
const boardColumns = await this.findAllColumns(id, userId);
// Move tasks from board columns to first global column
for (const column of boardColumns) {
await this.db
.update(tasks)
.set({
columnId: firstGlobalColumn.id,
updatedAt: new Date(),
})
.where(eq(tasks.columnId, column.id));
// Move tasks from board columns to first global column
for (const column of boardColumns) {
await tx
.update(tasks)
.set({
columnId: firstGlobalColumn.id,
updatedAt: new Date(),
})
.where(eq(tasks.columnId, column.id));
}
}
}
// Delete the board (columns will cascade delete)
await this.db
.delete(kanbanBoards)
.where(and(eq(kanbanBoards.id, id), eq(kanbanBoards.userId, userId)));
// Delete the board (columns will cascade delete)
await tx
.delete(kanbanBoards)
.where(and(eq(kanbanBoards.id, id), eq(kanbanBoards.userId, userId)));
});
}
async reorderBoards(userId: string, boardIds: string[]): Promise<KanbanBoard[]> {
const updates = boardIds.map((id, index) =>
this.db
.update(kanbanBoards)
.set({ order: index, updatedAt: new Date() })
.where(and(eq(kanbanBoards.id, id), eq(kanbanBoards.userId, userId)))
);
await Promise.all(updates);
// Update order for each board atomically
await this.db.transaction(async (tx) => {
for (const [index, id] of boardIds.entries()) {
await tx
.update(kanbanBoards)
.set({ order: index, updatedAt: new Date() })
.where(and(eq(kanbanBoards.id, id), eq(kanbanBoards.userId, userId)));
}
});
return this.findAllBoards(userId);
}
@ -262,30 +280,34 @@ export class KanbanService {
throw new BadRequestException('Cannot delete the last column');
}
// Move all tasks from this column to the first column
await this.db
.update(tasks)
.set({
columnId: firstColumn.id,
updatedAt: new Date(),
})
.where(eq(tasks.columnId, id));
// Move tasks and delete column atomically
await this.db.transaction(async (tx) => {
// Move all tasks from this column to the first column
await tx
.update(tasks)
.set({
columnId: firstColumn.id,
updatedAt: new Date(),
})
.where(eq(tasks.columnId, id));
// Delete the column
await this.db
.delete(kanbanColumns)
.where(and(eq(kanbanColumns.id, id), eq(kanbanColumns.userId, userId)));
// Delete the column
await tx
.delete(kanbanColumns)
.where(and(eq(kanbanColumns.id, id), eq(kanbanColumns.userId, userId)));
});
}
async reorderColumns(userId: string, columnIds: string[]): Promise<KanbanColumn[]> {
const updates = columnIds.map((id, index) =>
this.db
.update(kanbanColumns)
.set({ order: index, updatedAt: new Date() })
.where(and(eq(kanbanColumns.id, id), eq(kanbanColumns.userId, userId)))
);
await Promise.all(updates);
// Update order for each column atomically
await this.db.transaction(async (tx) => {
for (const [index, id] of columnIds.entries()) {
await tx
.update(kanbanColumns)
.set({ order: index, updatedAt: new Date() })
.where(and(eq(kanbanColumns.id, id), eq(kanbanColumns.userId, userId)));
}
});
// Determine boardId from first column
const firstColumn = await this.findColumnById(columnIds[0], userId);
@ -436,19 +458,19 @@ export class KanbanService {
// Verify column exists
await this.findColumnByIdOrThrow(columnId, userId);
// Update order for each task
const updates = taskIds.map((id, index) =>
this.db
.update(tasks)
.set({
columnId,
columnOrder: index,
updatedAt: new Date(),
})
.where(and(eq(tasks.id, id), eq(tasks.userId, userId)))
);
await Promise.all(updates);
// Update order for each task atomically
await this.db.transaction(async (tx) => {
for (const [index, id] of taskIds.entries()) {
await tx
.update(tasks)
.set({
columnId,
columnOrder: index,
updatedAt: new Date(),
})
.where(and(eq(tasks.id, id), eq(tasks.userId, userId)));
}
});
// Return updated tasks
return this.db.query.tasks.findMany({

View file

@ -3,7 +3,7 @@ import { NotFoundException } from '@nestjs/common';
import { ProjectService } from '../project.service';
import { DATABASE_CONNECTION } from '../../db/database.module';
const mockDb = {
const mockDb: any = {
query: {
projects: {
findMany: jest.fn(),
@ -17,8 +17,12 @@ const mockDb = {
set: jest.fn().mockReturnThis(),
where: jest.fn().mockReturnThis(),
returning: jest.fn(),
transaction: jest.fn(),
};
// Make transaction execute callback with mockDb as tx
mockDb.transaction.mockImplementation((cb: any) => cb(mockDb));
describe('ProjectService', () => {
let service: ProjectService;

View file

@ -39,11 +39,6 @@ export class ProjectService {
// If this is the first project, make it default
const isDefault = dto.isDefault ?? existingProjects.length === 0;
// If this project is default, clear other defaults
if (isDefault) {
await this.clearDefaultProject(userId);
}
const newProject: NewProject = {
userId,
name: dto.name,
@ -55,26 +50,42 @@ export class ProjectService {
settings: dto.settings,
};
const [created] = await this.db.insert(projects).values(newProject).returning();
// Clear default and insert atomically to prevent multiple defaults
const [created] = await this.db.transaction(async (tx) => {
if (isDefault) {
await tx
.update(projects)
.set({ isDefault: false, updatedAt: new Date() })
.where(and(eq(projects.userId, userId), eq(projects.isDefault, true)));
}
return tx.insert(projects).values(newProject).returning();
});
return created;
}
async update(id: string, userId: string, dto: UpdateProjectDto): Promise<Project> {
await this.findByIdOrThrow(id, userId);
// If setting as default, clear other defaults first
if (dto.isDefault) {
await this.clearDefaultProject(userId);
}
// Clear default and update atomically to prevent multiple defaults
const [updated] = await this.db.transaction(async (tx) => {
if (dto.isDefault) {
await tx
.update(projects)
.set({ isDefault: false, updatedAt: new Date() })
.where(and(eq(projects.userId, userId), eq(projects.isDefault, true)));
}
const [updated] = await this.db
.update(projects)
.set({
...dto,
updatedAt: new Date(),
})
.where(and(eq(projects.id, id), eq(projects.userId, userId)))
.returning();
return tx
.update(projects)
.set({
...dto,
updatedAt: new Date(),
})
.where(and(eq(projects.id, id), eq(projects.userId, userId)))
.returning();
});
return updated;
}
@ -99,15 +110,15 @@ export class ProjectService {
}
async reorder(userId: string, projectIds: string[]): Promise<Project[]> {
// Update order for each project
const updates = projectIds.map((id, index) =>
this.db
.update(projects)
.set({ order: index, updatedAt: new Date() })
.where(and(eq(projects.id, id), eq(projects.userId, userId)))
);
await Promise.all(updates);
// Update order for each project atomically
await this.db.transaction(async (tx) => {
for (const [index, id] of projectIds.entries()) {
await tx
.update(projects)
.set({ order: index, updatedAt: new Date() })
.where(and(eq(projects.id, id), eq(projects.userId, userId)));
}
});
return this.findAll(userId);
}
@ -130,11 +141,4 @@ export class ProjectService {
isDefault: true,
});
}
private async clearDefaultProject(userId: string): Promise<void> {
await this.db
.update(projects)
.set({ isDefault: false, updatedAt: new Date() })
.where(and(eq(projects.userId, userId), eq(projects.isDefault, true)));
}
}

View file

@ -50,7 +50,11 @@ const mockDb = {
set: jest.fn().mockReturnThis(),
where: jest.fn().mockReturnThis(),
returning: jest.fn(),
};
transaction: jest.fn(),
} as any;
// Make transaction execute callback with mockDb as tx
mockDb.transaction.mockImplementation((cb: any) => cb(mockDb));
// Mock ProjectService
const mockProjectService = {

View file

@ -1,4 +1,4 @@
import { Injectable, Inject, NotFoundException } from '@nestjs/common';
import { Injectable, Inject, NotFoundException, Logger } from '@nestjs/common';
import { eq, and, or, gte, lte, ilike, asc, desc, isNull, SQL, sql, inArray } from 'drizzle-orm';
import { RRule, RRuleSet, rrulestr } from 'rrule';
import { DATABASE_CONNECTION } from '../db/database.module';
@ -12,6 +12,8 @@ type TaskWithLabels = Task & { labels: (typeof labels.$inferSelect)[] };
@Injectable()
export class TaskService {
private readonly logger = new Logger(TaskService.name);
constructor(
@Inject(DATABASE_CONNECTION) private db: Database,
private projectService: ProjectService
@ -264,12 +266,15 @@ export class TaskService {
// Reject if too many occurrences (prevents hourly/minutely abuse)
if (occurrences.length >= maxOccurrences) {
console.warn(`RRULE rejected: too many occurrences (${occurrences.length})`);
this.logger.warn(`RRULE rejected: too many occurrences (${occurrences.length})`);
return false;
}
return true;
} catch {
} catch (error) {
this.logger.error(
`RRULE validation failed for rule "${rruleString}": ${error instanceof Error ? error.message : error}`
);
return false;
}
}
@ -286,7 +291,9 @@ export class TaskService {
// Validate RRULE complexity before parsing
if (!this.validateRRule(task.recurrenceRule)) {
console.warn(`Invalid or too complex RRULE for task ${task.id}`);
this.logger.warn(
`Invalid or too complex RRULE for task ${task.id}: "${task.recurrenceRule}"`
);
return null;
}
@ -341,22 +348,29 @@ export class TaskService {
columnOrder: task.columnOrder,
};
const [created] = await this.db.insert(tasks).values(newTask).returning();
// Create task and copy labels atomically
const created = await this.db.transaction(async (tx) => {
const [newCreated] = await tx.insert(tasks).values(newTask).returning();
// Copy labels from original task
if (task.labels && task.labels.length > 0) {
await this.db.insert(taskLabels).values(
task.labels.map((label) => ({
taskId: created.id,
labelId: label.id,
}))
);
}
// Copy labels from original task
if (task.labels && task.labels.length > 0) {
await tx.insert(taskLabels).values(
task.labels.map((label) => ({
taskId: newCreated.id,
labelId: label.id,
}))
);
}
return newCreated;
});
return this.loadTaskLabels(created);
} catch (error) {
// If RRULE parsing fails, log and return null
console.error('Failed to parse recurrence rule:', error);
this.logger.error(
`Failed to parse recurrence rule for task ${task.id}: ${error instanceof Error ? error.message : error}`
);
return null;
}
}
@ -412,18 +426,19 @@ export class TaskService {
async updateTaskLabels(taskId: string, userId: string, labelIds: string[]): Promise<void> {
await this.findByIdOrThrow(taskId, userId);
// Delete existing labels
await this.db.delete(taskLabels).where(eq(taskLabels.taskId, taskId));
// Delete existing labels and insert new ones atomically
await this.db.transaction(async (tx) => {
await tx.delete(taskLabels).where(eq(taskLabels.taskId, taskId));
// Insert new labels
if (labelIds.length > 0) {
await this.db.insert(taskLabels).values(
labelIds.map((labelId) => ({
taskId,
labelId,
}))
);
}
if (labelIds.length > 0) {
await tx.insert(taskLabels).values(
labelIds.map((labelId) => ({
taskId,
labelId,
}))
);
}
});
}
async getInboxTasks(userId: string): Promise<TaskWithLabels[]> {
@ -545,15 +560,15 @@ export class TaskService {
taskIds: string[],
projectId?: string | null
): Promise<TaskWithLabels[]> {
// Update order for each task
const updates = taskIds.map((id, index) =>
this.db
.update(tasks)
.set({ order: index, updatedAt: new Date() })
.where(and(eq(tasks.id, id), eq(tasks.userId, userId)))
);
await Promise.all(updates);
// Update order for each task atomically
await this.db.transaction(async (tx) => {
for (const [index, id] of taskIds.entries()) {
await tx
.update(tasks)
.set({ order: index, updatedAt: new Date() })
.where(and(eq(tasks.id, id), eq(tasks.userId, userId)));
}
});
return this.findAll(userId, { projectId: projectId ?? undefined });
}