fix(sync): perform archive flush synchronously to prevent DB lock error

Previously, flushYoungToOld was dispatched as an action and handled by
an NgRx effect. This caused a race condition during finish day:
1. Action dispatched, effect queued
2. Method returned, sync started, DB locked
3. Effect ran, tried to write, blocked by DB lock

Fix follows the same pattern as moveToArchive:
- Perform the flush synchronously in ArchiveService before dispatching
- Dispatch action for op-log capture only (syncs to other clients)
- Handler skips local operations (only runs for remote)

Also adds comprehensive unit tests and e2e test for this scenario.
This commit is contained in:
Johannes Millan 2025-12-17 16:40:16 +01:00
parent 02d80249af
commit 5992fad51b
8 changed files with 405 additions and 45 deletions

6
docker-compose.e2e.yaml Normal file
View file

@ -0,0 +1,6 @@
# Docker Compose override for E2E tests
# Uses port 1901 so tests can run while dev server uses 1900
services:
supersync:
ports:
- '1901:1900'

View file

@ -1,4 +1,4 @@
import { test as base, expect } from '@playwright/test';
import { test as base, expect, type ConsoleMessage } from '@playwright/test';
import {
createTestUser,
getSuperSyncConfig,
@ -16,6 +16,15 @@ const generateTestRunId = (workerIndex: number): string => {
return `${Date.now()}-${workerIndex}`;
};
/**
* Checks if a console message indicates a DB lock error.
* These errors would indicate the fix for synchronous flush is broken.
*/
const isDbLockError = (msg: ConsoleMessage): boolean => {
const text = msg.text();
return text.includes('Attempting to write DB') && text.includes('while locked');
};
base.describe('@supersync Daily Summary Sync', () => {
let serverHealthy: boolean | null = null;
@ -175,4 +184,85 @@ base.describe('@supersync Daily Summary Sync', () => {
}
},
);
/**
* Scenario: Finish day completes without DB lock errors
*
* This tests the race condition fix where the archive flush
* (flushYoungToOld) was being executed by an NgRx effect AFTER
* sync started and locked the database. The fix ensures the flush
* happens synchronously BEFORE the action is dispatched.
*
* Actions:
* 1. Client creates multiple tasks and marks them done.
* 2. Client clicks finish day button.
* 3. Client completes daily summary (archives tasks, triggers sync).
* 4. Verify NO "DB lock" console errors occurred.
*/
base(
'Finish day completes without DB lock errors',
async ({ browser, baseURL }, testInfo) => {
testInfo.setTimeout(60000);
const testRunId = generateTestRunId(testInfo.workerIndex);
const uniqueId = Date.now();
let client: SimulatedE2EClient | null = null;
const dbLockErrors: string[] = [];
try {
const user = await createTestUser(testRunId);
const syncConfig = getSuperSyncConfig(user);
// Create client and set up sync
client = await createSimulatedClient(browser, baseURL!, 'A', testRunId);
await client.sync.setupSuperSync(syncConfig);
// Monitor for DB lock errors
client.page.on('console', (msg) => {
if (isDbLockError(msg)) {
dbLockErrors.push(msg.text());
}
});
// Create multiple tasks to increase chance of triggering flush
const tasks = [`Task1-${uniqueId}`, `Task2-${uniqueId}`, `Task3-${uniqueId}`];
for (const taskName of tasks) {
await client.workView.addTask(taskName);
}
// Mark all tasks as done
for (const taskName of tasks) {
const taskLocator = client.page.locator(`task:has-text("${taskName}")`);
await taskLocator.hover();
await taskLocator.locator('.task-done-btn').click();
}
// Click finish day
const finishDayBtn = client.page.locator('.e2e-finish-day');
await finishDayBtn.click();
// Wait for Daily Summary
await client.page.waitForURL(/daily-summary/);
// Click "Save and go home" to archive and trigger sync
const saveAndGoHomeBtn = client.page.locator(
'daily-summary button[mat-flat-button]:has(mat-icon:has-text("wb_sunny"))',
);
await saveAndGoHomeBtn.waitFor({ state: 'visible' });
await saveAndGoHomeBtn.click();
// Wait for navigation back to work view
await client.page.waitForURL(/(active\/tasks|tag\/TODAY\/tasks)/);
// Wait a moment for any async effects to complete
await client.page.waitForTimeout(1000);
// Verify no DB lock errors occurred
expect(dbLockErrors).toEqual([]);
console.log('✓ Finish day completed without DB lock errors');
} finally {
if (client) await closeClient(client);
}
},
);
});

View file

@ -6,8 +6,10 @@ import { waitForAppReady } from './waits';
/**
* SuperSync server URL for E2E tests.
* Server must be running with TEST_MODE=true.
* Defaults to port 1901 for e2e tests (dev server uses 1900).
*/
export const SUPERSYNC_BASE_URL = 'http://localhost:1900';
export const SUPERSYNC_BASE_URL =
process.env.SUPERSYNC_E2E_URL || 'http://localhost:1901';
/**
* Test user credentials returned from the server.

View file

@ -59,9 +59,9 @@
"e2e:show-report": "npx playwright show-report .tmp/e2e-test-results/playwright-report",
"e2e:report": "PLAYWRIGHT_HTML_REPORT=1 npx playwright test --config e2e/playwright.config.ts",
"e2e:webdav": "docker compose up -d webdav && ./scripts/wait-for-webdav.sh && npm run e2e -- --grep webdav; docker compose down",
"e2e:supersync": "docker compose up -d --build supersync && echo 'Waiting for SuperSync server...' && until curl -s http://localhost:1900/health > /dev/null 2>&1; do sleep 1; done && echo 'Server ready!' && npm run e2e -- --grep @supersync; docker compose down supersync",
"e2e:supersync:file": "docker compose up -d --build supersync && echo 'Waiting for SuperSync server...' && until curl -s http://localhost:1900/health > /dev/null 2>&1; do sleep 1; done && echo 'Server ready!' && E2E_VERBOSE=true npx playwright test --config e2e/playwright.config.ts --reporter=list",
"e2e:supersync:down": "docker compose down supersync",
"e2e:supersync": "docker compose -f docker-compose.yaml -f docker-compose.e2e.yaml up -d --build supersync && echo 'Waiting for SuperSync server...' && until curl -s http://localhost:1901/health > /dev/null 2>&1; do sleep 1; done && echo 'Server ready!' && npm run e2e -- --grep @supersync; docker compose -f docker-compose.yaml -f docker-compose.e2e.yaml down supersync",
"e2e:supersync:file": "docker compose -f docker-compose.yaml -f docker-compose.e2e.yaml up -d --build supersync && echo 'Waiting for SuperSync server...' && until curl -s http://localhost:1901/health > /dev/null 2>&1; do sleep 1; done && echo 'Server ready!' && E2E_VERBOSE=true npx playwright test --config e2e/playwright.config.ts --reporter=list",
"e2e:supersync:down": "docker compose -f docker-compose.yaml -f docker-compose.e2e.yaml down supersync",
"electron": "NODE_ENV=PROD electron .",
"electron:build": "tsc -p electron/tsconfig.electron.json",
"electron:watch": "tsc -p electron/tsconfig.electron.json --watch",

View file

@ -1103,7 +1103,7 @@ describe('ArchiveOperationHandler', () => {
});
describe('flushYoungToOld', () => {
it('should NOT pass isIgnoreDBLock for local operations', async () => {
it('should skip local operations (flush done by ArchiveService before dispatch)', async () => {
const timestamp = Date.now();
const action = {
type: flushYoungToOld.type,
@ -1111,20 +1111,19 @@ describe('ArchiveOperationHandler', () => {
meta: { isPersistent: true, isRemote: false },
} as unknown as PersistentAction;
try {
await service.handleOperation(action);
} catch {
// Expected - sort function returns undefined in tests
}
// Reset spies to verify nothing is called
(mockPfapiService.m.archiveYoung.load as jasmine.Spy).calls.reset();
(mockPfapiService.m.archiveYoung.save as jasmine.Spy).calls.reset();
(mockPfapiService.m.archiveOld.load as jasmine.Spy).calls.reset();
(mockPfapiService.m.archiveOld.save as jasmine.Spy).calls.reset();
// Verify save was called without isIgnoreDBLock: true
const saveCall = (
mockPfapiService.m.archiveYoung.save as jasmine.Spy
).calls.mostRecent();
if (saveCall) {
const options = saveCall.args[1];
expect(options.isIgnoreDBLock).toBeUndefined();
}
await service.handleOperation(action);
// Verify NO archive operations were performed for local actions
expect(mockPfapiService.m.archiveYoung.load).not.toHaveBeenCalled();
expect(mockPfapiService.m.archiveYoung.save).not.toHaveBeenCalled();
expect(mockPfapiService.m.archiveOld.load).not.toHaveBeenCalled();
expect(mockPfapiService.m.archiveOld.save).not.toHaveBeenCalled();
});
it('should pass isIgnoreDBLock: true for remote operations', async () => {

View file

@ -215,14 +215,19 @@ export class ArchiveOperationHandler {
/**
* Executes the flush from archiveYoung to archiveOld.
* Called for both local and remote flushYoungToOld operations.
* REMOTE ONLY: For local operations, the flush is already performed by
* ArchiveService.moveTasksToArchiveAndFlushArchiveIfDue() before action dispatch.
* This prevents race conditions with sync (see that method for details).
*
* This operation is deterministic - given the same timestamp and archive state,
* it will produce the same result on all clients.
*/
private async _handleFlushYoungToOld(action: PersistentAction): Promise<void> {
if (!action.meta?.isRemote) {
return; // Local: already written by ArchiveService before dispatch
}
const timestamp = (action as ReturnType<typeof flushYoungToOld>).timestamp;
const isRemote = !!action.meta?.isRemote;
const pfapi = this._getPfapiService();
const archiveYoung = await pfapi.m.archiveYoung.load();
@ -242,7 +247,7 @@ export class ArchiveOperationHandler {
},
{
isUpdateRevAndLastUpdate: true,
isIgnoreDBLock: isRemote ? true : undefined,
isIgnoreDBLock: true, // Remote ops: DB is locked during sync processing
},
);
@ -253,12 +258,12 @@ export class ArchiveOperationHandler {
},
{
isUpdateRevAndLastUpdate: true,
isIgnoreDBLock: isRemote ? true : undefined,
isIgnoreDBLock: true, // Remote ops: DB is locked during sync processing
},
);
Log.log(
`______________________\nFLUSHED ALL FROM ARCHIVE YOUNG TO OLD (via ${isRemote ? 'remote' : 'local'} op handler)\n_______________________`,
'______________________\nFLUSHED ALL FROM ARCHIVE YOUNG TO OLD (via remote op handler)\n_______________________',
);
}

View file

@ -1,16 +1,236 @@
// import { TestBed } from '@angular/core/testing';
//
// import { ArchiveService } from './archive.service';
//
// describe('ArchiveService', () => {
// let service: ArchiveService;
//
// beforeEach(() => {
// TestBed.configureTestingModule({});
// service = TestBed.inject(ArchiveService);
// });
//
// it('should be created', () => {
// expect(service).toBeTruthy();
// });
// });
import { TestBed } from '@angular/core/testing';
import { Store } from '@ngrx/store';
import { ArchiveService, ARCHIVE_ALL_YOUNG_TO_OLD_THRESHOLD } from './archive.service';
import { PfapiService } from '../../pfapi/pfapi.service';
import { flushYoungToOld } from './store/archive.actions';
import { TimeTrackingActions } from './store/time-tracking.actions';
import { TaskWithSubTasks } from '../tasks/task.model';
describe('ArchiveService', () => {
let service: ArchiveService;
let mockStore: jasmine.SpyObj<Store>;
let mockPfapiService: {
m: {
archiveYoung: {
load: jasmine.Spy;
save: jasmine.Spy;
};
archiveOld: {
load: jasmine.Spy;
save: jasmine.Spy;
};
timeTracking: {
load: jasmine.Spy;
};
};
};
const createEmptyArchive = (
lastTimeTrackingFlush: number = 0,
): {
task: { ids: string[]; entities: Record<string, unknown> };
timeTracking: { project: Record<string, unknown>; tag: Record<string, unknown> };
lastTimeTrackingFlush: number;
lastFlush: number;
} => ({
task: { ids: [], entities: {} },
timeTracking: { project: {}, tag: {} },
lastTimeTrackingFlush,
lastFlush: 0,
});
const ONE_DAY_MS = 1000 * 60 * 60 * 24;
const createMockTask = (
id: string,
overrides: Partial<TaskWithSubTasks> = {},
): TaskWithSubTasks =>
({
id,
title: `Task ${id}`,
isDone: true,
doneOn: Date.now() - ONE_DAY_MS,
subTaskIds: [],
tagIds: [],
timeSpentOnDay: {},
timeSpent: 0,
timeEstimate: 0,
...overrides,
}) as TaskWithSubTasks;
beforeEach(() => {
mockStore = jasmine.createSpyObj('Store', ['dispatch']);
mockPfapiService = {
m: {
archiveYoung: {
load: jasmine.createSpy('archiveYoung.load'),
save: jasmine.createSpy('archiveYoung.save'),
},
archiveOld: {
load: jasmine.createSpy('archiveOld.load'),
save: jasmine.createSpy('archiveOld.save'),
},
timeTracking: {
load: jasmine.createSpy('timeTracking.load'),
},
},
};
TestBed.configureTestingModule({
providers: [
ArchiveService,
{ provide: Store, useValue: mockStore },
{ provide: PfapiService, useValue: mockPfapiService },
],
});
service = TestBed.inject(ArchiveService);
// Default mock returns
mockPfapiService.m.archiveYoung.load.and.returnValue(
Promise.resolve(createEmptyArchive()),
);
mockPfapiService.m.archiveOld.load.and.returnValue(
Promise.resolve(createEmptyArchive()),
);
mockPfapiService.m.timeTracking.load.and.returnValue(
Promise.resolve({ project: {}, tag: {} }),
);
mockPfapiService.m.archiveYoung.save.and.returnValue(Promise.resolve());
mockPfapiService.m.archiveOld.save.and.returnValue(Promise.resolve());
});
it('should be created', () => {
expect(service).toBeTruthy();
});
describe('moveTasksToArchiveAndFlushArchiveIfDue', () => {
it('should save tasks to archiveYoung', async () => {
const tasks = [createMockTask('task-1')];
await service.moveTasksToArchiveAndFlushArchiveIfDue(tasks);
expect(mockPfapiService.m.archiveYoung.save).toHaveBeenCalled();
const saveCall = mockPfapiService.m.archiveYoung.save.calls.first();
const savedData = saveCall.args[0];
expect(savedData.task.ids).toContain('task-1');
});
it('should dispatch updateWholeState for time tracking', async () => {
const tasks = [createMockTask('task-1')];
await service.moveTasksToArchiveAndFlushArchiveIfDue(tasks);
expect(mockStore.dispatch).toHaveBeenCalledWith(
jasmine.objectContaining({ type: TimeTrackingActions.updateWholeState.type }),
);
});
describe('when flush is NOT due', () => {
beforeEach(() => {
// Set lastTimeTrackingFlush to recent time (less than threshold)
mockPfapiService.m.archiveOld.load.and.returnValue(
Promise.resolve(createEmptyArchive(Date.now() - 1000)), // 1 second ago
);
});
it('should NOT perform flush to archiveOld', async () => {
const tasks = [createMockTask('task-1')];
await service.moveTasksToArchiveAndFlushArchiveIfDue(tasks);
// Should only save once (for the tasks), not for the flush
expect(mockPfapiService.m.archiveYoung.save).toHaveBeenCalledTimes(1);
expect(mockPfapiService.m.archiveOld.save).not.toHaveBeenCalled();
});
it('should NOT dispatch flushYoungToOld action', async () => {
const tasks = [createMockTask('task-1')];
await service.moveTasksToArchiveAndFlushArchiveIfDue(tasks);
expect(mockStore.dispatch).not.toHaveBeenCalledWith(
jasmine.objectContaining({ type: flushYoungToOld.type }),
);
});
});
describe('when flush IS due', () => {
const oldFlushTime = Date.now() - ARCHIVE_ALL_YOUNG_TO_OLD_THRESHOLD - 1000;
beforeEach(() => {
// Set lastTimeTrackingFlush to old time (more than threshold)
mockPfapiService.m.archiveOld.load.and.returnValue(
Promise.resolve(createEmptyArchive(oldFlushTime)),
);
});
it('should save to archiveOld during flush (before dispatch can happen)', async () => {
const tasks = [createMockTask('task-1')];
await service.moveTasksToArchiveAndFlushArchiveIfDue(tasks);
// The key behavior: archiveOld.save was called, meaning flush happened
// synchronously before the method returned (and before dispatch could
// cause any async effects)
expect(mockPfapiService.m.archiveOld.save).toHaveBeenCalledTimes(1);
});
it('should save to both archiveYoung and archiveOld during flush', async () => {
const tasks = [createMockTask('task-1')];
await service.moveTasksToArchiveAndFlushArchiveIfDue(tasks);
// archiveYoung.save called twice: once for tasks, once for flush
expect(mockPfapiService.m.archiveYoung.save).toHaveBeenCalledTimes(2);
expect(mockPfapiService.m.archiveOld.save).toHaveBeenCalledTimes(1);
});
it('should dispatch flushYoungToOld action AFTER saves complete', async () => {
const tasks = [createMockTask('task-1')];
await service.moveTasksToArchiveAndFlushArchiveIfDue(tasks);
expect(mockStore.dispatch).toHaveBeenCalledWith(
jasmine.objectContaining({ type: flushYoungToOld.type }),
);
});
it('should dispatch flushYoungToOld action with timestamp', async () => {
const tasks = [createMockTask('task-1')];
await service.moveTasksToArchiveAndFlushArchiveIfDue(tasks);
// Verify flushYoungToOld was dispatched with a timestamp property
expect(mockStore.dispatch).toHaveBeenCalledWith(
jasmine.objectContaining({
type: flushYoungToOld.type,
timestamp: jasmine.any(Number),
}),
);
});
it('should set lastTimeTrackingFlush on saved archives', async () => {
const tasks = [createMockTask('task-1')];
await service.moveTasksToArchiveAndFlushArchiveIfDue(tasks);
// Check archiveYoung flush save (second call)
const archiveYoungFlushCall = mockPfapiService.m.archiveYoung.save.calls.all()[1];
expect(archiveYoungFlushCall.args[0].lastTimeTrackingFlush).toBeDefined();
// Check archiveOld save
const archiveOldCall = mockPfapiService.m.archiveOld.save.calls.first();
expect(archiveOldCall.args[0].lastTimeTrackingFlush).toBeDefined();
});
});
it('should do nothing if no tasks provided', async () => {
await service.moveTasksToArchiveAndFlushArchiveIfDue([]);
expect(mockPfapiService.m.archiveYoung.save).not.toHaveBeenCalled();
expect(mockStore.dispatch).not.toHaveBeenCalled();
});
});
});

View file

@ -4,7 +4,10 @@ import { flattenTasks } from '../tasks/store/task.selectors';
import { createEmptyEntity } from '../../util/create-empty-entity';
import { taskAdapter } from '../tasks/store/task.adapter';
import { PfapiService } from '../../pfapi/pfapi.service';
import { sortTimeTrackingDataToArchiveYoung } from './sort-data-to-flush';
import {
sortTimeTrackingAndTasksFromArchiveYoungToOld,
sortTimeTrackingDataToArchiveYoung,
} from './sort-data-to-flush';
import { Store } from '@ngrx/store';
import { TimeTrackingActions } from './store/time-tracking.actions';
import { flushYoungToOld } from './store/archive.actions';
@ -138,10 +141,45 @@ export class ArchiveService {
return;
}
// Dispatch the flush action - this will be persisted and synced to other clients
// The actual flush operation is handled by ArchiveEffects.flushYoungToOld$
// This ensures other clients receive the operation and replay the same flush,
// maintaining deterministic archive state without syncing large archiveOld files.
// Perform the flush BEFORE dispatching the action.
// This prevents a race condition where sync starts before the effect completes:
// 1. Action dispatch -> effect queued
// 2. Method returns -> daily summary starts sync -> DB locked
// 3. Effect runs -> tries to write -> blocked by DB lock
//
// By doing the flush here, we ensure it completes before this method returns.
// The action is still dispatched for op-log capture (syncs to other clients).
// ArchiveOperationHandler._handleFlushYoungToOld skips local operations.
const updatedArchiveYoung = await this.pfapiService.m.archiveYoung.load();
const newSorted = sortTimeTrackingAndTasksFromArchiveYoungToOld({
archiveYoung: updatedArchiveYoung,
archiveOld,
threshold: ARCHIVE_TASK_YOUNG_TO_OLD_THRESHOLD,
now,
});
await this.pfapiService.m.archiveYoung.save(
{
...newSorted.archiveYoung,
lastTimeTrackingFlush: now,
},
{ isUpdateRevAndLastUpdate: true },
);
await this.pfapiService.m.archiveOld.save(
{
...newSorted.archiveOld,
lastTimeTrackingFlush: now,
},
{ isUpdateRevAndLastUpdate: true },
);
Log.log(
'______________________\nFLUSHED ALL FROM ARCHIVE YOUNG TO OLD (via ArchiveService)\n_______________________',
);
// Dispatch for op-log capture - syncs to other clients
// The handler skips local operations since we already did the flush above
this._store.dispatch(flushYoungToOld({ timestamp: now }));
}