diff --git a/src/app/op-log/backup/state-snapshot.service.spec.ts b/src/app/op-log/backup/state-snapshot.service.spec.ts new file mode 100644 index 000000000..e1417d036 --- /dev/null +++ b/src/app/op-log/backup/state-snapshot.service.spec.ts @@ -0,0 +1,248 @@ +/* eslint-disable @typescript-eslint/no-explicit-any */ +import { TestBed } from '@angular/core/testing'; +import { provideMockStore, MockStore } from '@ngrx/store/testing'; +import { StateSnapshotService } from './state-snapshot.service'; +import { ArchiveDbAdapter } from '../../core/persistence/archive-db-adapter.service'; +import { selectTaskFeatureState } from '../../features/tasks/store/task.selectors'; +import { selectProjectFeatureState } from '../../features/project/store/project.selectors'; +import { selectTagFeatureState } from '../../features/tag/store/tag.reducer'; +import { selectConfigFeatureState } from '../../features/config/store/global-config.reducer'; +import { selectNoteFeatureState } from '../../features/note/store/note.reducer'; +import { selectIssueProviderState } from '../../features/issue/store/issue-provider.selectors'; +import { selectPlannerState } from '../../features/planner/store/planner.selectors'; +import { selectBoardsState } from '../../features/boards/store/boards.selectors'; +import { selectMetricFeatureState } from '../../features/metric/store/metric.selectors'; +import { selectSimpleCounterFeatureState } from '../../features/simple-counter/store/simple-counter.reducer'; +import { selectTaskRepeatCfgFeatureState } from '../../features/task-repeat-cfg/store/task-repeat-cfg.selectors'; +import { selectMenuTreeState } from '../../features/menu-tree/store/menu-tree.selectors'; +import { selectTimeTrackingState } from '../../features/time-tracking/store/time-tracking.selectors'; +import { selectPluginUserDataFeatureState } from '../../plugins/store/plugin-user-data.reducer'; +import { selectPluginMetadataFeatureState } from '../../plugins/store/plugin-metadata.reducer'; +import { selectReminderFeatureState } from '../../features/reminder/store/reminder.reducer'; +import { ArchiveModel } from '../../features/time-tracking/time-tracking.model'; +import { initialTimeTrackingState } from '../../features/time-tracking/store/time-tracking.reducer'; + +describe('StateSnapshotService', () => { + let service: StateSnapshotService; + let store: MockStore; + let archiveDbAdapterSpy: jasmine.SpyObj; + + // Sample mock states for selectors + const mockTaskState = { + ids: ['task1'], + entities: { task1: { id: 'task1', title: 'Test Task' } }, + selectedTaskId: 'task1', + currentTaskId: 'task1', + }; + const mockProjectState = { ids: [], entities: {} }; + const mockTagState = { ids: [], entities: {} }; + const mockConfigState = { misc: {} }; + const mockNoteState = { ids: [], entities: {} }; + const mockIssueProviderState = { ids: [], entities: {} }; + const mockPlannerState = { days: {} }; + const mockBoardsState = { ids: [], entities: {} }; + const mockMetricState = { ids: [], entities: {} }; + const mockSimpleCounterState = { ids: [], entities: {} }; + const mockTaskRepeatCfgState = { ids: [], entities: {} }; + const mockMenuTreeState = { root: [] }; + const mockTimeTrackingState = initialTimeTrackingState; + const mockPluginUserDataState = {}; + const mockPluginMetadataState = {}; + const mockReminderState = { ids: [], entities: {} }; + + const DEFAULT_ARCHIVE: ArchiveModel = { + task: { ids: [], entities: {} }, + timeTracking: initialTimeTrackingState, + lastTimeTrackingFlush: 0, + }; + + const mockArchiveYoung: ArchiveModel = { + task: { + ids: ['archived1'], + entities: { archived1: { id: 'archived1', title: 'Archived Young' } as any }, + }, + timeTracking: initialTimeTrackingState, + lastTimeTrackingFlush: 1000, + }; + + const mockArchiveOld: ArchiveModel = { + task: { + ids: ['archivedOld1'], + entities: { + archivedOld1: { id: 'archivedOld1', title: 'Archived Old' } as any, + }, + }, + timeTracking: initialTimeTrackingState, + lastTimeTrackingFlush: 500, + }; + + beforeEach(() => { + archiveDbAdapterSpy = jasmine.createSpyObj('ArchiveDbAdapter', [ + 'loadArchiveYoung', + 'loadArchiveOld', + ]); + archiveDbAdapterSpy.loadArchiveYoung.and.returnValue( + Promise.resolve(mockArchiveYoung), + ); + archiveDbAdapterSpy.loadArchiveOld.and.returnValue(Promise.resolve(mockArchiveOld)); + + TestBed.configureTestingModule({ + providers: [ + StateSnapshotService, + provideMockStore(), + { provide: ArchiveDbAdapter, useValue: archiveDbAdapterSpy }, + ], + }); + + service = TestBed.inject(StateSnapshotService); + store = TestBed.inject(MockStore); + + // Override selectors with mock values + store.overrideSelector(selectTaskFeatureState, mockTaskState as any); + store.overrideSelector(selectProjectFeatureState, mockProjectState as any); + store.overrideSelector(selectTagFeatureState, mockTagState as any); + store.overrideSelector(selectConfigFeatureState, mockConfigState as any); + store.overrideSelector(selectNoteFeatureState, mockNoteState as any); + store.overrideSelector(selectIssueProviderState, mockIssueProviderState as any); + store.overrideSelector(selectPlannerState, mockPlannerState as any); + store.overrideSelector(selectBoardsState, mockBoardsState as any); + store.overrideSelector(selectMetricFeatureState, mockMetricState as any); + store.overrideSelector( + selectSimpleCounterFeatureState, + mockSimpleCounterState as any, + ); + store.overrideSelector( + selectTaskRepeatCfgFeatureState, + mockTaskRepeatCfgState as any, + ); + store.overrideSelector(selectMenuTreeState, mockMenuTreeState as any); + store.overrideSelector(selectTimeTrackingState, mockTimeTrackingState as any); + store.overrideSelector( + selectPluginUserDataFeatureState, + mockPluginUserDataState as any, + ); + store.overrideSelector( + selectPluginMetadataFeatureState, + mockPluginMetadataState as any, + ); + store.overrideSelector(selectReminderFeatureState, mockReminderState as any); + }); + + afterEach(() => { + store.resetSelectors(); + }); + + describe('getStateSnapshot (sync)', () => { + it('should return all feature states from NgRx store', () => { + const snapshot = service.getStateSnapshot(); + + expect(snapshot.project).toEqual(mockProjectState); + expect(snapshot.tag).toEqual(mockTagState); + expect(snapshot.globalConfig).toEqual(mockConfigState); + expect(snapshot.note).toEqual(mockNoteState); + expect(snapshot.issueProvider).toEqual(mockIssueProviderState); + expect(snapshot.planner).toEqual(mockPlannerState); + expect(snapshot.boards).toEqual(mockBoardsState); + expect(snapshot.metric).toEqual(mockMetricState); + expect(snapshot.simpleCounter).toEqual(mockSimpleCounterState); + expect(snapshot.taskRepeatCfg).toEqual(mockTaskRepeatCfgState); + expect(snapshot.menuTree).toEqual(mockMenuTreeState); + expect(snapshot.timeTracking).toEqual(mockTimeTrackingState); + expect(snapshot.pluginUserData).toEqual(mockPluginUserDataState); + expect(snapshot.pluginMetadata).toEqual(mockPluginMetadataState); + expect(snapshot.reminders).toEqual(mockReminderState); + }); + + it('should return default empty archives', () => { + const snapshot = service.getStateSnapshot(); + + expect(snapshot.archiveYoung).toEqual(DEFAULT_ARCHIVE); + expect(snapshot.archiveOld).toEqual(DEFAULT_ARCHIVE); + }); + + it('should clear currentTaskId', () => { + const snapshot = service.getStateSnapshot(); + + expect((snapshot.task as any).currentTaskId).toBeNull(); + }); + + it('should include task state with ids and entities', () => { + const snapshot = service.getStateSnapshot(); + + expect((snapshot.task as any).ids).toEqual(['task1']); + expect((snapshot.task as any).entities).toBeDefined(); + }); + }); + + describe('getStateSnapshotAsync', () => { + it('should return all feature states from NgRx store', async () => { + const snapshot = await service.getStateSnapshotAsync(); + + expect(snapshot.project).toEqual(mockProjectState); + expect(snapshot.tag).toEqual(mockTagState); + expect(snapshot.globalConfig).toEqual(mockConfigState); + }); + + it('should load archiveYoung from ArchiveDbAdapter', async () => { + const snapshot = await service.getStateSnapshotAsync(); + + expect(archiveDbAdapterSpy.loadArchiveYoung).toHaveBeenCalled(); + expect(snapshot.archiveYoung).toEqual(mockArchiveYoung); + }); + + it('should load archiveOld from ArchiveDbAdapter', async () => { + const snapshot = await service.getStateSnapshotAsync(); + + expect(archiveDbAdapterSpy.loadArchiveOld).toHaveBeenCalled(); + expect(snapshot.archiveOld).toEqual(mockArchiveOld); + }); + + it('should return default archive when adapter returns null for archiveYoung', async () => { + archiveDbAdapterSpy.loadArchiveYoung.and.returnValue(Promise.resolve(null as any)); + + const snapshot = await service.getStateSnapshotAsync(); + + expect(snapshot.archiveYoung).toEqual(DEFAULT_ARCHIVE); + }); + + it('should return default archive when adapter returns null for archiveOld', async () => { + archiveDbAdapterSpy.loadArchiveOld.and.returnValue(Promise.resolve(null as any)); + + const snapshot = await service.getStateSnapshotAsync(); + + expect(snapshot.archiveOld).toEqual(DEFAULT_ARCHIVE); + }); + + it('should clear currentTaskId in async version', async () => { + const snapshot = await service.getStateSnapshotAsync(); + + expect((snapshot.task as any).currentTaskId).toBeNull(); + }); + + it('should load both archives in parallel', async () => { + // Both should be called + await service.getStateSnapshotAsync(); + + expect(archiveDbAdapterSpy.loadArchiveYoung).toHaveBeenCalledTimes(1); + expect(archiveDbAdapterSpy.loadArchiveOld).toHaveBeenCalledTimes(1); + }); + }); + + describe('backward compatibility aliases', () => { + it('getAllSyncModelDataFromStore should call getStateSnapshot', () => { + spyOn(service, 'getStateSnapshot').and.callThrough(); + + service.getAllSyncModelDataFromStore(); + + expect(service.getStateSnapshot).toHaveBeenCalled(); + }); + + it('getAllSyncModelDataFromStoreAsync should call getStateSnapshotAsync', async () => { + spyOn(service, 'getStateSnapshotAsync').and.callThrough(); + + await service.getAllSyncModelDataFromStoreAsync(); + + expect(service.getStateSnapshotAsync).toHaveBeenCalled(); + }); + }); +}); diff --git a/src/app/op-log/sync/lww-operation-factory.service.spec.ts b/src/app/op-log/sync/lww-operation-factory.service.spec.ts new file mode 100644 index 000000000..d1dd69668 --- /dev/null +++ b/src/app/op-log/sync/lww-operation-factory.service.spec.ts @@ -0,0 +1,249 @@ +import { TestBed } from '@angular/core/testing'; +import { LWWOperationFactory } from './lww-operation-factory.service'; +import { EntityType, OpType, VectorClock } from '../core/operation.types'; +import { CURRENT_SCHEMA_VERSION } from '../persistence/schema-migration.service'; + +describe('LWWOperationFactory', () => { + let service: LWWOperationFactory; + + beforeEach(() => { + TestBed.configureTestingModule({ + providers: [LWWOperationFactory], + }); + service = TestBed.inject(LWWOperationFactory); + }); + + describe('createLWWUpdateOp', () => { + const entityType: EntityType = 'TASK'; + const entityId = 'task-123'; + const entityState = { id: entityId, title: 'Test Task', done: false }; + const clientId = 'client_abc'; + const vectorClock: VectorClock = { client_abc: 5, client_xyz: 3 }; + const timestamp = 1700000000000; + + it('should create operation with correct action type format [ENTITY] LWW Update', () => { + const op = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + expect(op.actionType).toBe('[TASK] LWW Update'); + }); + + it('should create action type for different entity types', () => { + const projectOp = service.createLWWUpdateOp( + 'PROJECT', + 'proj-1', + {}, + clientId, + vectorClock, + timestamp, + ); + expect(projectOp.actionType).toBe('[PROJECT] LWW Update'); + + const tagOp = service.createLWWUpdateOp( + 'TAG', + 'tag-1', + {}, + clientId, + vectorClock, + timestamp, + ); + expect(tagOp.actionType).toBe('[TAG] LWW Update'); + }); + + it('should assign correct opType (Update)', () => { + const op = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + expect(op.opType).toBe(OpType.Update); + }); + + it('should use provided entityType and entityId', () => { + const op = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + expect(op.entityType).toBe(entityType); + expect(op.entityId).toBe(entityId); + }); + + it('should include entityState as payload', () => { + const op = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + expect(op.payload).toEqual(entityState); + }); + + it('should generate UUIDv7 ID', () => { + const op = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + // UUIDv7 format: 8-4-4-4-12 hex characters + expect(op.id).toMatch( + /^[0-9a-f]{8}-[0-9a-f]{4}-7[0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}$/i, + ); + }); + + it('should generate unique IDs for each call', () => { + const op1 = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + const op2 = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + expect(op1.id).not.toBe(op2.id); + }); + + it('should include CURRENT_SCHEMA_VERSION', () => { + const op = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + expect(op.schemaVersion).toBe(CURRENT_SCHEMA_VERSION); + }); + + it('should preserve provided vectorClock', () => { + const op = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + expect(op.vectorClock).toEqual(vectorClock); + }); + + it('should preserve provided timestamp', () => { + const op = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + expect(op.timestamp).toBe(timestamp); + }); + + it('should preserve provided clientId', () => { + const op = service.createLWWUpdateOp( + entityType, + entityId, + entityState, + clientId, + vectorClock, + timestamp, + ); + + expect(op.clientId).toBe(clientId); + }); + }); + + describe('mergeAndIncrementClocks', () => { + it('should return incremented clock for single input clock', () => { + const clock: VectorClock = { clientA: 5 }; + const result = service.mergeAndIncrementClocks([clock], 'clientA'); + + expect(result).toEqual({ clientA: 6 }); + }); + + it('should merge two clocks taking max of each component', () => { + const clock1: VectorClock = { clientA: 5, clientB: 3 }; + const clock2: VectorClock = { clientA: 3, clientB: 7 }; + const result = service.mergeAndIncrementClocks([clock1, clock2], 'clientA'); + + expect(result['clientA']).toBe(6); // max(5,3) + 1 + expect(result['clientB']).toBe(7); // max(3,7) + }); + + it('should merge three+ clocks correctly', () => { + const clock1: VectorClock = { clientA: 1 }; + const clock2: VectorClock = { clientA: 5, clientB: 2 }; + const clock3: VectorClock = { clientB: 8, clientC: 3 }; + const result = service.mergeAndIncrementClocks([clock1, clock2, clock3], 'clientA'); + + expect(result['clientA']).toBe(6); // max(1,5,0) + 1 + expect(result['clientB']).toBe(8); // max(0,2,8) + expect(result['clientC']).toBe(3); // max(0,0,3) + }); + + it('should handle empty clocks array', () => { + const result = service.mergeAndIncrementClocks([], 'clientA'); + + expect(result).toEqual({ clientA: 1 }); + }); + + it('should increment merged clock for given clientId', () => { + const clock: VectorClock = { clientA: 10, clientB: 5 }; + const result = service.mergeAndIncrementClocks([clock], 'clientB'); + + expect(result['clientA']).toBe(10); // unchanged + expect(result['clientB']).toBe(6); // incremented + }); + + it('should add new clientId if not in any input clocks', () => { + const clock: VectorClock = { clientA: 5 }; + const result = service.mergeAndIncrementClocks([clock], 'clientNew'); + + expect(result['clientA']).toBe(5); // preserved + expect(result['clientNew']).toBe(1); // new entry, starts at 1 + }); + + it('should handle clocks with non-overlapping clients', () => { + const clock1: VectorClock = { clientA: 3 }; + const clock2: VectorClock = { clientB: 7 }; + const result = service.mergeAndIncrementClocks([clock1, clock2], 'clientC'); + + expect(result['clientA']).toBe(3); + expect(result['clientB']).toBe(7); + expect(result['clientC']).toBe(1); + }); + }); +}); diff --git a/src/app/op-log/sync/remote-ops-processing.service.spec.ts b/src/app/op-log/sync/remote-ops-processing.service.spec.ts index 5dc61def7..60d09f63c 100644 --- a/src/app/op-log/sync/remote-ops-processing.service.spec.ts +++ b/src/app/op-log/sync/remote-ops-processing.service.spec.ts @@ -52,6 +52,7 @@ describe('RemoteOpsProcessingService', () => { 'getUnsynced', 'hasOp', 'append', + 'appendBatch', 'appendWithVectorClockUpdate', 'markApplied', 'markFailed', @@ -65,6 +66,10 @@ describe('RemoteOpsProcessingService', () => { ]); // By default, treat all ops as new (return them as-is) opLogStoreSpy.filterNewOps.and.callFake((ops: any[]) => Promise.resolve(ops)); + // By default, appendBatch returns sequential seq numbers starting from 1 + opLogStoreSpy.appendBatch.and.callFake((ops: any[]) => + Promise.resolve(ops.map((_: any, i: number) => i + 1)), + ); // By default, no full-state ops in store opLogStoreSpy.getLatestFullStateOp.and.returnValue(Promise.resolve(undefined)); // By default, mergeRemoteOpClocks succeeds @@ -309,13 +314,11 @@ describe('RemoteOpsProcessingService', () => { vectorClockServiceSpy.getEntityFrontier.and.returnValue(Promise.resolve(new Map())); vectorClockServiceSpy.getSnapshotVectorClock.and.returnValue(Promise.resolve({})); opLogStoreSpy.hasOp.and.returnValue(Promise.resolve(false)); - opLogStoreSpy.append.and.returnValue(Promise.resolve(1)); await service.processRemoteOps(remoteOps); - // Only op1 should be applied - expect(opLogStoreSpy.append).toHaveBeenCalledTimes(1); - expect(opLogStoreSpy.append).toHaveBeenCalledWith(remoteOps[0], 'remote', { + // Only op1 should be applied (appendBatch called with single-element array) + expect(opLogStoreSpy.appendBatch).toHaveBeenCalledWith([remoteOps[0]], 'remote', { pendingApply: true, }); }); @@ -338,18 +341,15 @@ describe('RemoteOpsProcessingService', () => { vectorClockServiceSpy.getEntityFrontier.and.returnValue(Promise.resolve(new Map())); vectorClockServiceSpy.getSnapshotVectorClock.and.returnValue(Promise.resolve({})); opLogStoreSpy.hasOp.and.returnValue(Promise.resolve(false)); - opLogStoreSpy.append.and.returnValue(Promise.resolve(1)); await service.processRemoteOps(remoteOps); - // op1 and op3 should be processed, 'throws' is skipped - expect(opLogStoreSpy.append).toHaveBeenCalledTimes(2); - expect(opLogStoreSpy.append).toHaveBeenCalledWith(remoteOps[0], 'remote', { - pendingApply: true, - }); - expect(opLogStoreSpy.append).toHaveBeenCalledWith(remoteOps[2], 'remote', { - pendingApply: true, - }); + // op1 and op3 should be processed (appendBatch called with array of both) + expect(opLogStoreSpy.appendBatch).toHaveBeenCalledWith( + [remoteOps[0], remoteOps[2]], + 'remote', + { pendingApply: true }, + ); }); it('should return early when all ops fail migration', async () => { @@ -395,8 +395,10 @@ describe('RemoteOpsProcessingService', () => { // (used for potential dependency warnings in future enhancements) await service.processRemoteOps(remoteOps); - // Only op1 should be applied - expect(opLogStoreSpy.append).toHaveBeenCalledTimes(1); + // Only op1 should be applied (appendBatch called with single-element array) + expect(opLogStoreSpy.appendBatch).toHaveBeenCalledWith([remoteOps[0]], 'remote', { + pendingApply: true, + }); }); it('should show error snackbar and abort if version is too new', async () => { @@ -462,8 +464,10 @@ describe('RemoteOpsProcessingService', () => { }), ); - // Should still process the ops (ops are applied) - expect(opLogStoreSpy.append).toHaveBeenCalledTimes(2); + // Should still process the ops (appendBatch called with both ops) + expect(opLogStoreSpy.appendBatch).toHaveBeenCalledWith(remoteOps, 'remote', { + pendingApply: true, + }); }); it('should not show newer version warning again in same session', async () => { diff --git a/src/app/op-log/sync/remote-ops-processing.service.ts b/src/app/op-log/sync/remote-ops-processing.service.ts index a752a2070..ab4329968 100644 --- a/src/app/op-log/sync/remote-ops-processing.service.ts +++ b/src/app/op-log/sync/remote-ops-processing.service.ts @@ -297,11 +297,13 @@ export class RemoteOpsProcessingService { ); } - // Store operations with pending status before applying + // Store operations with pending status before applying (single transaction for performance) // If we crash after storing but before applying, these will be retried on startup - for (const op of opsToApply) { - const seq = await this.opLogStore.append(op, 'remote', { pendingApply: true }); - opIdToSeq.set(op.id, seq); + if (opsToApply.length > 0) { + const seqs = await this.opLogStore.appendBatch(opsToApply, 'remote', { + pendingApply: true, + }); + opsToApply.forEach((op, i) => opIdToSeq.set(op.id, seqs[i])); } // Apply only NON-duplicate ops to NgRx store