mirror of
https://github.com/johannesjo/super-productivity.git
synced 2026-01-23 02:36:05 +00:00
fix(sync): address code review findings from 2025-12-28 changes
- Add unit tests for extracted hydrator services (60 new tests): - operation-log-recovery.service.spec.ts (17 tests) - operation-log-snapshot.service.spec.ts (20 tests) - sync-hydration.service.spec.ts (23 tests) - Add compressed size pre-check in sync.routes.ts: - New MAX_COMPRESSED_SIZE (10MB) to prevent memory exhaustion - Two-stage protection: pre-check compressed, post-check decompressed - Applied to both /ops and /snapshot upload endpoints - Remove tombstone references from sync-server-architecture-diagrams.md: - Database schema, ER diagram, cleanup tasks - Replace console.warn with Logger.warn in sync.types.ts
This commit is contained in:
parent
80b925ef2f
commit
f780099ef8
6 changed files with 973 additions and 18 deletions
|
|
@ -22,7 +22,11 @@ const gunzipAsync = promisify(zlib.gunzip);
|
|||
// Validation constants
|
||||
const CLIENT_ID_REGEX = /^[a-zA-Z0-9_-]+$/;
|
||||
const MAX_CLIENT_ID_LENGTH = 255;
|
||||
const MAX_DECOMPRESSED_SIZE = 100 * 1024 * 1024; // 100MB - prevents zip bombs
|
||||
// Two-stage protection against zip bombs:
|
||||
// 1. Pre-check: Reject compressed data > 10MB (typical ratio ~10:1, so protects against ~100MB)
|
||||
// 2. Post-check: Reject decompressed data > 100MB (catches edge cases)
|
||||
const MAX_COMPRESSED_SIZE = 10 * 1024 * 1024; // 10MB - prevents memory exhaustion during decompression
|
||||
const MAX_DECOMPRESSED_SIZE = 100 * 1024 * 1024; // 100MB - catches malicious high-ratio compression
|
||||
|
||||
// Zod Schemas
|
||||
const ClientIdSchema = z
|
||||
|
|
@ -156,7 +160,20 @@ export const syncRoutes = async (fastify: FastifyInstance): Promise<void> => {
|
|||
const contentTransferEncoding = req.headers['content-transfer-encoding'] as
|
||||
| string
|
||||
| undefined;
|
||||
|
||||
// Pre-check: reject if compressed size exceeds limit (prevents memory exhaustion)
|
||||
if (rawBody.length > MAX_COMPRESSED_SIZE) {
|
||||
Logger.warn(
|
||||
`[user:${userId}] Compressed upload too large: ${rawBody.length} bytes (max ${MAX_COMPRESSED_SIZE})`,
|
||||
);
|
||||
return reply.status(413).send({
|
||||
error: 'Compressed payload too large',
|
||||
});
|
||||
}
|
||||
|
||||
const decompressed = await decompressBody(rawBody, contentTransferEncoding);
|
||||
|
||||
// Post-check: reject if decompressed size exceeds limit (catches high-ratio attacks)
|
||||
if (decompressed.length > MAX_DECOMPRESSED_SIZE) {
|
||||
Logger.warn(
|
||||
`[user:${userId}] Decompressed upload too large: ${decompressed.length} bytes (max ${MAX_DECOMPRESSED_SIZE})`,
|
||||
|
|
@ -541,7 +558,20 @@ export const syncRoutes = async (fastify: FastifyInstance): Promise<void> => {
|
|||
const contentTransferEncoding = req.headers['content-transfer-encoding'] as
|
||||
| string
|
||||
| undefined;
|
||||
|
||||
// Pre-check: reject if compressed size exceeds limit (prevents memory exhaustion)
|
||||
if (rawBody.length > MAX_COMPRESSED_SIZE) {
|
||||
Logger.warn(
|
||||
`[user:${userId}] Compressed snapshot too large: ${rawBody.length} bytes (max ${MAX_COMPRESSED_SIZE})`,
|
||||
);
|
||||
return reply.status(413).send({
|
||||
error: 'Compressed payload too large',
|
||||
});
|
||||
}
|
||||
|
||||
const decompressed = await decompressBody(rawBody, contentTransferEncoding);
|
||||
|
||||
// Post-check: reject if decompressed size exceeds limit (catches high-ratio attacks)
|
||||
if (decompressed.length > MAX_DECOMPRESSED_SIZE) {
|
||||
Logger.warn(
|
||||
`[user:${userId}] Decompressed snapshot too large: ${decompressed.length} bytes (max ${MAX_DECOMPRESSED_SIZE})`,
|
||||
|
|
|
|||
|
|
@ -1,3 +1,5 @@
|
|||
import { Logger } from '../logger';
|
||||
|
||||
// Structured error codes for client handling
|
||||
export const SYNC_ERROR_CODES = {
|
||||
// Validation errors (400)
|
||||
|
|
@ -112,7 +114,7 @@ export const sanitizeVectorClock = (
|
|||
}
|
||||
|
||||
if (strippedCount > 0) {
|
||||
console.warn(
|
||||
Logger.warn(
|
||||
`sanitizeVectorClock: Stripped ${strippedCount} invalid entries from vector clock`,
|
||||
);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -54,7 +54,7 @@ flowchart TB
|
|||
subgraph Services
|
||||
AuthS[Auth Service<br/>- Register/Login<br/>- Email verification<br/>- Account lockout]
|
||||
SyncS[Sync Service<br/>- Upload/Download ops<br/>- Conflict detection<br/>- Snapshot generation]
|
||||
CleanupS[Cleanup Service<br/>- Tombstone expiry<br/>- Old ops deletion<br/>- Stale device removal]
|
||||
CleanupS[Cleanup Service<br/>- Old ops deletion<br/>- Stale device removal]
|
||||
end
|
||||
|
||||
subgraph Database["PostgreSQL Database"]
|
||||
|
|
@ -62,7 +62,6 @@ flowchart TB
|
|||
Ops[(operations)]
|
||||
SyncState[(user_sync_state)]
|
||||
Devices[(sync_devices)]
|
||||
Tombstones[(tombstones)]
|
||||
end
|
||||
|
||||
Routes --> Middleware
|
||||
|
|
@ -71,10 +70,8 @@ flowchart TB
|
|||
SyncS --> Ops
|
||||
SyncS --> SyncState
|
||||
SyncS --> Devices
|
||||
SyncS --> Tombstones
|
||||
CleanupS --> Ops
|
||||
CleanupS --> Devices
|
||||
CleanupS --> Tombstones
|
||||
```
|
||||
|
||||
## 1.3 Database Schema
|
||||
|
|
@ -84,7 +81,6 @@ erDiagram
|
|||
users ||--o{ operations : has
|
||||
users ||--o| user_sync_state : has
|
||||
users ||--o{ sync_devices : owns
|
||||
users ||--o{ tombstones : has
|
||||
|
||||
users {
|
||||
int id PK
|
||||
|
|
@ -131,15 +127,6 @@ erDiagram
|
|||
int last_seen_at
|
||||
int created_at
|
||||
}
|
||||
|
||||
tombstones {
|
||||
int user_id PK
|
||||
text entity_type PK
|
||||
text entity_id PK
|
||||
int deleted_at
|
||||
text deleted_by_op_id
|
||||
int expires_at
|
||||
}
|
||||
```
|
||||
|
||||
**Schema Notes:**
|
||||
|
|
@ -511,12 +498,10 @@ flowchart TB
|
|||
subgraph Cleanup["Cleanup Tasks"]
|
||||
StaleDevices[Remove stale devices<br/>not seen in 50 days]
|
||||
OldOps[Delete old operations<br/>older than 45 days<br/>AND covered by snapshot]
|
||||
ExpiredTombstones[Delete expired tombstones<br/>older than 45 days]
|
||||
end
|
||||
|
||||
Hourly --> StaleDevices
|
||||
Daily --> OldOps
|
||||
Daily --> ExpiredTombstones
|
||||
```
|
||||
|
||||
**Operation deletion constraint:**
|
||||
|
|
|
|||
297
src/app/op-log/store/operation-log-recovery.service.spec.ts
Normal file
297
src/app/op-log/store/operation-log-recovery.service.spec.ts
Normal file
|
|
@ -0,0 +1,297 @@
|
|||
import { TestBed } from '@angular/core/testing';
|
||||
import { Store } from '@ngrx/store';
|
||||
import { OperationLogRecoveryService } from './operation-log-recovery.service';
|
||||
import { OperationLogStoreService } from './operation-log-store.service';
|
||||
import { PfapiService } from '../../pfapi/pfapi.service';
|
||||
import { ClientIdService } from '../../core/util/client-id.service';
|
||||
import { ActionType, OpType } from '../core/operation.types';
|
||||
import { PENDING_OPERATION_EXPIRY_MS } from '../core/operation-log.const';
|
||||
|
||||
describe('OperationLogRecoveryService', () => {
|
||||
let service: OperationLogRecoveryService;
|
||||
let mockStore: jasmine.SpyObj<Store>;
|
||||
let mockOpLogStore: jasmine.SpyObj<OperationLogStoreService>;
|
||||
let mockPfapiService: {
|
||||
pf: {
|
||||
getAllSyncModelDataFromModelCtrls: jasmine.Spy;
|
||||
metaModel: { syncVectorClock: jasmine.Spy };
|
||||
};
|
||||
};
|
||||
let mockClientIdService: jasmine.SpyObj<ClientIdService>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockStore = jasmine.createSpyObj('Store', ['dispatch']);
|
||||
mockOpLogStore = jasmine.createSpyObj('OperationLogStoreService', [
|
||||
'append',
|
||||
'getLastSeq',
|
||||
'saveStateCache',
|
||||
'getPendingRemoteOps',
|
||||
'markRejected',
|
||||
'markApplied',
|
||||
]);
|
||||
mockPfapiService = {
|
||||
pf: {
|
||||
getAllSyncModelDataFromModelCtrls: jasmine.createSpy().and.resolveTo({}),
|
||||
metaModel: { syncVectorClock: jasmine.createSpy().and.resolveTo(undefined) },
|
||||
},
|
||||
};
|
||||
mockClientIdService = jasmine.createSpyObj('ClientIdService', ['loadClientId']);
|
||||
|
||||
TestBed.configureTestingModule({
|
||||
providers: [
|
||||
OperationLogRecoveryService,
|
||||
{ provide: Store, useValue: mockStore },
|
||||
{ provide: OperationLogStoreService, useValue: mockOpLogStore },
|
||||
{ provide: PfapiService, useValue: mockPfapiService },
|
||||
{ provide: ClientIdService, useValue: mockClientIdService },
|
||||
],
|
||||
});
|
||||
service = TestBed.inject(OperationLogRecoveryService);
|
||||
});
|
||||
|
||||
describe('hasUsableData', () => {
|
||||
it('should return true when tasks exist', () => {
|
||||
const data = { task: { ids: ['task1'] } };
|
||||
expect(service.hasUsableData(data)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when task ids are empty', () => {
|
||||
const data = { task: { ids: [] } };
|
||||
expect(service.hasUsableData(data)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when more than one project exists', () => {
|
||||
const data = { task: { ids: [] }, project: { ids: ['proj1', 'proj2'] } };
|
||||
expect(service.hasUsableData(data)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when only default project exists', () => {
|
||||
const data = { task: { ids: [] }, project: { ids: ['defaultProject'] } };
|
||||
expect(service.hasUsableData(data)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when globalConfig has entries', () => {
|
||||
const data = {
|
||||
task: { ids: [] },
|
||||
project: { ids: [] },
|
||||
globalConfig: { lang: 'en' },
|
||||
};
|
||||
expect(service.hasUsableData(data)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false for completely empty data', () => {
|
||||
const data = {};
|
||||
expect(service.hasUsableData(data)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when task state is undefined', () => {
|
||||
const data = { project: { ids: [] } };
|
||||
expect(service.hasUsableData(data)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false for empty globalConfig', () => {
|
||||
const data = { task: { ids: [] }, project: { ids: [] }, globalConfig: {} };
|
||||
expect(service.hasUsableData(data)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('attemptRecovery', () => {
|
||||
it('should recover from legacy data when available', async () => {
|
||||
const legacyData = { task: { ids: ['task1'] } };
|
||||
mockPfapiService.pf.getAllSyncModelDataFromModelCtrls.and.resolveTo(legacyData);
|
||||
mockClientIdService.loadClientId.and.resolveTo('testClient');
|
||||
mockOpLogStore.append.and.resolveTo(undefined);
|
||||
mockOpLogStore.getLastSeq.and.resolveTo(1);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
|
||||
await service.attemptRecovery();
|
||||
|
||||
expect(mockOpLogStore.append).toHaveBeenCalledWith(
|
||||
jasmine.objectContaining({
|
||||
actionType: ActionType.RECOVERY_DATA_IMPORT,
|
||||
opType: OpType.Batch,
|
||||
entityType: 'RECOVERY',
|
||||
payload: legacyData,
|
||||
}),
|
||||
);
|
||||
expect(mockStore.dispatch).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should not recover when no usable legacy data exists', async () => {
|
||||
mockPfapiService.pf.getAllSyncModelDataFromModelCtrls.and.resolveTo({
|
||||
task: { ids: [] },
|
||||
});
|
||||
|
||||
await service.attemptRecovery();
|
||||
|
||||
expect(mockOpLogStore.append).not.toHaveBeenCalled();
|
||||
expect(mockStore.dispatch).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle database access errors gracefully', async () => {
|
||||
mockPfapiService.pf.getAllSyncModelDataFromModelCtrls.and.rejectWith(
|
||||
new Error('Database error'),
|
||||
);
|
||||
|
||||
// Should not throw
|
||||
await expectAsync(service.attemptRecovery()).toBeResolved();
|
||||
expect(mockOpLogStore.append).not.toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
|
||||
describe('recoverFromLegacyData', () => {
|
||||
it('should create recovery operation with correct properties', async () => {
|
||||
const legacyData = {
|
||||
task: { ids: ['task1'], entities: { task1: { id: 'task1' } } },
|
||||
};
|
||||
mockClientIdService.loadClientId.and.resolveTo('testClient');
|
||||
mockOpLogStore.append.and.resolveTo(undefined);
|
||||
mockOpLogStore.getLastSeq.and.resolveTo(1);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
|
||||
await service.recoverFromLegacyData(legacyData);
|
||||
|
||||
expect(mockOpLogStore.append).toHaveBeenCalledWith(
|
||||
jasmine.objectContaining({
|
||||
actionType: ActionType.RECOVERY_DATA_IMPORT,
|
||||
opType: OpType.Batch,
|
||||
entityType: 'RECOVERY',
|
||||
entityId: '*',
|
||||
payload: legacyData,
|
||||
clientId: 'testClient',
|
||||
vectorClock: { testClient: 1 },
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should throw when clientId cannot be loaded', async () => {
|
||||
mockClientIdService.loadClientId.and.resolveTo(null);
|
||||
|
||||
await expectAsync(service.recoverFromLegacyData({})).toBeRejectedWithError(
|
||||
/Failed to load clientId/,
|
||||
);
|
||||
});
|
||||
|
||||
it('should save state cache after recovery', async () => {
|
||||
const legacyData = { task: { ids: ['task1'] } };
|
||||
mockClientIdService.loadClientId.and.resolveTo('testClient');
|
||||
mockOpLogStore.append.and.resolveTo(undefined);
|
||||
mockOpLogStore.getLastSeq.and.resolveTo(5);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
|
||||
await service.recoverFromLegacyData(legacyData);
|
||||
|
||||
expect(mockOpLogStore.saveStateCache).toHaveBeenCalledWith(
|
||||
jasmine.objectContaining({
|
||||
state: legacyData,
|
||||
lastAppliedOpSeq: 5,
|
||||
vectorClock: { testClient: 1 },
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should sync PFAPI vector clock after recovery', async () => {
|
||||
const legacyData = { task: { ids: ['task1'] } };
|
||||
mockClientIdService.loadClientId.and.resolveTo('testClient');
|
||||
mockOpLogStore.append.and.resolveTo(undefined);
|
||||
mockOpLogStore.getLastSeq.and.resolveTo(1);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
|
||||
await service.recoverFromLegacyData(legacyData);
|
||||
|
||||
expect(mockPfapiService.pf.metaModel.syncVectorClock).toHaveBeenCalledWith({
|
||||
testClient: 1,
|
||||
});
|
||||
});
|
||||
});
|
||||
|
||||
describe('recoverPendingRemoteOps', () => {
|
||||
it('should do nothing when no pending ops exist', async () => {
|
||||
mockOpLogStore.getPendingRemoteOps.and.resolveTo([]);
|
||||
|
||||
await service.recoverPendingRemoteOps();
|
||||
|
||||
expect(mockOpLogStore.markApplied).not.toHaveBeenCalled();
|
||||
expect(mockOpLogStore.markRejected).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should mark valid pending ops as applied', async () => {
|
||||
const now = Date.now();
|
||||
const pendingOps = [
|
||||
{ seq: 1, op: { id: 'op1' }, appliedAt: now - 1000, source: 'remote' },
|
||||
{ seq: 2, op: { id: 'op2' }, appliedAt: now - 2000, source: 'remote' },
|
||||
] as any;
|
||||
mockOpLogStore.getPendingRemoteOps.and.resolveTo(pendingOps);
|
||||
mockOpLogStore.markApplied.and.resolveTo(undefined);
|
||||
|
||||
await service.recoverPendingRemoteOps();
|
||||
|
||||
expect(mockOpLogStore.markApplied).toHaveBeenCalledWith([1, 2]);
|
||||
});
|
||||
|
||||
it('should reject ops that exceed PENDING_OPERATION_EXPIRY_MS', async () => {
|
||||
const now = Date.now();
|
||||
const pendingOps = [
|
||||
{ seq: 1, op: { id: 'valid' }, appliedAt: now - 1000, source: 'remote' }, // Valid
|
||||
{
|
||||
seq: 2,
|
||||
op: { id: 'expired' },
|
||||
appliedAt: now - PENDING_OPERATION_EXPIRY_MS - 1,
|
||||
source: 'remote',
|
||||
}, // Expired
|
||||
] as any;
|
||||
mockOpLogStore.getPendingRemoteOps.and.resolveTo(pendingOps);
|
||||
mockOpLogStore.markApplied.and.resolveTo(undefined);
|
||||
mockOpLogStore.markRejected.and.resolveTo(undefined);
|
||||
|
||||
await service.recoverPendingRemoteOps();
|
||||
|
||||
expect(mockOpLogStore.markApplied).toHaveBeenCalledWith([1]);
|
||||
expect(mockOpLogStore.markRejected).toHaveBeenCalledWith(['expired']);
|
||||
});
|
||||
|
||||
it('should reject all expired ops when all are stale', async () => {
|
||||
const now = Date.now();
|
||||
const expiredTime = now - PENDING_OPERATION_EXPIRY_MS - 100000;
|
||||
const pendingOps = [
|
||||
{ seq: 1, op: { id: 'old1' }, appliedAt: expiredTime, source: 'remote' },
|
||||
{ seq: 2, op: { id: 'old2' }, appliedAt: expiredTime - 1000, source: 'remote' },
|
||||
] as any;
|
||||
mockOpLogStore.getPendingRemoteOps.and.resolveTo(pendingOps);
|
||||
mockOpLogStore.markRejected.and.resolveTo(undefined);
|
||||
|
||||
await service.recoverPendingRemoteOps();
|
||||
|
||||
expect(mockOpLogStore.markRejected).toHaveBeenCalledWith(['old1', 'old2']);
|
||||
expect(mockOpLogStore.markApplied).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle mixed valid and expired ops correctly', async () => {
|
||||
const now = Date.now();
|
||||
const pendingOps = [
|
||||
{ seq: 1, op: { id: 'valid1' }, appliedAt: now - 1000, source: 'remote' },
|
||||
{
|
||||
seq: 2,
|
||||
op: { id: 'expired1' },
|
||||
appliedAt: now - PENDING_OPERATION_EXPIRY_MS - 1,
|
||||
source: 'remote',
|
||||
},
|
||||
{ seq: 3, op: { id: 'valid2' }, appliedAt: now - 5000, source: 'remote' },
|
||||
{
|
||||
seq: 4,
|
||||
op: { id: 'expired2' },
|
||||
appliedAt: now - PENDING_OPERATION_EXPIRY_MS - 2,
|
||||
source: 'remote',
|
||||
},
|
||||
] as any;
|
||||
mockOpLogStore.getPendingRemoteOps.and.resolveTo(pendingOps);
|
||||
mockOpLogStore.markApplied.and.resolveTo(undefined);
|
||||
mockOpLogStore.markRejected.and.resolveTo(undefined);
|
||||
|
||||
await service.recoverPendingRemoteOps();
|
||||
|
||||
expect(mockOpLogStore.markApplied).toHaveBeenCalledWith([1, 3]);
|
||||
expect(mockOpLogStore.markRejected).toHaveBeenCalledWith(['expired1', 'expired2']);
|
||||
});
|
||||
});
|
||||
});
|
||||
297
src/app/op-log/store/operation-log-snapshot.service.spec.ts
Normal file
297
src/app/op-log/store/operation-log-snapshot.service.spec.ts
Normal file
|
|
@ -0,0 +1,297 @@
|
|||
import { TestBed } from '@angular/core/testing';
|
||||
import { OperationLogSnapshotService } from './operation-log-snapshot.service';
|
||||
import { OperationLogStoreService } from './operation-log-store.service';
|
||||
import {
|
||||
CURRENT_SCHEMA_VERSION,
|
||||
MigratableStateCache,
|
||||
SchemaMigrationService,
|
||||
} from './schema-migration.service';
|
||||
import { VectorClockService } from '../sync/vector-clock.service';
|
||||
import { PfapiStoreDelegateService } from '../../pfapi/pfapi-store-delegate.service';
|
||||
|
||||
describe('OperationLogSnapshotService', () => {
|
||||
let service: OperationLogSnapshotService;
|
||||
let mockOpLogStore: jasmine.SpyObj<OperationLogStoreService>;
|
||||
let mockVectorClockService: jasmine.SpyObj<VectorClockService>;
|
||||
let mockStoreDelegateService: jasmine.SpyObj<PfapiStoreDelegateService>;
|
||||
let mockSchemaMigrationService: jasmine.SpyObj<SchemaMigrationService>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockOpLogStore = jasmine.createSpyObj('OperationLogStoreService', [
|
||||
'saveStateCache',
|
||||
'saveStateCacheBackup',
|
||||
'clearStateCacheBackup',
|
||||
'restoreStateCacheFromBackup',
|
||||
'getLastSeq',
|
||||
]);
|
||||
mockVectorClockService = jasmine.createSpyObj('VectorClockService', [
|
||||
'getCurrentVectorClock',
|
||||
]);
|
||||
mockStoreDelegateService = jasmine.createSpyObj('PfapiStoreDelegateService', [
|
||||
'getAllSyncModelDataFromStore',
|
||||
]);
|
||||
mockSchemaMigrationService = jasmine.createSpyObj('SchemaMigrationService', [
|
||||
'migrateStateIfNeeded',
|
||||
]);
|
||||
|
||||
TestBed.configureTestingModule({
|
||||
providers: [
|
||||
OperationLogSnapshotService,
|
||||
{ provide: OperationLogStoreService, useValue: mockOpLogStore },
|
||||
{ provide: VectorClockService, useValue: mockVectorClockService },
|
||||
{ provide: PfapiStoreDelegateService, useValue: mockStoreDelegateService },
|
||||
{ provide: SchemaMigrationService, useValue: mockSchemaMigrationService },
|
||||
],
|
||||
});
|
||||
service = TestBed.inject(OperationLogSnapshotService);
|
||||
});
|
||||
|
||||
describe('isValidSnapshot', () => {
|
||||
const createValidSnapshot = (
|
||||
overrides: Partial<MigratableStateCache> = {},
|
||||
): MigratableStateCache => ({
|
||||
state: { task: {}, project: {}, globalConfig: {} },
|
||||
lastAppliedOpSeq: 1,
|
||||
vectorClock: { client1: 1 },
|
||||
compactedAt: Date.now(),
|
||||
schemaVersion: CURRENT_SCHEMA_VERSION,
|
||||
...overrides,
|
||||
});
|
||||
|
||||
it('should return true for valid snapshot with all core models', () => {
|
||||
const snapshot = createValidSnapshot();
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when state is missing', () => {
|
||||
const snapshot = createValidSnapshot({ state: undefined as any });
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when lastAppliedOpSeq is missing', () => {
|
||||
const snapshot = createValidSnapshot({ lastAppliedOpSeq: undefined as any });
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when state is null', () => {
|
||||
const snapshot = createValidSnapshot({ state: null as any });
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when state is not an object', () => {
|
||||
const snapshot = createValidSnapshot({ state: 'invalid' as any });
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when task model is missing', () => {
|
||||
const snapshot = createValidSnapshot({
|
||||
state: { project: {}, globalConfig: {} },
|
||||
});
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when project model is missing', () => {
|
||||
const snapshot = createValidSnapshot({
|
||||
state: { task: {}, globalConfig: {} },
|
||||
});
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return false when globalConfig model is missing', () => {
|
||||
const snapshot = createValidSnapshot({
|
||||
state: { task: {}, project: {} },
|
||||
});
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(false);
|
||||
});
|
||||
|
||||
it('should return true when additional models beyond core exist', () => {
|
||||
const snapshot = createValidSnapshot({
|
||||
state: { task: {}, project: {}, globalConfig: {}, tag: {}, note: {} },
|
||||
});
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(true);
|
||||
});
|
||||
|
||||
it('should return false when lastAppliedOpSeq is not a number', () => {
|
||||
const snapshot = createValidSnapshot({ lastAppliedOpSeq: '5' as any });
|
||||
expect(service.isValidSnapshot(snapshot)).toBe(false);
|
||||
});
|
||||
});
|
||||
|
||||
describe('saveCurrentStateAsSnapshot', () => {
|
||||
it('should save snapshot with current state data', async () => {
|
||||
const stateData = {
|
||||
task: { ids: ['t1'] },
|
||||
project: { ids: ['p1'] },
|
||||
globalConfig: {},
|
||||
};
|
||||
const vectorClock = { client1: 5, client2: 3 };
|
||||
mockStoreDelegateService.getAllSyncModelDataFromStore.and.resolveTo(
|
||||
stateData as any,
|
||||
);
|
||||
mockVectorClockService.getCurrentVectorClock.and.resolveTo(vectorClock);
|
||||
mockOpLogStore.getLastSeq.and.resolveTo(10);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
|
||||
await service.saveCurrentStateAsSnapshot();
|
||||
|
||||
expect(mockOpLogStore.saveStateCache).toHaveBeenCalledWith(
|
||||
jasmine.objectContaining({
|
||||
state: stateData,
|
||||
lastAppliedOpSeq: 10,
|
||||
vectorClock: vectorClock,
|
||||
schemaVersion: CURRENT_SCHEMA_VERSION,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should not throw when save fails', async () => {
|
||||
mockStoreDelegateService.getAllSyncModelDataFromStore.and.resolveTo({} as any);
|
||||
mockVectorClockService.getCurrentVectorClock.and.resolveTo({});
|
||||
mockOpLogStore.getLastSeq.and.resolveTo(1);
|
||||
mockOpLogStore.saveStateCache.and.rejectWith(new Error('Save failed'));
|
||||
|
||||
// Should not throw - errors are caught internally
|
||||
await expectAsync(service.saveCurrentStateAsSnapshot()).toBeResolved();
|
||||
});
|
||||
|
||||
it('should include compactedAt timestamp', async () => {
|
||||
const beforeTime = Date.now();
|
||||
mockStoreDelegateService.getAllSyncModelDataFromStore.and.resolveTo({} as any);
|
||||
mockVectorClockService.getCurrentVectorClock.and.resolveTo({});
|
||||
mockOpLogStore.getLastSeq.and.resolveTo(1);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
|
||||
await service.saveCurrentStateAsSnapshot();
|
||||
const afterTime = Date.now();
|
||||
|
||||
const savedCache = mockOpLogStore.saveStateCache.calls.mostRecent().args[0];
|
||||
expect(savedCache.compactedAt).toBeGreaterThanOrEqual(beforeTime);
|
||||
expect(savedCache.compactedAt).toBeLessThanOrEqual(afterTime);
|
||||
});
|
||||
});
|
||||
|
||||
describe('migrateSnapshotWithBackup', () => {
|
||||
const createSnapshot = (): MigratableStateCache => ({
|
||||
state: { task: {}, project: {}, globalConfig: {} },
|
||||
lastAppliedOpSeq: 5,
|
||||
vectorClock: { client1: 3 },
|
||||
compactedAt: Date.now(),
|
||||
schemaVersion: 1,
|
||||
});
|
||||
|
||||
it('should create backup before migration', async () => {
|
||||
const snapshot = createSnapshot();
|
||||
const migratedSnapshot = { ...snapshot, schemaVersion: CURRENT_SCHEMA_VERSION };
|
||||
mockOpLogStore.saveStateCacheBackup.and.resolveTo(undefined);
|
||||
mockSchemaMigrationService.migrateStateIfNeeded.and.returnValue(migratedSnapshot);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
mockOpLogStore.clearStateCacheBackup.and.resolveTo(undefined);
|
||||
|
||||
await service.migrateSnapshotWithBackup(snapshot);
|
||||
|
||||
expect(mockOpLogStore.saveStateCacheBackup).toHaveBeenCalled();
|
||||
expect(mockOpLogStore.saveStateCacheBackup).toHaveBeenCalledBefore(
|
||||
mockSchemaMigrationService.migrateStateIfNeeded,
|
||||
);
|
||||
});
|
||||
|
||||
it('should save migrated snapshot after successful migration', async () => {
|
||||
const snapshot = createSnapshot();
|
||||
const migratedSnapshot = { ...snapshot, schemaVersion: CURRENT_SCHEMA_VERSION };
|
||||
mockOpLogStore.saveStateCacheBackup.and.resolveTo(undefined);
|
||||
mockSchemaMigrationService.migrateStateIfNeeded.and.returnValue(migratedSnapshot);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
mockOpLogStore.clearStateCacheBackup.and.resolveTo(undefined);
|
||||
|
||||
await service.migrateSnapshotWithBackup(snapshot);
|
||||
|
||||
expect(mockOpLogStore.saveStateCache).toHaveBeenCalledWith(migratedSnapshot);
|
||||
});
|
||||
|
||||
it('should clear backup after successful migration', async () => {
|
||||
const snapshot = createSnapshot();
|
||||
const migratedSnapshot = { ...snapshot, schemaVersion: CURRENT_SCHEMA_VERSION };
|
||||
mockOpLogStore.saveStateCacheBackup.and.resolveTo(undefined);
|
||||
mockSchemaMigrationService.migrateStateIfNeeded.and.returnValue(migratedSnapshot);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
mockOpLogStore.clearStateCacheBackup.and.resolveTo(undefined);
|
||||
|
||||
await service.migrateSnapshotWithBackup(snapshot);
|
||||
|
||||
expect(mockOpLogStore.clearStateCacheBackup).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should return migrated snapshot on success', async () => {
|
||||
const snapshot = createSnapshot();
|
||||
const migratedSnapshot = { ...snapshot, schemaVersion: CURRENT_SCHEMA_VERSION };
|
||||
mockOpLogStore.saveStateCacheBackup.and.resolveTo(undefined);
|
||||
mockSchemaMigrationService.migrateStateIfNeeded.and.returnValue(migratedSnapshot);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
mockOpLogStore.clearStateCacheBackup.and.resolveTo(undefined);
|
||||
|
||||
const result = await service.migrateSnapshotWithBackup(snapshot);
|
||||
|
||||
expect(result).toBe(migratedSnapshot);
|
||||
});
|
||||
|
||||
it('should restore backup when migration fails', async () => {
|
||||
const snapshot = createSnapshot();
|
||||
mockOpLogStore.saveStateCacheBackup.and.resolveTo(undefined);
|
||||
mockSchemaMigrationService.migrateStateIfNeeded.and.throwError(
|
||||
new Error('Migration failed'),
|
||||
);
|
||||
mockOpLogStore.restoreStateCacheFromBackup.and.resolveTo(undefined);
|
||||
|
||||
await expectAsync(
|
||||
service.migrateSnapshotWithBackup(snapshot),
|
||||
).toBeRejectedWithError('Migration failed');
|
||||
|
||||
expect(mockOpLogStore.restoreStateCacheFromBackup).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should throw combined error when both migration and restore fail', async () => {
|
||||
const snapshot = createSnapshot();
|
||||
mockOpLogStore.saveStateCacheBackup.and.resolveTo(undefined);
|
||||
mockSchemaMigrationService.migrateStateIfNeeded.and.throwError(
|
||||
new Error('Migration failed'),
|
||||
);
|
||||
mockOpLogStore.restoreStateCacheFromBackup.and.rejectWith(
|
||||
new Error('Restore failed'),
|
||||
);
|
||||
|
||||
await expectAsync(
|
||||
service.migrateSnapshotWithBackup(snapshot),
|
||||
).toBeRejectedWithError(
|
||||
/Schema migration failed and backup restore also failed.*Migration failed.*Restore failed/,
|
||||
);
|
||||
});
|
||||
|
||||
it('should not clear backup when migration fails', async () => {
|
||||
const snapshot = createSnapshot();
|
||||
mockOpLogStore.saveStateCacheBackup.and.resolveTo(undefined);
|
||||
mockSchemaMigrationService.migrateStateIfNeeded.and.throwError(
|
||||
new Error('Migration failed'),
|
||||
);
|
||||
mockOpLogStore.restoreStateCacheFromBackup.and.resolveTo(undefined);
|
||||
|
||||
await expectAsync(service.migrateSnapshotWithBackup(snapshot)).toBeRejected();
|
||||
|
||||
expect(mockOpLogStore.clearStateCacheBackup).not.toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should restore backup when saveStateCache fails after migration', async () => {
|
||||
const snapshot = createSnapshot();
|
||||
const migratedSnapshot = { ...snapshot, schemaVersion: CURRENT_SCHEMA_VERSION };
|
||||
mockOpLogStore.saveStateCacheBackup.and.resolveTo(undefined);
|
||||
mockSchemaMigrationService.migrateStateIfNeeded.and.returnValue(migratedSnapshot);
|
||||
mockOpLogStore.saveStateCache.and.rejectWith(new Error('Save failed'));
|
||||
mockOpLogStore.restoreStateCacheFromBackup.and.resolveTo(undefined);
|
||||
|
||||
await expectAsync(
|
||||
service.migrateSnapshotWithBackup(snapshot),
|
||||
).toBeRejectedWithError('Save failed');
|
||||
|
||||
expect(mockOpLogStore.restoreStateCacheFromBackup).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
});
|
||||
344
src/app/op-log/store/sync-hydration.service.spec.ts
Normal file
344
src/app/op-log/store/sync-hydration.service.spec.ts
Normal file
|
|
@ -0,0 +1,344 @@
|
|||
import { TestBed } from '@angular/core/testing';
|
||||
import { Store } from '@ngrx/store';
|
||||
import { SyncHydrationService } from './sync-hydration.service';
|
||||
import { OperationLogStoreService } from './operation-log-store.service';
|
||||
import { PfapiService } from '../../pfapi/pfapi.service';
|
||||
import { ClientIdService } from '../../core/util/client-id.service';
|
||||
import { VectorClockService } from '../sync/vector-clock.service';
|
||||
import { ValidateStateService } from '../validation/validate-state.service';
|
||||
import { loadAllData } from '../../root-store/meta/load-all-data.action';
|
||||
import { ActionType, OpType } from '../core/operation.types';
|
||||
|
||||
describe('SyncHydrationService', () => {
|
||||
let service: SyncHydrationService;
|
||||
let mockStore: jasmine.SpyObj<Store>;
|
||||
let mockOpLogStore: jasmine.SpyObj<OperationLogStoreService>;
|
||||
let mockPfapiService: {
|
||||
pf: {
|
||||
getAllSyncModelDataFromModelCtrls: jasmine.Spy;
|
||||
metaModel: { load: jasmine.Spy };
|
||||
};
|
||||
};
|
||||
let mockClientIdService: jasmine.SpyObj<ClientIdService>;
|
||||
let mockVectorClockService: jasmine.SpyObj<VectorClockService>;
|
||||
let mockValidateStateService: jasmine.SpyObj<ValidateStateService>;
|
||||
|
||||
beforeEach(() => {
|
||||
mockStore = jasmine.createSpyObj('Store', ['dispatch']);
|
||||
mockOpLogStore = jasmine.createSpyObj('OperationLogStoreService', [
|
||||
'append',
|
||||
'getLastSeq',
|
||||
'saveStateCache',
|
||||
'setVectorClock',
|
||||
]);
|
||||
mockPfapiService = {
|
||||
pf: {
|
||||
getAllSyncModelDataFromModelCtrls: jasmine.createSpy().and.resolveTo({}),
|
||||
metaModel: { load: jasmine.createSpy().and.resolveTo(null) },
|
||||
},
|
||||
};
|
||||
mockClientIdService = jasmine.createSpyObj('ClientIdService', ['loadClientId']);
|
||||
mockVectorClockService = jasmine.createSpyObj('VectorClockService', [
|
||||
'getCurrentVectorClock',
|
||||
]);
|
||||
mockValidateStateService = jasmine.createSpyObj('ValidateStateService', [
|
||||
'validateAndRepair',
|
||||
]);
|
||||
|
||||
TestBed.configureTestingModule({
|
||||
providers: [
|
||||
SyncHydrationService,
|
||||
{ provide: Store, useValue: mockStore },
|
||||
{ provide: OperationLogStoreService, useValue: mockOpLogStore },
|
||||
{ provide: PfapiService, useValue: mockPfapiService },
|
||||
{ provide: ClientIdService, useValue: mockClientIdService },
|
||||
{ provide: VectorClockService, useValue: mockVectorClockService },
|
||||
{ provide: ValidateStateService, useValue: mockValidateStateService },
|
||||
],
|
||||
});
|
||||
service = TestBed.inject(SyncHydrationService);
|
||||
});
|
||||
|
||||
const setupDefaultMocks = (): void => {
|
||||
mockClientIdService.loadClientId.and.resolveTo('localClient');
|
||||
mockVectorClockService.getCurrentVectorClock.and.resolveTo({ localClient: 5 });
|
||||
mockOpLogStore.append.and.resolveTo(undefined);
|
||||
mockOpLogStore.getLastSeq.and.resolveTo(10);
|
||||
mockOpLogStore.saveStateCache.and.resolveTo(undefined);
|
||||
mockOpLogStore.setVectorClock.and.resolveTo(undefined);
|
||||
mockValidateStateService.validateAndRepair.and.returnValue({
|
||||
isValid: true,
|
||||
wasRepaired: false,
|
||||
});
|
||||
};
|
||||
|
||||
describe('hydrateFromRemoteSync', () => {
|
||||
beforeEach(setupDefaultMocks);
|
||||
|
||||
it('should merge downloaded data with archive data from DB', async () => {
|
||||
const downloadedData = { task: { ids: ['t1'] }, project: { ids: ['p1'] } };
|
||||
const archiveData = {
|
||||
archiveYoung: { data: 'young' },
|
||||
archiveOld: { data: 'old' },
|
||||
};
|
||||
mockPfapiService.pf.getAllSyncModelDataFromModelCtrls.and.resolveTo(archiveData);
|
||||
|
||||
await service.hydrateFromRemoteSync(downloadedData);
|
||||
|
||||
// Verify the merged data was used
|
||||
const appendCall = mockOpLogStore.append.calls.mostRecent();
|
||||
const payload = appendCall.args[0].payload as Record<string, unknown>;
|
||||
expect(payload['task']).toEqual({ ids: ['t1'] });
|
||||
expect(payload['project']).toEqual({ ids: ['p1'] });
|
||||
expect(payload['archiveYoung']).toEqual({ data: 'young' });
|
||||
expect(payload['archiveOld']).toEqual({ data: 'old' });
|
||||
});
|
||||
|
||||
it('should create SYNC_IMPORT operation with correct properties', async () => {
|
||||
await service.hydrateFromRemoteSync({ task: {} });
|
||||
|
||||
expect(mockOpLogStore.append).toHaveBeenCalledWith(
|
||||
jasmine.objectContaining({
|
||||
actionType: ActionType.LOAD_ALL_DATA,
|
||||
opType: OpType.SyncImport,
|
||||
entityType: 'ALL',
|
||||
clientId: 'localClient',
|
||||
}),
|
||||
'remote',
|
||||
);
|
||||
});
|
||||
|
||||
it('should merge local and PFAPI vector clocks', async () => {
|
||||
const localClock = { localClient: 5 };
|
||||
const pfapiClock = { remoteClient: 10, otherClient: 3 };
|
||||
mockVectorClockService.getCurrentVectorClock.and.resolveTo(localClock);
|
||||
mockPfapiService.pf.metaModel.load.and.resolveTo({ vectorClock: pfapiClock });
|
||||
|
||||
await service.hydrateFromRemoteSync({});
|
||||
|
||||
const appendCall = mockOpLogStore.append.calls.mostRecent();
|
||||
const vectorClock = appendCall.args[0].vectorClock;
|
||||
// Should have all clients with incremented local client
|
||||
expect(vectorClock['localClient']).toBe(6);
|
||||
expect(vectorClock['remoteClient']).toBe(10);
|
||||
expect(vectorClock['otherClient']).toBe(3);
|
||||
});
|
||||
|
||||
it('should handle missing PFAPI meta model gracefully', async () => {
|
||||
mockPfapiService.pf.metaModel.load.and.resolveTo(null);
|
||||
|
||||
await service.hydrateFromRemoteSync({});
|
||||
|
||||
// Should still work with just local clock
|
||||
const appendCall = mockOpLogStore.append.calls.mostRecent();
|
||||
const vectorClock = appendCall.args[0].vectorClock;
|
||||
expect(vectorClock['localClient']).toBe(6);
|
||||
});
|
||||
|
||||
it('should handle PFAPI meta model with missing vectorClock', async () => {
|
||||
mockPfapiService.pf.metaModel.load.and.resolveTo({ someOtherProp: 'value' });
|
||||
|
||||
await service.hydrateFromRemoteSync({});
|
||||
|
||||
const appendCall = mockOpLogStore.append.calls.mostRecent();
|
||||
const vectorClock = appendCall.args[0].vectorClock;
|
||||
expect(vectorClock['localClient']).toBe(6);
|
||||
});
|
||||
|
||||
it('should strip syncProvider from globalConfig.sync', async () => {
|
||||
const downloadedData = {
|
||||
task: {},
|
||||
globalConfig: {
|
||||
sync: { syncProvider: 'dropbox', someOther: 'setting' },
|
||||
otherSetting: 'value',
|
||||
},
|
||||
};
|
||||
|
||||
await service.hydrateFromRemoteSync(downloadedData);
|
||||
|
||||
const appendCall = mockOpLogStore.append.calls.mostRecent();
|
||||
const payload = appendCall.args[0].payload as Record<string, unknown>;
|
||||
const globalConfig = payload['globalConfig'] as Record<string, unknown>;
|
||||
const sync = globalConfig['sync'] as Record<string, unknown>;
|
||||
expect(sync['syncProvider']).toBeNull();
|
||||
expect(sync['someOther']).toBe('setting');
|
||||
expect(globalConfig['otherSetting']).toBe('value');
|
||||
});
|
||||
|
||||
it('should not modify data without globalConfig', async () => {
|
||||
const downloadedData = { task: { ids: ['t1'] } };
|
||||
|
||||
await service.hydrateFromRemoteSync(downloadedData);
|
||||
|
||||
const appendCall = mockOpLogStore.append.calls.mostRecent();
|
||||
const payload = appendCall.args[0].payload as Record<string, unknown>;
|
||||
expect(payload['task']).toEqual({ ids: ['t1'] });
|
||||
});
|
||||
|
||||
it('should not modify globalConfig without sync property', async () => {
|
||||
const downloadedData = {
|
||||
task: {},
|
||||
globalConfig: { lang: 'en' },
|
||||
};
|
||||
|
||||
await service.hydrateFromRemoteSync(downloadedData);
|
||||
|
||||
const appendCall = mockOpLogStore.append.calls.mostRecent();
|
||||
const payload = appendCall.args[0].payload as Record<string, unknown>;
|
||||
const globalConfig = payload['globalConfig'] as Record<string, unknown>;
|
||||
expect(globalConfig['lang']).toBe('en');
|
||||
});
|
||||
|
||||
it('should throw when clientId cannot be loaded', async () => {
|
||||
mockClientIdService.loadClientId.and.resolveTo(null);
|
||||
|
||||
await expectAsync(service.hydrateFromRemoteSync({})).toBeRejectedWithError(
|
||||
/Failed to load clientId/,
|
||||
);
|
||||
});
|
||||
|
||||
it('should save state cache after appending operation', async () => {
|
||||
mockOpLogStore.getLastSeq.and.resolveTo(42);
|
||||
|
||||
await service.hydrateFromRemoteSync({});
|
||||
|
||||
expect(mockOpLogStore.saveStateCache).toHaveBeenCalledWith(
|
||||
jasmine.objectContaining({
|
||||
lastAppliedOpSeq: 42,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should update vector clock store after sync', async () => {
|
||||
mockVectorClockService.getCurrentVectorClock.and.resolveTo({ localClient: 5 });
|
||||
mockPfapiService.pf.metaModel.load.and.resolveTo({ vectorClock: { remote: 3 } });
|
||||
|
||||
await service.hydrateFromRemoteSync({});
|
||||
|
||||
expect(mockOpLogStore.setVectorClock).toHaveBeenCalledWith(
|
||||
jasmine.objectContaining({
|
||||
localClient: 6,
|
||||
remote: 3,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should dispatch loadAllData with synced data', async () => {
|
||||
const downloadedData = { task: { ids: ['t1'] }, project: {} };
|
||||
mockPfapiService.pf.getAllSyncModelDataFromModelCtrls.and.resolveTo({});
|
||||
|
||||
await service.hydrateFromRemoteSync(downloadedData);
|
||||
|
||||
expect(mockStore.dispatch).toHaveBeenCalledWith(
|
||||
loadAllData({
|
||||
appDataComplete: jasmine.objectContaining({
|
||||
task: { ids: ['t1'] },
|
||||
}) as any,
|
||||
}),
|
||||
);
|
||||
});
|
||||
|
||||
it('should use repaired state when validation detects issues', async () => {
|
||||
const downloadedData = { task: { ids: ['t1'] } };
|
||||
const repairedState = { task: { ids: ['t1'], repaired: true } } as any;
|
||||
mockValidateStateService.validateAndRepair.and.returnValue({
|
||||
isValid: true,
|
||||
wasRepaired: true,
|
||||
repairedState,
|
||||
});
|
||||
|
||||
await service.hydrateFromRemoteSync(downloadedData);
|
||||
|
||||
expect(mockStore.dispatch).toHaveBeenCalledWith(
|
||||
loadAllData({
|
||||
appDataComplete: repairedState as any,
|
||||
}),
|
||||
);
|
||||
// State cache should also use repaired state
|
||||
const saveCacheCall = mockOpLogStore.saveStateCache.calls.mostRecent();
|
||||
expect(saveCacheCall.args[0].state).toBe(repairedState);
|
||||
});
|
||||
|
||||
it('should use original data when no repair needed', async () => {
|
||||
const downloadedData = { task: { ids: ['t1'] } };
|
||||
mockValidateStateService.validateAndRepair.and.returnValue({
|
||||
isValid: true,
|
||||
wasRepaired: false,
|
||||
});
|
||||
|
||||
await service.hydrateFromRemoteSync(downloadedData);
|
||||
|
||||
// Should dispatch with the original (merged, stripped) data, not null
|
||||
expect(mockStore.dispatch).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should handle null downloadedMainModelData by using only DB data', async () => {
|
||||
const dbData = { archiveYoung: { data: 'archive' } };
|
||||
mockPfapiService.pf.getAllSyncModelDataFromModelCtrls.and.resolveTo(dbData);
|
||||
|
||||
await service.hydrateFromRemoteSync(undefined);
|
||||
|
||||
const appendCall = mockOpLogStore.append.calls.mostRecent();
|
||||
const payload = appendCall.args[0].payload as Record<string, unknown>;
|
||||
expect(payload['archiveYoung']).toEqual({ data: 'archive' });
|
||||
});
|
||||
|
||||
it('should propagate errors from append', async () => {
|
||||
mockOpLogStore.append.and.rejectWith(new Error('Append failed'));
|
||||
|
||||
await expectAsync(service.hydrateFromRemoteSync({})).toBeRejectedWithError(
|
||||
'Append failed',
|
||||
);
|
||||
});
|
||||
|
||||
it('should propagate errors from saveStateCache', async () => {
|
||||
mockOpLogStore.saveStateCache.and.rejectWith(new Error('Save failed'));
|
||||
|
||||
await expectAsync(service.hydrateFromRemoteSync({})).toBeRejectedWithError(
|
||||
'Save failed',
|
||||
);
|
||||
});
|
||||
});
|
||||
|
||||
describe('_stripLocalOnlySettings (via hydrateFromRemoteSync)', () => {
|
||||
beforeEach(setupDefaultMocks);
|
||||
|
||||
it('should handle non-object data gracefully', async () => {
|
||||
// Pass null - the merged data should still work
|
||||
mockPfapiService.pf.getAllSyncModelDataFromModelCtrls.and.resolveTo(null as any);
|
||||
|
||||
// Should not throw when calling hydrateFromRemoteSync with data that gets
|
||||
// merged with null from DB
|
||||
await service.hydrateFromRemoteSync({ task: {} });
|
||||
|
||||
// If it didn't throw, the stripping handled the edge case
|
||||
expect(mockOpLogStore.append).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should preserve all other globalConfig properties', async () => {
|
||||
const downloadedData = {
|
||||
globalConfig: {
|
||||
lang: 'de',
|
||||
theme: 'dark',
|
||||
sync: {
|
||||
syncProvider: 'webdav',
|
||||
syncInterval: 300,
|
||||
isEnabled: true,
|
||||
},
|
||||
},
|
||||
};
|
||||
|
||||
await service.hydrateFromRemoteSync(downloadedData);
|
||||
|
||||
const appendCall = mockOpLogStore.append.calls.mostRecent();
|
||||
const payload = appendCall.args[0].payload as Record<string, unknown>;
|
||||
const globalConfig = payload['globalConfig'] as Record<string, unknown>;
|
||||
expect(globalConfig['lang']).toBe('de');
|
||||
expect(globalConfig['theme']).toBe('dark');
|
||||
const sync = globalConfig['sync'] as Record<string, unknown>;
|
||||
expect(sync['syncInterval']).toBe(300);
|
||||
expect(sync['isEnabled']).toBe(true);
|
||||
expect(sync['syncProvider']).toBeNull();
|
||||
});
|
||||
});
|
||||
});
|
||||
Loading…
Add table
Add a link
Reference in a new issue