diff --git a/e2e/tests/sync/supersync-backup-import-id-mismatch.spec.ts b/e2e/tests/sync/supersync-backup-import-id-mismatch.spec.ts new file mode 100644 index 000000000..952d8f952 --- /dev/null +++ b/e2e/tests/sync/supersync-backup-import-id-mismatch.spec.ts @@ -0,0 +1,509 @@ +import { test, expect } from '../../fixtures/supersync.fixture'; +import { + createTestUser, + getSuperSyncConfig, + createSimulatedClient, + closeClient, + waitForTask, + hasTask, + type SimulatedE2EClient, +} from '../../utils/supersync-helpers'; +import * as fs from 'fs'; +import * as path from 'path'; +import * as os from 'os'; + +/** + * SuperSync Backup Import ID Mismatch Bug Reproduction Test + * + * BUG: When a backup is imported and synced, the client creates a BACKUP_IMPORT + * operation with a local ID. However, uploadSnapshot() does NOT send this ID to + * the server - the server generates its own ID. When the client later downloads + * operations, it doesn't recognize the server's BACKUP_IMPORT (different ID) as + * a duplicate, so it RE-APPLIES the old backup state, overwriting any tasks + * created after the import. + * + * ROOT CAUSE: Missing op.id parameter in uploadSnapshot() interface at + * src/app/op-log/sync-providers/provider.interface.ts:277-284 + * + * EXPECTED: Tasks created after backup import should survive subsequent syncs. + * ACTUAL (BUG): Tasks created after backup import are LOST because the old + * BACKUP_IMPORT state is re-applied. + * + * Run with: npm run e2e:supersync:file e2e/tests/sync/supersync-backup-import-id-mismatch.spec.ts + */ + +/** + * Helper to export backup by triggering the UI export button. + * This ensures the backup data is in the correct format for import. + */ +const exportBackup = async (page: SimulatedE2EClient['page']): Promise => { + // Navigate to settings page + await page.goto('/#/config'); + await page.waitForLoadState('networkidle'); + await page.waitForTimeout(1000); + + // Expand Import/Export section + const importExportSection = page.locator('collapsible:has-text("Import/Export")'); + await importExportSection.scrollIntoViewIfNeeded(); + await page.waitForTimeout(300); + + const collapsibleHeader = importExportSection.locator('.collapsible-header, .header'); + await collapsibleHeader.click(); + await page.waitForTimeout(500); + + // Set up download handler before clicking export + const tempDir = os.tmpdir(); + const backupPath = path.join(tempDir, `sp-backup-idmismatch-${Date.now()}.json`); + + // Wait for download and save to temp file + const downloadPromise = page.waitForEvent('download'); + + // Click the export button (export current data) + const exportBtn = page.locator('file-imex button:has-text("Export")'); + await exportBtn.waitFor({ state: 'visible', timeout: 5000 }); + await exportBtn.click(); + + const download = await downloadPromise; + await download.saveAs(backupPath); + + // Navigate back to tasks + await page.goto('/#/tag/TODAY/tasks'); + await page.waitForLoadState('networkidle'); + await page.waitForTimeout(500); + + return backupPath; +}; + +/** + * Helper to import backup file. + * Returns true if import succeeded, false if it failed. + */ +const importBackup = async ( + page: SimulatedE2EClient['page'], + backupPath: string, +): Promise => { + // Track console errors during import + let importFailed = false; + const errorHandler = (msg: { text: () => string }): void => { + if (msg.text().includes('Import process failed')) { + importFailed = true; + } + }; + page.on('console', errorHandler); + + await page.goto('/#/config'); + await page.waitForLoadState('networkidle'); + await page.waitForTimeout(1000); + + const importExportSection = page.locator('collapsible:has-text("Import/Export")'); + await importExportSection.scrollIntoViewIfNeeded(); + await page.waitForTimeout(300); + + const collapsibleHeader = importExportSection.locator('.collapsible-header, .header'); + await collapsibleHeader.click(); + await page.waitForTimeout(500); + + const fileInput = page.locator('file-imex input[type="file"]'); + await fileInput.setInputFiles(backupPath); + + // Wait for import to complete (app navigates to TODAY tag) or error + const startTime = Date.now(); + const timeout = 30000; + while (Date.now() - startTime < timeout) { + if (importFailed) { + page.off('console', errorHandler); + return false; + } + const url = page.url(); + if (url.includes('tag') && url.includes('TODAY')) { + break; + } + await page.waitForTimeout(500); + } + + await page.waitForLoadState('networkidle'); + await page.waitForTimeout(1000); + + page.off('console', errorHandler); + return !importFailed; +}; + +test.describe('@supersync Backup Import ID Mismatch Bug', () => { + /** + * CRITICAL BUG VERIFICATION TEST + * + * This test verifies that the BACKUP_IMPORT operation ID mismatch bug exists. + * + * BUG: When uploading a BACKUP_IMPORT via uploadSnapshot(), the client's op.id + * is NOT sent to the server. The server generates its own ID for the operation. + * + * This test: + * 1. Client A imports backup (creates BACKUP_IMPORT with LOCAL ID) + * 2. Client A syncs (uploads BACKUP_IMPORT - server generates DIFFERENT ID) + * 3. Query IndexedDB for local BACKUP_IMPORT op ID + * 4. Query server API for the operation ID it stored + * 5. VERIFY: The IDs are DIFFERENT (this proves the bug exists) + * + * When the bug is FIXED, the IDs should match and this test should FAIL. + */ + test('BUG: Server stores BACKUP_IMPORT with different ID than client (ID mismatch)', async ({ + browser, + baseURL, + testRunId, + }) => { + let clientA: SimulatedE2EClient | null = null; + let backupPath: string | null = null; + + try { + const user = await createTestUser(testRunId + '-idmismatch'); + const syncConfig = getSuperSyncConfig(user); + + // ============ PHASE 1: Setup - Connect and sync ============ + console.log('[ID Mismatch Test] Phase 1: Setting up client'); + + clientA = await createSimulatedClient(browser, baseURL!, 'A', testRunId); + await clientA.sync.setupSuperSync(syncConfig); + await clientA.sync.syncAndWait(); + console.log('[ID Mismatch Test] Client A connected and synced'); + + // Create initial task that will be in the backup + const initialTaskName = `InitialTask-${testRunId}`; + await clientA.workView.addTask(initialTaskName); + await waitForTask(clientA.page, initialTaskName); + await clientA.sync.syncAndWait(); + console.log(`[ID Mismatch Test] Created and synced initial task`); + + // ============ PHASE 2: Export and import backup ============ + console.log('[ID Mismatch Test] Phase 2: Export and import backup'); + + backupPath = await exportBackup(clientA.page); + const importSuccess = await importBackup(clientA.page, backupPath); + if (!importSuccess) { + throw new Error('Backup import failed'); + } + console.log( + '[ID Mismatch Test] Backup imported (BACKUP_IMPORT created with local ID)', + ); + + // ============ PHASE 3: Get local BACKUP_IMPORT op ID from IndexedDB ============ + console.log('[ID Mismatch Test] Phase 3: Getting local BACKUP_IMPORT op ID'); + + // Wait for IndexedDB writes to complete + await clientA.page.waitForTimeout(1000); + + const localOpData = await clientA.page.evaluate(async () => { + // Open SUP_OPS database + const db = await new Promise((resolve, reject) => { + const request = indexedDB.open('SUP_OPS'); + request.onsuccess = () => resolve(request.result); + request.onerror = () => reject(request.error); + }); + + // Compact operation format uses short keys: + // o = opType, a = actionType, e = entityType, etc. + interface CompactOp { + id: string; + o: string; // opType + a: string; // actionType short code + e: string; // entityType + } + + // Get all operations from the 'ops' store + const ops = await new Promise>( + (resolve, reject) => { + const tx = db.transaction('ops', 'readonly'); + const store = tx.objectStore('ops'); + const request = store.getAll(); + request.onsuccess = () => resolve(request.result || []); + request.onerror = () => reject(request.error); + }, + ); + + db.close(); + + // Debug: Log all operations + const debugInfo = ops.map((entry) => ({ + seq: entry.seq, + opType: entry.op?.o, + entityType: entry.op?.e, + id: entry.op?.id?.substring(0, 20), + })); + + // Find BACKUP_IMPORT operation (opType 'BACKUP_IMPORT') + const backupImportOp = ops.find((entry) => entry.op?.o === 'BACKUP_IMPORT'); + + return { + opId: backupImportOp?.op.id || null, + debugInfo, + totalOps: ops.length, + }; + }); + + console.log( + `[ID Mismatch Test] Debug: Found ${localOpData.totalOps} ops:`, + JSON.stringify(localOpData.debugInfo), + ); + const localOpId = localOpData.opId; + + console.log(`[ID Mismatch Test] Local BACKUP_IMPORT op ID: ${localOpId}`); + expect(localOpId).toBeTruthy(); + + // ============ PHASE 4: Sync to upload BACKUP_IMPORT ============ + console.log('[ID Mismatch Test] Phase 4: Syncing to upload BACKUP_IMPORT'); + + await clientA.sync.setupSuperSync(syncConfig); + await clientA.sync.syncAndWait(); + console.log('[ID Mismatch Test] Sync completed - BACKUP_IMPORT uploaded to server'); + + // ============ PHASE 5: Query server for the operation ID it stored ============ + console.log('[ID Mismatch Test] Phase 5: Querying server for stored operation ID'); + + // Query the PostgreSQL database directly to get the server's operation ID + // The download API doesn't return the SYNC_IMPORT due to server optimization + // (it skips to the SYNC_IMPORT's seq but doesn't include the operation itself) + const { execSync } = await import('child_process'); + + // Query the database for SYNC_IMPORT operations for this user + // Database uses snake_case column names: user_id, op_type, server_seq + const dbQuery = + `SELECT id, op_type FROM operations ` + + `WHERE user_id = '${user.userId}' ` + + `AND op_type IN ('SYNC_IMPORT', 'BACKUP_IMPORT', 'REPAIR') ` + + `ORDER BY server_seq DESC LIMIT 1;`; + + let serverOpId: string | null = null; + try { + const dbResult = execSync( + `docker exec super-productivity-db-1 psql -U supersync -d supersync_db -t -A -c "${dbQuery}"`, + { encoding: 'utf8' }, + ).trim(); + + console.log(`[ID Mismatch Test] Database query result: ${dbResult}`); + + if (dbResult) { + // Result format: "uuid|op_type" + const [opId] = dbResult.split('|'); + serverOpId = opId || null; + } + } catch (err) { + console.error(`[ID Mismatch Test] Database query failed:`, err); + } + + console.log(`[ID Mismatch Test] Server operation ID from database: ${serverOpId}`); + + // ============ PHASE 6: CRITICAL ASSERTION - IDs should match ============ + console.log('[ID Mismatch Test] Phase 6: Comparing local and server op IDs'); + console.log(`[ID Mismatch Test] Local ID: ${localOpId}`); + console.log(`[ID Mismatch Test] Server ID: ${serverOpId}`); + + // BUG: The IDs are DIFFERENT because uploadSnapshot doesn't send op.id + // When the bug is FIXED, this assertion should PASS (IDs match) + // Currently, this test FAILS because the IDs are different + expect(serverOpId).toBeTruthy(); + expect( + localOpId, + 'BUG: Local BACKUP_IMPORT op ID should match server op ID. ' + + 'The server generated a different ID because uploadSnapshot() does not send op.id. ' + + `Local: ${localOpId}, Server: ${serverOpId}`, + ).toBe(serverOpId); + + console.log('[ID Mismatch Test] ✓ IDs MATCH - Bug is fixed!'); + } finally { + if (clientA) await closeClient(clientA); + if (backupPath && fs.existsSync(backupPath)) { + fs.unlinkSync(backupPath); + } + } + }); + + /** + * CRITICAL BUG REPRODUCTION TEST - With concurrent client causing rejection + * + * This test more closely reproduces the actual bug scenario: + * 1. Client A imports backup → BACKUP_IMPORT with local ID X + * 2. Client A syncs → server creates BACKUP_IMPORT with ID Y + * 3. Client A creates new task + * 4. Client B syncs (causes concurrent operations on server) + * 5. Client A syncs → some ops may be rejected, triggering re-download + * 6. Re-download returns BACKUP_IMPORT with ID Y + * 7. Client A doesn't recognize ID Y → re-applies old state → DATA LOST + */ + test('Tasks should survive when concurrent client causes operation rejection', async ({ + browser, + baseURL, + testRunId, + }) => { + let clientA: SimulatedE2EClient | null = null; + let clientB: SimulatedE2EClient | null = null; + let backupPath: string | null = null; + + try { + const user = await createTestUser(testRunId + '-concurrent'); + const syncConfig = getSuperSyncConfig(user); + + // ============ Setup: Client A connects first ============ + console.log('[Concurrent Test] Phase 1: Client A setup'); + + clientA = await createSimulatedClient(browser, baseURL!, 'A', testRunId); + await clientA.sync.setupSuperSync(syncConfig); + await clientA.sync.syncAndWait(); + + // Create initial task + const initialTaskName = `InitialConcurrent-${testRunId}`; + await clientA.workView.addTask(initialTaskName); + await waitForTask(clientA.page, initialTaskName); + await clientA.sync.syncAndWait(); + console.log(`[Concurrent Test] Created initial task: ${initialTaskName}`); + + // ============ Client A exports and imports backup ============ + console.log('[Concurrent Test] Phase 2: Export and import backup'); + + backupPath = await exportBackup(clientA.page); + const importSuccess = await importBackup(clientA.page, backupPath); + if (!importSuccess) { + throw new Error('Backup import failed'); + } + console.log('[Concurrent Test] Backup imported'); + + // Re-setup sync and sync (uploads BACKUP_IMPORT) + await clientA.sync.setupSuperSync(syncConfig); + await clientA.sync.syncAndWait(); + console.log( + '[Concurrent Test] BACKUP_IMPORT uploaded (server assigns different ID)', + ); + + // ============ Client A creates task AFTER backup import ============ + console.log('[Concurrent Test] Phase 3: Create task after import'); + + const taskAfterImportName = `TaskAfterConcurrent-${testRunId}`; + await clientA.workView.addTask(taskAfterImportName); + await waitForTask(clientA.page, taskAfterImportName); + console.log(`[Concurrent Test] Created post-import task: ${taskAfterImportName}`); + + // ============ Client B joins and creates concurrent operations ============ + console.log('[Concurrent Test] Phase 4: Client B creates concurrent operations'); + + clientB = await createSimulatedClient(browser, baseURL!, 'B', testRunId); + await clientB.sync.setupSuperSync(syncConfig); + await clientB.sync.syncAndWait(); + + // Client B creates a task (this creates concurrent ops on server) + const clientBTask = `ClientBTask-${testRunId}`; + await clientB.workView.addTask(clientBTask); + await waitForTask(clientB.page, clientBTask); + await clientB.sync.syncAndWait(); + console.log(`[Concurrent Test] Client B created task: ${clientBTask}`); + + // ============ Client A syncs (may trigger rejection/re-download) ============ + console.log( + '[Concurrent Test] Phase 5: Client A syncs with concurrent server state', + ); + + // Verify task exists before sync + const hasBefore = await hasTask(clientA.page, taskAfterImportName); + console.log(`[Concurrent Test] Before sync - TaskAfterImport exists: ${hasBefore}`); + expect(hasBefore).toBe(true); + + await clientA.sync.syncAndWait(); + console.log('[Concurrent Test] Client A sync completed'); + + // Wait for state to settle + await clientA.page.waitForTimeout(2000); + + // ============ Verify task survived ============ + console.log('[Concurrent Test] Phase 6: Verify task survived'); + + await clientA.page.goto('/#/tag/TODAY/tasks'); + await clientA.page.waitForLoadState('networkidle'); + await clientA.page.waitForTimeout(1000); + + const hasAfter = await hasTask(clientA.page, taskAfterImportName); + console.log(`[Concurrent Test] After sync - TaskAfterImport exists: ${hasAfter}`); + + // CRITICAL ASSERTION + expect( + hasAfter, + 'TaskAfterImport should survive concurrent sync. ' + + 'Bug: BACKUP_IMPORT was re-applied with server ID.', + ).toBe(true); + + console.log('[Concurrent Test] ✓ Test PASSED'); + } finally { + if (clientA) await closeClient(clientA); + if (clientB) await closeClient(clientB); + if (backupPath && fs.existsSync(backupPath)) { + fs.unlinkSync(backupPath); + } + } + }); + + /** + * Additional verification: Check that a second sync also doesn't lose data + * + * This ensures the fix is robust - even multiple syncs after backup import + * should not lose data. + */ + test('Multiple syncs after backup import should not lose data', async ({ + browser, + baseURL, + testRunId, + }) => { + let clientA: SimulatedE2EClient | null = null; + let backupPath: string | null = null; + + try { + const user = await createTestUser(testRunId + '-multisync'); + const syncConfig = getSuperSyncConfig(user); + + // Setup + clientA = await createSimulatedClient(browser, baseURL!, 'A', testRunId); + await clientA.sync.setupSuperSync(syncConfig); + await clientA.sync.syncAndWait(); + + // Create initial task + const initialTaskName = `InitialMulti-${testRunId}`; + await clientA.workView.addTask(initialTaskName); + await waitForTask(clientA.page, initialTaskName); + await clientA.sync.syncAndWait(); + + // Export and import backup + backupPath = await exportBackup(clientA.page); + const importSuccess = await importBackup(clientA.page, backupPath); + if (!importSuccess) { + throw new Error('Backup import failed - cannot test multi-sync scenario'); + } + + // Re-setup sync and sync after import + await clientA.sync.setupSuperSync(syncConfig); + await clientA.sync.syncAndWait(); + + // Create task after import + const taskAfterImportName = `TaskAfterMulti-${testRunId}`; + await clientA.workView.addTask(taskAfterImportName); + await waitForTask(clientA.page, taskAfterImportName); + + // Multiple syncs - each could potentially trigger the bug + console.log('[Multi-Sync Test] Performing multiple syncs...'); + for (let i = 1; i <= 3; i++) { + await clientA.sync.syncAndWait(); + console.log(`[Multi-Sync Test] Sync ${i} completed`); + + await clientA.page.waitForTimeout(500); + + // Check after each sync + const hasNew = await hasTask(clientA.page, taskAfterImportName); + expect( + hasNew, + `TaskAfterImport should exist after sync ${i}. Bug: BACKUP_IMPORT re-applied.`, + ).toBe(true); + } + + console.log('[Multi-Sync Test] ✓ All syncs completed, task survived'); + } finally { + if (clientA) await closeClient(clientA); + if (backupPath && fs.existsSync(backupPath)) { + fs.unlinkSync(backupPath); + } + } + }); +}); diff --git a/packages/super-sync-server/src/sync/sync.routes.ts b/packages/super-sync-server/src/sync/sync.routes.ts index 1aee6e373..35411bbbd 100644 --- a/packages/super-sync-server/src/sync/sync.routes.ts +++ b/packages/super-sync-server/src/sync/sync.routes.ts @@ -90,6 +90,8 @@ const UploadSnapshotSchema = z.object({ vectorClock: z.record(z.string(), z.number()), schemaVersion: z.number().optional(), isPayloadEncrypted: z.boolean().optional(), + // Client's operation ID - server MUST use this to prevent ID mismatch bugs + opId: z.string().uuid().optional(), }); // Error helper @@ -643,6 +645,7 @@ export const syncRoutes = async (fastify: FastifyInstance): Promise => { vectorClock, schemaVersion, isPayloadEncrypted, + opId, } = parseResult.data; const syncService = getSyncService(); @@ -734,8 +737,10 @@ export const syncRoutes = async (fastify: FastifyInstance): Promise => { // Create a SYNC_IMPORT operation // Use the correct NgRx action type so the operation can be replayed on other clients + // FIX: Use client's opId if provided to prevent ID mismatch bugs + // When client doesn't send opId (legacy clients), fall back to server-generated UUID const op = { - id: uuidv7(), + id: opId ?? uuidv7(), clientId, actionType: '[SP_ALL] Load(import) all data', opType: 'SYNC_IMPORT' as const, diff --git a/src/app/op-log/sync-providers/file-based/file-based-sync-adapter.service.ts b/src/app/op-log/sync-providers/file-based/file-based-sync-adapter.service.ts index 7a0b75f53..0157efb06 100644 --- a/src/app/op-log/sync-providers/file-based/file-based-sync-adapter.service.ts +++ b/src/app/op-log/sync-providers/file-based/file-based-sync-adapter.service.ts @@ -336,6 +336,7 @@ export class FileBasedSyncAdapterService { vectorClock: Record, schemaVersion: number, isPayloadEncrypted?: boolean, + _opId?: string, // Not used in file-based sync (operation IDs are client-local) ): Promise => { return this._uploadSnapshot( provider, diff --git a/src/app/op-log/sync-providers/provider.interface.ts b/src/app/op-log/sync-providers/provider.interface.ts index d014ca1de..c7a6a9180 100644 --- a/src/app/op-log/sync-providers/provider.interface.ts +++ b/src/app/op-log/sync-providers/provider.interface.ts @@ -273,6 +273,7 @@ export interface OperationSyncCapable { * @param vectorClock Current vector clock state * @param schemaVersion Schema version of the state * @param isPayloadEncrypted Whether the state payload is E2E encrypted + * @param opId Client's operation ID - server MUST use this ID to prevent ID mismatch bugs */ uploadSnapshot( state: unknown, @@ -281,6 +282,7 @@ export interface OperationSyncCapable { vectorClock: Record, schemaVersion: number, isPayloadEncrypted?: boolean, + opId?: string, ): Promise; /** diff --git a/src/app/op-log/sync-providers/super-sync/super-sync.ts b/src/app/op-log/sync-providers/super-sync/super-sync.ts index 228398a33..b96176f27 100644 --- a/src/app/op-log/sync-providers/super-sync/super-sync.ts +++ b/src/app/op-log/sync-providers/super-sync/super-sync.ts @@ -207,12 +207,14 @@ export class SuperSyncProvider vectorClock: Record, schemaVersion: number, isPayloadEncrypted?: boolean, + opId?: string, ): Promise { SyncLog.debug(this.logLabel, 'uploadSnapshot', { clientId, reason, schemaVersion, isPayloadEncrypted, + opId, }); const cfg = await this._cfgOrError(); @@ -224,6 +226,7 @@ export class SuperSyncProvider vectorClock, schemaVersion, isPayloadEncrypted, + opId, // CRITICAL: Server must use this ID to prevent ID mismatch bugs }); // On Android, use CapacitorHttp with base64-encoded gzip diff --git a/src/app/op-log/sync/operation-log-upload.service.spec.ts b/src/app/op-log/sync/operation-log-upload.service.spec.ts index 0c148049b..29e1345f7 100644 --- a/src/app/op-log/sync/operation-log-upload.service.spec.ts +++ b/src/app/op-log/sync/operation-log-upload.service.spec.ts @@ -606,6 +606,7 @@ describe('OperationLogUploadService', () => { jasmine.anything(), jasmine.anything(), false, // isPayloadEncrypted + 'op-1', // op.id ); }); @@ -622,6 +623,7 @@ describe('OperationLogUploadService', () => { jasmine.anything(), jasmine.anything(), false, // isPayloadEncrypted + 'op-1', // op.id ); }); @@ -638,6 +640,7 @@ describe('OperationLogUploadService', () => { jasmine.anything(), jasmine.anything(), false, // isPayloadEncrypted + 'op-1', // op.id ); }); @@ -782,6 +785,46 @@ describe('OperationLogUploadService', () => { expect(mockApiProvider.setLastServerSeq).toHaveBeenCalledWith(42); }); + /** + * FIX VERIFIED: uploadSnapshot now receives op.id to prevent ID mismatch + * + * BACKGROUND: Previously uploadSnapshot() was called WITHOUT the client's op.id. + * The server would generate its own ID, causing filterNewOps() to not recognize + * the server's operation as the same one the client uploaded. This caused data + * loss when the old state was re-applied. + * + * FIX: op.id is now passed as the 7th argument to uploadSnapshot. + * Server uses this ID instead of generating a new one, ensuring client and + * server have matching operation IDs. + */ + it('uploadSnapshot receives op.id to prevent ID mismatch', async () => { + const entry = createFullStateEntry( + 1, + 'my-backup-import-id', + 'client-1', + OpType.BackupImport, + ); + mockOpLogStore.getUnsynced.and.returnValue(Promise.resolve([entry])); + + await service.uploadPendingOps(mockApiProvider); + + // Verify uploadSnapshot was called + expect(mockApiProvider.uploadSnapshot).toHaveBeenCalled(); + + // Get the call arguments + const callArgs = mockApiProvider.uploadSnapshot.calls.mostRecent().args; + + // Verify all 7 args are passed including op.id + expect(callArgs.length).toBe(7); + + // Verify specific args + expect(callArgs[1]).toBe('client-1'); // clientId + expect(callArgs[2]).toBe('recovery'); // reason + + // CRITICAL: Verify op.id is passed as 7th argument + expect(callArgs[6]).toBe('my-backup-import-id'); + }); + it('should pass vectorClock and schemaVersion to snapshot upload', async () => { const vectorClock: Record = {}; vectorClock['client-1'] = 5; @@ -818,6 +861,7 @@ describe('OperationLogUploadService', () => { vectorClock, 42, false, // isPayloadEncrypted + 'op-1', // op.id ); }); diff --git a/src/app/op-log/sync/operation-log-upload.service.ts b/src/app/op-log/sync/operation-log-upload.service.ts index 3ba8c562f..047a58871 100644 --- a/src/app/op-log/sync/operation-log-upload.service.ts +++ b/src/app/op-log/sync/operation-log-upload.service.ts @@ -384,6 +384,7 @@ export class OperationLogUploadService { op.vectorClock, op.schemaVersion, isPayloadEncrypted, + op.id, // CRITICAL: Pass op.id to prevent ID mismatch bugs ); return response; } catch (err) {