mirror of
https://github.com/johannesjo/super-productivity.git
synced 2026-01-22 18:30:09 +00:00
refactor: replace SyncLog with PFLog in pfapi directory
- Replace all SyncLog calls with PFLog throughout pfapi - Modified 24 files with 246 total changes - All pfapi logs now consistently use PFLog with [pf] context prefix - Fixed duplicate import in model-ctrl.ts
This commit is contained in:
parent
1bac5f9280
commit
9c20096bb8
25 changed files with 310 additions and 268 deletions
64
scripts/replace-synclog-with-pflog.ts
Executable file
64
scripts/replace-synclog-with-pflog.ts
Executable file
|
|
@ -0,0 +1,64 @@
|
|||
#!/usr/bin/env ts-node
|
||||
|
||||
import * as fs from 'fs';
|
||||
import * as glob from 'glob';
|
||||
|
||||
const pfapiFiles = glob.sync('src/app/pfapi/**/*.ts', {
|
||||
ignore: ['**/*.spec.ts', '**/node_modules/**'],
|
||||
});
|
||||
|
||||
console.log(`Found ${pfapiFiles.length} pfapi files to check`);
|
||||
|
||||
let totalFiles = 0;
|
||||
let totalChanges = 0;
|
||||
|
||||
function migrateFile(filePath: string): void {
|
||||
let content = fs.readFileSync(filePath, 'utf8');
|
||||
let modified = false;
|
||||
let localChanges = 0;
|
||||
|
||||
// Replace SyncLog with PFLog in code
|
||||
const syncLogPattern = /\bSyncLog\b/g;
|
||||
const matches = content.match(syncLogPattern);
|
||||
if (matches) {
|
||||
content = content.replace(syncLogPattern, 'PFLog');
|
||||
modified = true;
|
||||
localChanges = matches.length;
|
||||
}
|
||||
|
||||
// Update imports - replace SyncLog with PFLog
|
||||
if (modified) {
|
||||
// Handle imports that have both SyncLog and PFLog
|
||||
content = content.replace(
|
||||
/import\s*{\s*([^}]*)\bSyncLog\b([^}]*)\}\s*from\s*['"][^'"]*core\/log['"]/g,
|
||||
(match, before, after) => {
|
||||
const imports = (before + after)
|
||||
.split(',')
|
||||
.map((s) => s.trim())
|
||||
.filter((s) => s && s !== 'SyncLog');
|
||||
// Only add PFLog if it's not already there
|
||||
if (!imports.includes('PFLog')) {
|
||||
imports.push('PFLog');
|
||||
}
|
||||
const importPath = match.includes('"')
|
||||
? match.split('"')[1]
|
||||
: match.split("'")[1];
|
||||
return `import { ${imports.join(', ')} } from '${importPath}'`;
|
||||
},
|
||||
);
|
||||
}
|
||||
|
||||
if (modified) {
|
||||
fs.writeFileSync(filePath, content);
|
||||
console.log(`✓ ${filePath} (${localChanges} changes)`);
|
||||
totalFiles++;
|
||||
totalChanges += localChanges;
|
||||
}
|
||||
}
|
||||
|
||||
// Process all files
|
||||
pfapiFiles.forEach(migrateFile);
|
||||
|
||||
console.log(
|
||||
`\nMigration complete! Modified ${totalFiles} files with ${totalChanges} total changes`,
|
||||
);
|
||||
|
|
@ -1,6 +1,6 @@
|
|||
import { DBNames } from '../pfapi.const';
|
||||
import { Database } from '../db/database';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
|
||||
export class TmpBackupService<BD extends Record<string, any>> {
|
||||
private static readonly L = 'TmpBackupService';
|
||||
|
|
@ -14,7 +14,7 @@ export class TmpBackupService<BD extends Record<string, any>> {
|
|||
* @returns The backup data or null if not found.
|
||||
*/
|
||||
async load(): Promise<BD | null> {
|
||||
SyncLog.verbose(`${TmpBackupService.L}.${this.load.name}()`);
|
||||
PFLog.verbose(`${TmpBackupService.L}.${this.load.name}()`);
|
||||
return (
|
||||
this._inMemoryBackup ||
|
||||
((await this._db.load(TmpBackupService.DB_KEY)) as BD) ||
|
||||
|
|
@ -28,7 +28,7 @@ export class TmpBackupService<BD extends Record<string, any>> {
|
|||
* @returns A promise resolving when the save is complete.
|
||||
*/
|
||||
async save(backup: BD): Promise<unknown> {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${TmpBackupService.L}.${this.save.name}()`,
|
||||
TmpBackupService.DB_KEY,
|
||||
backup,
|
||||
|
|
@ -41,7 +41,7 @@ export class TmpBackupService<BD extends Record<string, any>> {
|
|||
* Clears the backup from memory and database.
|
||||
*/
|
||||
async clear(): Promise<void> {
|
||||
SyncLog.normal(`${TmpBackupService.L}.${this.clear.name}()`);
|
||||
PFLog.normal(`${TmpBackupService.L}.${this.clear.name}()`);
|
||||
this._inMemoryBackup = undefined;
|
||||
await this._db.remove(TmpBackupService.DB_KEY, true);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -17,7 +17,7 @@ export async function compressWithGzipToString(input: string): Promise<string> {
|
|||
}
|
||||
const base64 = btoa(binary);
|
||||
|
||||
// SyncLog.normal( 'Compression stats', {
|
||||
// PFLog.normal( 'Compression stats', {
|
||||
// inputLength: input.length,
|
||||
// compressedSize: compressed.byteLength,
|
||||
// base64Length: base64.length,
|
||||
|
|
@ -49,7 +49,7 @@ export async function decompressGzipFromString(
|
|||
|
||||
const decompressed = await new Response(stream.readable).arrayBuffer();
|
||||
const decoded = new TextDecoder().decode(decompressed);
|
||||
// SyncLog.normal( 'Decompression stats', { decompressedLength: decoded.length });
|
||||
// PFLog.normal( 'Decompression stats', { decompressedLength: decoded.length });
|
||||
return decoded;
|
||||
} catch (error) {
|
||||
PFLog.err(error);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { DatabaseAdapter } from './database-adapter.model';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { devError } from '../../../util/dev-error';
|
||||
import { PFLog } from '../../../core/log';
|
||||
|
||||
|
|
@ -18,12 +18,12 @@ export class Database {
|
|||
}
|
||||
|
||||
lock(): void {
|
||||
SyncLog.normal(`${Database.L}.${this.lock.name}()`);
|
||||
PFLog.normal(`${Database.L}.${this.lock.name}()`);
|
||||
this._isLocked = true;
|
||||
}
|
||||
|
||||
unlock(): void {
|
||||
SyncLog.normal(`${Database.L}.${this.unlock.name}()`);
|
||||
PFLog.normal(`${Database.L}.${this.unlock.name}()`);
|
||||
this._isLocked = false;
|
||||
}
|
||||
|
||||
|
|
@ -32,7 +32,7 @@ export class Database {
|
|||
try {
|
||||
return await this._adapter.load<T>(key);
|
||||
} catch (e) {
|
||||
SyncLog.critical('DB Load Error', { lastParams: this._lastParams, error: e });
|
||||
PFLog.critical('DB Load Error', { lastParams: this._lastParams, error: e });
|
||||
return this._errorHandler(e as Error, this.load, [key]);
|
||||
}
|
||||
}
|
||||
|
|
@ -42,7 +42,7 @@ export class Database {
|
|||
try {
|
||||
return await this._adapter.loadAll<T>();
|
||||
} catch (e) {
|
||||
SyncLog.critical('DB LoadAll Error', { lastParams: this._lastParams, error: e });
|
||||
PFLog.critical('DB LoadAll Error', { lastParams: this._lastParams, error: e });
|
||||
return this._errorHandler(e as Error, this.loadAll, []);
|
||||
}
|
||||
}
|
||||
|
|
@ -52,7 +52,7 @@ export class Database {
|
|||
if (this._isLocked && !isIgnoreDBLock) {
|
||||
console.trace();
|
||||
devError(`Attempting to write DB for ${key} while locked`);
|
||||
SyncLog.critical(`${Database.L}.save() BLOCKED!!! - Database is locked!`, {
|
||||
PFLog.critical(`${Database.L}.save() BLOCKED!!! - Database is locked!`, {
|
||||
key,
|
||||
isLocked: this._isLocked,
|
||||
isIgnoreDBLock,
|
||||
|
|
@ -69,7 +69,7 @@ export class Database {
|
|||
try {
|
||||
return await this._adapter.save(key, data);
|
||||
} catch (e) {
|
||||
SyncLog.critical('DB Save Error', { lastParams: this._lastParams, error: e });
|
||||
PFLog.critical('DB Save Error', { lastParams: this._lastParams, error: e });
|
||||
return this._errorHandler(e as Error, this.save, [key, data]);
|
||||
}
|
||||
}
|
||||
|
|
@ -107,7 +107,7 @@ export class Database {
|
|||
await this._adapter.init();
|
||||
} catch (e) {
|
||||
PFLog.err(e);
|
||||
SyncLog.critical('Database initialization failed', {
|
||||
PFLog.critical('Database initialization failed', {
|
||||
lastParams: this._lastParams,
|
||||
error: e,
|
||||
});
|
||||
|
|
@ -120,7 +120,7 @@ export class Database {
|
|||
fn: (...args: any[]) => Promise<any>,
|
||||
args: any[],
|
||||
): Promise<void> {
|
||||
SyncLog.critical(`${Database.L}.${this._errorHandler.name}()`, e, fn.name, args);
|
||||
PFLog.critical(`${Database.L}.${this._errorHandler.name}()`, e, fn.name, args);
|
||||
this._onError(e);
|
||||
throw e; // Rethrow to allow caller to handle
|
||||
}
|
||||
|
|
|
|||
|
|
@ -10,7 +10,7 @@ class AdditionalLogErrorBase<T = unknown[]> extends Error {
|
|||
super(typeof additional[0] === 'string' ? additional[0] : new.target.name);
|
||||
|
||||
if (additional.length > 0) {
|
||||
// SyncLog.critical( this.name, ...additional);
|
||||
// PFLog.critical( this.name, ...additional);
|
||||
PFLog.log(this.name, ...additional);
|
||||
try {
|
||||
PFLog.log('additional error log: ' + JSON.stringify(additional));
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { AllSyncModels, ModelCfgs } from '../pfapi.model';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import {
|
||||
CanNotMigrateMajorDownError,
|
||||
ImpossibleError,
|
||||
|
|
@ -15,7 +15,7 @@ export class MigrationService<MD extends ModelCfgs> {
|
|||
|
||||
async checkAndMigrateLocalDB(): Promise<void> {
|
||||
const meta = await this._pfapiMain.metaModel.load();
|
||||
SyncLog.normal(`${MigrationService.L}.${this.checkAndMigrateLocalDB.name}()`, {
|
||||
PFLog.normal(`${MigrationService.L}.${this.checkAndMigrateLocalDB.name}()`, {
|
||||
meta,
|
||||
});
|
||||
|
||||
|
|
@ -41,11 +41,9 @@ export class MigrationService<MD extends ModelCfgs> {
|
|||
crossModelVersion: versionAfter,
|
||||
lastUpdate: Date.now(),
|
||||
});
|
||||
SyncLog.normal(
|
||||
`Migration successful: ${meta.crossModelVersion} → ${versionAfter}`,
|
||||
);
|
||||
PFLog.normal(`Migration successful: ${meta.crossModelVersion} → ${versionAfter}`);
|
||||
} catch (error) {
|
||||
SyncLog.critical(`Migration failed`, {
|
||||
PFLog.critical(`Migration failed`, {
|
||||
error,
|
||||
fromVersion: meta.crossModelVersion,
|
||||
toVersion: versionAfter,
|
||||
|
|
@ -69,7 +67,7 @@ export class MigrationService<MD extends ModelCfgs> {
|
|||
typeof codeModelVersion !== 'number' ||
|
||||
dataInCrossModelVersion === codeModelVersion
|
||||
) {
|
||||
SyncLog.normal(`${MigrationService.L}.${this.migrate.name}() no migration needed`, {
|
||||
PFLog.normal(`${MigrationService.L}.${this.migrate.name}() no migration needed`, {
|
||||
dataInCrossModelVersion,
|
||||
codeModelVersion,
|
||||
});
|
||||
|
|
@ -112,7 +110,7 @@ export class MigrationService<MD extends ModelCfgs> {
|
|||
const migrationsKeysToRun = migrationKeys.filter((v) => v > dataInCrossModelVersion);
|
||||
const migrationsToRun = migrationsKeysToRun.map((v) => cfg!.crossModelMigrations![v]);
|
||||
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${MigrationService.L}.${this.migrate.name}() migrate ${dataInCrossModelVersion} to ${codeModelVersion}`,
|
||||
{
|
||||
migrationKeys,
|
||||
|
|
@ -142,7 +140,7 @@ export class MigrationService<MD extends ModelCfgs> {
|
|||
wasMigrated: true,
|
||||
};
|
||||
} catch (error) {
|
||||
SyncLog.critical(`Migration functions failed to execute`, { error });
|
||||
PFLog.critical(`Migration functions failed to execute`, { error });
|
||||
throw new ModelMigrationError('Error running migration functions', error);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { Database } from '../db/database';
|
||||
import { LocalMeta, ModelBase, ModelCfg } from '../pfapi.model';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { getEnvironmentId } from '../util/get-environment-id';
|
||||
import { DBNames } from '../pfapi.const';
|
||||
import {
|
||||
|
|
@ -70,7 +70,7 @@ export class MetaModelCtrl {
|
|||
modelCfg: ModelCfg<MT>,
|
||||
isIgnoreDBLock = false,
|
||||
): Promise<void> {
|
||||
SyncLog.normal(`${MetaModelCtrl.L}.${this.updateRevForModel.name}()`, modelId, {
|
||||
PFLog.normal(`${MetaModelCtrl.L}.${this.updateRevForModel.name}()`, modelId, {
|
||||
modelCfg,
|
||||
inMemory: this._metaModelInMemory,
|
||||
});
|
||||
|
|
@ -126,7 +126,7 @@ export class MetaModelCtrl {
|
|||
* @throws {InvalidMetaError} When metamodel is invalid
|
||||
*/
|
||||
save(metaModel: LocalMeta, isIgnoreDBLock = false): Promise<unknown> {
|
||||
SyncLog.normal(`${MetaModelCtrl.L}.${this.save.name}()`, {
|
||||
PFLog.normal(`${MetaModelCtrl.L}.${this.save.name}()`, {
|
||||
metaModel,
|
||||
lastSyncedUpdate: metaModel.lastSyncedUpdate,
|
||||
lastUpdate: metaModel.lastUpdate,
|
||||
|
|
@ -139,7 +139,7 @@ export class MetaModelCtrl {
|
|||
this._ev.emit('syncStatusChange', 'UNKNOWN_OR_CHANGED');
|
||||
|
||||
// Add detailed logging before saving
|
||||
SyncLog.normal(`${MetaModelCtrl.L}.${this.save.name}() about to save to DB:`, {
|
||||
PFLog.normal(`${MetaModelCtrl.L}.${this.save.name}() about to save to DB:`, {
|
||||
id: MetaModelCtrl.META_MODEL_ID,
|
||||
lastSyncedUpdate: metaModel.lastSyncedUpdate,
|
||||
lastUpdate: metaModel.lastUpdate,
|
||||
|
|
@ -155,14 +155,14 @@ export class MetaModelCtrl {
|
|||
// Log after save completes
|
||||
savePromise
|
||||
.then(() => {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${MetaModelCtrl.L}.${this.save.name}() DB save completed successfully`,
|
||||
metaModel,
|
||||
);
|
||||
})
|
||||
.catch((error) => {
|
||||
devError('DB save for meta file failed');
|
||||
SyncLog.critical(`${MetaModelCtrl.L}.${this.save.name}() DB save failed`, error);
|
||||
PFLog.critical(`${MetaModelCtrl.L}.${this.save.name}() DB save failed`, error);
|
||||
});
|
||||
|
||||
return savePromise;
|
||||
|
|
@ -175,7 +175,7 @@ export class MetaModelCtrl {
|
|||
* @throws {InvalidMetaError} When loaded data is invalid
|
||||
*/
|
||||
async load(): Promise<LocalMeta> {
|
||||
SyncLog.verbose(`${MetaModelCtrl.L}.${this.load.name}()`, this._metaModelInMemory);
|
||||
PFLog.verbose(`${MetaModelCtrl.L}.${this.load.name}()`, this._metaModelInMemory);
|
||||
|
||||
if (this._metaModelInMemory) {
|
||||
return this._metaModelInMemory;
|
||||
|
|
@ -184,7 +184,7 @@ export class MetaModelCtrl {
|
|||
const data = (await this._db.load(MetaModelCtrl.META_MODEL_ID)) as LocalMeta;
|
||||
|
||||
// Add debug logging
|
||||
SyncLog.normal(`${MetaModelCtrl.L}.${this.load.name}() loaded from DB:`, {
|
||||
PFLog.normal(`${MetaModelCtrl.L}.${this.load.name}() loaded from DB:`, {
|
||||
data,
|
||||
hasData: !!data,
|
||||
lastSyncedUpdate: data?.lastSyncedUpdate,
|
||||
|
|
@ -197,7 +197,7 @@ export class MetaModelCtrl {
|
|||
...DEFAULT_META_MODEL,
|
||||
crossModelVersion: this.crossModelVersion,
|
||||
};
|
||||
SyncLog.normal(`${MetaModelCtrl.L}.${this.load.name}() initialized with defaults`);
|
||||
PFLog.normal(`${MetaModelCtrl.L}.${this.load.name}() initialized with defaults`);
|
||||
return this._metaModelInMemory;
|
||||
}
|
||||
if (!data.revMap) {
|
||||
|
|
@ -205,7 +205,7 @@ export class MetaModelCtrl {
|
|||
}
|
||||
|
||||
// Log the loaded data
|
||||
SyncLog.normal(`${MetaModelCtrl.L}.${this.load.name}() loaded valid data:`, {
|
||||
PFLog.normal(`${MetaModelCtrl.L}.${this.load.name}() loaded valid data:`, {
|
||||
lastUpdate: data.lastUpdate,
|
||||
lastSyncedUpdate: data.lastSyncedUpdate,
|
||||
metaRev: data.metaRev,
|
||||
|
|
@ -220,13 +220,13 @@ export class MetaModelCtrl {
|
|||
// Ensure vector clock fields are initialized for old data
|
||||
if (data.vectorClock === undefined) {
|
||||
data.vectorClock = {};
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${MetaModelCtrl.L}.${this.load.name}() initialized missing vectorClock`,
|
||||
);
|
||||
}
|
||||
if (data.lastSyncedVectorClock === undefined) {
|
||||
data.lastSyncedVectorClock = null;
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${MetaModelCtrl.L}.${this.load.name}() initialized missing lastSyncedVectorClock`,
|
||||
);
|
||||
}
|
||||
|
|
@ -264,10 +264,10 @@ export class MetaModelCtrl {
|
|||
} catch (e) {
|
||||
if (e instanceof ClientIdNotFoundError) {
|
||||
const clientId = this._generateClientId();
|
||||
SyncLog.normal(`${MetaModelCtrl.L} Create clientId ${clientId}`);
|
||||
PFLog.normal(`${MetaModelCtrl.L} Create clientId ${clientId}`);
|
||||
await this._saveClientId(clientId);
|
||||
} else {
|
||||
SyncLog.critical(`${MetaModelCtrl.L} Error initializing clientId:`, e);
|
||||
PFLog.critical(`${MetaModelCtrl.L} Error initializing clientId:`, e);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
@ -293,7 +293,7 @@ export class MetaModelCtrl {
|
|||
* @returns Promise that resolves when the save completes
|
||||
*/
|
||||
private _saveClientId(clientId: string): Promise<unknown> {
|
||||
SyncLog.normal(`${MetaModelCtrl.L}.${this._saveClientId.name}()`, clientId);
|
||||
PFLog.normal(`${MetaModelCtrl.L}.${this._saveClientId.name}()`, clientId);
|
||||
this._clientIdInMemory = clientId;
|
||||
return this._db.save(MetaModelCtrl.CLIENT_ID, clientId, true);
|
||||
}
|
||||
|
|
@ -307,14 +307,14 @@ export class MetaModelCtrl {
|
|||
const newClientId = this._generateClientId();
|
||||
// Save the new client ID
|
||||
await this._db.save(MetaModelCtrl.CLIENT_ID, newClientId, true);
|
||||
SyncLog.error(`${MetaModelCtrl.L}.generateNewClientId() generated new client ID`, {
|
||||
PFLog.error(`${MetaModelCtrl.L}.generateNewClientId() generated new client ID`, {
|
||||
newClientId,
|
||||
});
|
||||
return newClientId;
|
||||
}
|
||||
|
||||
private _generateClientId(): string {
|
||||
SyncLog.normal(`${MetaModelCtrl.L}.${this._generateClientId.name}()`);
|
||||
PFLog.normal(`${MetaModelCtrl.L}.${this._generateClientId.name}()`);
|
||||
return getEnvironmentId() + '_' + Date.now();
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,9 +1,8 @@
|
|||
import { ModelBase, ModelCfg } from '../pfapi.model';
|
||||
import { Database } from '../db/database';
|
||||
import { MetaModelCtrl } from './meta-model-ctrl';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { ModelValidationError } from '../errors/errors';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { ModelValidationError } from '../errors/errors';
|
||||
|
||||
// type ExtractModelType<T extends ModelCfg<unknown>> = T extends ModelCfg<infer U> ? U : never;
|
||||
|
||||
|
|
@ -44,7 +43,7 @@ export class ModelCtrl<MT extends ModelBase> {
|
|||
p?: { isUpdateRevAndLastUpdate: boolean; isIgnoreDBLock?: boolean },
|
||||
): Promise<unknown> {
|
||||
this._inMemoryData = data;
|
||||
SyncLog.normal(`___ ${ModelCtrl.L}.${this.save.name}()`, this.modelId, p, data);
|
||||
PFLog.normal(`___ ${ModelCtrl.L}.${this.save.name}()`, this.modelId, p, data);
|
||||
|
||||
// Validate data if validator is available
|
||||
if (this.modelCfg.validate) {
|
||||
|
|
@ -108,7 +107,7 @@ export class ModelCtrl<MT extends ModelBase> {
|
|||
* @returns Promise resolving to model data
|
||||
*/
|
||||
async load(): Promise<MT> {
|
||||
SyncLog.verbose(`${ModelCtrl.L}.${this.load.name}()`, {
|
||||
PFLog.verbose(`${ModelCtrl.L}.${this.load.name}()`, {
|
||||
inMemoryData: this._inMemoryData,
|
||||
});
|
||||
return (
|
||||
|
|
@ -123,7 +122,7 @@ export class ModelCtrl<MT extends ModelBase> {
|
|||
* @returns Promise resolving after remove operation
|
||||
*/
|
||||
async remove(): Promise<unknown> {
|
||||
SyncLog.normal(`${ModelCtrl.L}.${this.remove.name}()`, this.modelId);
|
||||
PFLog.normal(`${ModelCtrl.L}.${this.remove.name}()`, this.modelId);
|
||||
this._inMemoryData = null;
|
||||
return this._db.remove(this.modelId);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ import { MetaModelCtrl } from './model-ctrl/meta-model-ctrl';
|
|||
import { ModelCtrl } from './model-ctrl/model-ctrl';
|
||||
import { MiniObservable } from './util/mini-observable';
|
||||
import { SyncProviderServiceInterface } from './sync/sync-provider.interface';
|
||||
import { SyncLog } from '../../core/log';
|
||||
import { PFLog } from '../../core/log';
|
||||
import { SyncProviderId, SyncStatus } from './pfapi.const';
|
||||
import { EncryptAndCompressHandlerService } from './sync/encrypt-and-compress-handler.service';
|
||||
import { SyncProviderPrivateCfgStore } from './sync/sync-provider-private-cfg-store';
|
||||
|
|
@ -95,7 +95,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
this.cfg?.crossModelVersion || 0,
|
||||
);
|
||||
this.m = this._createModels(modelCfgs);
|
||||
SyncLog.normal(`m`, this.m);
|
||||
PFLog.normal(`m`, this.m);
|
||||
|
||||
this.syncProviders = syncProviders;
|
||||
this.syncProviders.forEach((sp) => {
|
||||
|
|
@ -137,7 +137,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
private async _wrapSyncAction<T>(logPrefix: string, fn: () => Promise<T>): Promise<T> {
|
||||
// Check if sync is already in progress
|
||||
if (this._isSyncInProgress) {
|
||||
SyncLog.normal(`${logPrefix} SKIPPED - sync already in progress`);
|
||||
PFLog.normal(`${logPrefix} SKIPPED - sync already in progress`);
|
||||
throw new Error('Sync already in progress');
|
||||
}
|
||||
|
||||
|
|
@ -148,10 +148,10 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
this.db.lock();
|
||||
|
||||
try {
|
||||
SyncLog.normal(`${logPrefix}`);
|
||||
PFLog.normal(`${logPrefix}`);
|
||||
this.ev.emit('syncStatusChange', 'SYNCING');
|
||||
const result = await fn();
|
||||
SyncLog.normal(`${logPrefix} result:`, result);
|
||||
PFLog.normal(`${logPrefix} result:`, result);
|
||||
this.ev.emit('syncDone', result);
|
||||
// Keep lock until after status change to prevent race conditions
|
||||
this.ev.emit('syncStatusChange', 'IN_SYNC');
|
||||
|
|
@ -169,7 +169,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
}
|
||||
|
||||
setActiveSyncProvider(activeProviderId: SyncProviderId | null): void {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${this.setActiveSyncProvider.name}()`,
|
||||
activeProviderId,
|
||||
activeProviderId,
|
||||
|
|
@ -197,7 +197,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
async getSyncProviderById<T extends SyncProviderId>(
|
||||
providerId: T,
|
||||
): Promise<SyncProviderServiceInterface<T>> {
|
||||
SyncLog.normal(`${this.getSyncProviderById.name}()`, providerId);
|
||||
PFLog.normal(`${this.getSyncProviderById.name}()`, providerId);
|
||||
const provider = this.syncProviders.find((sp) => sp.id === providerId);
|
||||
if (!provider) {
|
||||
throw new InvalidSyncProviderError();
|
||||
|
|
@ -209,7 +209,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
async getSyncProviderPrivateCfg<T extends SyncProviderId>(
|
||||
providerId: T,
|
||||
): Promise<PrivateCfgByProviderId<T>> {
|
||||
SyncLog.normal(`${this.getSyncProviderPrivateCfg.name}()`, providerId);
|
||||
PFLog.normal(`${this.getSyncProviderPrivateCfg.name}()`, providerId);
|
||||
const provider = this.syncProviders.find((sp) => sp.id === providerId);
|
||||
if (!provider) {
|
||||
throw new InvalidSyncProviderError();
|
||||
|
|
@ -223,7 +223,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
providerId: T,
|
||||
privateCfg: PrivateCfgByProviderId<T>,
|
||||
): Promise<void> {
|
||||
SyncLog.normal(`${this.setPrivateCfgForSyncProvider.name}()`, providerId, privateCfg);
|
||||
PFLog.normal(`${this.setPrivateCfgForSyncProvider.name}()`, providerId, privateCfg);
|
||||
const provider = this.syncProviders.find((sp) => sp.id === providerId);
|
||||
if (!provider) {
|
||||
throw new InvalidSyncProviderError();
|
||||
|
|
@ -236,7 +236,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
}
|
||||
|
||||
setEncryptAndCompressCfg(cfg: EncryptAndCompressCfg): void {
|
||||
SyncLog.normal(`${this.setEncryptAndCompressCfg.name}()`, cfg);
|
||||
PFLog.normal(`${this.setEncryptAndCompressCfg.name}()`, cfg);
|
||||
this._encryptAndCompressCfg$.next(cfg);
|
||||
}
|
||||
|
||||
|
|
@ -244,7 +244,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
|
||||
// TODO improve naming with validity check
|
||||
async getAllSyncModelData(isSkipValidityCheck = false): Promise<AllSyncModels<MD>> {
|
||||
SyncLog.normal(`${this.getAllSyncModelData.name}()`);
|
||||
PFLog.normal(`${this.getAllSyncModelData.name}()`);
|
||||
const modelIds = Object.keys(this.m);
|
||||
const promises = modelIds.map((modelId) => {
|
||||
const modelCtrl = this.m[modelId];
|
||||
|
|
@ -263,9 +263,9 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
this.cfg?.validate &&
|
||||
this.cfg.validate(allData as AllSyncModels<MD>);
|
||||
if (validationResultIfNeeded && !validationResultIfNeeded.success) {
|
||||
SyncLog.error('ACTUALLY GOT ONE!!', validationResultIfNeeded);
|
||||
PFLog.error('ACTUALLY GOT ONE!!', validationResultIfNeeded);
|
||||
if (this._getAllSyncModelDataRetryCount >= 1) {
|
||||
SyncLog.error('ACTUALLY GOT ONE 2!! ERROR', validationResultIfNeeded);
|
||||
PFLog.error('ACTUALLY GOT ONE 2!! ERROR', validationResultIfNeeded);
|
||||
this._getAllSyncModelDataRetryCount = 0;
|
||||
throw new DataValidationFailedError(validationResultIfNeeded);
|
||||
}
|
||||
|
|
@ -341,7 +341,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
isSkipLegacyWarnings?: boolean;
|
||||
isBackupImport?: boolean;
|
||||
}): Promise<void> {
|
||||
SyncLog.normal(`${this.importAllSycModelData.name}()`, { data, cfg: this.cfg });
|
||||
PFLog.normal(`${this.importAllSycModelData.name}()`, { data, cfg: this.cfg });
|
||||
|
||||
const { dataAfter } = await this.migrationService.migrate(crossModelVersion, data);
|
||||
data = dataAfter;
|
||||
|
|
@ -349,12 +349,12 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
if (this.cfg?.validate) {
|
||||
const validationResult = this.cfg.validate(data);
|
||||
if (!validationResult.success) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${this.importAllSycModelData.name}() data not valid`,
|
||||
validationResult,
|
||||
);
|
||||
if (isAttemptRepair && this.cfg.repair) {
|
||||
SyncLog.critical(`${this.importAllSycModelData.name}() attempting repair`);
|
||||
PFLog.critical(`${this.importAllSycModelData.name}() attempting repair`);
|
||||
data = this.cfg.repair(data, (validationResult as IValidation.IFailure).errors);
|
||||
|
||||
const r2 = this.cfg.validate(data);
|
||||
|
|
@ -371,7 +371,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
try {
|
||||
await this.tmpBackupService.save(await this.getAllSyncModelData());
|
||||
} catch (error) {
|
||||
SyncLog.critical(this.importAllSycModelData.name, error);
|
||||
PFLog.critical(this.importAllSycModelData.name, error);
|
||||
PFLog.err(
|
||||
'Could not create valid backup. Onwards on the highway throug the Danger Zone!',
|
||||
);
|
||||
|
|
@ -432,7 +432,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
}
|
||||
|
||||
isValidateComplete(data: AllSyncModels<MD>): boolean {
|
||||
SyncLog.normal(`${this.isValidateComplete.name}()`, { data });
|
||||
PFLog.normal(`${this.isValidateComplete.name}()`, { data });
|
||||
if (!this.cfg?.validate) {
|
||||
throw new NoValidateFunctionProvidedError();
|
||||
}
|
||||
|
|
@ -441,7 +441,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
}
|
||||
|
||||
repairCompleteData(data: unknown, errors: IValidation.IError[]): AllSyncModels<MD> {
|
||||
SyncLog.normal(`${this.repairCompleteData.name}()`, { data });
|
||||
PFLog.normal(`${this.repairCompleteData.name}()`, { data });
|
||||
if (!this.cfg?.repair) {
|
||||
throw new NoRepairFunctionProvidedError();
|
||||
}
|
||||
|
|
@ -449,7 +449,7 @@ export class Pfapi<const MD extends ModelCfgs> {
|
|||
}
|
||||
|
||||
validate(data: unknown): IValidation<AllSyncModels<MD>> {
|
||||
SyncLog.normal(`${this.validate.name}()`, { data });
|
||||
PFLog.normal(`${this.validate.name}()`, { data });
|
||||
if (!this.cfg?.validate) {
|
||||
throw new NoValidateFunctionProvidedError();
|
||||
}
|
||||
|
|
|
|||
|
|
@ -2,7 +2,7 @@ import {
|
|||
extractSyncFileStateFromPrefix,
|
||||
getSyncFilePrefix,
|
||||
} from '../util/sync-file-prefix';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { decrypt, encrypt } from '../encryption/encryption';
|
||||
import { DecryptError, DecryptNoPasswordError } from '../errors/errors';
|
||||
import {
|
||||
|
|
@ -62,7 +62,7 @@ export class EncryptAndCompressHandlerService {
|
|||
isEncrypt,
|
||||
modelVersion,
|
||||
});
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${EncryptAndCompressHandlerService.L}.${this.compressAndEncrypt.name}()`,
|
||||
{
|
||||
prefix,
|
||||
|
|
@ -99,7 +99,7 @@ export class EncryptAndCompressHandlerService {
|
|||
}> {
|
||||
const { isCompressed, isEncrypted, modelVersion, cleanDataStr } =
|
||||
extractSyncFileStateFromPrefix(dataStr);
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${EncryptAndCompressHandlerService.L}.${this.decompressAndDecrypt.name}()`,
|
||||
{ isCompressed, isEncrypted, modelVersion },
|
||||
);
|
||||
|
|
|
|||
|
|
@ -7,7 +7,7 @@ import {
|
|||
NoRemoteMetaFile,
|
||||
RemoteFileNotFoundAPIError,
|
||||
} from '../errors/errors';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { MetaModelCtrl } from '../model-ctrl/meta-model-ctrl';
|
||||
import { EncryptAndCompressHandlerService } from './encrypt-and-compress-handler.service';
|
||||
import { validateMetaBase } from '../util/validate-meta-base';
|
||||
|
|
@ -32,7 +32,7 @@ export class MetaSyncService {
|
|||
* @returns Promise resolving when save is complete
|
||||
*/
|
||||
async saveLocal(localMetaFileContent: LocalMeta): Promise<unknown> {
|
||||
SyncLog.normal(`${MetaSyncService.L}.${this.saveLocal.name}()`, {
|
||||
PFLog.normal(`${MetaSyncService.L}.${this.saveLocal.name}()`, {
|
||||
localMetaFileContent,
|
||||
lastUpdate: localMetaFileContent.lastUpdate,
|
||||
lastSyncedUpdate: localMetaFileContent.lastSyncedUpdate,
|
||||
|
|
@ -56,7 +56,7 @@ export class MetaSyncService {
|
|||
localRev: string | null = null,
|
||||
): Promise<{ remoteMeta: RemoteMeta; remoteMetaRev: string }> {
|
||||
// return {} as any as MetaFileContent;
|
||||
SyncLog.normal(`${MetaSyncService.L}.${this.download.name}()`, { localRev });
|
||||
PFLog.normal(`${MetaSyncService.L}.${this.download.name}()`, { localRev });
|
||||
const syncProvider = this._currentSyncProvider$.getOrError();
|
||||
|
||||
try {
|
||||
|
|
@ -115,7 +115,7 @@ export class MetaSyncService {
|
|||
meta.crossModelVersion,
|
||||
);
|
||||
|
||||
SyncLog.normal(`${MetaSyncService.L}.${this.upload.name}()`, { meta });
|
||||
PFLog.normal(`${MetaSyncService.L}.${this.upload.name}()`, { meta });
|
||||
|
||||
// Upload the data
|
||||
return (
|
||||
|
|
@ -135,7 +135,7 @@ export class MetaSyncService {
|
|||
* @throws NoRemoteMetaFile if the remote file doesn't exist
|
||||
*/
|
||||
async getRev(localRev: string | null): Promise<string> {
|
||||
SyncLog.normal(`${MetaSyncService.L}.${this.getRev.name}()`, { localRev });
|
||||
PFLog.normal(`${MetaSyncService.L}.${this.getRev.name}()`, { localRev });
|
||||
const syncProvider = this._currentSyncProvider$.getOrError();
|
||||
|
||||
try {
|
||||
|
|
@ -158,7 +158,7 @@ export class MetaSyncService {
|
|||
* @returns Promise resolving to the new revision string
|
||||
*/
|
||||
async lock(revToMatch: string | null = null): Promise<string> {
|
||||
SyncLog.normal(`${MetaSyncService.L}.${this.lock.name}()`, { revToMatch });
|
||||
PFLog.normal(`${MetaSyncService.L}.${this.lock.name}()`, { revToMatch });
|
||||
const syncProvider = this._currentSyncProvider$.getOrError();
|
||||
const clientId = await this._metaModelCtrl.loadClientId();
|
||||
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { MiniObservable } from '../util/mini-observable';
|
||||
import { SyncProviderServiceInterface } from './sync-provider.interface';
|
||||
import {
|
||||
|
|
@ -56,7 +56,7 @@ export class ModelSyncService<MD extends ModelCfgs> {
|
|||
}
|
||||
|
||||
const modelVersion = this._getModelVersion(modelId);
|
||||
SyncLog.normal(`${ModelSyncService.L}.${this.upload.name}()`, modelId, {
|
||||
PFLog.normal(`${ModelSyncService.L}.${this.upload.name}()`, modelId, {
|
||||
modelVersion,
|
||||
data,
|
||||
localRev,
|
||||
|
|
@ -94,7 +94,7 @@ export class ModelSyncService<MD extends ModelCfgs> {
|
|||
throw new ImpossibleError('Model ID is required for download');
|
||||
}
|
||||
|
||||
SyncLog.normal(`${ModelSyncService.L}.${this.download.name}()`, {
|
||||
PFLog.normal(`${ModelSyncService.L}.${this.download.name}()`, {
|
||||
modelId,
|
||||
expectedRev,
|
||||
});
|
||||
|
|
@ -107,7 +107,7 @@ export class ModelSyncService<MD extends ModelCfgs> {
|
|||
);
|
||||
if (expectedRev) {
|
||||
if (!rev || !this._isSameRev(rev, expectedRev)) {
|
||||
SyncLog.normal('Rev mismatch', rev, expectedRev);
|
||||
PFLog.normal('Rev mismatch', rev, expectedRev);
|
||||
throw new RevMismatchForModelError(modelId, { rev, expectedRev });
|
||||
}
|
||||
}
|
||||
|
|
@ -146,7 +146,7 @@ export class ModelSyncService<MD extends ModelCfgs> {
|
|||
throw new ImpossibleError('Model ID is required for removal');
|
||||
}
|
||||
|
||||
SyncLog.normal(`${ModelSyncService.L}.${this.remove.name}()`, {
|
||||
PFLog.normal(`${ModelSyncService.L}.${this.remove.name}()`, {
|
||||
modelId,
|
||||
});
|
||||
const syncProvider = this._currentSyncProvider$.getOrError();
|
||||
|
|
@ -183,7 +183,7 @@ export class ModelSyncService<MD extends ModelCfgs> {
|
|||
async updateLocalMainModelsFromRemoteMetaFile(remote: RemoteMeta): Promise<void> {
|
||||
const mainModelData = remote.mainModelData;
|
||||
if (typeof mainModelData === 'object' && mainModelData !== null) {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${ModelSyncService.L}.${this.updateLocalMainModelsFromRemoteMetaFile.name}() updating (main) models`,
|
||||
Object.keys(mainModelData),
|
||||
);
|
||||
|
|
@ -222,7 +222,7 @@ export class ModelSyncService<MD extends ModelCfgs> {
|
|||
const mainModelData: MainModelData = Object.fromEntries(
|
||||
mainFileModelIds.map((modelId) => [modelId, completeModel[modelId]]),
|
||||
);
|
||||
SyncLog.normal(`${ModelSyncService.L}.${this.getMainFileModelDataForUpload.name}()`, {
|
||||
PFLog.normal(`${ModelSyncService.L}.${this.getMainFileModelDataForUpload.name}()`, {
|
||||
mainModelData,
|
||||
mainFileModelIds,
|
||||
});
|
||||
|
|
@ -299,7 +299,7 @@ export class ModelSyncService<MD extends ModelCfgs> {
|
|||
* @private
|
||||
*/
|
||||
private async _removeLocal<T extends keyof MD>(modelId: T): Promise<void> {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${ModelSyncService.L}.${this._removeLocal.name}: Delete local model ${String(modelId)}`,
|
||||
);
|
||||
await this.m[modelId].remove();
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import {
|
|||
RemoteFileNotFoundAPIError,
|
||||
TooManyRequestsAPIError,
|
||||
} from '../../../errors/errors';
|
||||
import { SyncLog } from '../../../../../core/log';
|
||||
import { PFLog } from '../../../../../core/log';
|
||||
import { SyncProviderServiceInterface } from '../../sync-provider.interface';
|
||||
import { SyncProviderId } from '../../../pfapi.const';
|
||||
import { tryCatchInlineAsync } from '../../../../../util/try-catch-inline';
|
||||
|
|
@ -68,7 +68,7 @@ export class DropboxApi {
|
|||
});
|
||||
return response.json();
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${DropboxApi.L}.getMetaData() error for path: ${path}`, e);
|
||||
PFLog.critical(`${DropboxApi.L}.getMetaData() error for path: ${path}`, e);
|
||||
this._checkCommonErrors(e, path);
|
||||
throw e;
|
||||
}
|
||||
|
|
@ -115,7 +115,7 @@ export class DropboxApi {
|
|||
|
||||
return { meta, data: data as unknown as T };
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${DropboxApi.L}.download() error for path: ${path}`, e);
|
||||
PFLog.critical(`${DropboxApi.L}.download() error for path: ${path}`, e);
|
||||
this._checkCommonErrors(e, path);
|
||||
throw e;
|
||||
}
|
||||
|
|
@ -167,7 +167,7 @@ export class DropboxApi {
|
|||
|
||||
return result;
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${DropboxApi.L}.upload() error for path: ${path}`, e);
|
||||
PFLog.critical(`${DropboxApi.L}.upload() error for path: ${path}`, e);
|
||||
this._checkCommonErrors(e, path);
|
||||
throw e;
|
||||
}
|
||||
|
|
@ -186,7 +186,7 @@ export class DropboxApi {
|
|||
});
|
||||
return response.json();
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${DropboxApi.L}.remove() error for path: ${path}`, e);
|
||||
PFLog.critical(`${DropboxApi.L}.remove() error for path: ${path}`, e);
|
||||
this._checkCommonErrors(e, path);
|
||||
throw e;
|
||||
}
|
||||
|
|
@ -209,7 +209,7 @@ export class DropboxApi {
|
|||
});
|
||||
return response.json();
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${DropboxApi.L}.checkUser() error`, e);
|
||||
PFLog.critical(`${DropboxApi.L}.checkUser() error`, e);
|
||||
this._checkCommonErrors(e, 'check/user');
|
||||
throw e;
|
||||
}
|
||||
|
|
@ -219,13 +219,13 @@ export class DropboxApi {
|
|||
* Refresh access token using refresh token
|
||||
*/
|
||||
async updateAccessTokenFromRefreshTokenIfAvailable(): Promise<void> {
|
||||
SyncLog.normal(`${DropboxApi.L}.updateAccessTokenFromRefreshTokenIfAvailable()`);
|
||||
PFLog.normal(`${DropboxApi.L}.updateAccessTokenFromRefreshTokenIfAvailable()`);
|
||||
|
||||
const privateCfg = await this._parent.privateCfg.load();
|
||||
const refreshToken = privateCfg?.refreshToken;
|
||||
|
||||
if (!refreshToken) {
|
||||
SyncLog.critical('Dropbox: No refresh token available');
|
||||
PFLog.critical('Dropbox: No refresh token available');
|
||||
throw new MissingRefreshTokenAPIError();
|
||||
}
|
||||
|
||||
|
|
@ -247,14 +247,14 @@ export class DropboxApi {
|
|||
}
|
||||
|
||||
const data = (await response.json()) as TokenResponse;
|
||||
SyncLog.normal('Dropbox: Refresh access token Response', data);
|
||||
PFLog.normal('Dropbox: Refresh access token Response', data);
|
||||
|
||||
await this._parent.privateCfg.save({
|
||||
accessToken: data.access_token,
|
||||
refreshToken: data.refresh_token || privateCfg?.refreshToken,
|
||||
});
|
||||
} catch (e) {
|
||||
SyncLog.critical('Failed to refresh Dropbox access token', e);
|
||||
PFLog.critical('Failed to refresh Dropbox access token', e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
@ -308,7 +308,7 @@ export class DropboxApi {
|
|||
expiresAt: +data.expires_in * 1000 + Date.now(),
|
||||
};
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${DropboxApi.L}.getTokensFromAuthCode() error`, e);
|
||||
PFLog.critical(`${DropboxApi.L}.getTokensFromAuthCode() error`, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
@ -398,7 +398,7 @@ export class DropboxApi {
|
|||
|
||||
return response;
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${DropboxApi.L}._request() error for ${url}`, e);
|
||||
PFLog.critical(`${DropboxApi.L}._request() error for ${url}`, e);
|
||||
this._checkCommonErrors(e, url);
|
||||
throw e;
|
||||
}
|
||||
|
|
@ -471,7 +471,7 @@ export class DropboxApi {
|
|||
return new Promise((resolve, reject) => {
|
||||
setTimeout(
|
||||
() => {
|
||||
SyncLog.normal(`Too many requests ${path}, retrying in ${retryAfter}s...`);
|
||||
PFLog.normal(`Too many requests ${path}, retrying in ${retryAfter}s...`);
|
||||
originalRequestExecutor().then(resolve).catch(reject);
|
||||
},
|
||||
(retryAfter + EXTRA_WAIT) * 1000,
|
||||
|
|
|
|||
|
|
@ -9,7 +9,7 @@ import {
|
|||
RemoteFileNotFoundAPIError,
|
||||
NoRevAPIError,
|
||||
} from '../../../errors/errors';
|
||||
import { SyncLog } from '../../../../../core/log';
|
||||
import { PFLog } from '../../../../../core/log';
|
||||
import { DropboxApi } from './dropbox-api';
|
||||
import { generatePKCECodes } from './generate-pkce-codes';
|
||||
import { SyncProviderPrivateCfgStore } from '../../sync-provider-private-cfg-store';
|
||||
|
|
@ -86,7 +86,7 @@ export class Dropbox implements SyncProviderServiceInterface<SyncProviderId.Drop
|
|||
};
|
||||
} catch (e) {
|
||||
if (this._isTokenError(e)) {
|
||||
SyncLog.critical('EXPIRED or INVALID TOKEN, trying to refresh');
|
||||
PFLog.critical('EXPIRED or INVALID TOKEN, trying to refresh');
|
||||
await this._api.updateAccessTokenFromRefreshTokenIfAvailable();
|
||||
return this.getFileRev(targetPath, localRev);
|
||||
}
|
||||
|
|
@ -131,7 +131,7 @@ export class Dropbox implements SyncProviderServiceInterface<SyncProviderId.Drop
|
|||
}
|
||||
|
||||
if (typeof r.data !== 'string') {
|
||||
SyncLog.critical(`${Dropbox.L}.${this.downloadFile.name}() data`, r.data);
|
||||
PFLog.critical(`${Dropbox.L}.${this.downloadFile.name}() data`, r.data);
|
||||
throw new InvalidDataSPError(r.data);
|
||||
}
|
||||
|
||||
|
|
@ -141,7 +141,7 @@ export class Dropbox implements SyncProviderServiceInterface<SyncProviderId.Drop
|
|||
};
|
||||
} catch (e) {
|
||||
if (this._isTokenError(e)) {
|
||||
SyncLog.critical('EXPIRED or INVALID TOKEN, trying to refresh');
|
||||
PFLog.critical('EXPIRED or INVALID TOKEN, trying to refresh');
|
||||
await this._api.updateAccessTokenFromRefreshTokenIfAvailable();
|
||||
return this.downloadFile(targetPath, localRev);
|
||||
}
|
||||
|
|
@ -181,7 +181,7 @@ export class Dropbox implements SyncProviderServiceInterface<SyncProviderId.Drop
|
|||
};
|
||||
} catch (e) {
|
||||
if (this._isTokenError(e)) {
|
||||
SyncLog.critical('EXPIRED or INVALID TOKEN, trying to refresh');
|
||||
PFLog.critical('EXPIRED or INVALID TOKEN, trying to refresh');
|
||||
await this._api.updateAccessTokenFromRefreshTokenIfAvailable();
|
||||
return this.uploadFile(targetPath, dataStr, revToMatch, isForceOverwrite);
|
||||
}
|
||||
|
|
@ -200,7 +200,7 @@ export class Dropbox implements SyncProviderServiceInterface<SyncProviderId.Drop
|
|||
await this._api.remove(this._getPath(targetPath));
|
||||
} catch (e) {
|
||||
if (this._isTokenError(e)) {
|
||||
SyncLog.critical('EXPIRED or INVALID TOKEN, trying to refresh');
|
||||
PFLog.critical('EXPIRED or INVALID TOKEN, trying to refresh');
|
||||
await this._api.updateAccessTokenFromRefreshTokenIfAvailable();
|
||||
return this.removeFile(targetPath);
|
||||
}
|
||||
|
|
|
|||
|
|
@ -50,7 +50,7 @@ export class ElectronFileAdapter implements FileAdapter {
|
|||
// }
|
||||
// return result;
|
||||
// } catch (e) {
|
||||
// SyncLog.critical( `ElectronFileAdapter.checkDirExists() error`, e);
|
||||
// PFLog.critical( `ElectronFileAdapter.checkDirExists() error`, e);
|
||||
// return false;
|
||||
// }
|
||||
// }
|
||||
|
|
@ -59,7 +59,7 @@ export class ElectronFileAdapter implements FileAdapter {
|
|||
// try {
|
||||
// return await this.ea.pickDirectory();
|
||||
// } catch (e) {
|
||||
// SyncLog.critical( `ElectronFileAdapter.pickDirectory() error`, e);
|
||||
// PFLog.critical( `ElectronFileAdapter.pickDirectory() error`, e);
|
||||
// throw e;
|
||||
// }
|
||||
// }
|
||||
|
|
|
|||
|
|
@ -12,7 +12,7 @@ import {
|
|||
WebCryptoNotAvailableError,
|
||||
} from '../../../errors/errors';
|
||||
import { md5HashPromise } from '../../../../../util/md5-hash';
|
||||
import { SyncLog } from '../../../../../core/log';
|
||||
import { PFLog } from '../../../../../core/log';
|
||||
import { PrivateCfgByProviderId } from '../../../pfapi.model';
|
||||
|
||||
export abstract class LocalFileSyncBase
|
||||
|
|
@ -39,7 +39,7 @@ export abstract class LocalFileSyncBase
|
|||
protected abstract getFilePath(targetPath: string): Promise<string>;
|
||||
|
||||
async getFileRev(targetPath: string, localRev: string): Promise<{ rev: string }> {
|
||||
SyncLog.normal(`${LocalFileSyncBase.LB}.${this.getFileRev.name}`, {
|
||||
PFLog.normal(`${LocalFileSyncBase.LB}.${this.getFileRev.name}`, {
|
||||
targetPath,
|
||||
localRev,
|
||||
});
|
||||
|
|
@ -47,7 +47,7 @@ export abstract class LocalFileSyncBase
|
|||
const r = await this.downloadFile(targetPath, localRev);
|
||||
return { rev: r.rev };
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${LocalFileSyncBase.LB}.${this.getFileRev.name} error`, e);
|
||||
PFLog.critical(`${LocalFileSyncBase.LB}.${this.getFileRev.name} error`, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
@ -56,7 +56,7 @@ export abstract class LocalFileSyncBase
|
|||
targetPath: string,
|
||||
localRev: string,
|
||||
): Promise<{ rev: string; dataStr: string }> {
|
||||
SyncLog.normal(`${LocalFileSyncBase.LB}.${this.downloadFile.name}()`, {
|
||||
PFLog.normal(`${LocalFileSyncBase.LB}.${this.downloadFile.name}()`, {
|
||||
targetPath,
|
||||
localRev,
|
||||
});
|
||||
|
|
@ -87,7 +87,7 @@ export abstract class LocalFileSyncBase
|
|||
throw new RemoteFileNotFoundAPIError(targetPath);
|
||||
}
|
||||
|
||||
SyncLog.critical(`${LocalFileSyncBase.LB}.${this.downloadFile.name}() error`, e);
|
||||
PFLog.critical(`${LocalFileSyncBase.LB}.${this.downloadFile.name}() error`, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
@ -98,7 +98,7 @@ export abstract class LocalFileSyncBase
|
|||
revToMatch: string | null,
|
||||
isForceOverwrite: boolean = false,
|
||||
): Promise<{ rev: string }> {
|
||||
SyncLog.normal(`${LocalFileSyncBase.LB}.${this.uploadFile.name}()`, {
|
||||
PFLog.normal(`${LocalFileSyncBase.LB}.${this.uploadFile.name}()`, {
|
||||
targetPath,
|
||||
dataLength: dataStr?.length,
|
||||
revToMatch,
|
||||
|
|
@ -111,7 +111,7 @@ export abstract class LocalFileSyncBase
|
|||
try {
|
||||
const existingFile = await this.downloadFile(targetPath, revToMatch);
|
||||
if (existingFile.rev !== revToMatch) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${LocalFileSyncBase.LB}.${this.uploadFile.name}() rev mismatch`,
|
||||
existingFile.rev,
|
||||
revToMatch,
|
||||
|
|
@ -132,13 +132,13 @@ export abstract class LocalFileSyncBase
|
|||
const newRev = await this._getLocalRev(dataStr);
|
||||
return { rev: newRev };
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${LocalFileSyncBase.LB}.${this.uploadFile.name}() error`, e);
|
||||
PFLog.critical(`${LocalFileSyncBase.LB}.${this.uploadFile.name}() error`, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
||||
async removeFile(targetPath: string): Promise<void> {
|
||||
SyncLog.normal(`${LocalFileSyncBase.LB}.${this.removeFile.name}`, { targetPath });
|
||||
PFLog.normal(`${LocalFileSyncBase.LB}.${this.removeFile.name}`, { targetPath });
|
||||
try {
|
||||
const filePath = await this.getFilePath(targetPath);
|
||||
await this.fileAdapter.deleteFile(filePath);
|
||||
|
|
@ -149,7 +149,7 @@ export abstract class LocalFileSyncBase
|
|||
e?.toString?.().includes('File does not exist') ||
|
||||
e?.toString?.().includes('ENOENT')
|
||||
) {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${LocalFileSyncBase.LB}.${this.removeFile.name} - file doesn't exist`,
|
||||
{
|
||||
targetPath,
|
||||
|
|
@ -158,7 +158,7 @@ export abstract class LocalFileSyncBase
|
|||
return;
|
||||
}
|
||||
|
||||
SyncLog.critical(`${LocalFileSyncBase.LB}.${this.removeFile.name} error`, e);
|
||||
PFLog.critical(`${LocalFileSyncBase.LB}.${this.removeFile.name} error`, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
// src/app/pfapi/api/sync/providers/local-file-sync/electron-file-adapter.ts
|
||||
import { LocalFileSyncBase } from './local-file-sync-base';
|
||||
import { IS_ELECTRON } from '../../../../../app.constants';
|
||||
import { SyncLog } from '../../../../../core/log';
|
||||
import { PFLog } from '../../../../../core/log';
|
||||
import { ElectronFileAdapter } from './electron-file-adapter';
|
||||
import { LocalFileSyncPrivateCfg } from '../../../pfapi.model';
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ export class LocalFileSyncElectron extends LocalFileSyncBase {
|
|||
}
|
||||
|
||||
private async _checkDirAndOpenPickerIfNotExists(): Promise<void> {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${LocalFileSyncElectron.L}.${this._checkDirAndOpenPickerIfNotExists.name}`,
|
||||
);
|
||||
|
||||
|
|
@ -43,13 +43,11 @@ export class LocalFileSyncElectron extends LocalFileSyncBase {
|
|||
const isDirExists = await this._checkDirExists(folderPath);
|
||||
|
||||
if (!isDirExists) {
|
||||
SyncLog.critical(
|
||||
`${LocalFileSyncElectron.L} - No valid directory, opening picker`,
|
||||
);
|
||||
PFLog.critical(`${LocalFileSyncElectron.L} - No valid directory, opening picker`);
|
||||
await this.pickDirectory();
|
||||
}
|
||||
} catch (err) {
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${LocalFileSyncElectron.L}.${this._checkDirAndOpenPickerIfNotExists.name}() error`,
|
||||
err,
|
||||
);
|
||||
|
|
@ -76,7 +74,7 @@ export class LocalFileSyncElectron extends LocalFileSyncBase {
|
|||
}
|
||||
return r;
|
||||
} catch (e) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${LocalFileSyncElectron.L}.${this._checkDirExists.name}() error`,
|
||||
e,
|
||||
);
|
||||
|
|
@ -85,7 +83,7 @@ export class LocalFileSyncElectron extends LocalFileSyncBase {
|
|||
}
|
||||
|
||||
async pickDirectory(): Promise<string | void> {
|
||||
SyncLog.normal(`${LocalFileSyncElectron.L}.pickDirectory()`);
|
||||
PFLog.normal(`${LocalFileSyncElectron.L}.pickDirectory()`);
|
||||
|
||||
try {
|
||||
const dir = await (window as any).ea.pickDirectory();
|
||||
|
|
@ -94,7 +92,7 @@ export class LocalFileSyncElectron extends LocalFileSyncBase {
|
|||
}
|
||||
return dir;
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${LocalFileSyncElectron.L}.pickDirectory() error`, e);
|
||||
PFLog.critical(`${LocalFileSyncElectron.L}.pickDirectory() error`, e);
|
||||
throw e;
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -6,7 +6,7 @@ import {
|
|||
NoEtagAPIError,
|
||||
RemoteFileNotFoundAPIError,
|
||||
} from '../../../errors/errors';
|
||||
import { SyncLog } from '../../../../../core/log';
|
||||
import { PFLog } from '../../../../../core/log';
|
||||
import { IS_ANDROID_WEB_VIEW } from '../../../../../util/is-android-web-view';
|
||||
import { CapacitorHttp } from '@capacitor/core';
|
||||
|
||||
|
|
@ -83,7 +83,7 @@ export class WebdavApi {
|
|||
|
||||
private _handleWebDavError(error: any, operation: string, path: string): void {
|
||||
const status = error?.status;
|
||||
SyncLog.critical(`${WebdavApi.L}.${operation}() error`, { path, error });
|
||||
PFLog.critical(`${WebdavApi.L}.${operation}() error`, { path, error });
|
||||
|
||||
switch (status) {
|
||||
case 401:
|
||||
|
|
@ -136,7 +136,7 @@ export class WebdavApi {
|
|||
const etag = this._findEtagInHeaders(responseHeaderObj);
|
||||
|
||||
if (!etag) {
|
||||
SyncLog.error(`${WebdavApi.L}.upload() no etag in response headers`, {
|
||||
PFLog.error(`${WebdavApi.L}.upload() no etag in response headers`, {
|
||||
path,
|
||||
headers: responseHeaderObj,
|
||||
});
|
||||
|
|
@ -145,19 +145,19 @@ export class WebdavApi {
|
|||
const meta = await this.getFileMeta(path, null);
|
||||
return meta.etag;
|
||||
} catch (metaError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.upload() failed to get etag via PROPFIND`,
|
||||
metaError,
|
||||
);
|
||||
// Last resort: try GET request to retrieve ETag
|
||||
try {
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}.upload() attempting GET request fallback for etag`,
|
||||
);
|
||||
const { rev } = await this.download({ path });
|
||||
return rev;
|
||||
} catch (getError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.upload() GET request fallback also failed`,
|
||||
getError,
|
||||
);
|
||||
|
|
@ -172,12 +172,12 @@ export class WebdavApi {
|
|||
|
||||
return etag;
|
||||
} catch (e: any) {
|
||||
SyncLog.critical(`${WebdavApi.L}.upload() error`, { path, error: e });
|
||||
PFLog.critical(`${WebdavApi.L}.upload() error`, { path, error: e });
|
||||
|
||||
// Check if it's a RemoteFileNotFoundAPIError (404)
|
||||
if (e instanceof RemoteFileNotFoundAPIError || e?.status === 404) {
|
||||
// Not found - parent directory might not exist (some WebDAV servers return 404 instead of 409)
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}.upload() 404 not found, attempting to create parent directories`,
|
||||
{ path },
|
||||
);
|
||||
|
|
@ -200,19 +200,19 @@ export class WebdavApi {
|
|||
const meta = await this.getFileMeta(path, null);
|
||||
return meta.etag;
|
||||
} catch (metaError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.upload() failed to get etag after retry`,
|
||||
metaError,
|
||||
);
|
||||
// Last resort: try GET request to retrieve ETag
|
||||
try {
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}.upload() attempting GET request fallback for etag after retry`,
|
||||
);
|
||||
const { rev } = await this.download({ path });
|
||||
return rev;
|
||||
} catch (getError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.upload() GET request fallback also failed after retry`,
|
||||
getError,
|
||||
);
|
||||
|
|
@ -226,7 +226,7 @@ export class WebdavApi {
|
|||
|
||||
return retryEtag;
|
||||
} catch (retryError: any) {
|
||||
SyncLog.critical(`${WebdavApi.L}.upload() retry after 404 failed`, retryError);
|
||||
PFLog.critical(`${WebdavApi.L}.upload() retry after 404 failed`, retryError);
|
||||
if (retryError instanceof RemoteFileNotFoundAPIError) {
|
||||
throw retryError;
|
||||
}
|
||||
|
|
@ -240,7 +240,7 @@ export class WebdavApi {
|
|||
switch (e?.status) {
|
||||
case 409:
|
||||
// Conflict - parent directory doesn't exist
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}.upload() 409 conflict, attempting to create parent directories`,
|
||||
{ path },
|
||||
);
|
||||
|
|
@ -263,19 +263,19 @@ export class WebdavApi {
|
|||
const meta = await this.getFileMeta(path, null);
|
||||
return meta.etag;
|
||||
} catch (metaError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.upload() failed to get etag after retry`,
|
||||
metaError,
|
||||
);
|
||||
// Last resort: try GET request to retrieve ETag
|
||||
try {
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}.upload() attempting GET request fallback for etag after 409 retry`,
|
||||
);
|
||||
const { rev } = await this.download({ path });
|
||||
return rev;
|
||||
} catch (getError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.upload() GET request fallback also failed after 409 retry`,
|
||||
getError,
|
||||
);
|
||||
|
|
@ -289,10 +289,7 @@ export class WebdavApi {
|
|||
|
||||
return retryEtag;
|
||||
} catch (retryError: any) {
|
||||
SyncLog.critical(
|
||||
`${WebdavApi.L}.upload() retry after 409 failed`,
|
||||
retryError,
|
||||
);
|
||||
PFLog.critical(`${WebdavApi.L}.upload() retry after 409 failed`, retryError);
|
||||
throw new Error(
|
||||
`Upload failed: ${retryError?.message || 'Directory creation or upload conflict'}`,
|
||||
);
|
||||
|
|
@ -342,7 +339,7 @@ export class WebdavApi {
|
|||
|
||||
// Check if response is HTML instead of XML
|
||||
if (this._isHtmlResponse(xmlText)) {
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}.getFileMeta() received HTML response instead of XML`,
|
||||
{
|
||||
path,
|
||||
|
|
@ -402,9 +399,7 @@ export class WebdavApi {
|
|||
// If HEAD also fails and useGetFallback is enabled, try GET as last resort
|
||||
if (useGetFallback) {
|
||||
try {
|
||||
SyncLog.error(
|
||||
`${WebdavApi.L}.getFileMeta() attempting GET request fallback`,
|
||||
);
|
||||
PFLog.error(`${WebdavApi.L}.getFileMeta() attempting GET request fallback`);
|
||||
const { rev } = await this.download({ path });
|
||||
|
||||
// Since we only have the ETag from GET, create minimal metadata
|
||||
|
|
@ -421,7 +416,7 @@ export class WebdavApi {
|
|||
},
|
||||
};
|
||||
} catch (getError: any) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.getFileMeta() GET fallback also failed`,
|
||||
getError,
|
||||
);
|
||||
|
|
@ -492,7 +487,7 @@ export class WebdavApi {
|
|||
|
||||
if (response.status === 206) {
|
||||
// Partial Content - range request successful
|
||||
SyncLog.normal(`${WebdavApi.L}.download() received partial content for ${path}`);
|
||||
PFLog.normal(`${WebdavApi.L}.download() received partial content for ${path}`);
|
||||
}
|
||||
|
||||
// Get response data
|
||||
|
|
@ -500,7 +495,7 @@ export class WebdavApi {
|
|||
|
||||
// Check if response is HTML instead of file content
|
||||
if (this._isHtmlResponse(dataStr)) {
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}.download() received HTML error page instead of file content`,
|
||||
{
|
||||
path,
|
||||
|
|
@ -512,7 +507,7 @@ export class WebdavApi {
|
|||
|
||||
// Validate response content
|
||||
if (!dataStr && response.status === 200) {
|
||||
SyncLog.error(`${WebdavApi.L}.download() received empty content for ${path}`);
|
||||
PFLog.error(`${WebdavApi.L}.download() received empty content for ${path}`);
|
||||
// Empty file is valid in some cases, but log it
|
||||
}
|
||||
|
||||
|
|
@ -523,7 +518,7 @@ export class WebdavApi {
|
|||
const rev = this._findEtagInHeaders(headerObj);
|
||||
|
||||
if (!rev) {
|
||||
SyncLog.error(`${WebdavApi.L}.download() no etag in response headers`, {
|
||||
PFLog.error(`${WebdavApi.L}.download() no etag in response headers`, {
|
||||
path,
|
||||
headers: headerObj,
|
||||
});
|
||||
|
|
@ -539,10 +534,7 @@ export class WebdavApi {
|
|||
if (metaError instanceof RemoteFileNotFoundAPIError) {
|
||||
throw metaError;
|
||||
}
|
||||
SyncLog.critical(
|
||||
`${WebdavApi.L}.download() PROPFIND fallback failed`,
|
||||
metaError,
|
||||
);
|
||||
PFLog.critical(`${WebdavApi.L}.download() PROPFIND fallback failed`, metaError);
|
||||
// Use content-based hash as last resort
|
||||
const crypto = globalThis.crypto || (globalThis as any).msCrypto;
|
||||
if (crypto && crypto.subtle) {
|
||||
|
|
@ -559,7 +551,7 @@ export class WebdavApi {
|
|||
dataStr,
|
||||
};
|
||||
} catch (hashError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.download() hash generation failed`,
|
||||
hashError,
|
||||
);
|
||||
|
|
@ -574,7 +566,7 @@ export class WebdavApi {
|
|||
dataStr,
|
||||
};
|
||||
} catch (e: any) {
|
||||
SyncLog.critical(`${WebdavApi.L}.download() error`, { path, error: e });
|
||||
PFLog.critical(`${WebdavApi.L}.download() error`, { path, error: e });
|
||||
|
||||
// Enhanced error handling
|
||||
switch (e?.status) {
|
||||
|
|
@ -614,13 +606,11 @@ export class WebdavApi {
|
|||
} catch (checkError: any) {
|
||||
if (checkError?.status === 404) {
|
||||
// Resource doesn't exist, consider deletion successful
|
||||
SyncLog.normal(
|
||||
`${WebdavApi.L}.remove() resource already doesn't exist: ${path}`,
|
||||
);
|
||||
PFLog.normal(`${WebdavApi.L}.remove() resource already doesn't exist: ${path}`);
|
||||
return;
|
||||
}
|
||||
// If we can't check, proceed with deletion anyway
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}.remove() couldn't check resource before deletion`,
|
||||
checkError,
|
||||
);
|
||||
|
|
@ -629,7 +619,7 @@ export class WebdavApi {
|
|||
// Add Depth header for collections (directories)
|
||||
if (resourceType === 'directory') {
|
||||
headers['Depth'] = 'infinity'; // Delete directory and all contents
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}.remove() deleting directory with infinity depth: ${path}`,
|
||||
);
|
||||
}
|
||||
|
|
@ -649,14 +639,14 @@ export class WebdavApi {
|
|||
}
|
||||
}
|
||||
|
||||
SyncLog.normal(`${WebdavApi.L}.remove() successfully deleted: ${path}`);
|
||||
PFLog.normal(`${WebdavApi.L}.remove() successfully deleted: ${path}`);
|
||||
} catch (e: any) {
|
||||
SyncLog.critical(`${WebdavApi.L}.remove() error`, { path, error: e });
|
||||
PFLog.critical(`${WebdavApi.L}.remove() error`, { path, error: e });
|
||||
|
||||
// Enhanced error handling for WebDAV DELETE
|
||||
if (e instanceof RemoteFileNotFoundAPIError || e?.status === 404) {
|
||||
// Not Found - resource doesn't exist, consider this success
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}.remove() resource not found (already deleted): ${path}`,
|
||||
);
|
||||
return;
|
||||
|
|
@ -702,7 +692,7 @@ export class WebdavApi {
|
|||
const status = response.querySelector('status')?.textContent;
|
||||
|
||||
if (href && status && !status.includes('200') && !status.includes('204')) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}._checkDeleteMultiStatusResponse() deletion failed for`,
|
||||
{
|
||||
href,
|
||||
|
|
@ -716,7 +706,7 @@ export class WebdavApi {
|
|||
|
||||
return hasErrors;
|
||||
} catch (parseError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}._checkDeleteMultiStatusResponse() XML parsing error`,
|
||||
parseError,
|
||||
);
|
||||
|
|
@ -756,7 +746,7 @@ export class WebdavApi {
|
|||
const cfg = await this._getCfgOrError();
|
||||
try {
|
||||
// Try to check if root exists
|
||||
SyncLog.error(`${WebdavApi.L}.testConnection() testing WebDAV connection`, {
|
||||
PFLog.error(`${WebdavApi.L}.testConnection() testing WebDAV connection`, {
|
||||
baseUrl: cfg.baseUrl,
|
||||
});
|
||||
|
||||
|
|
@ -780,7 +770,7 @@ export class WebdavApi {
|
|||
},
|
||||
};
|
||||
} catch (error: any) {
|
||||
SyncLog.critical(`${WebdavApi.L}.testConnection() failed`, { error });
|
||||
PFLog.critical(`${WebdavApi.L}.testConnection() failed`, { error });
|
||||
return {
|
||||
success: false,
|
||||
message: `WebDAV connection failed: ${error?.message || 'Unknown error'}`,
|
||||
|
|
@ -802,7 +792,7 @@ export class WebdavApi {
|
|||
return this._cleanRev(d[etagKey]);
|
||||
}
|
||||
|
||||
SyncLog.critical(`${WebdavApi.L}.getRevFromMeta() No etag found in metadata`, {
|
||||
PFLog.critical(`${WebdavApi.L}.getRevFromMeta() No etag found in metadata`, {
|
||||
availableKeys: Object.keys(d),
|
||||
metadata: d,
|
||||
});
|
||||
|
|
@ -810,7 +800,7 @@ export class WebdavApi {
|
|||
}
|
||||
|
||||
async createFolder({ folderPath }: { folderPath: string }): Promise<void> {
|
||||
SyncLog.normal(`${WebdavApi.L}.createFolder() attempting to create folder`, {
|
||||
PFLog.normal(`${WebdavApi.L}.createFolder() attempting to create folder`, {
|
||||
folderPath,
|
||||
});
|
||||
|
||||
|
|
@ -819,14 +809,14 @@ export class WebdavApi {
|
|||
method: 'MKCOL',
|
||||
path: folderPath,
|
||||
});
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}.createFolder() successfully created folder with MKCOL`,
|
||||
{
|
||||
folderPath,
|
||||
},
|
||||
);
|
||||
} catch (e: any) {
|
||||
SyncLog.critical(`${WebdavApi.L}.createFolder() MKCOL error`, {
|
||||
PFLog.critical(`${WebdavApi.L}.createFolder() MKCOL error`, {
|
||||
folderPath,
|
||||
error: e,
|
||||
status: e?.status,
|
||||
|
|
@ -843,12 +833,12 @@ export class WebdavApi {
|
|||
e?.message?.includes('MKCOL') ||
|
||||
e instanceof RemoteFileNotFoundAPIError
|
||||
) {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}.createFolder() MKCOL failed with status ${e?.status}, trying PUT fallback`,
|
||||
);
|
||||
try {
|
||||
const putPath = `${folderPath}/.folder`;
|
||||
SyncLog.normal(`${WebdavApi.L}.createFolder() attempting PUT to`, { putPath });
|
||||
PFLog.normal(`${WebdavApi.L}.createFolder() attempting PUT to`, { putPath });
|
||||
|
||||
await this._makeRequest({
|
||||
method: 'PUT',
|
||||
|
|
@ -859,14 +849,14 @@ export class WebdavApi {
|
|||
'Content-Length': '0',
|
||||
},
|
||||
});
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}.createFolder() successfully created folder with PUT`,
|
||||
{
|
||||
folderPath,
|
||||
},
|
||||
);
|
||||
} catch (putError: any) {
|
||||
SyncLog.critical(`${WebdavApi.L}.createFolder() PUT fallback failed`, {
|
||||
PFLog.critical(`${WebdavApi.L}.createFolder() PUT fallback failed`, {
|
||||
folderPath,
|
||||
error: putError,
|
||||
status: putError?.status,
|
||||
|
|
@ -881,7 +871,7 @@ export class WebdavApi {
|
|||
// Check if we need to create parent directories first
|
||||
const pathParts = folderPath.split('/').filter((p) => p);
|
||||
if (pathParts.length > 1) {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}.createFolder() trying to create parent directories first`,
|
||||
{
|
||||
folderPath,
|
||||
|
|
@ -898,13 +888,13 @@ export class WebdavApi {
|
|||
try {
|
||||
const exists = await this.checkFolderExists(currentPath);
|
||||
if (!exists) {
|
||||
SyncLog.normal(`${WebdavApi.L}.createFolder() creating parent`, {
|
||||
PFLog.normal(`${WebdavApi.L}.createFolder() creating parent`, {
|
||||
currentPath,
|
||||
});
|
||||
await this.createFolder({ folderPath: currentPath });
|
||||
}
|
||||
} catch (parentError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.createFolder() failed to create parent`,
|
||||
{
|
||||
currentPath,
|
||||
|
|
@ -918,7 +908,7 @@ export class WebdavApi {
|
|||
// Try one more time with a different approach - create a .gitkeep file
|
||||
try {
|
||||
const gitkeepPath = `${folderPath}/.gitkeep`;
|
||||
SyncLog.normal(`${WebdavApi.L}.createFolder() trying .gitkeep approach`, {
|
||||
PFLog.normal(`${WebdavApi.L}.createFolder() trying .gitkeep approach`, {
|
||||
gitkeepPath,
|
||||
});
|
||||
|
||||
|
|
@ -931,13 +921,13 @@ export class WebdavApi {
|
|||
'Content-Length': '0',
|
||||
},
|
||||
});
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}.createFolder() successfully created folder with .gitkeep`,
|
||||
{ folderPath },
|
||||
);
|
||||
return;
|
||||
} catch (gitkeepError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}.createFolder() .gitkeep approach also failed`,
|
||||
{
|
||||
folderPath,
|
||||
|
|
@ -1008,7 +998,7 @@ export class WebdavApi {
|
|||
|
||||
// Handle 404 specifically to throw RemoteFileNotFoundAPIError consistently
|
||||
if (response.status === 404) {
|
||||
SyncLog.normal(`${WebdavApi.L}._makeRequest() 404 Not Found`, {
|
||||
PFLog.normal(`${WebdavApi.L}._makeRequest() 404 Not Found`, {
|
||||
method,
|
||||
path,
|
||||
});
|
||||
|
|
@ -1031,7 +1021,7 @@ export class WebdavApi {
|
|||
];
|
||||
|
||||
if (!validWebDavStatuses.includes(response.status)) {
|
||||
SyncLog.critical(`${WebdavApi.L}._makeRequest() HTTP error`, {
|
||||
PFLog.critical(`${WebdavApi.L}._makeRequest() HTTP error`, {
|
||||
method,
|
||||
path,
|
||||
status: response.status,
|
||||
|
|
@ -1040,7 +1030,7 @@ export class WebdavApi {
|
|||
}
|
||||
return response;
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${WebdavApi.L}._makeRequest() CapacitorHttp error`, {
|
||||
PFLog.critical(`${WebdavApi.L}._makeRequest() CapacitorHttp error`, {
|
||||
method,
|
||||
path,
|
||||
error: e,
|
||||
|
|
@ -1065,7 +1055,7 @@ export class WebdavApi {
|
|||
|
||||
// Handle 404 specifically to throw RemoteFileNotFoundAPIError consistently
|
||||
if (response.status === 404) {
|
||||
SyncLog.normal(`${WebdavApi.L}._makeRequest() 404 Not Found`, {
|
||||
PFLog.normal(`${WebdavApi.L}._makeRequest() 404 Not Found`, {
|
||||
method,
|
||||
path,
|
||||
});
|
||||
|
|
@ -1088,7 +1078,7 @@ export class WebdavApi {
|
|||
];
|
||||
|
||||
if (!validWebDavStatuses.includes(response.status)) {
|
||||
SyncLog.critical(`${WebdavApi.L}._makeRequest() HTTP error`, {
|
||||
PFLog.critical(`${WebdavApi.L}._makeRequest() HTTP error`, {
|
||||
method,
|
||||
path,
|
||||
status: response.status,
|
||||
|
|
@ -1098,7 +1088,7 @@ export class WebdavApi {
|
|||
}
|
||||
return response;
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${WebdavApi.L}._makeRequest() network error`, {
|
||||
PFLog.critical(`${WebdavApi.L}._makeRequest() network error`, {
|
||||
method,
|
||||
path,
|
||||
error: e,
|
||||
|
|
@ -1129,7 +1119,7 @@ export class WebdavApi {
|
|||
.replace(/"/g, '')
|
||||
.trim();
|
||||
|
||||
SyncLog.verbose(`${WebdavApi.L}.cleanRev() "${rev}" -> "${result}"`);
|
||||
PFLog.verbose(`${WebdavApi.L}.cleanRev() "${rev}" -> "${result}"`);
|
||||
return result;
|
||||
}
|
||||
|
||||
|
|
@ -1144,7 +1134,7 @@ export class WebdavApi {
|
|||
const url = new URL(normalizedPath, baseUrl).toString();
|
||||
|
||||
// Log for debugging - increased log level for better visibility
|
||||
SyncLog.error(`${WebdavApi.L}._getUrl() constructed URL`, {
|
||||
PFLog.error(`${WebdavApi.L}._getUrl() constructed URL`, {
|
||||
baseUrl,
|
||||
path,
|
||||
normalizedPath,
|
||||
|
|
@ -1210,13 +1200,13 @@ export class WebdavApi {
|
|||
try {
|
||||
// Check if xmlText is empty or not valid XML
|
||||
if (!xmlText || xmlText.trim() === '') {
|
||||
SyncLog.critical(`${WebdavApi.L}._parsePropsFromXml() Empty XML response`);
|
||||
PFLog.critical(`${WebdavApi.L}._parsePropsFromXml() Empty XML response`);
|
||||
return null;
|
||||
}
|
||||
|
||||
// Check if response is HTML instead of XML
|
||||
if (this._isHtmlResponse(xmlText)) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}._parsePropsFromXml() Received HTML instead of XML`,
|
||||
{
|
||||
requestPath,
|
||||
|
|
@ -1232,7 +1222,7 @@ export class WebdavApi {
|
|||
// Check for parsing errors
|
||||
const parserError = xmlDoc.querySelector('parsererror');
|
||||
if (parserError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}._parsePropsFromXml() XML parsing error`,
|
||||
parserError.textContent,
|
||||
);
|
||||
|
|
@ -1257,12 +1247,12 @@ export class WebdavApi {
|
|||
}
|
||||
}
|
||||
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}._parsePropsFromXml() No matching response found for path: ${requestPath}`,
|
||||
);
|
||||
return null;
|
||||
} catch (error) {
|
||||
SyncLog.critical(`${WebdavApi.L}._parsePropsFromXml() parsing error`, error);
|
||||
PFLog.critical(`${WebdavApi.L}._parsePropsFromXml() parsing error`, error);
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
|
@ -1297,7 +1287,7 @@ export class WebdavApi {
|
|||
|
||||
const parserError = xmlDoc.querySelector('parsererror');
|
||||
if (parserError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}._parseMultiplePropsFromXml() XML parsing error`,
|
||||
parserError.textContent,
|
||||
);
|
||||
|
|
@ -1326,16 +1316,13 @@ export class WebdavApi {
|
|||
|
||||
return results;
|
||||
} catch (error) {
|
||||
SyncLog.critical(
|
||||
`${WebdavApi.L}._parseMultiplePropsFromXml() parsing error`,
|
||||
error,
|
||||
);
|
||||
PFLog.critical(`${WebdavApi.L}._parseMultiplePropsFromXml() parsing error`, error);
|
||||
return [];
|
||||
}
|
||||
}
|
||||
|
||||
private async _ensureParentDirectoryExists(filePath: string): Promise<void> {
|
||||
SyncLog.normal(`${WebdavApi.L}._ensureParentDirectoryExists() called for`, {
|
||||
PFLog.normal(`${WebdavApi.L}._ensureParentDirectoryExists() called for`, {
|
||||
filePath,
|
||||
});
|
||||
|
||||
|
|
@ -1343,7 +1330,7 @@ export class WebdavApi {
|
|||
|
||||
// Don't process if it's a root-level file
|
||||
if (pathParts.length <= 1) {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}._ensureParentDirectoryExists() no parent directory needed for root-level file`,
|
||||
);
|
||||
return;
|
||||
|
|
@ -1351,7 +1338,7 @@ export class WebdavApi {
|
|||
|
||||
// Remove the filename to get directory path parts
|
||||
const dirParts = pathParts.slice(0, -1);
|
||||
SyncLog.normal(`${WebdavApi.L}._ensureParentDirectoryExists() directory parts`, {
|
||||
PFLog.normal(`${WebdavApi.L}._ensureParentDirectoryExists() directory parts`, {
|
||||
dirParts,
|
||||
});
|
||||
|
||||
|
|
@ -1361,15 +1348,15 @@ export class WebdavApi {
|
|||
const currentPath = dirParts.slice(0, i).join('/');
|
||||
|
||||
try {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}._ensureParentDirectoryExists() attempting to create directory: ${currentPath}`,
|
||||
);
|
||||
await this.createFolder({ folderPath: currentPath });
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${WebdavApi.L}._ensureParentDirectoryExists() successfully created directory: ${currentPath}`,
|
||||
);
|
||||
} catch (error: any) {
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}._ensureParentDirectoryExists() error creating directory: ${currentPath}`,
|
||||
{
|
||||
error,
|
||||
|
|
@ -1380,14 +1367,14 @@ export class WebdavApi {
|
|||
|
||||
// Check if it's a 404 error, which might indicate the parent doesn't exist
|
||||
if (error?.status === 404 || error instanceof RemoteFileNotFoundAPIError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
`${WebdavApi.L}._ensureParentDirectoryExists() got 404 for directory creation, this might indicate a path issue`,
|
||||
{ currentPath },
|
||||
);
|
||||
}
|
||||
|
||||
// Log the error but continue - let the actual upload operation fail with a clearer error
|
||||
SyncLog.error(
|
||||
PFLog.error(
|
||||
`${WebdavApi.L}._ensureParentDirectoryExists() ignoring error for ${currentPath}`,
|
||||
error,
|
||||
);
|
||||
|
|
@ -1396,7 +1383,7 @@ export class WebdavApi {
|
|||
}
|
||||
|
||||
private _checkCommonErrors(e: any, targetPath: string): void {
|
||||
SyncLog.critical(`${WebdavApi.L} API error for ${targetPath}`, e);
|
||||
PFLog.critical(`${WebdavApi.L} API error for ${targetPath}`, e);
|
||||
|
||||
const status = e?.status || e?.response?.status;
|
||||
// Handle common HTTP error codes
|
||||
|
|
|
|||
|
|
@ -1,6 +1,6 @@
|
|||
import { DBNames, SyncProviderId } from '../pfapi.const';
|
||||
import { Database } from '../db/database';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { PFEventEmitter } from '../util/events';
|
||||
import { PrivateCfgByProviderId } from '../pfapi.model';
|
||||
|
||||
|
|
@ -29,7 +29,7 @@ export class SyncProviderPrivateCfgStore<PID extends SyncProviderId> {
|
|||
* @throws Error if database load operation fails
|
||||
*/
|
||||
async load(): Promise<PrivateCfgByProviderId<PID> | null> {
|
||||
SyncLog.verbose(
|
||||
PFLog.verbose(
|
||||
`${SyncProviderPrivateCfgStore.L}.${this.load.name}`,
|
||||
this._privateCfgInMemory,
|
||||
);
|
||||
|
|
@ -48,7 +48,7 @@ export class SyncProviderPrivateCfgStore<PID extends SyncProviderId> {
|
|||
}
|
||||
return loadedConfig;
|
||||
} catch (error) {
|
||||
SyncLog.critical(`Failed to load private config: ${error}`);
|
||||
PFLog.critical(`Failed to load private config: ${error}`);
|
||||
throw new Error(`Failed to load private config: ${error}`);
|
||||
}
|
||||
}
|
||||
|
|
@ -61,11 +61,7 @@ export class SyncProviderPrivateCfgStore<PID extends SyncProviderId> {
|
|||
*/
|
||||
async save(privateCfg: PrivateCfgByProviderId<PID>): Promise<unknown> {
|
||||
const key = this._providerId;
|
||||
SyncLog.normal(
|
||||
`${SyncProviderPrivateCfgStore.L}.${this.save.name}()`,
|
||||
key,
|
||||
privateCfg,
|
||||
);
|
||||
PFLog.normal(`${SyncProviderPrivateCfgStore.L}.${this.save.name}()`, key, privateCfg);
|
||||
|
||||
this._privateCfgInMemory = privateCfg;
|
||||
|
||||
|
|
@ -79,7 +75,7 @@ export class SyncProviderPrivateCfgStore<PID extends SyncProviderId> {
|
|||
// NOTE we always want to ignore DB lock during sync as it is unrelated to sync data in every single case
|
||||
return await this._db.save(this._dbKey, privateCfg, true);
|
||||
} catch (error) {
|
||||
SyncLog.critical(`Failed to save private config: ${error}`);
|
||||
PFLog.critical(`Failed to save private config: ${error}`);
|
||||
throw new Error(`Failed to save private config: ${error}`);
|
||||
}
|
||||
}
|
||||
|
|
|
|||
|
|
@ -18,7 +18,7 @@ import {
|
|||
NoRemoteMetaFile,
|
||||
UnknownSyncStateError,
|
||||
} from '../errors/errors';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { MetaModelCtrl } from '../model-ctrl/meta-model-ctrl';
|
||||
import { EncryptAndCompressHandlerService } from './encrypt-and-compress-handler.service';
|
||||
import { cleanRev } from '../util/clean-rev';
|
||||
|
|
@ -95,7 +95,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
}
|
||||
const localMeta0 = await this._metaModelCtrl.load();
|
||||
|
||||
SyncLog.normal(`${SyncService.L}.${this.sync.name}(): Initial meta check`, {
|
||||
PFLog.normal(`${SyncService.L}.${this.sync.name}(): Initial meta check`, {
|
||||
lastUpdate: localMeta0.lastUpdate,
|
||||
lastSyncedUpdate: localMeta0.lastSyncedUpdate,
|
||||
metaRev: localMeta0.metaRev,
|
||||
|
|
@ -106,7 +106,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
if (localMeta0.lastSyncedUpdate === localMeta0.lastUpdate) {
|
||||
const metaRev = await this._metaFileSyncService.getRev(localMeta0.metaRev);
|
||||
if (metaRev === localMeta0.metaRev) {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${SyncService.L}.${this.sync.name}(): Early return - already in sync`,
|
||||
);
|
||||
return { status: SyncStatus.InSync };
|
||||
|
|
@ -122,7 +122,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
|
||||
const { status, conflictData } = getSyncStatusFromMetaFiles(remoteMeta, localMeta);
|
||||
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${SyncService.L}.${this.sync.name}(): __SYNC_START__ metaFileCheck`,
|
||||
status,
|
||||
{
|
||||
|
|
@ -152,7 +152,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
modelsToUpdate: toUpdate,
|
||||
});
|
||||
} catch (error) {
|
||||
SyncLog.critical('Failed to emit onBeforeUpdateLocal event', error);
|
||||
PFLog.critical('Failed to emit onBeforeUpdateLocal event', error);
|
||||
// Continue with sync even if backup event fails
|
||||
}
|
||||
|
||||
|
|
@ -166,7 +166,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
switch (mvcR) {
|
||||
case ModelVersionCheckResult.MinorUpdate:
|
||||
case ModelVersionCheckResult.MajorUpdate:
|
||||
SyncLog.normal('Downloading all since model version changed');
|
||||
PFLog.normal('Downloading all since model version changed');
|
||||
await this.downloadAll();
|
||||
return { status: SyncStatus.UpdateLocalAll };
|
||||
case ModelVersionCheckResult.RemoteMajorAhead:
|
||||
|
|
@ -187,7 +187,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
case SyncStatus.InSync:
|
||||
// Ensure lastSyncedUpdate is set even when already in sync
|
||||
if (localMeta.lastSyncedUpdate !== localMeta.lastUpdate) {
|
||||
SyncLog.normal('InSync but lastSyncedUpdate needs update', {
|
||||
PFLog.normal('InSync but lastSyncedUpdate needs update', {
|
||||
lastSyncedUpdate: localMeta.lastSyncedUpdate,
|
||||
lastUpdate: localMeta.lastUpdate,
|
||||
});
|
||||
|
|
@ -216,10 +216,10 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
throw new UnknownSyncStateError();
|
||||
}
|
||||
} catch (e) {
|
||||
SyncLog.critical(`${SyncService.L}.${this.sync.name}(): Sync error`, e);
|
||||
PFLog.critical(`${SyncService.L}.${this.sync.name}(): Sync error`, e);
|
||||
|
||||
if (e instanceof NoRemoteMetaFile) {
|
||||
SyncLog.critical('No remote meta file found, uploading all data');
|
||||
PFLog.critical('No remote meta file found, uploading all data');
|
||||
// if there is no remote meta file, we need to upload all data
|
||||
await this.uploadAll(true);
|
||||
return { status: SyncStatus.UpdateRemoteAll };
|
||||
|
|
@ -227,7 +227,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
|
||||
// This indicates an incomplete sync, retry upload
|
||||
if (e instanceof LockFromLocalClientPresentError) {
|
||||
SyncLog.critical('Lock from local client present, forcing upload of all data');
|
||||
PFLog.critical('Lock from local client present, forcing upload of all data');
|
||||
await this.uploadAll(true);
|
||||
return { status: SyncStatus.UpdateRemoteAll };
|
||||
}
|
||||
|
|
@ -240,7 +240,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
* @param isForceUpload Whether to force upload even if lock exists
|
||||
*/
|
||||
async uploadAll(isForceUpload: boolean = false): Promise<void> {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${SyncService.L}.${this.uploadAll.name}(): Uploading all data to remote, force=${isForceUpload}`,
|
||||
);
|
||||
|
||||
|
|
@ -262,7 +262,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
const result = await this._metaFileSyncService.download();
|
||||
remoteMeta = result.remoteMeta;
|
||||
} catch (e) {
|
||||
SyncLog.error('Warning: Cannot fetch remote metadata during force upload', e);
|
||||
PFLog.error('Warning: Cannot fetch remote metadata during force upload', e);
|
||||
}
|
||||
|
||||
// Merge vector clocks if remote metadata was successfully fetched
|
||||
|
|
@ -271,13 +271,13 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
// Sanitize remote vector clock before merging
|
||||
remoteVector = sanitizeVectorClock(remoteVector);
|
||||
localVector = mergeVectorClocks(localVector, remoteVector);
|
||||
SyncLog.normal('Merged remote vector clock for force upload', {
|
||||
PFLog.normal('Merged remote vector clock for force upload', {
|
||||
localOriginal: local.vectorClock,
|
||||
remote: remoteVector,
|
||||
merged: localVector,
|
||||
});
|
||||
} else {
|
||||
SyncLog.error('Proceeding with force upload without remote vector clock merge');
|
||||
PFLog.error('Proceeding with force upload without remote vector clock merge');
|
||||
}
|
||||
|
||||
let newVector = incrementVectorClock(localVector, clientId);
|
||||
|
|
@ -322,7 +322,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
);
|
||||
} catch (e) {
|
||||
if (e instanceof LockFromLocalClientPresentError) {
|
||||
SyncLog.critical(
|
||||
PFLog.critical(
|
||||
'Lock from local client detected during uploadAll, forcing upload',
|
||||
);
|
||||
return await this.uploadAll(true);
|
||||
|
|
@ -336,7 +336,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
* @param isSkipModelRevMapCheck Whether to skip revision map checks
|
||||
*/
|
||||
async downloadAll(isSkipModelRevMapCheck: boolean = false): Promise<void> {
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${SyncService.L}.${this.downloadAll.name}(): Downloading all data from remote`,
|
||||
);
|
||||
|
||||
|
|
@ -384,7 +384,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
errorContext: 'DOWNLOAD',
|
||||
});
|
||||
|
||||
SyncLog.normal(`${SyncService.L}.${this.downloadToLocal.name}()`, {
|
||||
PFLog.normal(`${SyncService.L}.${this.downloadToLocal.name}()`, {
|
||||
remoteMeta: remote,
|
||||
localMeta: local,
|
||||
remoteRev,
|
||||
|
|
@ -399,7 +399,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
this._currentSyncProvider$.getOrError().isLimitedToSingleFileSync
|
||||
) {
|
||||
await this._modelSyncService.updateLocalMainModelsFromRemoteMetaFile(remote);
|
||||
SyncLog.verbose('RevMap comparison', {
|
||||
PFLog.verbose('RevMap comparison', {
|
||||
isEqual: JSON.stringify(remote.revMap) === JSON.stringify(local.revMap),
|
||||
remoteRevMap: remote.revMap,
|
||||
localRevMap: local.revMap,
|
||||
|
|
@ -456,7 +456,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
isSkipModelRevMapCheck: boolean = false,
|
||||
isDownloadAll: boolean = false,
|
||||
): Promise<void> {
|
||||
SyncLog.normal(`${SyncService.L}.${this._downloadToLocalMULTI.name}()`, {
|
||||
PFLog.normal(`${SyncService.L}.${this._downloadToLocalMULTI.name}()`, {
|
||||
remote,
|
||||
local,
|
||||
remoteRev,
|
||||
|
|
@ -493,7 +493,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
|
||||
if (isDownloadAll) {
|
||||
const fullData = { ...dataMap, ...remote.mainModelData } as any;
|
||||
SyncLog.normal(`${SyncService.L}.${this._downloadToLocalMULTI.name}()`, {
|
||||
PFLog.normal(`${SyncService.L}.${this._downloadToLocalMULTI.name}()`, {
|
||||
fullData,
|
||||
dataMap,
|
||||
realRemoteRevMap,
|
||||
|
|
@ -554,7 +554,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
local: LocalMeta,
|
||||
lastRemoteRev: string | null,
|
||||
): Promise<void> {
|
||||
SyncLog.normal(`${SyncService.L}.${this.uploadToRemote.name}()`, {
|
||||
PFLog.normal(`${SyncService.L}.${this.uploadToRemote.name}()`, {
|
||||
remoteMeta: remote,
|
||||
localMeta: local,
|
||||
});
|
||||
|
|
@ -591,7 +591,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
);
|
||||
|
||||
// Update local after successful upload
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${SyncService.L}.${this.uploadToRemote.name}(): Updating local metadata after upload`,
|
||||
{
|
||||
localLastUpdate: local.lastUpdate,
|
||||
|
|
@ -611,7 +611,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
|
||||
await this._metaFileSyncService.saveLocal(updatedMeta);
|
||||
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
`${SyncService.L}.${this.uploadToRemote.name}(): Local metadata updated successfully`,
|
||||
);
|
||||
return;
|
||||
|
|
@ -638,7 +638,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
errorContext: 'UPLOAD',
|
||||
});
|
||||
|
||||
SyncLog.normal(`${SyncService.L}.${this._uploadToRemoteMULTI.name}()`, {
|
||||
PFLog.normal(`${SyncService.L}.${this._uploadToRemoteMULTI.name}()`, {
|
||||
toUpdate,
|
||||
toDelete,
|
||||
remote,
|
||||
|
|
@ -670,7 +670,7 @@ export class SyncService<const MD extends ModelCfgs> {
|
|||
// Execute operations with load balancing
|
||||
await loadBalancer([...uploadModelFns, ...toDeleteFns], maxConcurrentRequests);
|
||||
|
||||
SyncLog.verbose('Final revMap after uploads', realRevMap);
|
||||
PFLog.verbose('Final revMap after uploads', realRevMap);
|
||||
|
||||
// Validate and upload the final revMap
|
||||
const validatedRevMap = validateRevMap(realRevMap);
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { PfapiEvents, PfapiEventPayloadMap } from '../pfapi.model';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
|
||||
type EventHandler<T> = (data: T) => void;
|
||||
|
||||
|
|
@ -34,7 +34,7 @@ export class PFEventEmitter {
|
|||
}
|
||||
|
||||
emit<K extends PfapiEvents>(event: K, data: PfapiEventPayloadMap[K]): void {
|
||||
SyncLog.normal(`EV:${event}`, data, this.events);
|
||||
PFLog.normal(`EV:${event}`, data, this.events);
|
||||
this.events[event].forEach((handler) => handler(data));
|
||||
}
|
||||
|
||||
|
|
|
|||
|
|
@ -1,7 +1,7 @@
|
|||
import { ConflictData, LocalMeta, RemoteMeta, VectorClock } from '../pfapi.model';
|
||||
import { ConflictReason, SyncStatus } from '../pfapi.const';
|
||||
import { ImpossibleError, InvalidMetaError, NoRemoteMetaFile } from '../errors/errors';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { hasVectorClocks } from './backwards-compat';
|
||||
import {
|
||||
compareVectorClocks,
|
||||
|
|
@ -37,7 +37,7 @@ export const getSyncStatusFromMetaFiles = (
|
|||
|
||||
// Handle the case where remote is empty (lastUpdate = 0) - should upload local data
|
||||
if (remote.lastUpdate === 0 && local.lastUpdate > 0) {
|
||||
SyncLog.normal('Remote is empty, uploading local data');
|
||||
PFLog.normal('Remote is empty, uploading local data');
|
||||
return {
|
||||
status: SyncStatus.UpdateRemote,
|
||||
};
|
||||
|
|
@ -45,7 +45,7 @@ export const getSyncStatusFromMetaFiles = (
|
|||
|
||||
// Handle the case where local is empty (lastUpdate = 0) - should download remote data
|
||||
if (local.lastUpdate === 0 && remote.lastUpdate > 0) {
|
||||
SyncLog.normal('Local is empty, downloading remote data');
|
||||
PFLog.normal('Local is empty, downloading remote data');
|
||||
return {
|
||||
status: SyncStatus.UpdateLocal,
|
||||
};
|
||||
|
|
@ -60,7 +60,7 @@ export const getSyncStatusFromMetaFiles = (
|
|||
localTotalUpdates <= MINIMAL_UPDATE_THRESHOLD &&
|
||||
remoteTotalUpdates > localTotalUpdates * MINIMAL_FACTOR
|
||||
) {
|
||||
SyncLog.normal('First-time sync detected with minimal local data', {
|
||||
PFLog.normal('First-time sync detected with minimal local data', {
|
||||
localTotalUpdates,
|
||||
remoteTotalUpdates,
|
||||
threshold: MINIMAL_UPDATE_THRESHOLD,
|
||||
|
|
@ -88,7 +88,7 @@ export const getSyncStatusFromMetaFiles = (
|
|||
const remoteHasVectorClock =
|
||||
remote.vectorClock && Object.keys(remote.vectorClock).length > 0;
|
||||
|
||||
SyncLog.normal('Vector clock availability check', {
|
||||
PFLog.normal('Vector clock availability check', {
|
||||
localHasVectorClock,
|
||||
remoteHasVectorClock,
|
||||
localVectorClock: local.vectorClock,
|
||||
|
|
@ -124,7 +124,7 @@ export const getSyncStatusFromMetaFiles = (
|
|||
const remoteVector = remote.vectorClock!;
|
||||
const lastSyncedVector = local.lastSyncedVectorClock;
|
||||
|
||||
SyncLog.normal('Using vector clocks for sync status', {
|
||||
PFLog.normal('Using vector clocks for sync status', {
|
||||
localVector: vectorClockToString(localVector),
|
||||
remoteVector: vectorClockToString(remoteVector),
|
||||
lastSyncedVector: vectorClockToString(lastSyncedVector),
|
||||
|
|
@ -169,7 +169,7 @@ export const getSyncStatusFromMetaFiles = (
|
|||
};
|
||||
}
|
||||
} catch (e) {
|
||||
SyncLog.critical('Vector clock comparison failed', {
|
||||
PFLog.critical('Vector clock comparison failed', {
|
||||
error: e,
|
||||
localVector: vectorClockToString(localVector),
|
||||
remoteVector: vectorClockToString(remoteVector),
|
||||
|
|
@ -223,7 +223,7 @@ const _checkForUpdateVectorClock = (params: {
|
|||
throw new Error('Invalid remoteVector in vector clock comparison');
|
||||
}
|
||||
|
||||
SyncLog.normal('Vector clock check', {
|
||||
PFLog.normal('Vector clock check', {
|
||||
localVector: vectorClockToString(localVector),
|
||||
remoteVector: vectorClockToString(remoteVector),
|
||||
lastSyncedVector: vectorClockToString(lastSyncedVector),
|
||||
|
|
@ -243,7 +243,7 @@ const _checkForUpdateVectorClock = (params: {
|
|||
// Both have changes - need to check if they're truly concurrent
|
||||
const comparison = compareVectorClocks(localVector, remoteVector);
|
||||
|
||||
SyncLog.normal('Both sides have changes, vector comparison result:', comparison);
|
||||
PFLog.normal('Both sides have changes, vector comparison result:', comparison);
|
||||
|
||||
// If one vector clock dominates the other, we can still sync
|
||||
if (comparison === VectorClockComparison.LESS_THAN) {
|
||||
|
|
|
|||
|
|
@ -1,5 +1,5 @@
|
|||
import { RevMap } from '../pfapi.model';
|
||||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
|
||||
export const isSameRevMap = (revMap1: RevMap, revMap2: RevMap): boolean => {
|
||||
if (Object.keys(revMap1).length !== Object.keys(revMap2).length) {
|
||||
|
|
@ -8,7 +8,7 @@ export const isSameRevMap = (revMap1: RevMap, revMap2: RevMap): boolean => {
|
|||
|
||||
for (const key in revMap1) {
|
||||
if (revMap1[key] !== revMap2[key]) {
|
||||
SyncLog.critical(`${isSameRevMap.name}(): ${key} is different`, {
|
||||
PFLog.critical(`${isSameRevMap.name}(): ${key} is different`, {
|
||||
revMap1,
|
||||
revMap2,
|
||||
});
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
|
||||
export const loadBalancer = <T>(
|
||||
asyncTasks: (() => Promise<T>)[],
|
||||
|
|
@ -13,7 +13,7 @@ export const loadBalancer = <T>(
|
|||
const batch = asyncTasks.slice(index, index + batchSize);
|
||||
|
||||
// Execute all promises in the current batch concurrently
|
||||
SyncLog.normal(
|
||||
PFLog.normal(
|
||||
// eslint-disable-next-line no-mixed-operators
|
||||
`loadBalancer ${index / batchSize + 1} / ${Math.ceil(asyncTasks.length / batchSize)}`,
|
||||
);
|
||||
|
|
|
|||
|
|
@ -1,4 +1,4 @@
|
|||
import { SyncLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
import { PFLog } from '../../../core/log';
|
||||
|
||||
/**
|
||||
|
|
@ -105,7 +105,7 @@ export const sanitizeVectorClock = (clock: any): VectorClock => {
|
|||
}
|
||||
}
|
||||
} catch (e) {
|
||||
SyncLog.error('Error sanitizing vector clock', e);
|
||||
PFLog.error('Error sanitizing vector clock', e);
|
||||
return {};
|
||||
}
|
||||
|
||||
|
|
@ -192,7 +192,7 @@ export const incrementVectorClock = (
|
|||
|
||||
// Handle overflow - reset to 1 if approaching max safe integer
|
||||
if (currentValue >= Number.MAX_SAFE_INTEGER - 1000) {
|
||||
SyncLog.error('Vector clock component overflow protection triggered', {
|
||||
PFLog.error('Vector clock component overflow protection triggered', {
|
||||
clientId,
|
||||
currentValue,
|
||||
});
|
||||
|
|
@ -204,7 +204,7 @@ export const incrementVectorClock = (
|
|||
// Warn if vector clock is getting large
|
||||
const size = Object.keys(newClock).length;
|
||||
if (size > 30) {
|
||||
SyncLog.error('Warning: Vector clock growing large', {
|
||||
PFLog.error('Warning: Vector clock growing large', {
|
||||
size,
|
||||
clientId,
|
||||
threshold: 30,
|
||||
|
|
@ -293,7 +293,7 @@ export const hasVectorClockChanges = (
|
|||
// This detects when a client's entry has been removed/corrupted
|
||||
for (const [clientId, refVal] of Object.entries(reference!)) {
|
||||
if (refVal > 0 && !(clientId in current!)) {
|
||||
SyncLog.error('Vector clock change detected: client missing from current', {
|
||||
PFLog.error('Vector clock change detected: client missing from current', {
|
||||
clientId,
|
||||
refValue: refVal,
|
||||
currentClock: vectorClockToString(current),
|
||||
|
|
@ -334,7 +334,7 @@ export const limitVectorClockSize = (
|
|||
return clock;
|
||||
}
|
||||
|
||||
SyncLog.error('Vector clock pruning triggered', {
|
||||
PFLog.error('Vector clock pruning triggered', {
|
||||
originalSize: entries.length,
|
||||
maxSize: MAX_VECTOR_CLOCK_SIZE,
|
||||
currentClientId,
|
||||
|
|
|
|||
Loading…
Add table
Add a link
Reference in a new issue