refactor: use PFLog for all logging in pfapi directory

- Replaced all Log.* calls with PFLog.* in pfapi directory
- Modified 24 files with 106 total changes
- All pfapi-related logs now have [pf] context prefix

This ensures consistent context-aware logging for all persistence framework
operations, making it easier to filter and debug data-related issues.
This commit is contained in:
Johannes Millan 2025-07-10 14:35:56 +02:00
parent 5007038fe8
commit de3d1106bd
26 changed files with 282 additions and 129 deletions

146
scripts/migrate-to-pf-log.ts Executable file
View file

@ -0,0 +1,146 @@
#!/usr/bin/env ts-node
import * as fs from 'fs';
import * as path from 'path';
import * as glob from 'glob';
interface Replacement {
pattern: RegExp;
replacement: string;
}
const replacements: Replacement[] = [
// Replace Log.log with PFLog.log
{ pattern: /\bLog\.log\(/g, replacement: 'PFLog.log(' },
// Replace Log.err with PFLog.err
{ pattern: /\bLog\.err\(/g, replacement: 'PFLog.err(' },
// Replace Log.info with PFLog.info
{ pattern: /\bLog\.info\(/g, replacement: 'PFLog.info(' },
// Replace Log.debug with PFLog.debug
{ pattern: /\bLog\.debug\(/g, replacement: 'PFLog.debug(' },
// Replace Log.verbose with PFLog.verbose
{ pattern: /\bLog\.verbose\(/g, replacement: 'PFLog.verbose(' },
// Replace Log.critical with PFLog.critical
{ pattern: /\bLog\.critical\(/g, replacement: 'PFLog.critical(' },
];
function updateImports(content: string): string {
// Check if file already imports PFLog
const hasPFLogImport =
/import\s*{[^}]*\bPFLog\b[^}]*}\s*from\s*['"][^'"]*\/log['"]/.test(content);
if (hasPFLogImport) {
// If PFLog is already imported, just remove Log from the import if it's not used elsewhere
return content;
}
// Find existing Log import and add PFLog to it
const logImportRegex = /import\s*{([^}]*\bLog\b[^}]*)}\s*from\s*(['"][^'"]*\/log['"])/;
const match = content.match(logImportRegex);
if (match) {
const [fullMatch, imports, importPath] = match;
const importList = imports.split(',').map((s) => s.trim());
// Add PFLog if not already there
if (!importList.includes('PFLog')) {
importList.push('PFLog');
}
// Check if Log is still used after replacements
let tempContent = content;
for (const { pattern, replacement } of replacements) {
tempContent = tempContent.replace(pattern, replacement);
}
// Remove the import statement from check
tempContent = tempContent.replace(logImportRegex, '');
// If Log is no longer used, remove it from imports
const logStillUsed = /\bLog\b/.test(tempContent);
if (!logStillUsed) {
const logIndex = importList.indexOf('Log');
if (logIndex > -1) {
importList.splice(logIndex, 1);
}
}
const newImports = importList.join(', ');
const newImportStatement = `import { ${newImports} } from ${importPath}`;
content = content.replace(fullMatch, newImportStatement);
}
return content;
}
function processFile(filePath: string): { modified: boolean; changes: number } {
try {
let content = fs.readFileSync(filePath, 'utf8');
const originalContent = content;
let changeCount = 0;
// Count and apply replacements
for (const { pattern, replacement } of replacements) {
const matches = content.match(pattern);
if (matches) {
changeCount += matches.length;
content = content.replace(pattern, replacement);
}
}
// Update imports if changes were made
if (changeCount > 0) {
content = updateImports(content);
}
const modified = content !== originalContent;
if (modified) {
fs.writeFileSync(filePath, content, 'utf8');
}
return { modified, changes: changeCount };
} catch (error) {
console.error(`Error processing ${filePath}:`, error);
return { modified: false, changes: 0 };
}
}
function main() {
console.log('Migrating Log to PFLog in pfapi directory...\n');
// Find all TypeScript files in pfapi directory
const files = glob.sync('src/app/pfapi/**/*.ts', {
ignore: ['**/*.spec.ts', '**/node_modules/**'],
absolute: true,
});
console.log(`Found ${files.length} TypeScript files in pfapi directory\n`);
const modifiedFiles: { path: string; changes: number }[] = [];
let totalChanges = 0;
for (const file of files) {
const result = processFile(file);
if (result.modified) {
modifiedFiles.push({ path: file, changes: result.changes });
totalChanges += result.changes;
}
}
console.log('\nMigration complete!\n');
console.log(`Total changes: ${totalChanges}`);
console.log(`Modified ${modifiedFiles.length} files:\n`);
modifiedFiles
.sort((a, b) => b.changes - a.changes)
.forEach(({ path: filePath, changes }) => {
console.log(` - ${path.relative(process.cwd(), filePath)} (${changes} changes)`);
});
if (modifiedFiles.length === 0) {
console.log(' No files needed modification.');
}
}
main();

View file

@ -243,3 +243,4 @@ export class Log {
export const SyncLog = Log.withContext('sync');
export const PFLog = Log.withContext('pf');
export const PluginLog = Log.withContext('plugin');
export const IssueLog = Log.withContext('issue');

View file

@ -1,5 +1,5 @@
import { CompressError, DecompressError } from '../errors/errors';
import { Log } from '../../../core/log';
import { PFLog } from '../../../core/log';
// eslint-disable-next-line prefer-arrow/prefer-arrow-functions
export async function compressWithGzipToString(input: string): Promise<string> {
@ -25,7 +25,7 @@ export async function compressWithGzipToString(input: string): Promise<string> {
return base64;
} catch (error) {
Log.err(error);
PFLog.err(error);
throw new CompressError(error);
}
}
@ -52,7 +52,7 @@ export async function decompressGzipFromString(
// SyncLog.normal( 'Decompression stats', { decompressedLength: decoded.length });
return decoded;
} catch (error) {
Log.err(error);
PFLog.err(error);
throw new DecompressError(error);
}
}

View file

@ -1,7 +1,7 @@
import { DatabaseAdapter } from './database-adapter.model';
import { SyncLog } from '../../../core/log';
import { devError } from '../../../util/dev-error';
import { Log } from '../../../core/log';
import { PFLog } from '../../../core/log';
export class Database {
private static readonly L = 'Database';
@ -77,27 +77,27 @@ export class Database {
async remove(key: string, isIgnoreDBLock = false): Promise<unknown> {
this._lastParams = { a: 'remove', key };
if (this._isLocked && !isIgnoreDBLock) {
Log.err('Blocking write during lock');
PFLog.err('Blocking write during lock');
return;
}
try {
return await this._adapter.remove(key);
} catch (e) {
Log.err('DB Remove Error: Last Params,', this._lastParams);
PFLog.err('DB Remove Error: Last Params,', this._lastParams);
return this._errorHandler(e as Error, this.remove, [key]);
}
}
async clearDatabase(isIgnoreDBLock = false): Promise<unknown> {
if (this._isLocked && !isIgnoreDBLock) {
Log.err('Blocking write during lock');
PFLog.err('Blocking write during lock');
return;
}
this._lastParams = { a: 'clearDatabase' };
try {
return await this._adapter.clearDatabase();
} catch (e) {
Log.err('DB Clear Error: Last Params,', this._lastParams);
PFLog.err('DB Clear Error: Last Params,', this._lastParams);
return this._errorHandler(e as Error, this.clearDatabase, []);
}
}
@ -106,7 +106,7 @@ export class Database {
try {
await this._adapter.init();
} catch (e) {
Log.err(e);
PFLog.err(e);
SyncLog.critical('Database initialization failed', {
lastParams: this._lastParams,
error: e,

View file

@ -2,7 +2,7 @@ import { IDBPDatabase } from 'idb/build';
import { DBSchema, openDB } from 'idb';
import { DatabaseAdapter } from './database-adapter.model';
import { MiniObservable } from '../util/mini-observable';
import { Log } from '../../../core/log';
import { PFLog } from '../../../core/log';
// otherwise the typing of idb dependency won't work
const FAKE = 'FAAAAAKE' as const;
@ -36,7 +36,7 @@ export class IndexedDbAdapter implements DatabaseAdapter {
// upgrade(db: IDBPDatabase<MyDb>, oldVersion: number, newVersion: number | null, transaction: IDBPTransaction<MyDb>) {
// eslint-disable-next-line prefer-arrow/prefer-arrow-functions
upgrade(db: IDBPDatabase<MyDb>, oldVersion: number, newVersion: number | null) {
Log.log('IDB UPGRADE', oldVersion, newVersion);
PFLog.log('IDB UPGRADE', oldVersion, newVersion);
// TODO
db.createObjectStore(that._dbMainName as typeof FAKE);
// db.createObjectStore(FAKE_DB_MAIN_NAME);

View file

@ -86,17 +86,17 @@ export async function decrypt(data: string, password: string): Promise<string> {
// TESTING CODE
// export const testCrypto = async (): Promise<void> => {
// const enc = await encrypt('HAHAHHA', '1234');
// Log.log('enc', enc);
// PFLog.log('enc', enc);
// decrypt(enc, '1234')
// .then((r) => {
// Log.log('YEAH', r);
// PFLog.log('YEAH', r);
// })
// .catch((r) => {
// Log.log('NOOO', r);
// PFLog.log('NOOO', r);
// });
//
// const decrypted = await decrypt(enc, '1234');
// Log.log('decrypted', decrypted);
// PFLog.log('decrypted', decrypted);
// };
//
// testCrypto();

View file

@ -1,6 +1,6 @@
import { IValidation } from 'typia';
import { AllModelData } from '../pfapi.model';
import { Log } from '../../../core/log';
import { PFLog } from '../../../core/log';
class AdditionalLogErrorBase<T = unknown[]> extends Error {
additionalLog: T;
@ -11,11 +11,11 @@ class AdditionalLogErrorBase<T = unknown[]> extends Error {
if (additional.length > 0) {
// SyncLog.critical( this.name, ...additional);
Log.log(this.name, ...additional);
PFLog.log(this.name, ...additional);
try {
Log.log('additional error log: ' + JSON.stringify(additional));
PFLog.log('additional error log: ' + JSON.stringify(additional));
} catch (e) {
Log.log('additional error log not stringified: ', additional, e);
PFLog.log('additional error log not stringified: ', additional, e);
}
}
this.additionalLog = additional as T;
@ -202,24 +202,24 @@ export class ModelValidationError extends Error {
e?: unknown;
}) {
super('ModelValidationError');
Log.log(`ModelValidationError for model ${params.id}:`, params);
PFLog.log(`ModelValidationError for model ${params.id}:`, params);
if (params.validationResult) {
Log.log('validation result: ', params.validationResult);
PFLog.log('validation result: ', params.validationResult);
try {
if ('errors' in params.validationResult) {
const str = JSON.stringify(params.validationResult.errors);
Log.log('validation errors: ' + str);
PFLog.log('validation errors: ' + str);
this.additionalLog = `Model: ${params.id}, Errors: ${str.substring(0, 400)}`;
}
} catch (e) {
Log.err('Error stringifying validation errors:', e);
PFLog.err('Error stringifying validation errors:', e);
}
}
if (params.e) {
Log.log('Additional error:', params.e);
PFLog.log('Additional error:', params.e);
}
}
}
@ -230,17 +230,17 @@ export class DataValidationFailedError extends Error {
constructor(validationResult: IValidation<AllModelData<any>>) {
super('DataValidationFailedError');
Log.log('validation result: ', validationResult);
PFLog.log('validation result: ', validationResult);
try {
if ('errors' in validationResult) {
const str = JSON.stringify(validationResult.errors);
Log.log('validation errors_: ' + str);
PFLog.log('validation errors_: ' + str);
this.additionalLog = str.substring(0, 400);
}
Log.log('validation result_: ' + JSON.stringify(validationResult));
PFLog.log('validation result_: ' + JSON.stringify(validationResult));
} catch (e) {
Log.err('Failed to stringify validation errors:', e);
PFLog.err('Failed to stringify validation errors:', e);
}
}
}

View file

@ -3,7 +3,7 @@ import { Database } from '../db/database';
import { MetaModelCtrl } from './meta-model-ctrl';
import { SyncLog } from '../../../core/log';
import { ModelValidationError } from '../errors/errors';
import { Log } from '../../../core/log';
import { PFLog } from '../../../core/log';
// type ExtractModelType<T extends ModelCfg<unknown>> = T extends ModelCfg<infer U> ? U : never;
@ -54,7 +54,7 @@ export class ModelCtrl<MT extends ModelBase> {
try {
data = this.modelCfg.repair(data);
} catch (e) {
Log.err(e);
PFLog.err(e);
throw new ModelValidationError({
id: this.modelId,
data,

View file

@ -36,7 +36,7 @@ import { promiseTimeout } from '../../util/promise-timeout';
import { PFEventEmitter } from './util/events';
import { MigrationService } from './migration/migration.service';
import { IValidation } from 'typia';
import { Log } from '../../core/log';
import { PFLog } from '../../core/log';
export class Pfapi<const MD extends ModelCfgs> {
private static _wasInstanceCreated = false;
@ -177,7 +177,7 @@ export class Pfapi<const MD extends ModelCfgs> {
if (activeProviderId) {
const provider = this.syncProviders.find((sp) => sp.id === activeProviderId);
if (!provider) {
Log.log(provider, activeProviderId);
PFLog.log(provider, activeProviderId);
throw new InvalidSyncProviderError();
}
this._activeSyncProvider$.next(provider);
@ -372,10 +372,10 @@ export class Pfapi<const MD extends ModelCfgs> {
await this.tmpBackupService.save(await this.getAllSyncModelData());
} catch (error) {
SyncLog.critical(this.importAllSycModelData.name, error);
Log.err(
PFLog.err(
'Could not create valid backup. Onwards on the highway throug the Danger Zone!',
);
Log.err(error);
PFLog.err(error);
}
}
@ -387,7 +387,7 @@ export class Pfapi<const MD extends ModelCfgs> {
const modelData = data[modelId];
const modelCtrl = this.m[modelId];
if (!modelCtrl) {
Log.err('ModelId without Ctrl', modelId, modelData);
PFLog.err('ModelId without Ctrl', modelId, modelData);
if (
SKIPPED_MODEL_IDS.includes(modelId) ||
isSkipLegacyWarnings ||

View file

@ -10,7 +10,7 @@ import {
decompressGzipFromString,
} from '../compression/compression-handler';
import { EncryptAndCompressCfg } from '../pfapi.model';
import { Log } from '../../../core/log';
import { PFLog } from '../../../core/log';
export class EncryptAndCompressHandlerService {
private static readonly L = 'EncryptAndCompressHandlerService';
@ -77,7 +77,7 @@ export class EncryptAndCompressHandlerService {
}
if (isEncrypt) {
if (!encryptKey) {
Log.log(encryptKey);
PFLog.log(encryptKey);
throw new Error('No encryption password provided');
}

View file

@ -25,7 +25,7 @@ import { cleanRev } from '../util/clean-rev';
import { getModelIdsToUpdateFromRevMaps } from '../util/get-model-ids-to-update-from-rev-maps';
import { Pfapi } from '../pfapi';
import { SyncProviderId } from '../pfapi.const';
import { Log } from '../../../core/log';
import { PFLog } from '../../../core/log';
export class ModelSyncService<MD extends ModelCfgs> {
private static readonly L = 'ModelSyncService';
@ -329,7 +329,7 @@ export class ModelSyncService<MD extends ModelCfgs> {
*/
private _isSameRev(a: string | null, b: string | null): boolean {
if (!a || !b) {
Log.err(`Invalid revs a:${a} and b:${b} given`);
PFLog.err(`Invalid revs a:${a} and b:${b} given`);
return false;
}
if (a === b) {

View file

@ -1,6 +1,6 @@
import { FileAdapter } from '../file-adapter.interface';
import { SafService } from './saf.service';
import { Log } from '../../../../../../core/log';
import { PFLog } from '../../../../../../core/log';
export class SafFileAdapter implements FileAdapter {
constructor(private getUri: () => Promise<string | undefined>) {}
@ -38,7 +38,7 @@ export class SafFileAdapter implements FileAdapter {
} catch (error) {
// Ignore file not found errors
if (error?.toString?.().includes('File not found')) {
Log.err(`File not found for deletion: ${filePath}`);
PFLog.err(`File not found for deletion: ${filePath}`);
return;
}
throw error;

View file

@ -1,6 +1,6 @@
/* eslint-disable */
import { Capacitor, registerPlugin } from '@capacitor/core';
import { Log } from '../../../../../../core/log';
import { PFLog } from '../../../../../../core/log';
// Define the plugin interface for SAF operations
export interface SafPlugin {
@ -71,7 +71,7 @@ export class SafService {
const result = await SafBridge.checkUriPermission({ uri });
return result.hasPermission;
} catch (error) {
Log.err('Error checking SAF permission:', error);
PFLog.err('Error checking SAF permission:', error);
return false;
}
}
@ -110,7 +110,7 @@ export class SafService {
const result = await SafBridge.checkFileExists({ uri, fileName });
return result.exists;
} catch (error) {
Log.err('Error checking file existence:', error);
PFLog.err('Error checking file existence:', error);
return false;
}
}

View file

@ -3,7 +3,7 @@ import { LocalFileSyncBase } from './local-file-sync-base';
import { LocalFileSyncPrivateCfg } from '../../../pfapi.model';
import { SafService } from './droid-saf/saf.service';
import { SafFileAdapter } from './droid-saf/saf-file-adapter';
import { Log } from '../../../../../core/log';
import { PFLog } from '../../../../../core/log';
export class LocalFileSyncAndroid extends LocalFileSyncBase {
constructor(public directory = Directory.Documents) {
@ -40,7 +40,7 @@ export class LocalFileSyncAndroid extends LocalFileSyncBase {
});
return uri;
} catch (error) {
Log.err('Failed to setup SAF:', error);
PFLog.err('Failed to setup SAF:', error);
return undefined;
}
}

View file

@ -1,4 +1,4 @@
import { Log } from '../../../core/log';
import { PFLog } from '../../../core/log';
export class MiniObservable<T, E extends typeof Error = typeof Error> {
private _value: T;
@ -35,7 +35,7 @@ export class MiniObservable<T, E extends typeof Error = typeof Error> {
subscribe(listener: (value: T) => void): () => void {
if (this._closed) {
Log.err('Cannot subscribe to a closed observable');
PFLog.err('Cannot subscribe to a closed observable');
return () => {};
}

View file

@ -1,5 +1,5 @@
import { SyncLog } from '../../../core/log';
import { Log } from '../../../core/log';
import { PFLog } from '../../../core/log';
/**
* Vector Clock implementation for distributed synchronization
@ -125,15 +125,15 @@ export const compareVectorClocks = (
): VectorClockComparison => {
// Handle null/undefined cases
if (isVectorClockEmpty(a) && isVectorClockEmpty(b)) {
Log.err('BOTH VECTOR CLOCKS EMPTY!!!');
PFLog.err('BOTH VECTOR CLOCKS EMPTY!!!');
return VectorClockComparison.CONCURRENT;
}
if (isVectorClockEmpty(a)) {
Log.err('EMPTY VECTOR CLOCK a !!!');
PFLog.err('EMPTY VECTOR CLOCK a !!!');
return VectorClockComparison.CONCURRENT;
}
if (isVectorClockEmpty(b)) {
Log.err('EMPTY VECTOR CLOCK b !!!');
PFLog.err('EMPTY VECTOR CLOCK b !!!');
return VectorClockComparison.CONCURRENT;
}

View file

@ -17,12 +17,12 @@ import {
initialBoardsState,
} from '../../features/boards/store/boards.reducer';
import { DEFAULT_BOARD_CFG, DEFAULT_PANEL_CFG } from '../../features/boards/boards.const';
import { Log } from '../../core/log';
import { PFLog } from '../../core/log';
export const crossModelMigration2: CrossModelMigrateFn = ((
fullData: AppDataCompleteLegacy,
): AppDataCompleteNew => {
Log.log('____________________Migrate2__________________');
PFLog.log('____________________Migrate2__________________');
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const { lastLocalSyncModelChange, lastArchiveUpdate, taskArchive, ...copy } = fullData;
@ -34,10 +34,10 @@ export const crossModelMigration2: CrossModelMigrateFn = ((
Object.keys((fullData as any as AppDataCompleteNew).timeTracking.project).length
) {
// If time tracking is already migrated, return the original data
Log.err('already migrated despite old model version!!!');
PFLog.err('already migrated despite old model version!!!');
return fullData as any as AppDataCompleteNew;
}
Log.log(':::::::::::crossModelMigration2::::::::::::::');
PFLog.log(':::::::::::crossModelMigration2::::::::::::::');
// Migrate project time tracking data
const projectTimeTracking: TTWorkContextSessionMap = Object.keys(
@ -127,7 +127,7 @@ export const crossModelMigration2: CrossModelMigrateFn = ((
},
{} as TTWorkContextSessionMap,
);
Log.log('________________________________________________________', {
PFLog.log('________________________________________________________', {
copy,
projectTimeTracking,
tagTimeTracking,

View file

@ -13,14 +13,14 @@ import {
import { ProjectState } from '../../features/project/project.model';
import { DEFAULT_GLOBAL_CONFIG } from '../../features/config/default-global-config.const';
import { issueProviderInitialState } from '../../features/issue/store/issue-provider.reducer';
import { Log } from '../../core/log';
import { PFLog } from '../../core/log';
const LEGACY_INBOX_PROJECT_ID = 'INBOX' as const;
export const crossModelMigration3: CrossModelMigrateFn = ((
fullData: AppDataCompleteNew,
): AppDataCompleteNew => {
Log.log('____________________Migrate3__________________');
PFLog.log('____________________Migrate3__________________');
const copy = fullData;
if (copy.planner) {
@ -154,7 +154,7 @@ export const crossModelMigration3: CrossModelMigrateFn = ((
}
});
Log.log(copy);
PFLog.log(copy);
return copy;
}) as CrossModelMigrateFn;

View file

@ -3,13 +3,13 @@ import { CrossModelMigrateFn } from '../api';
import { TaskCopy } from '../../features/tasks/task.model';
import { EntityState } from '@ngrx/entity';
import { TODAY_TAG } from '../../features/tag/tag.const';
import { Log } from '../../core/log';
import { PFLog } from '../../core/log';
export const crossModelMigration4: CrossModelMigrateFn = ((
fullData: AppDataCompleteNew,
): AppDataCompleteNew => {
// throw new Error('Migration 4 is not implemented yet');
Log.log('____________________Migrate4__________________');
PFLog.log('____________________Migrate4__________________');
const copy = fullData;
if (!Array.isArray(copy.improvement.hiddenImprovementBannerItems)) {
@ -23,7 +23,7 @@ export const crossModelMigration4: CrossModelMigrateFn = ((
// @ts-ignore
// copy.tag.entities[TODAY_TAG.id].taskIds = [];
Log.log(copy);
PFLog.log(copy);
return copy;
}) as CrossModelMigrateFn;

View file

@ -1,14 +1,14 @@
import { AppDataCompleteNew } from '../pfapi-config';
import { CrossModelMigrateFn } from '../api';
import { TODAY_TAG } from '../../features/tag/tag.const';
import { Log } from '../../core/log';
import { PFLog } from '../../core/log';
// eslint-disable-next-line @typescript-eslint/naming-convention
export const crossModelMigration4_1: CrossModelMigrateFn = ((
fullData: AppDataCompleteNew,
): AppDataCompleteNew => {
// throw new Error('Migration 4 is not implemented yet');
Log.log('____________________Migrate4.1__________________');
PFLog.log('____________________Migrate4.1__________________');
const copy = fullData;
Object.keys(copy.taskRepeatCfg.entities).forEach((id) => {
@ -22,6 +22,6 @@ export const crossModelMigration4_1: CrossModelMigrateFn = ((
// @ts-ignore
// copy.tag.entities[TODAY_TAG.id].taskIds = [];
Log.log(copy);
PFLog.log(copy);
return copy;
}) as CrossModelMigrateFn;

View file

@ -55,7 +55,7 @@ import { CROSS_MODEL_MIGRATIONS } from './migrate/cross-model-migrations';
import { appDataValidators, validateAllData } from './validate/validation-fn';
import { fixEntityStateConsistency } from '../util/check-fix-entity-state-consistency';
import { IValidation } from 'typia';
import { Log } from '../core/log';
import { PFLog } from '../core/log';
import {
initialPluginMetaDataState,
initialPluginUserDataState,
@ -246,7 +246,7 @@ export const PFAPI_CFG: PfapiBaseCfg<PfapiAllModelCfg> = {
const r = validateAllData(data);
if (!environment.production && !r.success) {
Log.log(r);
PFLog.log(r);
alert('VALIDATION ERROR ');
}
@ -269,7 +269,7 @@ export const PFAPI_CFG: PfapiBaseCfg<PfapiAllModelCfg> = {
return r;
},
onDbError: (err) => {
Log.err(err);
PFLog.err(err);
alert('DB ERROR: ' + err);
},
repair: (data: any, errors: IValidation.IError[]) => {

View file

@ -33,7 +33,7 @@ import {
import { fromPfapiEvent, pfapiEventAndInitialAfter } from './pfapi-helper';
import { DataInitStateService } from '../core/data-init/data-init-state.service';
import { GlobalProgressBarService } from '../core-ui/global-progress-bar/global-progress-bar.service';
import { Log } from '../core/log';
import { PFLog } from '../core/log';
@Injectable({
providedIn: 'root',
@ -60,7 +60,7 @@ export class PfapiService {
).pipe(
shareReplay(1),
distinctUntilChanged(),
// tap((v) => Log.log(`isSyncProviderEnabledAndReady$`, v)),
// tap((v) => PFLog.log(`isSyncProviderEnabledAndReady$`, v)),
);
public readonly currentProviderPrivateCfg$ = pfapiEventAndInitialAfter(
@ -106,9 +106,9 @@ export class PfapiService {
constructor() {
// TODO check why it gets triggered twice always
// this.syncState$.subscribe((v) => Log.log(`syncState$`, v));
// this.syncState$.subscribe((v) => PFLog.log(`syncState$`, v));
this.isSyncInProgress$.subscribe((v) => {
// Log.log('isSyncInProgress$', v);
// PFLog.log('isSyncInProgress$', v);
if (v) {
this._globalProgressBarService.countUp('SYNC');
} else {
@ -128,7 +128,7 @@ export class PfapiService {
});
}
} catch (e) {
Log.err(e);
PFLog.err(e);
alert('Unable to set sync provider. Please check your settings.');
}
});

View file

@ -1,7 +1,7 @@
import { AppDataCompleteNew } from '../pfapi-config';
import { IValidation } from 'typia';
import { DEFAULT_GLOBAL_CONFIG } from '../../features/config/default-global-config.const';
import { Log } from '../../core/log';
import { PFLog } from '../../core/log';
export const autoFixTypiaErrors = (
data: AppDataCompleteNew,
@ -16,7 +16,7 @@ export const autoFixTypiaErrors = (
const path = error.path.replace('$input.', '');
const keys = parsePath(path);
const value = getValueByPath(data, keys);
Log.err('Auto-fixing error:', error, keys, value);
PFLog.err('Auto-fixing error:', error, keys, value);
if (
error.expected.includes('number') &&
@ -25,7 +25,7 @@ export const autoFixTypiaErrors = (
) {
const parsedValue = parseFloat(value);
setValueByPath(data, keys, parsedValue);
Log.err(`Fixed: ${path} from string "${value}" to number ${parsedValue}`);
PFLog.err(`Fixed: ${path} from string "${value}" to number ${parsedValue}`);
} else if (keys[0] === 'globalConfig') {
const defaultValue = getValueByPath(DEFAULT_GLOBAL_CONFIG, keys.slice(1));
setValueByPath(data, keys, defaultValue);
@ -34,27 +34,29 @@ export const autoFixTypiaErrors = (
);
} else if (error.expected.includes('undefined') && value === null) {
setValueByPath(data, keys, undefined);
Log.err(`Fixed: ${path} from null to undefined`);
PFLog.err(`Fixed: ${path} from null to undefined`);
} else if (error.expected.includes('null') && value === 'null') {
setValueByPath(data, keys, null);
Log.err(`Fixed: ${path} from string null to null`);
PFLog.err(`Fixed: ${path} from string null to null`);
} else if (error.expected.includes('undefined') && value === 'null') {
setValueByPath(data, keys, undefined);
Log.err(`Fixed: ${path} from string null to null`);
PFLog.err(`Fixed: ${path} from string null to null`);
} else if (error.expected.includes('null') && value === undefined) {
setValueByPath(data, keys, null);
Log.err(`Fixed: ${path} from undefined to null`);
PFLog.err(`Fixed: ${path} from undefined to null`);
} else if (error.expected.includes('boolean') && !value) {
setValueByPath(data, keys, false);
Log.err(`Fixed: ${path} to false (was ${value})`);
PFLog.err(`Fixed: ${path} to false (was ${value})`);
} else if (keys[0] === 'task' && error.expected.includes('number')) {
// If the value is a string that can be parsed to a number, parse it
if (typeof value === 'string' && !isNaN(parseFloat(value))) {
setValueByPath(data, keys, parseFloat(value));
Log.err(`Fixed: ${path} from string "${value}" to number ${parseFloat(value)}`);
PFLog.err(
`Fixed: ${path} from string "${value}" to number ${parseFloat(value)}`,
);
} else {
setValueByPath(data, keys, 0);
Log.err(`Fixed: ${path} to 0 (was ${value})`);
PFLog.err(`Fixed: ${path} to 0 (was ${value})`);
}
} else if (
keys[0] === 'simpleCounter' &&
@ -66,7 +68,7 @@ export const autoFixTypiaErrors = (
) {
// Fix for issue #4593: simpleCounter countOnDay null value
setValueByPath(data, keys, 0);
Log.err(`Fixed: ${path} from null to 0 for simpleCounter`);
PFLog.err(`Fixed: ${path} from null to 0 for simpleCounter`);
}
}
});
@ -108,7 +110,7 @@ const setValueByPath = <T extends object>(
value: any,
): void => {
if (!Array.isArray(path) || path.length === 0) return;
Log.err('Auto-fixing error =>', path, value);
PFLog.err('Auto-fixing error =>', path, value);
let current: any = obj;
for (let i = 0; i < path.length - 1; i++) {

View file

@ -15,7 +15,7 @@ import { AppDataCompleteNew } from '../pfapi-config';
import { INBOX_PROJECT } from '../../features/project/project.const';
import { autoFixTypiaErrors } from './auto-fix-typia-errors';
import { IValidation } from 'typia';
import { Log } from '../../core/log';
import { PFLog } from '../../core/log';
// TODO improve later
const ENTITY_STATE_KEYS: (keyof AppDataCompleteLegacy)[] = ALL_ENTITY_MODEL_KEYS;
@ -124,7 +124,7 @@ const _removeDuplicatesFromArchive = (data: AppDataCompleteNew): AppDataComplete
}
});
if (duplicateIds.length > 0) {
Log.log(duplicateIds.length + ' duplicates removed from archive.');
PFLog.log(duplicateIds.length + ' duplicates removed from archive.');
}
}
return data;
@ -154,7 +154,7 @@ const _moveArchivedSubTasksToUnarchivedParents = (
.map((id: string) => taskArchiveState.entities[id] as TaskCopy)
.filter((t: TaskCopy) => t.parentId && !taskArchiveState.ids.includes(t.parentId));
Log.log('orphanArchivedSubTasks', orphanArchivedSubTasks);
PFLog.log('orphanArchivedSubTasks', orphanArchivedSubTasks);
orphanArchivedSubTasks.forEach((t: TaskCopy) => {
// delete archived if duplicate
if (taskState.ids.includes(t.id as string)) {
@ -198,7 +198,7 @@ const _moveUnArchivedSubTasksToArchivedParents = (
.map((id: string) => taskState.entities[id] as TaskCopy)
.filter((t: TaskCopy) => t.parentId && !taskState.ids.includes(t.parentId));
Log.log('orphanUnArchivedSubTasks', orphanUnArchivedSubTasks);
PFLog.log('orphanUnArchivedSubTasks', orphanUnArchivedSubTasks);
orphanUnArchivedSubTasks.forEach((t: TaskCopy) => {
// delete un-archived if duplicate
if (taskArchiveState.ids.includes(t.id as string)) {
@ -276,7 +276,9 @@ const _removeMissingTasksFromListsOrRestoreFromArchive = (
);
if (taskIdsToRestoreFromArchive.length > 0) {
Log.log(taskIdsToRestoreFromArchive.length + ' missing tasks restored from archive.');
PFLog.log(
taskIdsToRestoreFromArchive.length + ' missing tasks restored from archive.',
);
}
return data;
};
@ -325,7 +327,7 @@ const _addOrphanedTasksToProjectLists = (
});
if (orphanedTaskIds.length > 0) {
Log.log(orphanedTaskIds.length + ' orphaned tasks found & restored.');
PFLog.log(orphanedTaskIds.length + ' orphaned tasks found & restored.');
}
return data;
@ -348,7 +350,7 @@ const _addInboxProjectIdIfNecessary = (data: AppDataCompleteNew): AppDataComplet
taskIds.forEach((id) => {
const t = task.entities[id] as TaskCopy;
if (!t.projectId) {
Log.log('Set inbox project id for task ' + t.id);
PFLog.log('Set inbox project id for task ' + t.id);
// @ts-ignore
data.project.entities[INBOX_PROJECT.id].taskIds = [
...(data.project.entities[INBOX_PROJECT.id]!.taskIds as string[]),
@ -363,13 +365,13 @@ const _addInboxProjectIdIfNecessary = (data: AppDataCompleteNew): AppDataComplet
}
});
Log.log(taskArchiveIds);
Log.log(Object.keys(archiveYoung.task.entities));
PFLog.log(taskArchiveIds);
PFLog.log(Object.keys(archiveYoung.task.entities));
taskArchiveIds.forEach((id) => {
const t = archiveYoung.task.entities[id] as TaskCopy;
if (!t.projectId) {
Log.log('Set inbox project for missing project id from archive task ' + t.id);
PFLog.log('Set inbox project for missing project id from archive task ' + t.id);
t.projectId = INBOX_PROJECT.id;
}
// while we are at it, we also cleanup the today tag
@ -405,19 +407,19 @@ const _removeNonExistentProjectIdsFromTasks = (
taskIds.forEach((id) => {
const t = task.entities[id] as TaskCopy;
if (t.projectId && !projectIds.includes(t.projectId)) {
Log.log('Delete missing project id from task ' + t.projectId);
PFLog.log('Delete missing project id from task ' + t.projectId);
// @ts-ignore
delete t.projectId;
}
});
Log.log(taskArchiveIds);
Log.log(Object.keys(archiveYoung.task.entities));
PFLog.log(taskArchiveIds);
PFLog.log(Object.keys(archiveYoung.task.entities));
taskArchiveIds.forEach((id) => {
const t = archiveYoung.task.entities[id] as TaskCopy;
if (t.projectId && !projectIds.includes(t.projectId)) {
Log.log('Delete missing project id from archive task ' + t.projectId);
PFLog.log('Delete missing project id from archive task ' + t.projectId);
// @ts-ignore
delete t.projectId;
}
@ -457,7 +459,7 @@ const _removeNonExistentTagsFromTasks = (
(tagId) => !tagIds.includes(tagId) && tagId !== TODAY_TAG.id,
);
if (removedTags.length > 0) {
Log.log(
PFLog.log(
`Removing non-existent tags from task ${t.id}: ${removedTags.join(', ')}`,
);
removedCount += removedTags.length;
@ -477,7 +479,7 @@ const _removeNonExistentTagsFromTasks = (
(tagId) => !tagIds.includes(tagId) && tagId !== TODAY_TAG.id,
);
if (removedTags.length > 0) {
Log.log(
PFLog.log(
`Removing non-existent tags from archive task ${t.id}: ${removedTags.join(', ')}`,
);
removedCount += removedTags.length;
@ -488,7 +490,7 @@ const _removeNonExistentTagsFromTasks = (
});
if (removedCount > 0) {
Log.log(`Total non-existent tags removed from tasks: ${removedCount}`);
PFLog.log(`Total non-existent tags removed from tasks: ${removedCount}`);
}
return data;
@ -503,7 +505,7 @@ const _removeNonExistentProjectIdsFromIssueProviders = (
issueProviderIds.forEach((id) => {
const t = issueProvider.entities[id] as IssueProvider;
if (t.defaultProjectId && !projectIds.includes(t.defaultProjectId)) {
Log.log('Delete missing project id from issueProvider ' + t.defaultProjectId);
PFLog.log('Delete missing project id from issueProvider ' + t.defaultProjectId);
t.defaultProjectId = null;
}
});
@ -521,14 +523,16 @@ const _removeNonExistentProjectIdsFromTaskRepeatCfg = (
const repeatCfg = taskRepeatCfg.entities[id] as TaskRepeatCfgCopy;
if (repeatCfg.projectId && !projectIds.includes(repeatCfg.projectId)) {
if (repeatCfg.tagIds.length) {
Log.log('Delete missing project id from task repeat cfg ' + repeatCfg.projectId);
PFLog.log(
'Delete missing project id from task repeat cfg ' + repeatCfg.projectId,
);
repeatCfg.projectId = null;
} else {
taskRepeatCfg.ids = (taskRepeatCfg.ids as string[]).filter(
(rid: string) => rid !== repeatCfg.id,
);
delete taskRepeatCfg.entities[repeatCfg.id];
Log.log('Delete task repeat cfg with missing project id' + repeatCfg.projectId);
PFLog.log('Delete task repeat cfg with missing project id' + repeatCfg.projectId);
}
}
});
@ -542,7 +546,7 @@ const _cleanupNonExistingTasksFromLists = (
projectIds.forEach((pid) => {
const projectItem = data.project.entities[pid];
if (!projectItem) {
Log.log(data.project);
PFLog.log(data.project);
throw new Error('No project');
}
(projectItem as ProjectCopy).taskIds = projectItem.taskIds.filter(
@ -557,7 +561,7 @@ const _cleanupNonExistingTasksFromLists = (
.map((id) => data.tag.entities[id])
.forEach((tagItem) => {
if (!tagItem) {
Log.log(data.tag);
PFLog.log(data.tag);
throw new Error('No tag');
}
(tagItem as TagCopy).taskIds = tagItem.taskIds.filter(
@ -574,7 +578,7 @@ const _cleanupNonExistingNotesFromLists = (
projectIds.forEach((pid) => {
const projectItem = data.project.entities[pid];
if (!projectItem) {
Log.log(data.project);
PFLog.log(data.project);
throw new Error('No project');
}
(projectItem as ProjectCopy).noteIds = (projectItem as ProjectCopy).noteIds
@ -595,7 +599,7 @@ const _fixOrphanedNotes = (data: AppDataCompleteNew): AppDataCompleteNew => {
noteIds.forEach((nId) => {
const note = data.note.entities[nId];
if (!note) {
Log.log(data.note);
PFLog.log(data.note);
throw new Error('No note');
}
// missing project case
@ -603,7 +607,7 @@ const _fixOrphanedNotes = (data: AppDataCompleteNew): AppDataCompleteNew => {
if (data.project.entities[note.projectId]) {
// @ts-ignore
if (!data.project.entities[note.projectId]!.noteIds.includes(note.id)) {
Log.log(
PFLog.log(
'Add orphaned note back to project list ' + note.projectId + ' ' + note.id,
);
// @ts-ignore
@ -613,7 +617,7 @@ const _fixOrphanedNotes = (data: AppDataCompleteNew): AppDataCompleteNew => {
];
}
} else {
Log.log('Delete missing project id from note ' + note.id);
PFLog.log('Delete missing project id from note ' + note.id);
note.projectId = null;
// @ts-ignore
if (!data.note.todayOrder.includes(note.id)) {
@ -622,7 +626,7 @@ const _fixOrphanedNotes = (data: AppDataCompleteNew): AppDataCompleteNew => {
}
} // orphaned note case
else if (!data.note.todayOrder.includes(note.id)) {
Log.log('Add orphaned note to today list ' + note.id);
PFLog.log('Add orphaned note to today list ' + note.id);
// @ts-ignore
if (!data.note.todayOrder.includes(note.id)) {
data.note.todayOrder = [...data.note.todayOrder, note.id];
@ -639,7 +643,7 @@ const _fixInconsistentProjectId = (data: AppDataCompleteNew): AppDataCompleteNew
.map((id) => data.project.entities[id])
.forEach((projectItem) => {
if (!projectItem) {
Log.log(data.project);
PFLog.log(data.project);
throw new Error('No project');
}
projectItem.taskIds.forEach((tid) => {
@ -684,7 +688,7 @@ const _fixInconsistentTagId = (data: AppDataCompleteNew): AppDataCompleteNew =>
.map((id) => data.tag.entities[id])
.forEach((tagItem) => {
if (!tagItem) {
Log.log(data.tag);
PFLog.log(data.tag);
throw new Error('No tag');
}
tagItem.taskIds.forEach((tid) => {
@ -708,7 +712,7 @@ const _setTaskProjectIdAccordingToParent = (
.map((id) => data.task.entities[id])
.forEach((taskItem) => {
if (!taskItem) {
Log.log(data.task);
PFLog.log(data.task);
throw new Error('No task');
}
if (taskItem.subTaskIds) {
@ -730,7 +734,7 @@ const _setTaskProjectIdAccordingToParent = (
.map((id) => data.archiveYoung.task.entities[id])
.forEach((taskItem) => {
if (!taskItem) {
Log.log(data.archiveYoung.task);
PFLog.log(data.archiveYoung.task);
throw new Error('No archive task');
}
if (taskItem.subTaskIds) {
@ -757,7 +761,7 @@ const _cleanupOrphanedSubTasks = (data: AppDataCompleteNew): AppDataCompleteNew
.map((id) => data.task.entities[id])
.forEach((taskItem) => {
if (!taskItem) {
Log.log(data.task);
PFLog.log(data.task);
throw new Error('No task');
}
@ -766,7 +770,7 @@ const _cleanupOrphanedSubTasks = (data: AppDataCompleteNew): AppDataCompleteNew
while (i >= 0) {
const sid = taskItem.subTaskIds[i];
if (!data.task.entities[sid]) {
Log.log('Delete orphaned sub task for ', taskItem);
PFLog.log('Delete orphaned sub task for ', taskItem);
taskItem.subTaskIds.splice(i, 1);
}
i -= 1;
@ -779,7 +783,7 @@ const _cleanupOrphanedSubTasks = (data: AppDataCompleteNew): AppDataCompleteNew
.map((id) => data.archiveYoung.task.entities[id])
.forEach((taskItem) => {
if (!taskItem) {
Log.log(data.archiveYoung.task);
PFLog.log(data.archiveYoung.task);
throw new Error('No archive task');
}
@ -788,7 +792,7 @@ const _cleanupOrphanedSubTasks = (data: AppDataCompleteNew): AppDataCompleteNew
while (i >= 0) {
const sid = taskItem.subTaskIds[i];
if (!data.archiveYoung.task.entities[sid]) {
Log.log('Delete orphaned archive sub task for ', taskItem);
PFLog.log('Delete orphaned archive sub task for ', taskItem);
taskItem.subTaskIds.splice(i, 1);
}
i -= 1;

View file

@ -1,7 +1,7 @@
import { devError } from '../../util/dev-error';
import { environment } from '../../../environments/environment';
import { AppDataCompleteNew } from '../pfapi-config';
import { Log } from '../../core/log';
import { PFLog } from '../../core/log';
let errorCount = 0;
let lastValidityError: string;
@ -48,10 +48,10 @@ export const getLastValidityError = (): string | undefined => lastValidityError;
const _validityError = (errTxt: string, additionalInfo?: any): void => {
if (additionalInfo) {
Log.log('Validity Error Info: ', additionalInfo);
PFLog.log('Validity Error Info: ', additionalInfo);
if (environment.production) {
try {
Log.log('Validity Error Info string: ', JSON.stringify(additionalInfo));
PFLog.log('Validity Error Info string: ', JSON.stringify(additionalInfo));
} catch (e) {}
}
}
@ -59,9 +59,9 @@ const _validityError = (errTxt: string, additionalInfo?: any): void => {
devError(errTxt);
} else {
if (errorCount === 4) {
Log.err('too many validity errors, only logging from now on');
PFLog.err('too many validity errors, only logging from now on');
}
Log.err(errTxt);
PFLog.err(errTxt);
}
lastValidityError = errTxt;
errorCount++;

View file

@ -20,7 +20,7 @@ import { ObstructionState } from '../../features/metric/obstruction/obstruction.
import { GlobalConfigState } from '../../features/config/global-config.model';
import { AppDataCompleteNew } from '../pfapi-config';
import { ValidationResult } from '../api/pfapi.model';
import { Log } from '../../core/log';
import { PFLog } from '../../core/log';
import {
PluginUserDataState,
PluginMetaDataState,
@ -111,7 +111,7 @@ export const appDataValidators: {
const validateArchiveModel = <R>(d: ArchiveModel | R): ValidationResult<ArchiveModel> => {
const r = _validateArchive(d);
if (!r.success) {
Log.log('Validation failed', (r as any)?.errors, r.data);
PFLog.log('Validation failed', (r as any)?.errors, r.data);
}
if (!isEntityStateConsistent((d as ArchiveModel).task)) {
return {
@ -136,7 +136,7 @@ const _wrapValidate = <R>(
isEntityCheck = false,
): ValidationResult<R> => {
if (!result.success) {
Log.log('Validation failed', (result as any)?.errors, result, d);
PFLog.log('Validation failed', (result as any)?.errors, result, d);
}
if (isEntityCheck && !isEntityStateConsistent(d as any)) {
return {