From 6708cd9b0fc7964de13542ee6a4d5cad1afd72ef Mon Sep 17 00:00:00 2001 From: Jonathan Date: Fri, 1 Nov 2024 23:00:17 +0200 Subject: [PATCH 01/10] Support automatic scraping periodically --- packages/main/src/backend/commonTypes.ts | 1 + packages/main/src/backend/index.ts | 22 +++++++++++++++++++ packages/main/src/handlers/index.ts | 4 +++- packages/preload/src/eventsBridge.ts | 4 ++-- packages/preload/src/index.ts | 6 +++++ .../src/components/GeneralSettings.tsx | 12 ++++++++++ packages/renderer/src/store/ConfigStore.tsx | 4 ++++ packages/renderer/src/types.tsx | 1 + 8 files changed, 51 insertions(+), 3 deletions(-) diff --git a/packages/main/src/backend/commonTypes.ts b/packages/main/src/backend/commonTypes.ts index 920a5472..78def43e 100644 --- a/packages/main/src/backend/commonTypes.ts +++ b/packages/main/src/backend/commonTypes.ts @@ -24,6 +24,7 @@ export interface Config { chromiumPath?: string; maxConcurrency?: number; timeout: number; + periodicScrapingIntervalHours?: number; }; useReactUI?: boolean; } diff --git a/packages/main/src/backend/index.ts b/packages/main/src/backend/index.ts index 282bb887..d54e542a 100644 --- a/packages/main/src/backend/index.ts +++ b/packages/main/src/backend/index.ts @@ -13,6 +13,28 @@ export { Events, configManager, outputVendors }; export const { inputVendors } = bankScraper; +let intervalId: NodeJS.Timeout | null = null; + +export async function scrapePeriodicallyIfNeeded(config: Config, optionalEventPublisher?: Events.EventPublisher) { + const hoursInterval = config.scraping.periodicScrapingIntervalHours; + optionalEventPublisher = optionalEventPublisher ?? new Events.BudgetTrackingEventEmitter(); + + stopPeriodicScraping(); + + if(hoursInterval && hoursInterval > 0) { + await optionalEventPublisher.emit(Events.EventNames.LOG, { message: `Setting up periodic scraping every ${hoursInterval} minutes` }); + intervalId = setInterval(async () => { + await scrapeAndUpdateOutputVendors(config, optionalEventPublisher); + }, hoursInterval * 1000 * 60 * 60); + } +} + +export function stopPeriodicScraping() { + if (intervalId) { + clearInterval(intervalId); + } +} + export async function scrapeAndUpdateOutputVendors(config: Config, optionalEventPublisher?: Events.EventPublisher) { const eventPublisher = optionalEventPublisher ?? new Events.BudgetTrackingEventEmitter(); diff --git a/packages/main/src/handlers/index.ts b/packages/main/src/handlers/index.ts index b57b3eb5..01f157eb 100644 --- a/packages/main/src/handlers/index.ts +++ b/packages/main/src/handlers/index.ts @@ -1,5 +1,5 @@ import { App } from '@/app-globals'; -import { scrapeAndUpdateOutputVendors } from '@/backend'; +import { scrapeAndUpdateOutputVendors, scrapePeriodicallyIfNeeded, stopPeriodicScraping } from '@/backend'; import { type Credentials } from '@/backend/commonTypes'; import { getConfig } from '@/backend/configManager/configManager'; import { BudgetTrackingEventEmitter } from '@/backend/eventEmitters/EventEmitter'; @@ -33,6 +33,7 @@ const functions: Record = { updateConfig: updateConfigHandler as Listener, getYnabAccountData, getLogsInfo: getLogsInfoHandler, + stopPeriodicScraping: stopPeriodicScraping, getAppInfo: async () => { return { sourceCommitShort: import.meta.env.VITE_SOURCE_COMMIT_SHORT, @@ -68,6 +69,7 @@ export const registerHandlers = () => { const config = await getConfig(); const eventSubscriber = new BudgetTrackingEventEmitter(); scrapeAndUpdateOutputVendors(config, eventSubscriber); + scrapePeriodicallyIfNeeded(config, eventSubscriber); eventSubscriber.onAny((eventName, eventData) => { event.reply('scrapingProgress', JSON.stringify({ eventName, eventData })); }); diff --git a/packages/preload/src/eventsBridge.ts b/packages/preload/src/eventsBridge.ts index d11af647..9f75e714 100644 --- a/packages/preload/src/eventsBridge.ts +++ b/packages/preload/src/eventsBridge.ts @@ -37,8 +37,8 @@ export async function scrape(handleScrapingEvent: HandleScrapingEvent) { } } -export async function toggleUIVersion() { - await electron.ipcRenderer.send('toggleUiVersion'); +export async function stopPeriodicScraping() { + return electron.ipcRenderer.invoke('stopPeriodicScraping'); } export async function openExternal(url: string) { diff --git a/packages/preload/src/index.ts b/packages/preload/src/index.ts index 33d7040c..4f125f64 100644 --- a/packages/preload/src/index.ts +++ b/packages/preload/src/index.ts @@ -2,4 +2,10 @@ * @module preload */ +import { stopPeriodicScraping } from './eventsBridge'; + export * from './eventsBridge'; + +// Clear the interval that scrapes periodically +stopPeriodicScraping(); + diff --git a/packages/renderer/src/components/GeneralSettings.tsx b/packages/renderer/src/components/GeneralSettings.tsx index 5764d715..2db507b5 100644 --- a/packages/renderer/src/components/GeneralSettings.tsx +++ b/packages/renderer/src/components/GeneralSettings.tsx @@ -17,6 +17,10 @@ function GeneralSettings() { } }; + const handlePeriodicScrapingIntervalHoursChanged = (interval: string) => { + configStore.setPeriodicScrapingIntervalHours(Number(interval)); + } + return (
@@ -61,6 +65,14 @@ function GeneralSettings() { onBlur={(event) => handleTimeoutChanged(event.target.value)} /> + + לרוץ אוטומטית כל X שעות + handlePeriodicScrapingIntervalHoursChanged(event.target.value)} + /> + diff --git a/packages/renderer/src/store/ConfigStore.tsx b/packages/renderer/src/store/ConfigStore.tsx index 49e5dcb0..376cf215 100644 --- a/packages/renderer/src/store/ConfigStore.tsx +++ b/packages/renderer/src/store/ConfigStore.tsx @@ -224,6 +224,10 @@ export class ConfigStore { async setChromiumPath(chromiumPath?: string) { this.config.scraping.chromiumPath = chromiumPath; } + + setPeriodicScrapingIntervalHours(interval?: number) { + this.config.scraping.periodicScrapingIntervalHours = interval; + } } export const configStore = new ConfigStore(); diff --git a/packages/renderer/src/types.tsx b/packages/renderer/src/types.tsx index 43b3da7c..f24fb543 100644 --- a/packages/renderer/src/types.tsx +++ b/packages/renderer/src/types.tsx @@ -30,6 +30,7 @@ export interface Config { accountsToScrape: AccountToScrapeConfig[]; chromiumPath?: string; maxConcurrency?: number; + periodicScrapingIntervalHours?: number; }; } From 866c2d6120e451cb1631532bf50ff36ef2fc45ed Mon Sep 17 00:00:00 2001 From: Jonathan Date: Fri, 1 Nov 2024 23:04:28 +0200 Subject: [PATCH 02/10] Fix lint --- packages/renderer/src/components/GeneralSettings.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/renderer/src/components/GeneralSettings.tsx b/packages/renderer/src/components/GeneralSettings.tsx index 2db507b5..e6ea9856 100644 --- a/packages/renderer/src/components/GeneralSettings.tsx +++ b/packages/renderer/src/components/GeneralSettings.tsx @@ -19,7 +19,7 @@ function GeneralSettings() { const handlePeriodicScrapingIntervalHoursChanged = (interval: string) => { configStore.setPeriodicScrapingIntervalHours(Number(interval)); - } + }; return (
From aa4fff9205f1e630e17f7529ec296dd51fc1a1df Mon Sep 17 00:00:00 2001 From: Jonathan Date: Fri, 1 Nov 2024 23:10:46 +0200 Subject: [PATCH 03/10] Fix lint --- packages/main/src/backend/index.ts | 7 +------ packages/preload/src/index.ts | 2 +- 2 files changed, 2 insertions(+), 7 deletions(-) diff --git a/packages/main/src/backend/index.ts b/packages/main/src/backend/index.ts index d54e542a..a9a51316 100644 --- a/packages/main/src/backend/index.ts +++ b/packages/main/src/backend/index.ts @@ -5,14 +5,11 @@ import moment from 'moment'; import * as configManager from './configManager/configManager'; import * as Events from './eventEmitters/EventEmitter'; import outputVendors from './export/outputVendors'; -import * as bankScraper from './import/bankScraper'; import logger from '../logging/logger'; export { CompanyTypes } from 'israeli-bank-scrapers-core'; export { Events, configManager, outputVendors }; -export const { inputVendors } = bankScraper; - let intervalId: NodeJS.Timeout | null = null; export async function scrapePeriodicallyIfNeeded(config: Config, optionalEventPublisher?: Events.EventPublisher) { @@ -50,14 +47,12 @@ export async function scrapeAndUpdateOutputVendors(config: Config, optionalEvent eventPublisher, ); try { - const executionResult = await createTransactionsInExternalVendors( + return await createTransactionsInExternalVendors( config.outputVendors, companyIdToTransactions, startDate, eventPublisher, ); - - return executionResult; } catch (e) { logger.error('Failed to create transactions in external vendors', e); await eventPublisher.emit( diff --git a/packages/preload/src/index.ts b/packages/preload/src/index.ts index 4f125f64..224639f0 100644 --- a/packages/preload/src/index.ts +++ b/packages/preload/src/index.ts @@ -7,5 +7,5 @@ import { stopPeriodicScraping } from './eventsBridge'; export * from './eventsBridge'; // Clear the interval that scrapes periodically -stopPeriodicScraping(); +await stopPeriodicScraping(); From 4ec83da79b889ce34f3efed15bb6027b8d37f2bf Mon Sep 17 00:00:00 2001 From: Baruch Odem Date: Sat, 2 Nov 2024 19:49:48 +0200 Subject: [PATCH 04/10] sugar --- packages/main/src/backend/index.ts | 15 ++++++++++----- packages/main/src/handlers/index.ts | 2 +- packages/preload/src/index.ts | 1 - 3 files changed, 11 insertions(+), 7 deletions(-) diff --git a/packages/main/src/backend/index.ts b/packages/main/src/backend/index.ts index a9a51316..85775e03 100644 --- a/packages/main/src/backend/index.ts +++ b/packages/main/src/backend/index.ts @@ -18,11 +18,16 @@ export async function scrapePeriodicallyIfNeeded(config: Config, optionalEventPu stopPeriodicScraping(); - if(hoursInterval && hoursInterval > 0) { - await optionalEventPublisher.emit(Events.EventNames.LOG, { message: `Setting up periodic scraping every ${hoursInterval} minutes` }); - intervalId = setInterval(async () => { - await scrapeAndUpdateOutputVendors(config, optionalEventPublisher); - }, hoursInterval * 1000 * 60 * 60); + if (hoursInterval) { + await optionalEventPublisher.emit(Events.EventNames.LOG, { + message: `Setting up periodic scraping every ${hoursInterval} hours`, + }); + intervalId = setInterval( + async () => { + await scrapeAndUpdateOutputVendors(config, optionalEventPublisher); + }, + hoursInterval * 1000 * 60 * 60, + ); } } diff --git a/packages/main/src/handlers/index.ts b/packages/main/src/handlers/index.ts index 01f157eb..86b8d30c 100644 --- a/packages/main/src/handlers/index.ts +++ b/packages/main/src/handlers/index.ts @@ -33,7 +33,7 @@ const functions: Record = { updateConfig: updateConfigHandler as Listener, getYnabAccountData, getLogsInfo: getLogsInfoHandler, - stopPeriodicScraping: stopPeriodicScraping, + stopPeriodicScraping, getAppInfo: async () => { return { sourceCommitShort: import.meta.env.VITE_SOURCE_COMMIT_SHORT, diff --git a/packages/preload/src/index.ts b/packages/preload/src/index.ts index 224639f0..a7d150db 100644 --- a/packages/preload/src/index.ts +++ b/packages/preload/src/index.ts @@ -8,4 +8,3 @@ export * from './eventsBridge'; // Clear the interval that scrapes periodically await stopPeriodicScraping(); - From 666a211e6782059786c963006ce631b7e4d25354 Mon Sep 17 00:00:00 2001 From: Jonathan Date: Sat, 9 Nov 2024 14:25:23 +0200 Subject: [PATCH 05/10] Show time of next automatic run --- .../src/backend/eventEmitters/EventEmitter.ts | 11 ++++++++++- packages/main/src/backend/index.ts | 17 ++++++++++++----- packages/preload/src/index.ts | 4 ---- packages/renderer/src/components/Body.tsx | 10 +++++++++- packages/renderer/src/store/ConfigStore.tsx | 15 ++++++++++++--- 5 files changed, 43 insertions(+), 14 deletions(-) diff --git a/packages/main/src/backend/eventEmitters/EventEmitter.ts b/packages/main/src/backend/eventEmitters/EventEmitter.ts index 04152c19..17792a8d 100644 --- a/packages/main/src/backend/eventEmitters/EventEmitter.ts +++ b/packages/main/src/backend/eventEmitters/EventEmitter.ts @@ -115,6 +115,15 @@ export class ExporterEndEvent extends ExporterEvent { } } +export class ImportStartEvent extends BudgetTrackingEvent { + nextAutomaticScrapeDate?: Date | null; + + constructor(message: string, nextAutomaticScrapeDate?: Date | null) { + super({ message }); + this.nextAutomaticScrapeDate = nextAutomaticScrapeDate; + } +} + export class DownalodChromeEvent extends BudgetTrackingEvent { percent: number; @@ -125,7 +134,7 @@ export class DownalodChromeEvent extends BudgetTrackingEvent { } export interface EventDataMap { - [EventNames.IMPORT_PROCESS_START]: BudgetTrackingEvent; + [EventNames.IMPORT_PROCESS_START]: ImportStartEvent; [EventNames.DOWNLOAD_CHROME]: DownalodChromeEvent; [EventNames.IMPORTER_START]: ImporterEvent; [EventNames.IMPORTER_PROGRESS]: ImporterEvent; diff --git a/packages/main/src/backend/index.ts b/packages/main/src/backend/index.ts index 85775e03..237f0b90 100644 --- a/packages/main/src/backend/index.ts +++ b/packages/main/src/backend/index.ts @@ -4,6 +4,7 @@ import { scrapeFinancialAccountsAndFetchTransactions } from '@/backend/import/im import moment from 'moment'; import * as configManager from './configManager/configManager'; import * as Events from './eventEmitters/EventEmitter'; +import { EventNames } from './eventEmitters/EventEmitter'; import outputVendors from './export/outputVendors'; import logger from '../logging/logger'; @@ -19,7 +20,7 @@ export async function scrapePeriodicallyIfNeeded(config: Config, optionalEventPu stopPeriodicScraping(); if (hoursInterval) { - await optionalEventPublisher.emit(Events.EventNames.LOG, { + await optionalEventPublisher.emit(EventNames.LOG, { message: `Setting up periodic scraping every ${hoursInterval} hours`, }); intervalId = setInterval( @@ -39,12 +40,18 @@ export function stopPeriodicScraping() { export async function scrapeAndUpdateOutputVendors(config: Config, optionalEventPublisher?: Events.EventPublisher) { const eventPublisher = optionalEventPublisher ?? new Events.BudgetTrackingEventEmitter(); + await eventPublisher.emit(EventNames.LOG, { + message: 'Starting to scrape', + }); const startDate = moment().subtract(config.scraping.numDaysBack, 'days').startOf('day').toDate(); - await eventPublisher.emit(Events.EventNames.IMPORT_PROCESS_START, { - message: `Starting to scrape from ${startDate} to today`, - }); + const nextAutomaticScrapeDate : Date | null = config.scraping.periodicScrapingIntervalHours ? moment().add(config.scraping.periodicScrapingIntervalHours, 'hours').toDate() : null; + + await eventPublisher.emit(EventNames.IMPORT_PROCESS_START, new Events.ImportStartEvent( + `Starting to scrape from ${startDate} to today`, + nextAutomaticScrapeDate, + )); const companyIdToTransactions = await scrapeFinancialAccountsAndFetchTransactions( config.scraping, @@ -61,7 +68,7 @@ export async function scrapeAndUpdateOutputVendors(config: Config, optionalEvent } catch (e) { logger.error('Failed to create transactions in external vendors', e); await eventPublisher.emit( - Events.EventNames.GENERAL_ERROR, + EventNames.GENERAL_ERROR, new Events.BudgetTrackingEvent({ message: (e as Error).message, error: e as Error, diff --git a/packages/preload/src/index.ts b/packages/preload/src/index.ts index a7d150db..f72ccf78 100644 --- a/packages/preload/src/index.ts +++ b/packages/preload/src/index.ts @@ -2,9 +2,5 @@ * @module preload */ -import { stopPeriodicScraping } from './eventsBridge'; - export * from './eventsBridge'; -// Clear the interval that scrapes periodically -await stopPeriodicScraping(); diff --git a/packages/renderer/src/components/Body.tsx b/packages/renderer/src/components/Body.tsx index dd9e8e49..d3358bcc 100644 --- a/packages/renderer/src/components/Body.tsx +++ b/packages/renderer/src/components/Body.tsx @@ -68,6 +68,9 @@ const Body = () => { closeModal(); }; + const shouldShowNextRunTime = !!(configStore.nextAutomaticScrapeDate && Number(configStore.config.scraping.periodicScrapingIntervalHours)); + const nextRunTimeString = configStore.nextAutomaticScrapeDate ? new Date(configStore.nextAutomaticScrapeDate).toLocaleTimeString() : null; + return ( @@ -124,7 +127,12 @@ const Body = () => { - + ריצה הבאה: {nextRunTimeString} + + } + showModal({} as Account, ModalStatus.GENERAL_SETTINGS)} className={styles.pointer} diff --git a/packages/renderer/src/store/ConfigStore.tsx b/packages/renderer/src/store/ConfigStore.tsx index 376cf215..c4afb4a3 100644 --- a/packages/renderer/src/store/ConfigStore.tsx +++ b/packages/renderer/src/store/ConfigStore.tsx @@ -17,6 +17,7 @@ import { type Log, type OutputVendorName, } from '../types'; +import { ImportStartEvent } from '../../../main/src/backend/eventEmitters/EventEmitter'; interface AccountScrapingData { logs: Log[]; @@ -71,6 +72,7 @@ export class ConfigStore { config: Config; chromeDownloadPercent = 0; + nextAutomaticScrapeDate?: Date | null; // TODO: move this to a separate store accountScrapingData: Map; @@ -130,6 +132,7 @@ export class ConfigStore { clearScrapingStatus() { this.accountScrapingData = new Map(); this.updateChromeDownloadPercent(0); + this.nextAutomaticScrapeDate = null; } updateChromeDownloadPercent(percent: number) { @@ -152,10 +155,13 @@ export class ConfigStore { } handleScrapingEvent(eventName: string, budgetTrackingEvent?: BudgetTrackingEvent) { - if (eventName === 'DOWNLOAD_CHROME') { - this.updateChromeDownloadPercent((budgetTrackingEvent as DownloadChromeEvent)?.percent); - } if (budgetTrackingEvent) { + if (eventName === 'DOWNLOAD_CHROME') { + this.updateChromeDownloadPercent((budgetTrackingEvent as DownloadChromeEvent)?.percent); + } + if (eventName === 'IMPORT_PROCESS_START') { + this.nextAutomaticScrapeDate = (budgetTrackingEvent as ImportStartEvent).nextAutomaticScrapeDate; + } const accountId = budgetTrackingEvent.vendorId; if (accountId) { if (!this.accountScrapingData.has(accountId)) { @@ -227,6 +233,9 @@ export class ConfigStore { setPeriodicScrapingIntervalHours(interval?: number) { this.config.scraping.periodicScrapingIntervalHours = interval; + if (!interval || interval <= 0) { + this.nextAutomaticScrapeDate = null; + } } } From 176271a05f6b4079a4af33be7370db831486539d Mon Sep 17 00:00:00 2001 From: Jonathan Date: Sat, 9 Nov 2024 14:32:43 +0200 Subject: [PATCH 06/10] Fix lint --- packages/renderer/src/store/ConfigStore.tsx | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/renderer/src/store/ConfigStore.tsx b/packages/renderer/src/store/ConfigStore.tsx index c4afb4a3..da1ab1da 100644 --- a/packages/renderer/src/store/ConfigStore.tsx +++ b/packages/renderer/src/store/ConfigStore.tsx @@ -17,7 +17,7 @@ import { type Log, type OutputVendorName, } from '../types'; -import { ImportStartEvent } from '../../../main/src/backend/eventEmitters/EventEmitter'; +import { type ImportStartEvent } from '../../../main/src/backend/eventEmitters/EventEmitter'; interface AccountScrapingData { logs: Log[]; From 712c4eea195692094a2ed06934785619c12a7fb6 Mon Sep 17 00:00:00 2001 From: Jonathan Date: Sat, 9 Nov 2024 14:41:28 +0200 Subject: [PATCH 07/10] Better naming for setPeriodicScraping function --- packages/main/src/backend/index.ts | 5 +---- packages/main/src/handlers/index.ts | 4 ++-- 2 files changed, 3 insertions(+), 6 deletions(-) diff --git a/packages/main/src/backend/index.ts b/packages/main/src/backend/index.ts index 237f0b90..4f733feb 100644 --- a/packages/main/src/backend/index.ts +++ b/packages/main/src/backend/index.ts @@ -13,7 +13,7 @@ export { Events, configManager, outputVendors }; let intervalId: NodeJS.Timeout | null = null; -export async function scrapePeriodicallyIfNeeded(config: Config, optionalEventPublisher?: Events.EventPublisher) { +export async function setPeriodicScrapingIfNeeded(config: Config, optionalEventPublisher?: Events.EventPublisher) { const hoursInterval = config.scraping.periodicScrapingIntervalHours; optionalEventPublisher = optionalEventPublisher ?? new Events.BudgetTrackingEventEmitter(); @@ -40,9 +40,6 @@ export function stopPeriodicScraping() { export async function scrapeAndUpdateOutputVendors(config: Config, optionalEventPublisher?: Events.EventPublisher) { const eventPublisher = optionalEventPublisher ?? new Events.BudgetTrackingEventEmitter(); - await eventPublisher.emit(EventNames.LOG, { - message: 'Starting to scrape', - }); const startDate = moment().subtract(config.scraping.numDaysBack, 'days').startOf('day').toDate(); diff --git a/packages/main/src/handlers/index.ts b/packages/main/src/handlers/index.ts index 86b8d30c..61bc2670 100644 --- a/packages/main/src/handlers/index.ts +++ b/packages/main/src/handlers/index.ts @@ -1,5 +1,5 @@ import { App } from '@/app-globals'; -import { scrapeAndUpdateOutputVendors, scrapePeriodicallyIfNeeded, stopPeriodicScraping } from '@/backend'; +import { scrapeAndUpdateOutputVendors, setPeriodicScrapingIfNeeded, stopPeriodicScraping } from '@/backend'; import { type Credentials } from '@/backend/commonTypes'; import { getConfig } from '@/backend/configManager/configManager'; import { BudgetTrackingEventEmitter } from '@/backend/eventEmitters/EventEmitter'; @@ -69,7 +69,7 @@ export const registerHandlers = () => { const config = await getConfig(); const eventSubscriber = new BudgetTrackingEventEmitter(); scrapeAndUpdateOutputVendors(config, eventSubscriber); - scrapePeriodicallyIfNeeded(config, eventSubscriber); + setPeriodicScrapingIfNeeded(config, eventSubscriber); eventSubscriber.onAny((eventName, eventData) => { event.reply('scrapingProgress', JSON.stringify({ eventName, eventData })); }); From 130d4f28bce229c0a537cace14c1aa55caecb912 Mon Sep 17 00:00:00 2001 From: Jonathan Date: Sat, 9 Nov 2024 14:42:40 +0200 Subject: [PATCH 08/10] Fix bug that caused some events to be missed --- packages/main/src/handlers/index.ts | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/packages/main/src/handlers/index.ts b/packages/main/src/handlers/index.ts index 61bc2670..81364cf8 100644 --- a/packages/main/src/handlers/index.ts +++ b/packages/main/src/handlers/index.ts @@ -68,11 +68,11 @@ export const registerHandlers = () => { ipcMain.on('scrape', async (event: IpcMainEvent) => { const config = await getConfig(); const eventSubscriber = new BudgetTrackingEventEmitter(); - scrapeAndUpdateOutputVendors(config, eventSubscriber); - setPeriodicScrapingIfNeeded(config, eventSubscriber); eventSubscriber.onAny((eventName, eventData) => { event.reply('scrapingProgress', JSON.stringify({ eventName, eventData })); }); + await setPeriodicScrapingIfNeeded(config, eventSubscriber); + await scrapeAndUpdateOutputVendors(config, eventSubscriber); }); ipcMain.removeAllListeners('getYnabAccountData'); From 12c69ea64790008158da5d0a14added870a88935 Mon Sep 17 00:00:00 2001 From: Baruch Odem Date: Sat, 9 Nov 2024 20:02:19 +0200 Subject: [PATCH 09/10] fix typing --- packages/main/src/backend/eventEmitters/EventEmitter.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/main/src/backend/eventEmitters/EventEmitter.ts b/packages/main/src/backend/eventEmitters/EventEmitter.ts index 17792a8d..4e6b1fdf 100644 --- a/packages/main/src/backend/eventEmitters/EventEmitter.ts +++ b/packages/main/src/backend/eventEmitters/EventEmitter.ts @@ -1,7 +1,7 @@ // eslint-disable-next-line max-classes-per-file -import { type EnrichedTransaction, type OutputVendorName } from '@/backend/commonTypes'; import Emittery from 'emittery'; import { type CompanyTypes } from 'israeli-bank-scrapers-core'; +import type { EnrichedTransaction, OutputVendorName } from '../commonTypes'; export enum EventNames { IMPORT_PROCESS_START = 'IMPORT_PROCESS_START', From 55b01485939aec8bcde76faffcd619a350412d20 Mon Sep 17 00:00:00 2001 From: Baruch Odem Date: Sun, 10 Nov 2024 07:33:31 +0200 Subject: [PATCH 10/10] format --- packages/main/src/backend/index.ts | 12 +++++++----- packages/preload/src/index.ts | 1 - packages/renderer/src/components/Body.tsx | 16 ++++++++-------- 3 files changed, 15 insertions(+), 14 deletions(-) diff --git a/packages/main/src/backend/index.ts b/packages/main/src/backend/index.ts index 4f733feb..c39aec4e 100644 --- a/packages/main/src/backend/index.ts +++ b/packages/main/src/backend/index.ts @@ -43,12 +43,14 @@ export async function scrapeAndUpdateOutputVendors(config: Config, optionalEvent const startDate = moment().subtract(config.scraping.numDaysBack, 'days').startOf('day').toDate(); - const nextAutomaticScrapeDate : Date | null = config.scraping.periodicScrapingIntervalHours ? moment().add(config.scraping.periodicScrapingIntervalHours, 'hours').toDate() : null; + const nextAutomaticScrapeDate: Date | null = config.scraping.periodicScrapingIntervalHours + ? moment().add(config.scraping.periodicScrapingIntervalHours, 'hours').toDate() + : null; - await eventPublisher.emit(EventNames.IMPORT_PROCESS_START, new Events.ImportStartEvent( - `Starting to scrape from ${startDate} to today`, - nextAutomaticScrapeDate, - )); + await eventPublisher.emit( + EventNames.IMPORT_PROCESS_START, + new Events.ImportStartEvent(`Starting to scrape from ${startDate} to today`, nextAutomaticScrapeDate), + ); const companyIdToTransactions = await scrapeFinancialAccountsAndFetchTransactions( config.scraping, diff --git a/packages/preload/src/index.ts b/packages/preload/src/index.ts index f72ccf78..33d7040c 100644 --- a/packages/preload/src/index.ts +++ b/packages/preload/src/index.ts @@ -3,4 +3,3 @@ */ export * from './eventsBridge'; - diff --git a/packages/renderer/src/components/Body.tsx b/packages/renderer/src/components/Body.tsx index d3358bcc..7bd92454 100644 --- a/packages/renderer/src/components/Body.tsx +++ b/packages/renderer/src/components/Body.tsx @@ -68,8 +68,12 @@ const Body = () => { closeModal(); }; - const shouldShowNextRunTime = !!(configStore.nextAutomaticScrapeDate && Number(configStore.config.scraping.periodicScrapingIntervalHours)); - const nextRunTimeString = configStore.nextAutomaticScrapeDate ? new Date(configStore.nextAutomaticScrapeDate).toLocaleTimeString() : null; + const shouldShowNextRunTime = !!( + configStore.nextAutomaticScrapeDate && Number(configStore.config.scraping.periodicScrapingIntervalHours) + ); + const nextRunTimeString = configStore.nextAutomaticScrapeDate + ? new Date(configStore.nextAutomaticScrapeDate).toLocaleTimeString() + : null; return ( @@ -127,12 +131,8 @@ const Body = () => { - {shouldShowNextRunTime && -
- ריצה הבאה: {nextRunTimeString} -
- } - ריצה הבאה: {nextRunTimeString}} + showModal({} as Account, ModalStatus.GENERAL_SETTINGS)} className={styles.pointer}