From e50762a5fe97eff877656dc1d8c0a9dde3b0d659 Mon Sep 17 00:00:00 2001 From: Marco Franke Date: Thu, 31 Oct 2024 15:58:04 +0100 Subject: [PATCH] fix: fixed a bug where internal pagecount was increased by one So the scraping always starts with the seconds page. Amazon uses zero based paginations. --- src/commands/scrape/amazon/index.ts | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/src/commands/scrape/amazon/index.ts b/src/commands/scrape/amazon/index.ts index aed15013..ec27db8a 100644 --- a/src/commands/scrape/amazon/index.ts +++ b/src/commands/scrape/amazon/index.ts @@ -119,13 +119,13 @@ export default class Amazon extends ScrapeCommand { } this.logger.debug(`Got ${orderPageCount} for year ${currentYear}`); - for (const orderPage of [...Array(orderPageCount).keys()].map(pageNo => pageNo + 1)) { + for (const orderPage of [...Array(orderPageCount).keys()].map(pageNo => pageNo)) { if (this.options.pageFilter && orderPage != this.options.pageFilter) { this.logger.info(`Skipping page ${orderPage} due to page filter`); continue; } - this.logger.info(`Processing page ${orderPage}`); + this.logger.info(`Processing page ${orderPage + 1}`); await this.goToYearAndPage(currentYear, orderPage, this.definition); const onlyNewInvoiceHandled = await this.processOrderPage(orderPage, processedOrders); if (onlyNewInvoiceHandled) { @@ -141,7 +141,7 @@ export default class Amazon extends ScrapeCommand { } private async processOrderPage(orderPage: number, processedOrders: Scrape[]): Promise { - this.logger.info(`Checking page ${orderPage} for orders`); + this.logger.info(`Checking page ${orderPage + 1} for orders`); const orderCards = await this.currentPage.$$(this.selectors.orderCards); this.logger.info(`Got ${orderCards.length} orders. Processing...`);