diff --git a/package.json b/package.json index bf59ddd..dbc8b5f 100644 --- a/package.json +++ b/package.json @@ -5,7 +5,7 @@ "main": "index.js", "type": "module", "scripts": { - "start": "npx tsx src/Scheduler/index.ts", + "start": "npx tsx src/index.ts", "start:scheduler": "npx tsx src/Scheduler/index.ts", "test": "jest --detectOpenHandles", "build": "tsc", diff --git a/src/Scheduler/index.ts b/src/Scheduler/index.ts index 87e671e..7e25629 100644 --- a/src/Scheduler/index.ts +++ b/src/Scheduler/index.ts @@ -1,5 +1,5 @@ import cron from 'node-cron'; -import Main from '..'; +import Main from '../main'; class ScraperScheduler { private cronExpression: string; diff --git a/src/index.ts b/src/index.ts index 85f86ad..b14f277 100644 --- a/src/index.ts +++ b/src/index.ts @@ -1,44 +1,4 @@ -import { v4 as uuid } from 'uuid'; -import ScrapersFactory from './ScrapersFactory'; -import ScrapableFactory from './ScrapableFactory'; -import PathBuilderFactory from './PathBuilderFactory'; -import { ListAmCategory, ListAmGeolocation, PathBuilderVariant, ScrapeValidatorVariant } from "./configs/types"; -import { ScrapeableVariant, ScraperType, ScrapeType } from "./configs/types"; -import { ListAmBaseURL } from './configs/constants'; -import { sleep } from "./utils/sleep"; -import ScrapeValidatorFactory from './ScrapeValidatorFactory'; -import getRandomInterval from './utils/getRandomInterval'; +import Main from "./main"; -class Main { - run = async () => { - const scraperFactory = ScrapersFactory; - const scrapableFactory = ScrapableFactory; - const pathBuilderFactory = PathBuilderFactory; - const scrapeValidatorFactory = ScrapeValidatorFactory; - - const scraper = scraperFactory.createScraper(ScraperType.PUPPETTER); - const validator = scrapeValidatorFactory.createScrapeValidator(ScrapeValidatorVariant.LISTAM); - const scrapable = scrapableFactory.createScrapable(ScrapeableVariant.LISTAM, scraper, validator); - const pathBuilder = pathBuilderFactory.createPathBuilder(PathBuilderVariant.LISTAM); - - pathBuilder.init('', ListAmCategory.ROOM_FOR_A_RENT); - - const scrapeId = uuid(); - - for (let i = 1; true; i++) { - pathBuilder.reset(); - pathBuilder.addPageNumber(i); - pathBuilder.addGeolocation(ListAmGeolocation.YEREVAN); - - const finalPath = pathBuilder.build(); - console.log(`${ListAmBaseURL}${finalPath}`); - - await scrapable.scrape(scrapeId, finalPath, ScrapeType.LIST); - - const sleepInterval = getRandomInterval(4000, 10000); - await sleep(sleepInterval); - } - } -}; - -export default Main; +const main = new Main(); +main.run(); \ No newline at end of file diff --git a/src/main.ts b/src/main.ts new file mode 100644 index 0000000..85f86ad --- /dev/null +++ b/src/main.ts @@ -0,0 +1,44 @@ +import { v4 as uuid } from 'uuid'; +import ScrapersFactory from './ScrapersFactory'; +import ScrapableFactory from './ScrapableFactory'; +import PathBuilderFactory from './PathBuilderFactory'; +import { ListAmCategory, ListAmGeolocation, PathBuilderVariant, ScrapeValidatorVariant } from "./configs/types"; +import { ScrapeableVariant, ScraperType, ScrapeType } from "./configs/types"; +import { ListAmBaseURL } from './configs/constants'; +import { sleep } from "./utils/sleep"; +import ScrapeValidatorFactory from './ScrapeValidatorFactory'; +import getRandomInterval from './utils/getRandomInterval'; + +class Main { + run = async () => { + const scraperFactory = ScrapersFactory; + const scrapableFactory = ScrapableFactory; + const pathBuilderFactory = PathBuilderFactory; + const scrapeValidatorFactory = ScrapeValidatorFactory; + + const scraper = scraperFactory.createScraper(ScraperType.PUPPETTER); + const validator = scrapeValidatorFactory.createScrapeValidator(ScrapeValidatorVariant.LISTAM); + const scrapable = scrapableFactory.createScrapable(ScrapeableVariant.LISTAM, scraper, validator); + const pathBuilder = pathBuilderFactory.createPathBuilder(PathBuilderVariant.LISTAM); + + pathBuilder.init('', ListAmCategory.ROOM_FOR_A_RENT); + + const scrapeId = uuid(); + + for (let i = 1; true; i++) { + pathBuilder.reset(); + pathBuilder.addPageNumber(i); + pathBuilder.addGeolocation(ListAmGeolocation.YEREVAN); + + const finalPath = pathBuilder.build(); + console.log(`${ListAmBaseURL}${finalPath}`); + + await scrapable.scrape(scrapeId, finalPath, ScrapeType.LIST); + + const sleepInterval = getRandomInterval(4000, 10000); + await sleep(sleepInterval); + } + } +}; + +export default Main;