2024-04-18 22:20:06 +05:30
|
|
|
import { PlaywrightCrawler } from 'crawlee';
|
|
|
|
|
|
2024-04-18 22:19:40 +05:30
|
|
|
async function scrapeData(url, selectors, waitForSeconds = 2) {
|
|
|
|
|
const crawler = new PlaywrightCrawler({
|
|
|
|
|
requestHandler: async ({ page }) => {
|
|
|
|
|
await page.goto(url);
|
|
|
|
|
|
|
|
|
|
// Wait for specific time (optional)
|
|
|
|
|
await page.waitForTimeout(waitForSeconds * 1000);
|
|
|
|
|
|
|
|
|
|
console.log('Received selectors:', selectors);
|
|
|
|
|
|
|
|
|
|
const scrapedData = [];
|
|
|
|
|
for (const selector of selectors) {
|
|
|
|
|
const elementData = await page.$$eval(selector, elements => elements.map(el => el.textContent.trim()));
|
|
|
|
|
scrapedData.push(...elementData);
|
|
|
|
|
}
|
|
|
|
|
|
2024-04-19 01:01:16 +05:30
|
|
|
console.log('Scraped data:', scrapedData);
|
2024-04-18 22:19:40 +05:30
|
|
|
},
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
await crawler.run([{ url }]);
|
|
|
|
|
}
|