chore(api): remove old code

This commit is contained in:
karishmas6
2024-05-27 20:56:07 +05:30
parent 4c6b44dfd2
commit e34ab021b0
4 changed files with 14 additions and 101 deletions

View File

@@ -1,19 +0,0 @@
import { PlaywrightCrawler, Configuration } from "crawlee";
async function loadWebsite(url) {
let htmlContent = '';
const crawler = new PlaywrightCrawler({
requestHandler: async ({ page }) => {
await page.goto(url);
htmlContent = await page.content();
}
},
new Configuration({
persistStorage: false,
}));
await crawler.run([url]);
return htmlContent;
}
export default loadWebsite;

View File

@@ -1,43 +0,0 @@
import Fastify from 'fastify'
import cors from '@fastify/cors'
import scrapeData from './scraper';
import loadWebsite from './load';
const fastify = Fastify();
// Change this later
const corsOptions = {
origin: 'http://localhost:5173'
}
await fastify.register(cors, corsOptions)
fastify.get('/', async (request, reply) => {
reply.send('Vroom Vroom Vroom');
});
fastify.post('/load-website', async (request, reply) => {
const { url } = request.body;
try {
const response = await loadWebsite(url);
reply.send(response);
console.log('Response is::', response)
} catch (error) {
reply.status(500).send({ error: error });
}
});
fastify.post('/scrape', async (request, reply) => {
const { url, selectors } = request.body;
try {
const response = await scrapeData(url, selectors);
reply.send(response);
} catch (error) {
reply.status(500).send({ error: error.message });
}
});
await fastify.listen(3000, (err, address) => {
if (err) throw err;
console.log(`Server listening on ${fastify.server.address().port}`)
});

View File

@@ -1,25 +0,0 @@
import { PlaywrightCrawler, Configuration } from 'crawlee';
async function scrapeData(url, selectors) {
const scrapedData = [];
const crawler = new PlaywrightCrawler({
requestHandler: async ({ page, request }) => {
await page.goto(url);
console.log('Received selectors:', selectors);
for (const selector of selectors) {
const elementData = await page.$$eval(selector, elements => elements.map(el => el.textContent.trim()));
scrapedData.push(...elementData);
}
console.log('Scraped data:', scrapedData);
},
},
new Configuration({
persistStorage: false,
}));
await crawler.run([url]);
return scrapedData;
}
export default scrapeData;

View File

@@ -1,14 +1,14 @@
{
"extends": "@apify/tsconfig",
"compilerOptions": {
"module": "NodeNext",
"moduleResolution": "NodeNext",
"target": "ES2022",
"outDir": "dist",
"noUnusedLocals": false,
"lib": ["DOM"]
},
"include": [
"./src/**/*"
]
}
// {
// "extends": "@apify/tsconfig",
// "compilerOptions": {
// "module": "NodeNext",
// "moduleResolution": "NodeNext",
// "target": "ES2022",
// "outDir": "dist",
// "noUnusedLocals": false,
// "lib": ["DOM"]
// },
// "include": [
// "./src/**/*"
// ]
// }