2024-07-31 20:40:14 +05:30
|
|
|
/* eslint-disable no-await-in-loop, no-restricted-syntax */
|
2025-01-27 15:00:19 +05:30
|
|
|
import { ElementHandle, Page, PageScreenshotOptions } from 'playwright';
|
2024-08-21 05:25:54 +05:30
|
|
|
import { PlaywrightBlocker } from '@cliqz/adblocker-playwright';
|
2024-08-21 05:06:34 +05:30
|
|
|
import fetch from 'cross-fetch';
|
2024-07-31 20:40:14 +05:30
|
|
|
import path from 'path';
|
|
|
|
|
|
|
|
|
|
import { EventEmitter } from 'events';
|
|
|
|
|
import {
|
|
|
|
|
Where, What, PageState, Workflow, WorkflowFile,
|
|
|
|
|
ParamType, SelectorArray, CustomFunctions,
|
|
|
|
|
} from './types/workflow';
|
|
|
|
|
|
|
|
|
|
import { operators, meta } from './types/logic';
|
|
|
|
|
import { arrayToObject } from './utils/utils';
|
|
|
|
|
import Concurrency from './utils/concurrency';
|
|
|
|
|
import Preprocessor from './preprocessor';
|
|
|
|
|
import log, { Level } from './utils/logger';
|
|
|
|
|
|
2024-12-03 17:51:06 +05:30
|
|
|
/**
|
|
|
|
|
* Extending the Window interface for custom scraping functions.
|
|
|
|
|
*/
|
|
|
|
|
declare global {
|
|
|
|
|
interface Window {
|
|
|
|
|
scrape: (selector: string | null) => Record<string, string>[];
|
|
|
|
|
scrapeSchema: (
|
|
|
|
|
schema: Record<string, { selector: string; tag: string; attribute: string }>
|
|
|
|
|
) => Record<string, any>;
|
|
|
|
|
scrapeList: (config: { listSelector: string; fields: any; limit?: number; pagination: any }) => Record<string, any>[];
|
|
|
|
|
scrapeListAuto: (listSelector: string) => { selector: string; innerText: string }[];
|
|
|
|
|
scrollDown: (pages?: number) => void;
|
|
|
|
|
scrollUp: (pages?: number) => void;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
/**
|
|
|
|
|
* Defines optional intepreter options (passed in constructor)
|
|
|
|
|
*/
|
|
|
|
|
interface InterpreterOptions {
|
2025-05-07 09:30:25 +05:30
|
|
|
mode?: string;
|
2024-07-31 20:40:14 +05:30
|
|
|
maxRepeats: number;
|
|
|
|
|
maxConcurrency: number;
|
|
|
|
|
serializableCallback: (output: any) => (void | Promise<void>);
|
|
|
|
|
binaryCallback: (output: any, mimeType: string) => (void | Promise<void>);
|
|
|
|
|
debug: boolean;
|
|
|
|
|
debugChannel: Partial<{
|
2025-04-29 20:27:22 +05:30
|
|
|
activeId: (id: number) => void,
|
|
|
|
|
debugMessage: (msg: string) => void,
|
|
|
|
|
setActionType: (type: string) => void,
|
2025-06-07 14:23:32 +05:30
|
|
|
incrementScrapeListIndex: () => void,
|
2024-07-31 20:40:14 +05:30
|
|
|
}>
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Class for running the Smart Workflows.
|
|
|
|
|
*/
|
|
|
|
|
export default class Interpreter extends EventEmitter {
|
|
|
|
|
private workflow: Workflow;
|
|
|
|
|
|
|
|
|
|
private initializedWorkflow: Workflow | null;
|
|
|
|
|
|
|
|
|
|
private options: InterpreterOptions;
|
|
|
|
|
|
|
|
|
|
private concurrency: Concurrency;
|
|
|
|
|
|
|
|
|
|
private stopper: Function | null = null;
|
|
|
|
|
|
|
|
|
|
private log: typeof log;
|
|
|
|
|
|
2024-08-21 05:06:34 +05:30
|
|
|
private blocker: PlaywrightBlocker | null = null;
|
|
|
|
|
|
2024-12-03 17:51:06 +05:30
|
|
|
private cumulativeResults: Record<string, any>[] = [];
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
constructor(workflow: WorkflowFile, options?: Partial<InterpreterOptions>) {
|
|
|
|
|
super();
|
|
|
|
|
this.workflow = workflow.workflow;
|
|
|
|
|
this.initializedWorkflow = null;
|
|
|
|
|
this.options = {
|
|
|
|
|
maxRepeats: 5,
|
|
|
|
|
maxConcurrency: 5,
|
2024-12-03 17:51:06 +05:30
|
|
|
serializableCallback: (data) => {
|
|
|
|
|
log(JSON.stringify(data), Level.WARN);
|
|
|
|
|
},
|
2024-07-31 20:40:14 +05:30
|
|
|
binaryCallback: () => { log('Received binary data, thrashing them.', Level.WARN); },
|
|
|
|
|
debug: false,
|
|
|
|
|
debugChannel: {},
|
|
|
|
|
...options,
|
|
|
|
|
};
|
|
|
|
|
this.concurrency = new Concurrency(this.options.maxConcurrency);
|
|
|
|
|
this.log = (...args) => log(...args);
|
|
|
|
|
|
|
|
|
|
const error = Preprocessor.validateWorkflow(workflow);
|
|
|
|
|
if (error) {
|
|
|
|
|
throw (error);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (this.options.debugChannel?.debugMessage) {
|
|
|
|
|
const oldLog = this.log;
|
|
|
|
|
// @ts-ignore
|
|
|
|
|
this.log = (...args: Parameters<typeof oldLog>) => {
|
|
|
|
|
if (args[1] !== Level.LOG) {
|
|
|
|
|
this.options.debugChannel.debugMessage!(typeof args[0] === 'string' ? args[0] : args[0].message);
|
|
|
|
|
}
|
|
|
|
|
oldLog(...args);
|
|
|
|
|
};
|
|
|
|
|
}
|
2024-08-21 05:10:56 +05:30
|
|
|
|
2024-12-11 14:14:23 +05:30
|
|
|
PlaywrightBlocker.fromLists(fetch, ['https://easylist.to/easylist/easylist.txt']).then(blocker => {
|
2024-08-21 05:10:56 +05:30
|
|
|
this.blocker = blocker;
|
2024-08-21 05:11:28 +05:30
|
|
|
}).catch(err => {
|
2024-08-21 05:10:56 +05:30
|
|
|
this.log(`Failed to initialize ad-blocker:`, Level.ERROR);
|
|
|
|
|
})
|
2024-07-31 20:40:14 +05:30
|
|
|
}
|
|
|
|
|
|
2024-08-21 05:14:58 +05:30
|
|
|
private async applyAdBlocker(page: Page): Promise<void> {
|
|
|
|
|
if (this.blocker) {
|
2024-12-22 22:08:56 +05:30
|
|
|
try {
|
|
|
|
|
await this.blocker.enableBlockingInPage(page);
|
|
|
|
|
} catch (err) {
|
|
|
|
|
this.log(`Ad-blocker operation failed:`, Level.ERROR);
|
|
|
|
|
}
|
2024-08-21 05:14:58 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-21 05:24:41 +05:30
|
|
|
private async disableAdBlocker(page: Page): Promise<void> {
|
|
|
|
|
if (this.blocker) {
|
2024-12-22 22:08:56 +05:30
|
|
|
try {
|
|
|
|
|
await this.blocker.disableBlockingInPage(page);
|
|
|
|
|
} catch (err) {
|
|
|
|
|
this.log(`Ad-blocker operation failed:`, Level.ERROR);
|
|
|
|
|
}
|
2024-08-21 05:24:41 +05:30
|
|
|
}
|
2024-08-21 05:24:52 +05:30
|
|
|
}
|
2024-08-21 05:24:41 +05:30
|
|
|
|
2024-12-08 18:04:49 +05:30
|
|
|
// private getSelectors(workflow: Workflow, actionId: number): string[] {
|
2024-12-06 22:10:28 +05:30
|
|
|
// const selectors: string[] = [];
|
|
|
|
|
|
2024-12-08 18:04:49 +05:30
|
|
|
// // Validate actionId
|
|
|
|
|
// if (actionId <= 0) {
|
|
|
|
|
// console.log("No previous selectors to collect.");
|
|
|
|
|
// return selectors; // Empty array as there are no previous steps
|
|
|
|
|
// }
|
|
|
|
|
|
|
|
|
|
// // Iterate from the start up to (but not including) actionId
|
|
|
|
|
// for (let index = 0; index < actionId; index++) {
|
|
|
|
|
// const currentSelectors = workflow[index]?.where?.selectors;
|
|
|
|
|
// console.log(`Selectors at step ${index}:`, currentSelectors);
|
|
|
|
|
|
|
|
|
|
// if (currentSelectors && currentSelectors.length > 0) {
|
|
|
|
|
// currentSelectors.forEach((selector) => {
|
2024-12-06 22:10:28 +05:30
|
|
|
// if (!selectors.includes(selector)) {
|
|
|
|
|
// selectors.push(selector); // Avoid duplicates
|
|
|
|
|
// }
|
|
|
|
|
// });
|
|
|
|
|
// }
|
|
|
|
|
// }
|
|
|
|
|
|
2024-12-08 18:04:49 +05:30
|
|
|
// console.log("Collected Selectors:", selectors);
|
2024-12-06 22:10:28 +05:30
|
|
|
// return selectors;
|
|
|
|
|
// }
|
|
|
|
|
|
2024-12-07 22:34:02 +05:30
|
|
|
private getSelectors(workflow: Workflow): string[] {
|
|
|
|
|
const selectorsSet = new Set<string>();
|
2024-12-06 03:39:15 +05:30
|
|
|
|
2024-12-07 22:34:02 +05:30
|
|
|
if (workflow.length === 0) {
|
|
|
|
|
return [];
|
2024-12-06 22:10:28 +05:30
|
|
|
}
|
|
|
|
|
|
2024-12-07 22:34:02 +05:30
|
|
|
for (let index = workflow.length - 1; index >= 0; index--) {
|
2024-12-06 22:10:28 +05:30
|
|
|
const currentSelectors = workflow[index]?.where?.selectors;
|
|
|
|
|
|
|
|
|
|
if (currentSelectors && currentSelectors.length > 0) {
|
2024-12-07 22:34:02 +05:30
|
|
|
currentSelectors.forEach((selector) => selectorsSet.add(selector));
|
|
|
|
|
return Array.from(selectorsSet);
|
2024-12-06 03:39:15 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-12-07 22:34:02 +05:30
|
|
|
return [];
|
2024-12-06 03:39:15 +05:30
|
|
|
}
|
|
|
|
|
|
2024-12-06 22:10:28 +05:30
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
/**
|
|
|
|
|
* Returns the context object from given Page and the current workflow.\
|
|
|
|
|
* \
|
|
|
|
|
* `workflow` is used for selector extraction - function searches for used selectors to
|
|
|
|
|
* look for later in the page's context.
|
|
|
|
|
* @param page Playwright Page object
|
|
|
|
|
* @param workflow Current **initialized** workflow (array of where-what pairs).
|
|
|
|
|
* @returns {PageState} State of the current page.
|
|
|
|
|
*/
|
2024-12-07 21:16:58 +05:30
|
|
|
private async getState(page: Page, workflowCopy: Workflow, selectors: string[]): Promise<PageState> {
|
2024-07-31 20:40:14 +05:30
|
|
|
/**
|
|
|
|
|
* All the selectors present in the current Workflow
|
|
|
|
|
*/
|
2024-12-06 03:39:15 +05:30
|
|
|
// const selectors = Preprocessor.extractSelectors(workflow);
|
2024-12-07 21:16:58 +05:30
|
|
|
// console.log("Current selectors:", selectors);
|
2024-07-31 20:40:14 +05:30
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Determines whether the element targetted by the selector is [actionable](https://playwright.dev/docs/actionability).
|
|
|
|
|
* @param selector Selector to be queried
|
|
|
|
|
* @returns True if the targetted element is actionable, false otherwise.
|
|
|
|
|
*/
|
2024-12-07 21:16:58 +05:30
|
|
|
// const actionable = async (selector: string): Promise<boolean> => {
|
|
|
|
|
// try {
|
|
|
|
|
// const proms = [
|
2024-12-14 20:32:07 +05:30
|
|
|
// page.isEnabled(selector, { timeout: 10000 }),
|
|
|
|
|
// page.isVisible(selector, { timeout: 10000 }),
|
2024-12-07 21:16:58 +05:30
|
|
|
// ];
|
|
|
|
|
|
|
|
|
|
// return await Promise.all(proms).then((bools) => bools.every((x) => x));
|
|
|
|
|
// } catch (e) {
|
|
|
|
|
// // log(<Error>e, Level.ERROR);
|
|
|
|
|
// return false;
|
|
|
|
|
// }
|
|
|
|
|
// };
|
2024-07-31 20:40:14 +05:30
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Object of selectors present in the current page.
|
|
|
|
|
*/
|
2024-12-07 21:16:58 +05:30
|
|
|
// const presentSelectors: SelectorArray = await Promise.all(
|
|
|
|
|
// selectors.map(async (selector) => {
|
|
|
|
|
// if (await actionable(selector)) {
|
|
|
|
|
// return [selector];
|
|
|
|
|
// }
|
|
|
|
|
// return [];
|
|
|
|
|
// }),
|
|
|
|
|
// ).then((x) => x.flat());
|
2024-12-14 20:32:07 +05:30
|
|
|
|
|
|
|
|
const presentSelectors: SelectorArray = await Promise.all(
|
|
|
|
|
selectors.map(async (selector) => {
|
|
|
|
|
try {
|
|
|
|
|
await page.waitForSelector(selector, { state: 'attached' });
|
|
|
|
|
return [selector];
|
|
|
|
|
} catch (e) {
|
|
|
|
|
return [];
|
|
|
|
|
}
|
|
|
|
|
}),
|
|
|
|
|
).then((x) => x.flat());
|
2024-12-08 18:04:49 +05:30
|
|
|
|
2024-12-07 22:34:02 +05:30
|
|
|
const action = workflowCopy[workflowCopy.length - 1];
|
2024-12-07 21:16:58 +05:30
|
|
|
|
2024-12-08 18:04:49 +05:30
|
|
|
// console.log("Next action:", action)
|
|
|
|
|
|
2024-12-07 21:16:58 +05:30
|
|
|
let url: any = page.url();
|
|
|
|
|
|
2024-12-08 18:04:49 +05:30
|
|
|
if (action && action.where.url !== url && action.where.url !== "about:blank") {
|
2024-12-07 21:16:58 +05:30
|
|
|
url = action.where.url;
|
|
|
|
|
}
|
2024-07-31 20:40:14 +05:30
|
|
|
|
|
|
|
|
return {
|
2024-12-07 21:16:58 +05:30
|
|
|
url,
|
2024-07-31 20:40:14 +05:30
|
|
|
cookies: (await page.context().cookies([page.url()]))
|
|
|
|
|
.reduce((p, cookie) => (
|
|
|
|
|
{
|
|
|
|
|
...p,
|
|
|
|
|
[cookie.name]: cookie.value,
|
|
|
|
|
}), {}),
|
2024-12-14 20:32:07 +05:30
|
|
|
selectors: presentSelectors,
|
2024-07-31 20:40:14 +05:30
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Tests if the given action is applicable with the given context.
|
|
|
|
|
* @param where Tested *where* condition
|
|
|
|
|
* @param context Current browser context.
|
|
|
|
|
* @returns True if `where` is applicable in the given context, false otherwise
|
|
|
|
|
*/
|
|
|
|
|
private applicable(where: Where, context: PageState, usedActions: string[] = []): boolean {
|
|
|
|
|
/**
|
|
|
|
|
* Given two arbitrary objects, determines whether `subset` is a subset of `superset`.\
|
|
|
|
|
* \
|
|
|
|
|
* For every key in `subset`, there must be a corresponding key with equal scalar
|
|
|
|
|
* value in `superset`, or `inclusive(subset[key], superset[key])` must hold.
|
|
|
|
|
* @param subset Arbitrary non-cyclic JS object (where clause)
|
|
|
|
|
* @param superset Arbitrary non-cyclic JS object (browser context)
|
|
|
|
|
* @returns `true` if `subset <= superset`, `false` otherwise.
|
|
|
|
|
*/
|
|
|
|
|
const inclusive = (subset: Record<string, unknown>, superset: Record<string, unknown>)
|
|
|
|
|
: boolean => (
|
|
|
|
|
Object.entries(subset).every(
|
|
|
|
|
([key, value]) => {
|
|
|
|
|
/**
|
|
|
|
|
* Arrays are compared without order (are transformed into objects before comparison).
|
|
|
|
|
*/
|
|
|
|
|
const parsedValue = Array.isArray(value) ? arrayToObject(value) : value;
|
|
|
|
|
|
|
|
|
|
const parsedSuperset: Record<string, unknown> = {};
|
|
|
|
|
parsedSuperset[key] = Array.isArray(superset[key])
|
|
|
|
|
? arrayToObject(<any>superset[key])
|
|
|
|
|
: superset[key];
|
|
|
|
|
|
2025-03-03 18:09:27 +05:30
|
|
|
if ((key === 'url' || key === 'selectors') &&
|
|
|
|
|
Array.isArray(value) && Array.isArray(superset[key]) &&
|
|
|
|
|
value.length === 0 && (superset[key] as any[]).length === 0) {
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
|
2025-02-20 19:04:43 +05:30
|
|
|
if (key === 'selectors' && Array.isArray(value) && Array.isArray(superset[key])) {
|
|
|
|
|
return value.some(selector =>
|
|
|
|
|
(superset[key] as any[]).includes(selector)
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
// Every `subset` key must exist in the `superset` and
|
|
|
|
|
// have the same value (strict equality), or subset[key] <= superset[key]
|
|
|
|
|
return parsedSuperset[key]
|
|
|
|
|
&& (
|
|
|
|
|
(parsedSuperset[key] === parsedValue)
|
|
|
|
|
|| ((parsedValue).constructor.name === 'RegExp' && (<RegExp>parsedValue).test(<string>parsedSuperset[key]))
|
|
|
|
|
|| (
|
|
|
|
|
(parsedValue).constructor.name !== 'RegExp'
|
|
|
|
|
&& typeof parsedValue === 'object' && inclusive(<typeof subset>parsedValue, <typeof superset>parsedSuperset[key])
|
|
|
|
|
)
|
|
|
|
|
);
|
|
|
|
|
},
|
|
|
|
|
)
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
// Every value in the "where" object should be compliant to the current state.
|
|
|
|
|
return Object.entries(where).every(
|
|
|
|
|
([key, value]) => {
|
|
|
|
|
if (operators.includes(<any>key)) {
|
|
|
|
|
const array = Array.isArray(value)
|
|
|
|
|
? value as Where[]
|
|
|
|
|
: Object.entries(value).map((a) => Object.fromEntries([a]));
|
|
|
|
|
// every condition is treated as a single context
|
|
|
|
|
|
2024-09-21 22:20:04 +05:30
|
|
|
switch (key as keyof typeof operators) {
|
2024-12-03 17:51:06 +05:30
|
|
|
case '$and' as keyof typeof operators:
|
2024-07-31 20:40:14 +05:30
|
|
|
return array?.every((x) => this.applicable(x, context));
|
2024-12-03 17:51:06 +05:30
|
|
|
case '$or' as keyof typeof operators:
|
2024-07-31 20:40:14 +05:30
|
|
|
return array?.some((x) => this.applicable(x, context));
|
2024-12-03 17:51:06 +05:30
|
|
|
case '$not' as keyof typeof operators:
|
2024-07-31 20:40:14 +05:30
|
|
|
return !this.applicable(<Where>value, context); // $not should be a unary operator
|
|
|
|
|
default:
|
|
|
|
|
throw new Error('Undefined logic operator.');
|
|
|
|
|
}
|
|
|
|
|
} else if (meta.includes(<any>key)) {
|
|
|
|
|
const testRegexString = (x: string) => {
|
|
|
|
|
if (typeof value === 'string') {
|
|
|
|
|
return x === value;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return (<RegExp><unknown>value).test(x);
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
switch (key as keyof typeof meta) {
|
2024-12-03 17:51:06 +05:30
|
|
|
case '$before' as keyof typeof meta:
|
2024-07-31 20:40:14 +05:30
|
|
|
return !usedActions.find(testRegexString);
|
2024-12-03 17:51:06 +05:30
|
|
|
case '$after' as keyof typeof meta:
|
2024-07-31 20:40:14 +05:30
|
|
|
return !!usedActions.find(testRegexString);
|
|
|
|
|
default:
|
|
|
|
|
throw new Error('Undefined meta operator.');
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
// Current key is a base condition (url, cookies, selectors)
|
|
|
|
|
return inclusive({ [key]: value }, context);
|
|
|
|
|
}
|
|
|
|
|
},
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Given a Playwright's page object and a "declarative" list of actions, this function
|
|
|
|
|
* calls all mentioned functions on the Page object.\
|
|
|
|
|
* \
|
|
|
|
|
* Manipulates the iterator indexes (experimental feature, likely to be removed in
|
2024-09-21 22:20:04 +05:30
|
|
|
* the following versions of maxun-core)
|
2024-07-31 20:40:14 +05:30
|
|
|
* @param page Playwright Page object
|
|
|
|
|
* @param steps Array of actions.
|
|
|
|
|
*/
|
|
|
|
|
private async carryOutSteps(page: Page, steps: What[]): Promise<void> {
|
|
|
|
|
/**
|
|
|
|
|
* Defines overloaded (or added) methods/actions usable in the workflow.
|
|
|
|
|
* If a method overloads any existing method of the Page class, it accepts the same set
|
|
|
|
|
* of parameters *(but can override some!)*\
|
|
|
|
|
* \
|
|
|
|
|
* Also, following piece of code defines functions to be run in the browser's context.
|
|
|
|
|
* Beware of false linter errors - here, we know better!
|
|
|
|
|
*/
|
|
|
|
|
const wawActions: Record<CustomFunctions, (...args: any[]) => void> = {
|
|
|
|
|
screenshot: async (params: PageScreenshotOptions) => {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType('screenshot');
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
const screenshotBuffer = await page.screenshot({
|
|
|
|
|
...params, path: undefined,
|
|
|
|
|
});
|
|
|
|
|
await this.options.binaryCallback(screenshotBuffer, 'image/png');
|
|
|
|
|
},
|
|
|
|
|
enqueueLinks: async (selector: string) => {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType('enqueueLinks');
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
const links: string[] = await page.locator(selector)
|
|
|
|
|
.evaluateAll(
|
|
|
|
|
// @ts-ignore
|
|
|
|
|
(elements) => elements.map((a) => a.href).filter((x) => x),
|
|
|
|
|
);
|
2024-10-27 18:16:48 +05:30
|
|
|
const context = page.context();
|
2024-07-31 20:40:14 +05:30
|
|
|
|
|
|
|
|
for (const link of links) {
|
|
|
|
|
// eslint-disable-next-line
|
|
|
|
|
this.concurrency.addJob(async () => {
|
|
|
|
|
try {
|
2024-10-27 18:16:48 +05:30
|
|
|
const newPage = await context.newPage();
|
2024-07-31 20:40:14 +05:30
|
|
|
await newPage.goto(link);
|
2024-12-14 20:32:07 +05:30
|
|
|
await newPage.waitForLoadState('networkidle');
|
2024-07-31 20:40:14 +05:30
|
|
|
await this.runLoop(newPage, this.initializedWorkflow!);
|
|
|
|
|
} catch (e) {
|
|
|
|
|
// `runLoop` uses soft mode, so it recovers from it's own exceptions
|
|
|
|
|
// but newPage(), goto() and waitForLoadState() don't (and will kill
|
|
|
|
|
// the interpreter by throwing).
|
|
|
|
|
this.log(<Error>e, Level.ERROR);
|
|
|
|
|
}
|
|
|
|
|
});
|
|
|
|
|
}
|
|
|
|
|
await page.close();
|
|
|
|
|
},
|
|
|
|
|
scrape: async (selector?: string) => {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType('scrape');
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-02 19:05:41 +05:30
|
|
|
await this.ensureScriptsLoaded(page);
|
2024-08-03 02:20:59 +05:30
|
|
|
|
2024-08-01 19:16:13 +05:30
|
|
|
const scrapeResults: Record<string, string>[] = await page.evaluate((s) => window.scrape(s ?? null), selector);
|
2024-07-31 20:40:14 +05:30
|
|
|
await this.options.serializableCallback(scrapeResults);
|
2024-08-03 02:20:59 +05:30
|
|
|
},
|
|
|
|
|
|
2024-12-30 01:28:22 +05:30
|
|
|
scrapeSchema: async (schema: Record<string, { selector: string; tag: string, attribute: string; shadow: string}>) => {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType('scrapeSchema');
|
|
|
|
|
}
|
2025-05-07 09:30:25 +05:30
|
|
|
|
|
|
|
|
if (this.options.mode && this.options.mode === 'editor') {
|
|
|
|
|
await this.options.serializableCallback({});
|
|
|
|
|
return;
|
|
|
|
|
}
|
2025-04-29 00:29:52 +05:30
|
|
|
|
2024-08-03 02:20:59 +05:30
|
|
|
await this.ensureScriptsLoaded(page);
|
2024-12-03 17:51:06 +05:30
|
|
|
|
2024-08-03 20:32:12 +05:30
|
|
|
const scrapeResult = await page.evaluate((schemaObj) => window.scrapeSchema(schemaObj), schema);
|
2024-12-03 17:51:06 +05:30
|
|
|
|
2025-04-29 00:29:52 +05:30
|
|
|
if (!this.cumulativeResults || !Array.isArray(this.cumulativeResults)) {
|
|
|
|
|
this.cumulativeResults = [];
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (this.cumulativeResults.length === 0) {
|
|
|
|
|
this.cumulativeResults.push({});
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
const mergedResult = this.cumulativeResults[0];
|
|
|
|
|
const resultToProcess = Array.isArray(scrapeResult) ? scrapeResult[0] : scrapeResult;
|
|
|
|
|
|
|
|
|
|
Object.entries(resultToProcess).forEach(([key, value]) => {
|
|
|
|
|
if (value !== undefined) {
|
|
|
|
|
mergedResult[key] = value;
|
|
|
|
|
}
|
2024-12-03 22:21:26 +05:30
|
|
|
});
|
2025-04-29 00:29:52 +05:30
|
|
|
|
|
|
|
|
console.log("Updated merged result:", mergedResult);
|
|
|
|
|
await this.options.serializableCallback([mergedResult]);
|
2024-08-03 02:20:59 +05:30
|
|
|
},
|
|
|
|
|
|
2024-08-16 18:21:34 +05:30
|
|
|
scrapeList: async (config: { listSelector: string, fields: any, limit?: number, pagination: any }) => {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType('scrapeList');
|
|
|
|
|
}
|
|
|
|
|
|
2025-05-07 09:30:25 +05:30
|
|
|
if (this.options.mode && this.options.mode === 'editor') {
|
|
|
|
|
await this.options.serializableCallback({});
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-06 23:20:10 +05:30
|
|
|
await this.ensureScriptsLoaded(page);
|
2025-06-07 14:23:32 +05:30
|
|
|
|
|
|
|
|
if (this.options.debugChannel?.incrementScrapeListIndex) {
|
|
|
|
|
this.options.debugChannel.incrementScrapeListIndex();
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-18 22:22:43 +05:30
|
|
|
if (!config.pagination) {
|
|
|
|
|
const scrapeResults: Record<string, any>[] = await page.evaluate((cfg) => window.scrapeList(cfg), config);
|
|
|
|
|
await this.options.serializableCallback(scrapeResults);
|
|
|
|
|
} else {
|
|
|
|
|
const scrapeResults: Record<string, any>[] = await this.handlePagination(page, config);
|
|
|
|
|
await this.options.serializableCallback(scrapeResults);
|
|
|
|
|
}
|
2024-08-06 23:20:10 +05:30
|
|
|
},
|
|
|
|
|
|
2024-08-12 06:28:08 +05:30
|
|
|
scrapeListAuto: async (config: { listSelector: string }) => {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType('scrapeListAuto');
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-12 06:28:08 +05:30
|
|
|
await this.ensureScriptsLoaded(page);
|
2024-08-12 06:35:00 +05:30
|
|
|
|
2024-08-12 06:28:08 +05:30
|
|
|
const scrapeResults: { selector: string, innerText: string }[] = await page.evaluate((listSelector) => {
|
|
|
|
|
return window.scrapeListAuto(listSelector);
|
|
|
|
|
}, config.listSelector);
|
2024-08-12 06:35:00 +05:30
|
|
|
|
2024-08-12 06:28:08 +05:30
|
|
|
await this.options.serializableCallback(scrapeResults);
|
|
|
|
|
},
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
scroll: async (pages?: number) => {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType('scroll');
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
await page.evaluate(async (pagesInternal) => {
|
|
|
|
|
for (let i = 1; i <= (pagesInternal ?? 1); i += 1) {
|
|
|
|
|
// @ts-ignore
|
|
|
|
|
window.scrollTo(0, window.scrollY + window.innerHeight);
|
|
|
|
|
}
|
|
|
|
|
}, pages ?? 1);
|
|
|
|
|
},
|
2024-08-12 06:35:00 +05:30
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
script: async (code: string) => {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType('script');
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
const AsyncFunction: FunctionConstructor = Object.getPrototypeOf(
|
|
|
|
|
async () => { },
|
|
|
|
|
).constructor;
|
|
|
|
|
const x = new AsyncFunction('page', 'log', code);
|
|
|
|
|
await x(page, this.log);
|
|
|
|
|
},
|
2024-08-18 22:19:24 +05:30
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
flag: async () => new Promise((res) => {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType('flag');
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
this.emit('flag', page, res);
|
|
|
|
|
}),
|
|
|
|
|
};
|
|
|
|
|
|
2024-12-10 20:33:35 +05:30
|
|
|
const executeAction = async (invokee: any, methodName: string, args: any) => {
|
|
|
|
|
console.log("Executing action:", methodName, args);
|
2025-01-22 23:51:17 +05:30
|
|
|
|
|
|
|
|
if (methodName === 'press' || methodName === 'type') {
|
|
|
|
|
// Extract only the first two arguments for these methods
|
|
|
|
|
const limitedArgs = Array.isArray(args) ? args.slice(0, 2) : [args];
|
|
|
|
|
await (<any>invokee[methodName])(...limitedArgs);
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
2024-12-10 20:33:35 +05:30
|
|
|
if (!args || Array.isArray(args)) {
|
|
|
|
|
await (<any>invokee[methodName])(...(args ?? []));
|
|
|
|
|
} else {
|
|
|
|
|
await (<any>invokee[methodName])(args);
|
|
|
|
|
}
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
for (const step of steps) {
|
2024-12-03 17:51:06 +05:30
|
|
|
this.log(`Launching ${String(step.action)}`, Level.LOG);
|
2024-07-31 20:40:14 +05:30
|
|
|
|
|
|
|
|
if (step.action in wawActions) {
|
|
|
|
|
// "Arrayifying" here should not be needed (TS + syntax checker - only arrays; but why not)
|
|
|
|
|
const params = !step.args || Array.isArray(step.args) ? step.args : [step.args];
|
|
|
|
|
await wawActions[step.action as CustomFunctions](...(params ?? []));
|
|
|
|
|
} else {
|
2025-04-27 15:22:06 +05:30
|
|
|
if (this.options.debugChannel?.setActionType) {
|
|
|
|
|
this.options.debugChannel.setActionType(String(step.action));
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
// Implements the dot notation for the "method name" in the workflow
|
2024-12-03 17:51:06 +05:30
|
|
|
const levels = String(step.action).split('.');
|
2024-07-31 20:40:14 +05:30
|
|
|
const methodName = levels[levels.length - 1];
|
|
|
|
|
|
|
|
|
|
let invokee: any = page;
|
|
|
|
|
for (const level of levels.splice(0, levels.length - 1)) {
|
|
|
|
|
invokee = invokee[level];
|
|
|
|
|
}
|
|
|
|
|
|
2024-12-10 20:33:35 +05:30
|
|
|
if (methodName === 'waitForLoadState') {
|
|
|
|
|
try {
|
|
|
|
|
await executeAction(invokee, methodName, step.args);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
await executeAction(invokee, methodName, 'domcontentloaded');
|
|
|
|
|
}
|
|
|
|
|
} else if (methodName === 'click') {
|
|
|
|
|
try {
|
|
|
|
|
await executeAction(invokee, methodName, step.args);
|
|
|
|
|
} catch (error) {
|
2024-12-17 21:55:24 +05:30
|
|
|
try{
|
|
|
|
|
await executeAction(invokee, methodName, [step.args[0], { force: true }]);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
continue
|
|
|
|
|
}
|
2024-12-10 20:33:35 +05:30
|
|
|
}
|
2024-07-31 20:40:14 +05:30
|
|
|
} else {
|
2024-12-10 20:33:35 +05:30
|
|
|
await executeAction(invokee, methodName, step.args);
|
2024-07-31 20:40:14 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
await new Promise((res) => { setTimeout(res, 500); });
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
private async handlePagination(page: Page, config: {
|
|
|
|
|
listSelector: string,
|
|
|
|
|
fields: any,
|
|
|
|
|
limit?: number,
|
|
|
|
|
pagination: any
|
|
|
|
|
}) {
|
2024-08-17 23:54:00 +05:30
|
|
|
let allResults: Record<string, any>[] = [];
|
2024-08-19 01:38:41 +05:30
|
|
|
let previousHeight = 0;
|
2024-08-20 23:15:33 +05:30
|
|
|
let scrapedItems: Set<string> = new Set<string>();
|
2025-01-27 15:00:19 +05:30
|
|
|
let visitedUrls: Set<string> = new Set<string>();
|
|
|
|
|
const MAX_RETRIES = 3;
|
|
|
|
|
const RETRY_DELAY = 1000; // 1 second delay between retries
|
2025-04-08 15:09:38 +05:30
|
|
|
const MAX_UNCHANGED_RESULTS = 5;
|
2024-08-18 22:19:24 +05:30
|
|
|
|
2025-01-21 14:24:50 +05:30
|
|
|
const debugLog = (message: string, ...args: any[]) => {
|
2025-01-27 15:00:19 +05:30
|
|
|
console.log(`[Page ${visitedUrls.size}] [URL: ${page.url()}] ${message}`, ...args);
|
2025-01-21 14:24:50 +05:30
|
|
|
};
|
|
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
const scrapeCurrentPage = async () => {
|
|
|
|
|
const results = await page.evaluate((cfg) => window.scrapeList(cfg), config);
|
|
|
|
|
const newResults = results.filter(item => {
|
|
|
|
|
const uniqueKey = JSON.stringify(item);
|
|
|
|
|
if (scrapedItems.has(uniqueKey)) return false;
|
|
|
|
|
scrapedItems.add(uniqueKey);
|
|
|
|
|
return true;
|
|
|
|
|
});
|
|
|
|
|
allResults = allResults.concat(newResults);
|
|
|
|
|
debugLog("Results collected:", allResults.length);
|
2025-06-07 14:23:32 +05:30
|
|
|
|
|
|
|
|
await this.options.serializableCallback(allResults);
|
2025-01-27 15:00:19 +05:30
|
|
|
};
|
2025-01-09 17:01:08 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
const checkLimit = () => {
|
|
|
|
|
if (config.limit && allResults.length >= config.limit) {
|
|
|
|
|
allResults = allResults.slice(0, config.limit);
|
|
|
|
|
return true;
|
|
|
|
|
}
|
|
|
|
|
return false;
|
|
|
|
|
};
|
2024-08-18 22:47:14 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
// Enhanced button finder with retry mechanism
|
2025-03-04 10:53:57 +05:30
|
|
|
const findWorkingButton = async (selectors: string[]): Promise<{
|
|
|
|
|
button: ElementHandle | null,
|
|
|
|
|
workingSelector: string | null,
|
|
|
|
|
updatedSelectors: string[]
|
2025-01-27 15:00:19 +05:30
|
|
|
}> => {
|
2025-03-04 10:53:57 +05:30
|
|
|
let updatedSelectors = [...selectors];
|
|
|
|
|
|
|
|
|
|
for (let i = 0; i < selectors.length; i++) {
|
|
|
|
|
const selector = selectors[i];
|
|
|
|
|
let retryCount = 0;
|
|
|
|
|
let selectorSuccess = false;
|
|
|
|
|
|
|
|
|
|
while (retryCount < MAX_RETRIES && !selectorSuccess) {
|
|
|
|
|
try {
|
|
|
|
|
const button = await page.waitForSelector(selector, {
|
|
|
|
|
state: 'attached',
|
|
|
|
|
timeout: 10000
|
|
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (button) {
|
|
|
|
|
debugLog('Found working selector:', selector);
|
|
|
|
|
return {
|
|
|
|
|
button,
|
|
|
|
|
workingSelector: selector,
|
|
|
|
|
updatedSelectors
|
|
|
|
|
};
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
retryCount++;
|
|
|
|
|
debugLog(`Selector "${selector}" failed: attempt ${retryCount}/${MAX_RETRIES}`);
|
|
|
|
|
|
|
|
|
|
if (retryCount < MAX_RETRIES) {
|
|
|
|
|
await page.waitForTimeout(RETRY_DELAY);
|
|
|
|
|
} else {
|
|
|
|
|
debugLog(`Removing failed selector "${selector}" after ${MAX_RETRIES} attempts`);
|
|
|
|
|
updatedSelectors = updatedSelectors.filter(s => s !== selector);
|
|
|
|
|
}
|
2025-01-27 15:00:19 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
2025-03-04 10:53:57 +05:30
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
button: null,
|
|
|
|
|
workingSelector: null,
|
|
|
|
|
updatedSelectors
|
|
|
|
|
};
|
2025-01-27 15:00:19 +05:30
|
|
|
};
|
2025-01-09 17:01:08 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
const retryOperation = async (operation: () => Promise<boolean>, retryCount = 0): Promise<boolean> => {
|
|
|
|
|
try {
|
|
|
|
|
return await operation();
|
|
|
|
|
} catch (error) {
|
|
|
|
|
if (retryCount < MAX_RETRIES) {
|
|
|
|
|
debugLog(`Retrying operation. Attempt ${retryCount + 1} of ${MAX_RETRIES}`);
|
|
|
|
|
await page.waitForTimeout(RETRY_DELAY);
|
|
|
|
|
return retryOperation(operation, retryCount + 1);
|
|
|
|
|
}
|
|
|
|
|
debugLog(`Operation failed after ${MAX_RETRIES} retries`);
|
|
|
|
|
return false;
|
|
|
|
|
}
|
|
|
|
|
};
|
2025-01-09 17:01:08 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
let availableSelectors = config.pagination.selector.split(',');
|
2025-04-08 15:09:38 +05:30
|
|
|
let unchangedResultCounter = 0;
|
2024-08-20 23:15:33 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
try {
|
2025-04-03 20:08:27 +05:30
|
|
|
while (true) {
|
2025-01-27 15:00:19 +05:30
|
|
|
switch (config.pagination.type) {
|
2025-02-03 23:36:57 +05:30
|
|
|
case 'scrollDown': {
|
2025-04-08 15:09:38 +05:30
|
|
|
let previousResultCount = allResults.length;
|
|
|
|
|
|
|
|
|
|
await scrapeCurrentPage();
|
|
|
|
|
|
|
|
|
|
if (checkLimit()) {
|
|
|
|
|
return allResults;
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
await page.evaluate(() => window.scrollTo(0, document.body.scrollHeight));
|
|
|
|
|
await page.waitForTimeout(2000);
|
|
|
|
|
|
|
|
|
|
const currentHeight = await page.evaluate(() => document.body.scrollHeight);
|
2025-04-08 15:09:38 +05:30
|
|
|
const currentResultCount = allResults.length;
|
|
|
|
|
|
|
|
|
|
if (currentResultCount === previousResultCount) {
|
|
|
|
|
unchangedResultCounter++;
|
|
|
|
|
if (unchangedResultCounter >= MAX_UNCHANGED_RESULTS) {
|
|
|
|
|
return allResults;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
unchangedResultCounter = 0;
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
if (currentHeight === previousHeight) {
|
|
|
|
|
return allResults;
|
|
|
|
|
}
|
2025-01-21 14:24:50 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
previousHeight = currentHeight;
|
|
|
|
|
break;
|
2025-02-03 23:36:57 +05:30
|
|
|
}
|
2025-01-21 14:24:50 +05:30
|
|
|
|
2025-02-03 23:36:57 +05:30
|
|
|
case 'scrollUp': {
|
2025-04-08 15:09:38 +05:30
|
|
|
let previousResultCount = allResults.length;
|
|
|
|
|
|
|
|
|
|
await scrapeCurrentPage();
|
|
|
|
|
|
|
|
|
|
if (checkLimit()) {
|
|
|
|
|
return allResults;
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
await page.evaluate(() => window.scrollTo(0, 0));
|
|
|
|
|
await page.waitForTimeout(2000);
|
|
|
|
|
|
|
|
|
|
const currentTopHeight = await page.evaluate(() => document.documentElement.scrollTop);
|
2025-04-08 15:09:38 +05:30
|
|
|
const currentResultCount = allResults.length;
|
|
|
|
|
|
|
|
|
|
if (currentResultCount === previousResultCount) {
|
|
|
|
|
unchangedResultCounter++;
|
|
|
|
|
if (unchangedResultCounter >= MAX_UNCHANGED_RESULTS) {
|
|
|
|
|
return allResults;
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
unchangedResultCounter = 0;
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
if (currentTopHeight === 0) {
|
|
|
|
|
return allResults;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
previousHeight = currentTopHeight;
|
|
|
|
|
break;
|
2025-02-03 23:36:57 +05:30
|
|
|
}
|
2025-01-27 15:00:19 +05:30
|
|
|
|
|
|
|
|
case 'clickNext': {
|
|
|
|
|
const currentUrl = page.url();
|
|
|
|
|
visitedUrls.add(currentUrl);
|
|
|
|
|
|
|
|
|
|
await scrapeCurrentPage();
|
|
|
|
|
if (checkLimit()) return allResults;
|
2025-03-17 16:03:14 +05:30
|
|
|
|
2025-03-04 10:53:57 +05:30
|
|
|
const { button, workingSelector, updatedSelectors } = await findWorkingButton(availableSelectors);
|
|
|
|
|
|
|
|
|
|
availableSelectors = updatedSelectors;
|
2025-03-17 16:03:14 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
if (!button || !workingSelector) {
|
2025-03-17 16:03:14 +05:30
|
|
|
// Final retry for navigation when no selectors work
|
2025-01-27 15:00:19 +05:30
|
|
|
const success = await retryOperation(async () => {
|
2025-01-21 14:24:50 +05:30
|
|
|
try {
|
2025-01-27 15:00:19 +05:30
|
|
|
await page.evaluate(() => window.history.forward());
|
|
|
|
|
const newUrl = page.url();
|
|
|
|
|
return !visitedUrls.has(newUrl);
|
|
|
|
|
} catch {
|
|
|
|
|
return false;
|
2025-01-21 14:24:50 +05:30
|
|
|
}
|
2025-01-27 15:00:19 +05:30
|
|
|
});
|
|
|
|
|
|
|
|
|
|
if (!success) return allResults;
|
|
|
|
|
break;
|
|
|
|
|
}
|
2025-03-17 16:03:14 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
let retryCount = 0;
|
2025-03-17 16:03:14 +05:30
|
|
|
let paginationSuccess = false;
|
|
|
|
|
|
2025-07-07 01:45:18 +05:30
|
|
|
// Capture basic content signature before click - with XPath support
|
2025-03-17 16:03:14 +05:30
|
|
|
const captureContentSignature = async () => {
|
2025-07-07 01:45:18 +05:30
|
|
|
return await page.evaluate((listSelector) => {
|
|
|
|
|
const isXPath = (selector: string) => {
|
|
|
|
|
return selector.startsWith('//') || selector.startsWith('./') || selector.includes('::');
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
let items: NodeListOf<Element> | Element[] = [];
|
|
|
|
|
|
|
|
|
|
if (isXPath(listSelector)) {
|
|
|
|
|
try {
|
|
|
|
|
// Use XPath to find elements
|
|
|
|
|
const xpathResult = document.evaluate(
|
|
|
|
|
listSelector,
|
|
|
|
|
document,
|
|
|
|
|
null,
|
|
|
|
|
XPathResult.ORDERED_NODE_SNAPSHOT_TYPE,
|
|
|
|
|
null
|
|
|
|
|
);
|
|
|
|
|
|
|
|
|
|
items = [];
|
|
|
|
|
for (let i = 0; i < xpathResult.snapshotLength; i++) {
|
|
|
|
|
const node = xpathResult.snapshotItem(i);
|
|
|
|
|
if (node && node.nodeType === Node.ELEMENT_NODE) {
|
|
|
|
|
items.push(node as Element);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} catch (xpathError) {
|
|
|
|
|
console.warn('XPath evaluation failed, trying CSS selector as fallback:', xpathError);
|
|
|
|
|
// Fallback to CSS selector
|
|
|
|
|
try {
|
|
|
|
|
items = document.querySelectorAll(listSelector);
|
|
|
|
|
} catch (cssError) {
|
|
|
|
|
console.warn('CSS selector fallback also failed:', cssError);
|
|
|
|
|
items = [];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
} else {
|
|
|
|
|
try {
|
|
|
|
|
// Use CSS selector
|
|
|
|
|
items = document.querySelectorAll(listSelector);
|
|
|
|
|
} catch (cssError) {
|
|
|
|
|
console.warn('CSS selector failed:', cssError);
|
|
|
|
|
items = [];
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2025-03-17 16:03:14 +05:30
|
|
|
return {
|
|
|
|
|
url: window.location.href,
|
|
|
|
|
itemCount: items.length,
|
|
|
|
|
firstItems: Array.from(items).slice(0, 3).map(el => el.textContent || '').join('|')
|
|
|
|
|
};
|
|
|
|
|
}, config.listSelector);
|
|
|
|
|
};
|
|
|
|
|
|
|
|
|
|
const beforeSignature = await captureContentSignature();
|
|
|
|
|
debugLog(`Before click: ${beforeSignature.itemCount} items`);
|
|
|
|
|
|
|
|
|
|
while (retryCount < MAX_RETRIES && !paginationSuccess) {
|
2025-01-10 12:49:14 +05:30
|
|
|
try {
|
2025-01-21 14:24:50 +05:30
|
|
|
try {
|
|
|
|
|
await Promise.all([
|
|
|
|
|
page.waitForNavigation({
|
2025-01-27 15:00:19 +05:30
|
|
|
waitUntil: 'networkidle',
|
|
|
|
|
timeout: 15000
|
2025-03-17 16:03:14 +05:30
|
|
|
}).catch(e => {
|
|
|
|
|
throw e;
|
2025-01-21 14:24:50 +05:30
|
|
|
}),
|
2025-01-27 15:00:19 +05:30
|
|
|
button.click()
|
2025-01-21 14:24:50 +05:30
|
|
|
]);
|
2025-03-17 16:03:14 +05:30
|
|
|
debugLog("Navigation successful after regular click");
|
2025-05-09 23:37:20 +05:30
|
|
|
await page.waitForTimeout(2000);
|
2025-03-17 16:03:14 +05:30
|
|
|
paginationSuccess = true;
|
|
|
|
|
} catch (navError) {
|
|
|
|
|
debugLog("Regular click with navigation failed, trying dispatch event with navigation");
|
|
|
|
|
try {
|
|
|
|
|
await Promise.all([
|
|
|
|
|
page.waitForNavigation({
|
|
|
|
|
waitUntil: 'networkidle',
|
|
|
|
|
timeout: 15000
|
|
|
|
|
}).catch(e => {
|
|
|
|
|
throw e;
|
|
|
|
|
}),
|
|
|
|
|
button.dispatchEvent('click')
|
|
|
|
|
]);
|
|
|
|
|
debugLog("Navigation successful after dispatch event");
|
2025-05-09 23:37:20 +05:30
|
|
|
await page.waitForTimeout(2000);
|
2025-03-17 16:03:14 +05:30
|
|
|
paginationSuccess = true;
|
|
|
|
|
} catch (dispatchNavError) {
|
2025-01-27 15:00:19 +05:30
|
|
|
try {
|
2025-03-17 16:03:14 +05:30
|
|
|
await button.click();
|
|
|
|
|
await page.waitForTimeout(2000);
|
|
|
|
|
} catch (clickError) {
|
|
|
|
|
await button.dispatchEvent('click');
|
|
|
|
|
await page.waitForTimeout(2000);
|
2025-01-27 15:00:19 +05:30
|
|
|
}
|
2025-01-21 14:24:50 +05:30
|
|
|
}
|
2025-01-10 12:49:14 +05:30
|
|
|
}
|
2025-03-17 16:03:14 +05:30
|
|
|
|
|
|
|
|
await page.waitForLoadState('networkidle', { timeout: 5000 }).catch(() => {});
|
|
|
|
|
|
|
|
|
|
if (!paginationSuccess) {
|
|
|
|
|
const newUrl = page.url();
|
|
|
|
|
const afterSignature = await captureContentSignature();
|
|
|
|
|
|
|
|
|
|
if (newUrl !== currentUrl) {
|
|
|
|
|
debugLog(`URL changed to ${newUrl}`);
|
|
|
|
|
visitedUrls.add(newUrl);
|
|
|
|
|
paginationSuccess = true;
|
|
|
|
|
}
|
|
|
|
|
else if (afterSignature.firstItems !== beforeSignature.firstItems) {
|
|
|
|
|
debugLog("Content changed without URL change");
|
|
|
|
|
paginationSuccess = true;
|
|
|
|
|
}
|
|
|
|
|
else if (afterSignature.itemCount !== beforeSignature.itemCount) {
|
|
|
|
|
debugLog(`Item count changed from ${beforeSignature.itemCount} to ${afterSignature.itemCount}`);
|
|
|
|
|
paginationSuccess = true;
|
|
|
|
|
}
|
2025-01-27 15:00:19 +05:30
|
|
|
}
|
2025-01-10 12:49:14 +05:30
|
|
|
} catch (error) {
|
2025-03-17 16:03:14 +05:30
|
|
|
debugLog(`Pagination attempt ${retryCount + 1} failed: ${error.message}`);
|
2025-01-10 12:49:14 +05:30
|
|
|
}
|
2025-03-17 16:03:14 +05:30
|
|
|
|
|
|
|
|
if (!paginationSuccess) {
|
2025-01-27 15:00:19 +05:30
|
|
|
retryCount++;
|
|
|
|
|
if (retryCount < MAX_RETRIES) {
|
2025-03-17 16:03:14 +05:30
|
|
|
debugLog(`Retrying pagination - attempt ${retryCount + 1} of ${MAX_RETRIES}`);
|
2025-01-27 15:00:19 +05:30
|
|
|
await page.waitForTimeout(RETRY_DELAY);
|
2025-01-21 14:24:50 +05:30
|
|
|
}
|
2025-01-27 15:00:19 +05:30
|
|
|
}
|
|
|
|
|
}
|
2025-03-17 16:03:14 +05:30
|
|
|
|
|
|
|
|
if (!paginationSuccess) {
|
|
|
|
|
debugLog(`Pagination failed after ${MAX_RETRIES} attempts`);
|
2025-01-27 15:00:19 +05:30
|
|
|
return allResults;
|
|
|
|
|
}
|
2025-03-17 16:03:14 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
break;
|
|
|
|
|
}
|
2025-01-21 14:24:50 +05:30
|
|
|
|
2025-02-03 23:36:57 +05:30
|
|
|
case 'clickLoadMore': {
|
2025-03-03 01:49:31 +05:30
|
|
|
await scrapeCurrentPage();
|
|
|
|
|
if (checkLimit()) return allResults;
|
|
|
|
|
|
|
|
|
|
let loadMoreCounter = 0;
|
2025-07-12 01:19:44 +05:30
|
|
|
// let previousResultCount = allResults.length;
|
|
|
|
|
// let noNewItemsCounter = 0;
|
|
|
|
|
// const MAX_NO_NEW_ITEMS = 2;
|
2025-03-03 01:49:31 +05:30
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
while (true) {
|
2025-03-03 01:49:31 +05:30
|
|
|
// Find working button with retry mechanism
|
2025-03-04 10:53:57 +05:30
|
|
|
const { button: loadMoreButton, workingSelector, updatedSelectors } = await findWorkingButton(availableSelectors);
|
|
|
|
|
|
|
|
|
|
availableSelectors = updatedSelectors;
|
2025-02-03 23:50:29 +05:30
|
|
|
|
|
|
|
|
if (!workingSelector || !loadMoreButton) {
|
|
|
|
|
debugLog('No working Load More selector found after retries');
|
2025-02-03 23:34:28 +05:30
|
|
|
return allResults;
|
|
|
|
|
}
|
2025-02-03 23:50:29 +05:30
|
|
|
|
|
|
|
|
// Implement retry mechanism for clicking the button
|
|
|
|
|
let retryCount = 0;
|
|
|
|
|
let clickSuccess = false;
|
|
|
|
|
|
|
|
|
|
while (retryCount < MAX_RETRIES && !clickSuccess) {
|
2025-02-03 23:34:28 +05:30
|
|
|
try {
|
2025-02-03 23:50:29 +05:30
|
|
|
try {
|
|
|
|
|
await loadMoreButton.click();
|
|
|
|
|
clickSuccess = true;
|
|
|
|
|
} catch (error) {
|
|
|
|
|
debugLog(`Regular click failed on attempt ${retryCount + 1}. Trying DispatchEvent`);
|
|
|
|
|
|
|
|
|
|
// If regular click fails, try dispatchEvent
|
|
|
|
|
try {
|
|
|
|
|
await loadMoreButton.dispatchEvent('click');
|
|
|
|
|
clickSuccess = true;
|
|
|
|
|
} catch (dispatchError) {
|
|
|
|
|
debugLog(`DispatchEvent failed on attempt ${retryCount + 1}.`);
|
|
|
|
|
throw dispatchError; // Propagate error to trigger retry
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (clickSuccess) {
|
|
|
|
|
await page.waitForTimeout(1000);
|
2025-03-03 01:49:31 +05:30
|
|
|
loadMoreCounter++;
|
|
|
|
|
debugLog(`Successfully clicked Load More button (${loadMoreCounter} times)`);
|
2025-02-03 23:50:29 +05:30
|
|
|
}
|
2025-01-21 14:24:50 +05:30
|
|
|
} catch (error) {
|
2025-02-03 23:50:29 +05:30
|
|
|
debugLog(`Click attempt ${retryCount + 1} failed completely.`);
|
|
|
|
|
retryCount++;
|
|
|
|
|
|
|
|
|
|
if (retryCount < MAX_RETRIES) {
|
|
|
|
|
debugLog(`Retrying click - attempt ${retryCount + 1} of ${MAX_RETRIES}`);
|
|
|
|
|
await page.waitForTimeout(RETRY_DELAY);
|
|
|
|
|
}
|
2025-01-21 14:24:50 +05:30
|
|
|
}
|
2025-02-03 23:50:29 +05:30
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (!clickSuccess) {
|
|
|
|
|
debugLog(`Load More clicking failed after ${MAX_RETRIES} attempts`);
|
2025-02-03 23:34:28 +05:30
|
|
|
return allResults;
|
|
|
|
|
}
|
2025-02-03 23:50:29 +05:30
|
|
|
|
|
|
|
|
// Wait for content to load and check scroll height
|
2025-02-03 23:34:28 +05:30
|
|
|
await page.waitForTimeout(2000);
|
|
|
|
|
await page.evaluate(() => window.scrollTo(0, document.body.scrollHeight));
|
|
|
|
|
await page.waitForTimeout(2000);
|
2025-02-03 23:50:29 +05:30
|
|
|
|
2025-02-03 23:34:28 +05:30
|
|
|
const currentHeight = await page.evaluate(() => document.body.scrollHeight);
|
2025-03-03 01:49:31 +05:30
|
|
|
const heightChanged = currentHeight !== previousHeight;
|
2025-02-03 23:34:28 +05:30
|
|
|
previousHeight = currentHeight;
|
|
|
|
|
|
2025-03-03 01:49:31 +05:30
|
|
|
await scrapeCurrentPage();
|
|
|
|
|
|
2025-07-12 01:19:44 +05:30
|
|
|
// const currentResultCount = allResults.length;
|
|
|
|
|
// const newItemsAdded = currentResultCount > previousResultCount;
|
2025-03-03 01:49:31 +05:30
|
|
|
|
2025-07-12 01:19:44 +05:30
|
|
|
// if (!newItemsAdded) {
|
|
|
|
|
// noNewItemsCounter++;
|
|
|
|
|
// debugLog(`No new items added after click (${noNewItemsCounter}/${MAX_NO_NEW_ITEMS})`);
|
2025-03-03 01:49:31 +05:30
|
|
|
|
2025-07-12 01:19:44 +05:30
|
|
|
// if (noNewItemsCounter >= MAX_NO_NEW_ITEMS) {
|
|
|
|
|
// debugLog(`Stopping after ${MAX_NO_NEW_ITEMS} clicks with no new items`);
|
|
|
|
|
// return allResults;
|
|
|
|
|
// }
|
|
|
|
|
// } else {
|
|
|
|
|
// noNewItemsCounter = 0;
|
|
|
|
|
// previousResultCount = currentResultCount;
|
|
|
|
|
// }
|
2025-03-03 01:49:31 +05:30
|
|
|
|
|
|
|
|
if (checkLimit()) return allResults;
|
|
|
|
|
|
|
|
|
|
if (!heightChanged) {
|
|
|
|
|
debugLog('No more items loaded after Load More');
|
|
|
|
|
return allResults;
|
2025-02-03 23:34:28 +05:30
|
|
|
}
|
2025-01-27 15:00:19 +05:30
|
|
|
}
|
2025-02-03 23:36:57 +05:30
|
|
|
}
|
2024-08-18 22:19:24 +05:30
|
|
|
|
2025-02-03 23:36:57 +05:30
|
|
|
default: {
|
2025-01-27 15:00:19 +05:30
|
|
|
await scrapeCurrentPage();
|
|
|
|
|
return allResults;
|
2025-02-03 23:36:57 +05:30
|
|
|
}
|
2025-01-21 14:24:50 +05:30
|
|
|
}
|
|
|
|
|
|
2025-01-27 15:00:19 +05:30
|
|
|
if (checkLimit()) break;
|
|
|
|
|
}
|
|
|
|
|
} catch (error) {
|
|
|
|
|
debugLog(`Fatal error: ${error.message}`);
|
|
|
|
|
return allResults;
|
2024-08-17 23:54:00 +05:30
|
|
|
}
|
2024-08-18 22:19:24 +05:30
|
|
|
|
2024-08-17 23:54:00 +05:30
|
|
|
return allResults;
|
|
|
|
|
}
|
|
|
|
|
|
2024-12-06 22:10:28 +05:30
|
|
|
private getMatchingActionId(workflow: Workflow, pageState: PageState, usedActions: string[]) {
|
|
|
|
|
for (let actionId = workflow.length - 1; actionId >= 0; actionId--) {
|
|
|
|
|
const step = workflow[actionId];
|
|
|
|
|
const isApplicable = this.applicable(step.where, pageState, usedActions);
|
|
|
|
|
console.log("-------------------------------------------------------------");
|
|
|
|
|
console.log(`Where:`, step.where);
|
|
|
|
|
console.log(`Page state:`, pageState);
|
|
|
|
|
console.log(`Match result: ${isApplicable}`);
|
|
|
|
|
console.log("-------------------------------------------------------------");
|
|
|
|
|
|
|
|
|
|
if (isApplicable) {
|
|
|
|
|
return actionId;
|
|
|
|
|
}
|
2024-12-30 23:38:38 +05:30
|
|
|
}
|
2024-12-06 22:10:28 +05:30
|
|
|
}
|
|
|
|
|
|
2024-12-30 23:38:38 +05:30
|
|
|
private removeShadowSelectors(workflow: Workflow) {
|
2025-01-04 15:37:35 +05:30
|
|
|
for (let actionId = workflow.length - 1; actionId >= 0; actionId--) {
|
|
|
|
|
const step = workflow[actionId];
|
|
|
|
|
|
|
|
|
|
// Check if step has where and selectors
|
|
|
|
|
if (step.where && Array.isArray(step.where.selectors)) {
|
|
|
|
|
// Filter out selectors that contain ">>"
|
2024-12-30 23:38:38 +05:30
|
|
|
step.where.selectors = step.where.selectors.filter(selector => !selector.includes('>>'));
|
2025-01-04 15:37:35 +05:30
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return workflow;
|
|
|
|
|
}
|
|
|
|
|
|
2025-01-05 00:05:47 +05:30
|
|
|
private removeSpecialSelectors(workflow: Workflow) {
|
|
|
|
|
for (let actionId = workflow.length - 1; actionId >= 0; actionId--) {
|
|
|
|
|
const step = workflow[actionId];
|
|
|
|
|
|
|
|
|
|
if (step.where && Array.isArray(step.where.selectors)) {
|
|
|
|
|
// Filter out if selector has EITHER ":>>" OR ">>"
|
|
|
|
|
step.where.selectors = step.where.selectors.filter(selector =>
|
|
|
|
|
!(selector.includes(':>>') || selector.includes('>>'))
|
|
|
|
|
);
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
return workflow;
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
private async runLoop(p: Page, workflow: Workflow) {
|
2025-01-04 15:37:35 +05:30
|
|
|
let workflowCopy: Workflow = JSON.parse(JSON.stringify(workflow));
|
|
|
|
|
|
2025-01-05 00:05:47 +05:30
|
|
|
workflowCopy = this.removeSpecialSelectors(workflowCopy);
|
2024-12-06 22:10:28 +05:30
|
|
|
|
2024-08-21 05:16:15 +05:30
|
|
|
// apply ad-blocker to the current page
|
2024-12-23 00:03:20 +05:30
|
|
|
try {
|
|
|
|
|
await this.applyAdBlocker(p);
|
|
|
|
|
} catch (error) {
|
|
|
|
|
this.log(`Failed to apply ad-blocker: ${error.message}`, Level.ERROR);
|
|
|
|
|
}
|
2024-07-31 20:40:14 +05:30
|
|
|
const usedActions: string[] = [];
|
2024-12-06 22:10:28 +05:30
|
|
|
let selectors: string[] = [];
|
2024-07-31 20:40:14 +05:30
|
|
|
let lastAction = null;
|
2024-12-07 21:16:58 +05:30
|
|
|
let actionId = -1
|
2024-07-31 20:40:14 +05:30
|
|
|
let repeatCount = 0;
|
|
|
|
|
|
|
|
|
|
/**
|
|
|
|
|
* Enables the interpreter functionality for popup windows.
|
|
|
|
|
* User-requested concurrency should be entirely managed by the concurrency manager,
|
|
|
|
|
* e.g. via `enqueueLinks`.
|
|
|
|
|
*/
|
|
|
|
|
p.on('popup', (popup) => {
|
2024-12-06 22:10:28 +05:30
|
|
|
this.concurrency.addJob(() => this.runLoop(popup, workflowCopy));
|
2024-07-31 20:40:14 +05:30
|
|
|
});
|
|
|
|
|
|
|
|
|
|
/* eslint no-constant-condition: ["warn", { "checkLoops": false }] */
|
|
|
|
|
while (true) {
|
|
|
|
|
// Checks whether the page was closed from outside,
|
|
|
|
|
// or the workflow execution has been stopped via `interpreter.stop()`
|
|
|
|
|
if (p.isClosed() || !this.stopper) {
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
try {
|
|
|
|
|
await p.waitForLoadState();
|
|
|
|
|
} catch (e) {
|
|
|
|
|
await p.close();
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
let pageState = {};
|
2024-12-07 21:16:58 +05:30
|
|
|
let getStateTest = "Hello";
|
2024-07-31 20:40:14 +05:30
|
|
|
try {
|
2024-12-06 22:10:28 +05:30
|
|
|
pageState = await this.getState(p, workflowCopy, selectors);
|
|
|
|
|
selectors = [];
|
2024-12-07 21:16:58 +05:30
|
|
|
console.log("Empty selectors:", selectors)
|
2024-07-31 20:40:14 +05:30
|
|
|
} catch (e: any) {
|
|
|
|
|
this.log('The browser has been closed.');
|
|
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if (this.options.debug) {
|
|
|
|
|
this.log(`Current state is: \n${JSON.stringify(pageState, null, 2)}`, Level.WARN);
|
|
|
|
|
}
|
2024-12-03 17:51:06 +05:30
|
|
|
|
2024-12-06 22:10:28 +05:30
|
|
|
// const actionId = workflow.findIndex((step) => {
|
|
|
|
|
// const isApplicable = this.applicable(step.where, pageState, usedActions);
|
|
|
|
|
// console.log("-------------------------------------------------------------");
|
|
|
|
|
// console.log(`Where:`, step.where);
|
|
|
|
|
// console.log(`Page state:`, pageState);
|
|
|
|
|
// console.log(`Match result: ${isApplicable}`);
|
|
|
|
|
// console.log("-------------------------------------------------------------");
|
|
|
|
|
// return isApplicable;
|
|
|
|
|
// });
|
|
|
|
|
|
2024-12-07 21:16:58 +05:30
|
|
|
actionId = this.getMatchingActionId(workflowCopy, pageState, usedActions);
|
2024-07-31 20:40:14 +05:30
|
|
|
|
2024-12-06 22:10:28 +05:30
|
|
|
const action = workflowCopy[actionId];
|
2024-07-31 20:40:14 +05:30
|
|
|
|
2024-12-06 22:10:28 +05:30
|
|
|
console.log("MATCHED ACTION:", action);
|
|
|
|
|
console.log("MATCHED ACTION ID:", actionId);
|
2024-07-31 20:40:14 +05:30
|
|
|
this.log(`Matched ${JSON.stringify(action?.where)}`, Level.LOG);
|
|
|
|
|
|
|
|
|
|
if (action) { // action is matched
|
|
|
|
|
if (this.options.debugChannel?.activeId) {
|
|
|
|
|
this.options.debugChannel.activeId(actionId);
|
|
|
|
|
}
|
2024-12-06 17:06:20 +05:30
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
repeatCount = action === lastAction ? repeatCount + 1 : 0;
|
2024-12-06 17:06:20 +05:30
|
|
|
|
|
|
|
|
console.log("REPEAT COUNT", repeatCount);
|
|
|
|
|
if (this.options.maxRepeats && repeatCount > this.options.maxRepeats) {
|
2024-07-31 20:40:14 +05:30
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
lastAction = action;
|
2024-12-06 17:06:20 +05:30
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
try {
|
2024-12-06 17:06:20 +05:30
|
|
|
console.log("Carrying out:", action.what);
|
2024-07-31 20:40:14 +05:30
|
|
|
await this.carryOutSteps(p, action.what);
|
|
|
|
|
usedActions.push(action.id ?? 'undefined');
|
2024-12-06 22:10:28 +05:30
|
|
|
|
|
|
|
|
workflowCopy.splice(actionId, 1);
|
2024-12-08 18:04:49 +05:30
|
|
|
console.log(`Action with ID ${action.id} removed from the workflow copy.`);
|
2024-12-06 03:39:15 +05:30
|
|
|
|
2024-12-06 22:10:28 +05:30
|
|
|
// const newSelectors = this.getPreviousSelectors(workflow, actionId);
|
2024-12-07 22:34:02 +05:30
|
|
|
const newSelectors = this.getSelectors(workflowCopy);
|
2024-12-06 17:06:20 +05:30
|
|
|
newSelectors.forEach(selector => {
|
|
|
|
|
if (!selectors.includes(selector)) {
|
|
|
|
|
selectors.push(selector);
|
|
|
|
|
}
|
|
|
|
|
});
|
2024-07-31 20:40:14 +05:30
|
|
|
} catch (e) {
|
|
|
|
|
this.log(<Error>e, Level.ERROR);
|
|
|
|
|
}
|
|
|
|
|
} else {
|
2024-08-21 05:55:05 +05:30
|
|
|
//await this.disableAdBlocker(p);
|
2024-07-31 20:40:14 +05:30
|
|
|
return;
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-08-02 19:05:41 +05:30
|
|
|
private async ensureScriptsLoaded(page: Page) {
|
2024-08-17 23:54:00 +05:30
|
|
|
const isScriptLoaded = await page.evaluate(() => typeof window.scrape === 'function' && typeof window.scrapeSchema === 'function' && typeof window.scrapeList === 'function' && typeof window.scrapeListAuto === 'function' && typeof window.scrollDown === 'function' && typeof window.scrollUp === 'function');
|
2024-08-02 19:05:41 +05:30
|
|
|
if (!isScriptLoaded) {
|
|
|
|
|
await page.addInitScript({ path: path.join(__dirname, 'browserSide', 'scraper.js') });
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
/**
|
|
|
|
|
* Spawns a browser context and runs given workflow.
|
|
|
|
|
* \
|
|
|
|
|
* Resolves after the playback is finished.
|
|
|
|
|
* @param {Page} [page] Page to run the workflow on.
|
|
|
|
|
* @param {ParamType} params Workflow specific, set of parameters
|
|
|
|
|
* for the `{$param: nameofparam}` fields.
|
|
|
|
|
*/
|
|
|
|
|
public async run(page: Page, params?: ParamType): Promise<void> {
|
2024-10-27 18:16:48 +05:30
|
|
|
this.log('Starting the workflow.', Level.LOG);
|
|
|
|
|
const context = page.context();
|
2024-12-14 18:35:38 +05:30
|
|
|
|
|
|
|
|
page.setDefaultNavigationTimeout(100000);
|
2024-10-27 18:16:48 +05:30
|
|
|
|
|
|
|
|
// Check proxy settings from context options
|
|
|
|
|
const contextOptions = (context as any)._options;
|
|
|
|
|
const hasProxy = !!contextOptions?.proxy;
|
|
|
|
|
|
|
|
|
|
this.log(`Proxy settings: ${hasProxy ? `Proxy is configured...` : 'No proxy configured...'}`);
|
|
|
|
|
|
|
|
|
|
if (hasProxy) {
|
|
|
|
|
if (contextOptions.proxy.username) {
|
|
|
|
|
this.log(`Proxy authenticated...`);
|
|
|
|
|
}
|
|
|
|
|
}
|
2024-07-31 20:40:14 +05:30
|
|
|
if (this.stopper) {
|
|
|
|
|
throw new Error('This Interpreter is already running a workflow. To run another workflow, please, spawn another Interpreter.');
|
|
|
|
|
}
|
|
|
|
|
/**
|
|
|
|
|
* `this.workflow` with the parameters initialized.
|
|
|
|
|
*/
|
|
|
|
|
this.initializedWorkflow = Preprocessor.initWorkflow(this.workflow, params);
|
|
|
|
|
|
2024-10-27 18:16:48 +05:30
|
|
|
await this.ensureScriptsLoaded(page);
|
2024-08-02 19:05:41 +05:30
|
|
|
|
2024-07-31 20:40:14 +05:30
|
|
|
this.stopper = () => {
|
|
|
|
|
this.stopper = null;
|
|
|
|
|
};
|
|
|
|
|
|
2024-10-27 18:16:48 +05:30
|
|
|
this.concurrency.addJob(() => this.runLoop(page, this.initializedWorkflow!));
|
2024-07-31 20:40:14 +05:30
|
|
|
|
|
|
|
|
await this.concurrency.waitForCompletion();
|
|
|
|
|
|
|
|
|
|
this.stopper = null;
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
public async stop(): Promise<void> {
|
|
|
|
|
if (this.stopper) {
|
|
|
|
|
await this.stopper();
|
|
|
|
|
this.stopper = null;
|
|
|
|
|
} else {
|
|
|
|
|
throw new Error('Cannot stop, there is no running workflow!');
|
|
|
|
|
}
|
|
|
|
|
}
|
|
|
|
|
}
|