feat: recorder revamp server changes

This commit is contained in:
Rohit Rajan
2025-10-21 00:43:08 +05:30
parent eafe11aef4
commit 5be2b3175b
9 changed files with 879 additions and 369 deletions

View File

@@ -726,34 +726,108 @@ export class WorkflowGenerator {
/**
* Generates a pair for the custom action event.
*
* @param action The type of the custom action.
* @param settings The settings of the custom action.
* @param actionId The unique identifier for this action (for updates)
* @param settings The settings of the custom action (may include name and actionId).
* @param page The page to use for obtaining the needed data.
*/
public customAction = async (action: CustomActions, settings: any, page: Page) => {
const pair: WhereWhatPair = {
where: { url: this.getBestUrl(page.url()) },
what: [{
action,
args: settings ? Array.isArray(settings) ? settings : [settings] : [],
}],
}
public customAction = async (action: CustomActions, actionId: string, settings: any, page: Page) => {
try {
let actionSettings = settings;
let actionName: string | undefined;
await this.addPairToWorkflowAndNotifyClient(pair, page);
if (settings && !Array.isArray(settings)) {
actionName = settings.name;
actionSettings = JSON.parse(JSON.stringify(settings));
delete actionSettings.name;
}
if (this.generatedData.lastUsedSelector) {
const elementInfo = await this.getLastUsedSelectorInfo(page, this.generatedData.lastUsedSelector);
const pair: WhereWhatPair = {
where: { url: this.getBestUrl(page.url()) },
what: [{
action,
args: actionSettings
? Array.isArray(actionSettings)
? actionSettings
: [actionSettings]
: [],
...(actionName ? { name: actionName } : {}),
...(actionId ? { actionId } : {}),
}],
};
this.socket.emit('decision', {
pair, actionType: 'customAction',
lastData: {
selector: this.generatedData.lastUsedSelector,
action: this.generatedData.lastAction,
tagName: elementInfo.tagName,
innerText: elementInfo.innerText,
if (actionId) {
const existingIndex = this.workflowRecord.workflow.findIndex(
(workflowPair) =>
Array.isArray(workflowPair.what) &&
workflowPair.what.some((whatItem: any) => whatItem.actionId === actionId)
);
if (existingIndex !== -1) {
const existingPair = this.workflowRecord.workflow[existingIndex];
const existingAction = existingPair.what.find((whatItem: any) => whatItem.actionId === actionId);
const updatedAction = {
...existingAction,
action,
args: Array.isArray(actionSettings)
? actionSettings
: [actionSettings],
name: actionName || existingAction?.name || '',
actionId,
};
this.workflowRecord.workflow[existingIndex] = {
where: JSON.parse(JSON.stringify(existingPair.where)),
what: existingPair.what.map((whatItem: any) =>
whatItem.actionId === actionId ? updatedAction : whatItem
),
};
if (action === 'scrapeSchema' && actionName) {
this.workflowRecord.workflow.forEach((pair, index) => {
pair.what.forEach((whatItem: any, whatIndex: number) => {
if (whatItem.action === 'scrapeSchema' && whatItem.actionId !== actionId) {
this.workflowRecord.workflow[index].what[whatIndex] = {
...whatItem,
name: actionName
};
}
});
});
}
} else {
await this.addPairToWorkflowAndNotifyClient(pair, page);
logger.log("debug", `Added new workflow action: ${action} with actionId: ${actionId}`);
}
});
}
} else {
await this.addPairToWorkflowAndNotifyClient(pair, page);
logger.log("debug", `Added new workflow action: ${action} without actionId`);
}
if (this.generatedData.lastUsedSelector) {
const elementInfo = await this.getLastUsedSelectorInfo(
page,
this.generatedData.lastUsedSelector
);
this.socket.emit('decision', {
pair,
actionType: 'customAction',
lastData: {
selector: this.generatedData.lastUsedSelector,
action: this.generatedData.lastAction,
tagName: elementInfo.tagName,
innerText: elementInfo.innerText,
},
});
}
} catch (e) {
const { message } = e as Error;
logger.log("warn", `Error handling customAction: ${message}`);
}
};
/**
@@ -810,6 +884,48 @@ export class WorkflowGenerator {
}
};
/**
* Removes an action with the given actionId from the workflow.
* Only removes the specific action from the what array, not the entire pair.
* If the what array becomes empty after removal, then the entire pair is removed.
* @param actionId The actionId of the action to remove
* @returns boolean indicating whether an action was removed
*/
public removeAction = (actionId: string): boolean => {
let actionWasRemoved = false;
this.workflowRecord.workflow = this.workflowRecord.workflow
.map((pair) => {
const filteredWhat = pair.what.filter(
(whatItem: any) => whatItem.actionId !== actionId
);
if (filteredWhat.length < pair.what.length) {
actionWasRemoved = true;
if (filteredWhat.length > 0) {
return {
...pair,
what: filteredWhat
};
}
return null;
}
return pair;
})
.filter((pair) => pair !== null) as WhereWhatPair[]; // Remove null entries
if (actionWasRemoved) {
logger.log("info", `Action with actionId ${actionId} removed from workflow`);
} else {
logger.log("debug", `No action found with actionId ${actionId}`);
}
return actionWasRemoved;
};
/**
* Updates the socket used for communication with the client.
* @param socket The socket to be used for communication.

View File

@@ -91,13 +91,16 @@ export class WorkflowInterpreter {
* Storage for different types of serializable data
*/
public serializableDataByType: {
scrapeSchema: any[],
scrapeList: any[],
scrapeSchema: Record<string, any>;
scrapeList: Record<string, any>;
[key: string]: any;
} = {
scrapeSchema: [],
scrapeList: [],
scrapeSchema: {},
scrapeList: {},
};
private currentActionName: string | null = null;
/**
* Track the current action type being processed
*/
@@ -106,7 +109,7 @@ export class WorkflowInterpreter {
/**
* An array of all the binary data extracted from the run.
*/
public binaryData: { mimetype: string, data: string }[] = [];
public binaryData: { name: string; mimeType: string; data: string }[] = [];
/**
* Track current scrapeList index
@@ -259,14 +262,19 @@ export class WorkflowInterpreter {
}
},
binaryCallback: async (data: string, mimetype: string) => {
const binaryItem = { mimetype, data: JSON.stringify(data) };
// For editor mode, we don't have the name yet, so use a timestamp-based name
const binaryItem = {
name: `Screenshot ${Date.now()}`,
mimeType: mimetype,
data: JSON.stringify(data)
};
this.binaryData.push(binaryItem);
// Persist binary data to database
await this.persistBinaryDataToDatabase(binaryItem);
this.socket.emit('binaryCallback', {
data,
this.socket.emit('binaryCallback', {
data,
mimetype,
type: 'captureScreenshot'
});
@@ -364,9 +372,10 @@ export class WorkflowInterpreter {
this.breakpoints = [];
this.interpretationResume = null;
this.currentActionType = null;
this.currentActionName = null;
this.serializableDataByType = {
scrapeSchema: [],
scrapeList: [],
scrapeSchema: {},
scrapeList: {},
};
this.binaryData = [];
this.currentScrapeListIndex = 0;
@@ -409,7 +418,7 @@ export class WorkflowInterpreter {
* Persists binary data to database in real-time
* @private
*/
private persistBinaryDataToDatabase = async (binaryItem: { mimetype: string, data: string }): Promise<void> => {
private persistBinaryDataToDatabase = async (binaryItem: { name: string; mimeType: string; data: string }): Promise<void> => {
if (!this.currentRunId) {
logger.log('debug', 'No run ID available for binary data persistence');
return;
@@ -422,22 +431,29 @@ export class WorkflowInterpreter {
return;
}
const currentBinaryOutput = run.binaryOutput ?
JSON.parse(JSON.stringify(run.binaryOutput)) :
{};
const uniqueKey = `item-${Date.now()}-${Object.keys(currentBinaryOutput).length}`;
const currentBinaryOutput =
run.binaryOutput && typeof run.binaryOutput === 'object'
? JSON.parse(JSON.stringify(run.binaryOutput))
: {};
const baseName = binaryItem.name?.trim() || `Screenshot ${Object.keys(currentBinaryOutput).length + 1}`;
let uniqueName = baseName;
let counter = 1;
while (currentBinaryOutput[uniqueName]) {
uniqueName = `${baseName} (${counter++})`;
}
const updatedBinaryOutput = {
...currentBinaryOutput,
[uniqueKey]: binaryItem
[uniqueName]: binaryItem,
};
await run.update({
binaryOutput: updatedBinaryOutput
});
logger.log('debug', `Persisted binary data for run ${this.currentRunId}: ${binaryItem.mimetype}`);
logger.log('debug', `Persisted binary data for run ${this.currentRunId}: ${binaryItem.name} (${binaryItem.mimeType})`);
} catch (error: any) {
logger.log('error', `Failed to persist binary data in real-time for run ${this.currentRunId}: ${error.message}`);
}
@@ -478,41 +494,101 @@ export class WorkflowInterpreter {
},
incrementScrapeListIndex: () => {
this.currentScrapeListIndex++;
}
},
setActionName: (name: string) => {
this.currentActionName = name;
},
},
serializableCallback: async (data: any) => {
if (this.currentActionType === 'scrapeSchema') {
if (Array.isArray(data) && data.length > 0) {
mergedScrapeSchema = { ...mergedScrapeSchema, ...data[0] };
this.serializableDataByType.scrapeSchema.push(data);
} else {
mergedScrapeSchema = { ...mergedScrapeSchema, ...data };
this.serializableDataByType.scrapeSchema.push([data]);
try {
if (!data || typeof data !== "object") return;
if (!this.currentActionType && Array.isArray(data) && data.length > 0) {
const first = data[0];
if (first && Object.keys(first).some(k => k.toLowerCase().includes("label") || k.toLowerCase().includes("text"))) {
this.currentActionType = "scrapeSchema";
}
}
// Persist the cumulative scrapeSchema data
const cumulativeScrapeSchemaData = Object.keys(mergedScrapeSchema).length > 0 ? [mergedScrapeSchema] : [];
if (cumulativeScrapeSchemaData.length > 0) {
await this.persistDataToDatabase('scrapeSchema', cumulativeScrapeSchemaData);
let typeKey = this.currentActionType || "unknown";
if (this.currentActionType === "scrapeList") {
typeKey = "scrapeList";
} else if (this.currentActionType === "scrapeSchema") {
typeKey = "scrapeSchema";
}
} else if (this.currentActionType === 'scrapeList') {
if (data && Array.isArray(data) && data.length > 0) {
// Use the current index for persistence
await this.persistDataToDatabase('scrapeList', data, this.currentScrapeListIndex);
if (this.currentActionType === "scrapeList" && data.scrapeList) {
data = data.scrapeList;
} else if (this.currentActionType === "scrapeSchema" && data.scrapeSchema) {
data = data.scrapeSchema;
}
this.serializableDataByType.scrapeList[this.currentScrapeListIndex] = data;
}
this.socket.emit('serializableCallback', data);
let actionName = this.currentActionName || "";
if (!actionName) {
if (!Array.isArray(data) && Object.keys(data).length === 1) {
const soleKey = Object.keys(data)[0];
const soleValue = data[soleKey];
if (Array.isArray(soleValue) || typeof soleValue === "object") {
actionName = soleKey;
data = soleValue;
}
}
}
if (!actionName) {
actionName = "Unnamed Action";
}
const flattened = Array.isArray(data)
? data
: (data?.List ?? (data && typeof data === 'object' ? Object.values(data).flat?.() ?? data : []));
if (!this.serializableDataByType[typeKey]) {
this.serializableDataByType[typeKey] = {};
}
this.serializableDataByType[typeKey][actionName] = flattened;
await this.persistDataToDatabase(typeKey, { [actionName]: flattened });
this.socket.emit("serializableCallback", {
type: typeKey,
name: actionName,
data: flattened,
});
this.currentActionType = null;
this.currentActionName = null;
} catch (err: any) {
logger.log('error', `serializableCallback handler failed: ${err.message}`);
}
},
binaryCallback: async (data: string, mimetype: string) => {
const binaryItem = { mimetype, data: JSON.stringify(data) };
this.binaryData.push(binaryItem);
// Persist binary data to database
await this.persistBinaryDataToDatabase(binaryItem);
this.socket.emit('binaryCallback', { data, mimetype });
binaryCallback: async (payload: { name: string; data: Buffer; mimeType: string }) => {
try {
const { name, data, mimeType } = payload;
const base64Data = data.toString("base64");
const binaryItem = {
name,
mimeType,
data: base64Data
};
this.binaryData.push(binaryItem);
await this.persistBinaryDataToDatabase(binaryItem);
this.socket.emit("binaryCallback", {
name,
data: base64Data,
mimeType
});
} catch (err: any) {
logger.log("error", `binaryCallback handler failed: ${err.message}`);
}
}
}
@@ -542,20 +618,13 @@ export class WorkflowInterpreter {
const result = {
log: this.debugMessages,
result: status,
scrapeSchemaOutput: Object.keys(mergedScrapeSchema).length > 0
? { "schema_merged": [mergedScrapeSchema] }
: this.serializableDataByType.scrapeSchema.reduce((reducedObject, item, index) => {
reducedObject[`schema_${index}`] = item;
return reducedObject;
}, {} as Record<string, any>),
scrapeListOutput: this.serializableDataByType.scrapeList.reduce((reducedObject, item, index) => {
reducedObject[`list_${index}`] = item;
return reducedObject;
}, {} as Record<string, any>),
binaryOutput: this.binaryData.reduce((reducedObject, item, index) => {
reducedObject[`item_${index}`] = item;
return reducedObject;
}, {} as Record<string, any>)
scrapeSchemaOutput: this.serializableDataByType.scrapeSchema,
scrapeListOutput: this.serializableDataByType.scrapeList,
binaryOutput: this.binaryData.reduce<Record<string, { data: string; mimeType: string }>>((acc, item) => {
const key = item.name || `Screenshot ${Object.keys(acc).length + 1}`;
acc[key] = { data: item.data, mimeType: item.mimeType };
return acc;
}, {})
}
logger.log('debug', `Interpretation finished`);
@@ -642,19 +711,37 @@ export class WorkflowInterpreter {
const currentSerializableOutput = run.serializableOutput ?
JSON.parse(JSON.stringify(run.serializableOutput)) :
{ scrapeSchema: [], scrapeList: [] };
if (Array.isArray(currentSerializableOutput.scrapeList)) {
currentSerializableOutput.scrapeList = {};
}
if (Array.isArray(currentSerializableOutput.scrapeSchema)) {
currentSerializableOutput.scrapeSchema = {};
}
let hasUpdates = false;
const mergeLists = (target: Record<string, any>, updates: Record<string, any>) => {
for (const [key, val] of Object.entries(updates)) {
const flattened = Array.isArray(val)
? val
: (val?.List ?? (val && typeof val === 'object' ? Object.values(val).flat?.() ?? val : []));
target[key] = flattened;
}
};
for (const item of batchToProcess) {
if (item.actionType === 'scrapeSchema') {
const newSchemaData = Array.isArray(item.data) ? item.data : [item.data];
currentSerializableOutput.scrapeSchema = newSchemaData;
hasUpdates = true;
} else if (item.actionType === 'scrapeList' && typeof item.listIndex === 'number') {
if (!Array.isArray(currentSerializableOutput.scrapeList)) {
currentSerializableOutput.scrapeList = [];
if (!currentSerializableOutput.scrapeSchema || typeof currentSerializableOutput.scrapeSchema !== 'object') {
currentSerializableOutput.scrapeSchema = {};
}
currentSerializableOutput.scrapeList[item.listIndex] = item.data;
mergeLists(currentSerializableOutput.scrapeSchema, item.data);
hasUpdates = true;
} else if (item.actionType === 'scrapeList') {
if (!currentSerializableOutput.scrapeList || typeof currentSerializableOutput.scrapeList !== 'object') {
currentSerializableOutput.scrapeList = {};
}
mergeLists(currentSerializableOutput.scrapeList, item.data);
hasUpdates = true;
}
}

View File

@@ -12,8 +12,8 @@ interface AirtableUpdateTask {
}
interface SerializableOutput {
scrapeSchema?: any[];
scrapeList?: any[];
scrapeSchema?: Record<string, any[]>;
scrapeList?: Record<string, any[]>;
}
const MAX_RETRIES = 3;
@@ -48,47 +48,94 @@ async function refreshAirtableToken(refreshToken: string) {
function mergeRelatedData(serializableOutput: SerializableOutput, binaryOutput: Record<string, string>) {
const allRecords: Record<string, any>[] = [];
const schemaData: Array<{key: string, value: any}> = [];
const schemaData: Array<{ Group: string; Field: string; Value: any }> = [];
const listData: any[] = [];
const screenshotData: Array<{key: string, url: string}> = [];
// Collect schema data
if (serializableOutput.scrapeSchema) {
for (const schemaArray of serializableOutput.scrapeSchema) {
if (!Array.isArray(schemaArray)) continue;
for (const schemaItem of schemaArray) {
Object.entries(schemaItem).forEach(([key, value]) => {
if (key && key.trim() !== '' && value !== null && value !== undefined && value !== '') {
schemaData.push({key, value});
}
});
if (Array.isArray(serializableOutput.scrapeSchema)) {
for (const schemaArray of serializableOutput.scrapeSchema) {
if (!Array.isArray(schemaArray)) continue;
for (const schemaItem of schemaArray) {
Object.entries(schemaItem || {}).forEach(([key, value]) => {
if (key && key.trim() !== "" && value !== null && value !== undefined && value !== "") {
schemaData.push({ Group: "Default", Field: key, Value: value });
}
});
}
}
} else if (typeof serializableOutput.scrapeSchema === "object") {
for (const [groupName, schemaArray] of Object.entries(serializableOutput.scrapeSchema)) {
if (!Array.isArray(schemaArray)) continue;
for (const schemaItem of schemaArray) {
Object.entries(schemaItem || {}).forEach(([fieldName, value]) => {
if (fieldName && fieldName.trim() !== "" && value !== null && value !== undefined && value !== "") {
schemaData.push({
Group: groupName,
Field: fieldName,
Value: value,
});
}
});
}
}
}
}
// Collect list data
if (serializableOutput.scrapeList) {
for (const listArray of serializableOutput.scrapeList) {
if (!Array.isArray(listArray)) continue;
listArray.forEach(listItem => {
const hasContent = Object.values(listItem).some(value =>
value !== null && value !== undefined && value !== ''
);
if (hasContent) {
listData.push(listItem);
}
});
if (Array.isArray(serializableOutput.scrapeList)) {
for (const listArray of serializableOutput.scrapeList) {
if (!Array.isArray(listArray)) continue;
listArray.forEach((listItem) => {
const hasContent = Object.values(listItem || {}).some(
(value) => value !== null && value !== undefined && value !== ""
);
if (hasContent) listData.push(listItem);
});
}
} else if (typeof serializableOutput.scrapeList === "object") {
for (const [listName, listArray] of Object.entries(serializableOutput.scrapeList)) {
if (!Array.isArray(listArray)) continue;
listArray.forEach((listItem) => {
const hasContent = Object.values(listItem || {}).some(
(value) => value !== null && value !== undefined && value !== ""
);
if (hasContent) listData.push({ List: listName, ...listItem });
});
}
}
}
// Collect screenshot data
if (binaryOutput && Object.keys(binaryOutput).length > 0) {
Object.entries(binaryOutput).forEach(([key, url]) => {
if (key && key.trim() !== '' && url && url.trim() !== '') {
screenshotData.push({key, url});
}
});
}
// if (binaryOutput && Object.keys(binaryOutput).length > 0) {
// Object.entries(binaryOutput).forEach(([key, rawValue]: [string, any]) => {
// if (!key || key.trim() === "") return;
// let urlString = "";
// // Case 1: old format (string URL)
// if (typeof rawValue === "string") {
// urlString = rawValue;
// }
// // Case 2: new format (object with { url?, data?, mimeType? })
// else if (rawValue && typeof rawValue === "object") {
// const valueObj = rawValue as { url?: string; data?: string; mimeType?: string };
// if (typeof valueObj.url === "string") {
// urlString = valueObj.url;
// } else if (typeof valueObj.data === "string") {
// const mime = valueObj.mimeType || "image/png";
// urlString = `data:${mime};base64,${valueObj.data}`;
// }
// }
// if (typeof urlString === "string" && urlString.trim() !== "") {
// screenshotData.push({ key, url: urlString });
// }
// });
// }
// Mix all data types together to create consecutive records
const maxLength = Math.max(schemaData.length, listData.length, screenshotData.length);
@@ -97,8 +144,9 @@ function mergeRelatedData(serializableOutput: SerializableOutput, binaryOutput:
const record: Record<string, any> = {};
if (i < schemaData.length) {
record.Label = schemaData[i].key;
record.Value = schemaData[i].value;
record.Group = schemaData[i].Group;
record.Label = schemaData[i].Field;
record.Value = schemaData[i].Value;
}
if (i < listData.length) {
@@ -120,20 +168,15 @@ function mergeRelatedData(serializableOutput: SerializableOutput, binaryOutput:
}
for (let i = maxLength; i < schemaData.length; i++) {
allRecords.push({
Label: schemaData[i].key,
Value: schemaData[i].value
});
allRecords.push({ Label: schemaData[i].Field, Value: schemaData[i].Value });
}
for (let i = maxLength; i < listData.length; i++) {
allRecords.push(listData[i]);
}
for (let i = maxLength; i < screenshotData.length; i++) {
allRecords.push({
Key: screenshotData[i].key,
Screenshot: screenshotData[i].url
Screenshot: screenshotData[i].url,
});
}

View File

@@ -49,25 +49,34 @@ export async function updateGoogleSheet(robotId: string, runId: string) {
const serializableOutput = plainRun.serializableOutput as SerializableOutput;
if (serializableOutput) {
if (serializableOutput.scrapeSchema && serializableOutput.scrapeSchema.length > 0) {
await processOutputType(
robotId,
spreadsheetId,
'Text',
serializableOutput.scrapeSchema,
plainRobot
);
if (serializableOutput.scrapeSchema && typeof serializableOutput.scrapeSchema === "object") {
for (const [groupName, schemaArray] of Object.entries(serializableOutput.scrapeSchema)) {
if (!Array.isArray(schemaArray) || schemaArray.length === 0) continue;
await processOutputType(
robotId,
spreadsheetId,
`Schema - ${groupName}`,
schemaArray,
plainRobot
);
}
}
if (serializableOutput.scrapeList && serializableOutput.scrapeList.length > 0) {
await processOutputType(
robotId,
spreadsheetId,
'List',
serializableOutput.scrapeList,
plainRobot
);
if (serializableOutput.scrapeList && typeof serializableOutput.scrapeList === "object") {
for (const [listName, listArray] of Object.entries(serializableOutput.scrapeList)) {
if (!Array.isArray(listArray) || listArray.length === 0) continue;
await processOutputType(
robotId,
spreadsheetId,
`List - ${listName}`,
listArray,
plainRobot
);
}
}
}
if (plainRun.binaryOutput && Object.keys(plainRun.binaryOutput).length > 0) {
@@ -102,30 +111,27 @@ async function processOutputType(
outputData: any[],
robotConfig: any
) {
for (let i = 0; i < outputData.length; i++) {
const data = outputData[i];
if (!data || data.length === 0) {
console.log(`No data to write for ${outputType}-${i}. Skipping.`);
continue;
}
const sheetName = `${outputType}-${i}`;
await ensureSheetExists(spreadsheetId, sheetName, robotConfig);
let formattedData = data;
if (outputType === 'Text' && data.length > 0) {
const schemaItem = data[0];
formattedData = Object.entries(schemaItem).map(([key, value]) => ({
Label: key,
Value: value
}));
}
await writeDataToSheet(robotId, spreadsheetId, formattedData, sheetName, robotConfig);
console.log(`Data written to ${sheetName} sheet for ${outputType} data`);
const data = outputData;
const sheetName = outputType;
if (!Array.isArray(data) || data.length === 0) {
console.log(`No data to write for ${sheetName}. Skipping.`);
return;
}
await ensureSheetExists(spreadsheetId, sheetName, robotConfig);
const formattedData = data.map(item => {
const flatRow: Record<string, any> = {};
for (const [key, value] of Object.entries(item || {})) {
flatRow[key] =
typeof value === "object" && value !== null ? JSON.stringify(value) : value;
}
return flatRow;
});
await writeDataToSheet(robotId, spreadsheetId, formattedData, sheetName, robotConfig);
console.log(`Data written to ${sheetName} sheet for ${outputType} data`);
}
async function ensureSheetExists(spreadsheetId: string, sheetName: string, robotConfig: any) {

View File

@@ -4,7 +4,7 @@ import stealthPlugin from 'puppeteer-extra-plugin-stealth';
import { io, Socket } from "socket.io-client";
import { createRemoteBrowserForRun, destroyRemoteBrowser } from '../../browser-management/controller';
import logger from '../../logger';
import { browserPool } from "../../server";
import { browserPool, io as serverIo } from "../../server";
import { googleSheetUpdateTasks, processGoogleSheetUpdates } from "../integrations/gsheet";
import Robot from "../../models/Robot";
import Run from "../../models/Run";
@@ -46,7 +46,7 @@ async function createWorkflowAndStoreMetadata(id: string, userId: string) {
};
}
const browserId = createRemoteBrowserForRun( userId);
const browserId = createRemoteBrowserForRun(userId);
const runId = uuid();
const run = await Run.create({
@@ -63,10 +63,30 @@ async function createWorkflowAndStoreMetadata(id: string, userId: string) {
runByScheduleId: uuid(),
serializableOutput: {},
binaryOutput: {},
retryCount: 0
});
const plainRun = run.toJSON();
try {
const runScheduledData = {
runId: plainRun.runId,
robotMetaId: plainRun.robotMetaId,
robotName: plainRun.name,
status: 'scheduled',
startedAt: plainRun.startedAt,
runByUserId: plainRun.runByUserId,
runByScheduleId: plainRun.runByScheduleId,
runByAPI: plainRun.runByAPI || false,
browserId: plainRun.browserId
};
serverIo.of('/queued-run').to(`user-${userId}`).emit('run-scheduled', runScheduledData);
logger.log('info', `Scheduled run notification sent for run: ${plainRun.runId} to user-${userId}`);
} catch (socketError: any) {
logger.log('warn', `Failed to send run-scheduled notification for run ${plainRun.runId}: ${socketError.message}`);
}
return {
browserId,
runId: plainRun.runId,
@@ -83,6 +103,29 @@ async function createWorkflowAndStoreMetadata(id: string, userId: string) {
}
}
async function triggerIntegrationUpdates(runId: string, robotMetaId: string): Promise<void> {
try {
googleSheetUpdateTasks[runId] = {
robotId: robotMetaId,
runId: runId,
status: 'pending',
retries: 5,
};
airtableUpdateTasks[runId] = {
robotId: robotMetaId,
runId: runId,
status: 'pending',
retries: 5,
};
processAirtableUpdates().catch(err => logger.log('error', `Airtable update error: ${err.message}`));
processGoogleSheetUpdates().catch(err => logger.log('error', `Google Sheets update error: ${err.message}`));
} catch (err: any) {
logger.log('error', `Failed to update integrations for run: ${runId}: ${err.message}`);
}
}
function AddGeneratedFlags(workflow: WorkflowFile) {
const copy = JSON.parse(JSON.stringify(workflow));
for (let i = 0; i < workflow.workflow.length; i++) {
@@ -95,6 +138,8 @@ function AddGeneratedFlags(workflow: WorkflowFile) {
};
async function executeRun(id: string, userId: string) {
let browser: any = null;
try {
const run = await Run.findOne({ where: { runId: id } });
if (!run) {
@@ -133,6 +178,21 @@ async function executeRun(id: string, userId: string) {
log: plainRun.log ? `${plainRun.log}\nMax retries exceeded (3/3) - Run failed after multiple attempts.` : `Max retries exceeded (3/3) - Run failed after multiple attempts.`
});
try {
const failureSocketData = {
runId: plainRun.runId,
robotMetaId: plainRun.robotMetaId,
robotName: recording ? recording.recording_meta.name : 'Unknown Robot',
status: 'failed',
finishedAt: new Date().toLocaleString()
};
serverIo.of(run.browserId).emit('run-completed', failureSocketData);
serverIo.of('/queued-run').to(`user-${userId}`).emit('run-completed', failureSocketData);
} catch (socketError: any) {
logger.log('warn', `Failed to emit failure event in main catch: ${socketError.message}`);
}
return {
success: false,
error: 'Max retries exceeded'
@@ -149,7 +209,22 @@ async function executeRun(id: string, userId: string) {
plainRun.status = 'running';
const browser = browserPool.getRemoteBrowser(plainRun.browserId);
try {
const runStartedData = {
runId: plainRun.runId,
robotMetaId: plainRun.robotMetaId,
robotName: recording ? recording.recording_meta.name : 'Unknown Robot',
status: 'running',
startedAt: plainRun.startedAt
};
serverIo.of('/queued-run').to(`user-${userId}`).emit('run-started', runStartedData);
logger.log('info', `Run started notification sent for run: ${plainRun.runId} to user-${userId}`);
} catch (socketError: any) {
logger.log('warn', `Failed to send run-started notification for run ${plainRun.runId}: ${socketError.message}`);
}
browser = browserPool.getRemoteBrowser(plainRun.browserId);
if (!browser) {
throw new Error('Could not access browser');
}
@@ -168,56 +243,52 @@ async function executeRun(id: string, userId: string) {
workflow, currentPage, (newPage: Page) => currentPage = newPage, plainRun.interpreterSettings
);
const binaryOutputService = new BinaryOutputService('maxun-run-screenshots');
const uploadedBinaryOutput = await binaryOutputService.uploadAndStoreBinaryOutput(run, interpretationInfo.binaryOutput);
const finalRun = await Run.findByPk(run.id);
const categorizedOutput = {
scrapeSchema: finalRun?.serializableOutput?.scrapeSchema || {},
scrapeList: finalRun?.serializableOutput?.scrapeList || {},
};
await destroyRemoteBrowser(plainRun.browserId, userId);
await run.update({
status: 'success',
finishedAt: new Date().toLocaleString(),
log: interpretationInfo.log.join('\n'),
binaryOutput: uploadedBinaryOutput
});
// Upload binary output to MinIO and update run with MinIO URLs
const updatedRun = await Run.findOne({ where: { runId: id } });
if (updatedRun && updatedRun.binaryOutput && Object.keys(updatedRun.binaryOutput).length > 0) {
try {
const binaryService = new BinaryOutputService('maxun-run-screenshots');
await binaryService.uploadAndStoreBinaryOutput(updatedRun, updatedRun.binaryOutput);
logger.log('info', `Uploaded binary output to MinIO for scheduled run ${id}`);
} catch (minioError: any) {
logger.log('error', `Failed to upload binary output to MinIO for scheduled run ${id}: ${minioError.message}`);
}
}
// Get metrics from persisted data for analytics and webhooks
let totalSchemaItemsExtracted = 0;
let totalListItemsExtracted = 0;
let extractedScreenshotsCount = 0;
if (updatedRun) {
if (updatedRun.serializableOutput) {
if (updatedRun.serializableOutput.scrapeSchema) {
Object.values(updatedRun.serializableOutput.scrapeSchema).forEach((schemaResult: any) => {
if (Array.isArray(schemaResult)) {
totalSchemaItemsExtracted += schemaResult.length;
} else if (schemaResult && typeof schemaResult === 'object') {
totalSchemaItemsExtracted += 1;
}
});
}
if (updatedRun.serializableOutput.scrapeList) {
Object.values(updatedRun.serializableOutput.scrapeList).forEach((listResult: any) => {
if (Array.isArray(listResult)) {
totalListItemsExtracted += listResult.length;
}
});
}
if (categorizedOutput) {
if (categorizedOutput.scrapeSchema) {
Object.values(categorizedOutput.scrapeSchema).forEach((schemaResult: any) => {
if (Array.isArray(schemaResult)) {
totalSchemaItemsExtracted += schemaResult.length;
} else if (schemaResult && typeof schemaResult === 'object') {
totalSchemaItemsExtracted += 1;
}
});
}
if (updatedRun.binaryOutput) {
extractedScreenshotsCount = Object.keys(updatedRun.binaryOutput).length;
if (categorizedOutput.scrapeList) {
Object.values(categorizedOutput.scrapeList).forEach((listResult: any) => {
if (Array.isArray(listResult)) {
totalListItemsExtracted += listResult.length;
}
});
}
}
if (run.binaryOutput) {
extractedScreenshotsCount = Object.keys(run.binaryOutput).length;
}
const totalRowsExtracted = totalSchemaItemsExtracted + totalListItemsExtracted;
@@ -234,6 +305,21 @@ async function executeRun(id: string, userId: string) {
}
);
try {
const completionData = {
runId: plainRun.runId,
robotMetaId: plainRun.robotMetaId,
robotName: recording.recording_meta.name,
status: 'success',
finishedAt: new Date().toLocaleString()
};
serverIo.of(plainRun.browserId).emit('run-completed', completionData);
serverIo.of('/queued-run').to(`user-${userId}`).emit('run-completed', completionData);
} catch (emitError: any) {
logger.log('warn', `Failed to emit success event: ${emitError.message}`);
}
const webhookPayload = {
robot_id: plainRun.robotMetaId,
run_id: plainRun.runId,
@@ -242,16 +328,20 @@ async function executeRun(id: string, userId: string) {
started_at: plainRun.startedAt,
finished_at: new Date().toLocaleString(),
extracted_data: {
captured_texts: updatedRun?.serializableOutput?.scrapeSchema ? Object.values(updatedRun.serializableOutput.scrapeSchema).flat() : [],
captured_lists: updatedRun?.serializableOutput?.scrapeList || {},
total_rows: totalRowsExtracted,
captured_texts: Object.keys(categorizedOutput.scrapeSchema || {}).length > 0
? Object.entries(categorizedOutput.scrapeSchema).reduce((acc, [name, value]) => {
acc[name] = Array.isArray(value) ? value : [value];
return acc;
}, {} as Record<string, any[]>)
: {},
captured_lists: categorizedOutput.scrapeList,
captured_texts_count: totalSchemaItemsExtracted,
captured_lists_count: totalListItemsExtracted,
screenshots_count: extractedScreenshotsCount
},
metadata: {
browser_id: plainRun.browserId,
user_id: userId
user_id: userId,
}
};
@@ -262,26 +352,7 @@ async function executeRun(id: string, userId: string) {
logger.log('error', `Failed to send webhooks for run ${plainRun.runId}: ${webhookError.message}`);
}
try {
googleSheetUpdateTasks[plainRun.runId] = {
robotId: plainRun.robotMetaId,
runId: plainRun.runId,
status: 'pending',
retries: 5,
};
airtableUpdateTasks[plainRun.runId] = {
robotId: plainRun.robotMetaId,
runId: plainRun.runId,
status: 'pending',
retries: 5,
};
processAirtableUpdates().catch(err => logger.log('error', `Airtable update error: ${err.message}`));
processGoogleSheetUpdates().catch(err => logger.log('error', `Google Sheets update error: ${err.message}`));
} catch (err: any) {
logger.log('error', `Failed to update Google Sheet for run: ${plainRun.runId}: ${err.message}`);
}
await triggerIntegrationUpdates(plainRun.runId, plainRun.robotMetaId);
return true;
} catch (error: any) {
logger.log('info', `Error while running a robot with id: ${id} - ${error.message}`);
@@ -320,6 +391,21 @@ async function executeRun(id: string, userId: string) {
} catch (webhookError: any) {
logger.log('error', `Failed to send failure webhooks for run ${run.runId}: ${webhookError.message}`);
}
try {
const failureSocketData = {
runId: run.runId,
robotMetaId: run.robotMetaId,
robotName: recording ? recording.recording_meta.name : 'Unknown Robot',
status: 'failed',
finishedAt: new Date().toLocaleString()
};
serverIo.of(run.browserId).emit('run-completed', failureSocketData);
serverIo.of('/queued-run').to(`user-${userId}`).emit('run-completed', failureSocketData);
} catch (socketError: any) {
logger.log('warn', `Failed to emit failure event in main catch: ${socketError.message}`);
}
}
capture(
'maxun-oss-run-created-scheduled',