feat: add server side crawl and search logic

This commit is contained in:
Rohit Rajan
2026-01-02 15:46:10 +05:30
parent 3689eb96bd
commit 9898dc410d
15 changed files with 1577 additions and 124 deletions

View File

@@ -154,7 +154,7 @@ export const RobotConfigPage: React.FC<RobotConfigPageProps> = ({
)}
<Box sx={{ display: 'flex', gap: 2 }}>
/* {showCancelButton && (
{/* {showCancelButton && (
<Button
variant="outlined"
onClick={handleBack}
@@ -164,7 +164,7 @@ export const RobotConfigPage: React.FC<RobotConfigPageProps> = ({
}} >
{cancelButtonText || t("buttons.cancel")}
</Button>
)} */
)} */}
{showSaveButton && onSave && (
<Button
variant="contained"

View File

@@ -1051,7 +1051,7 @@ export const RobotEditPage = ({ handleStart }: RobotSettingsProps) => {
})),
credentials: credentialsForPayload,
targetUrl: targetUrl,
workflow: robot.recording.workflow,
workflow: updatedWorkflow,
};
const success = await updateRecording(robot.recording_meta.id, payload);
@@ -1101,13 +1101,15 @@ export const RobotEditPage = ({ handleStart }: RobotSettingsProps) => {
style={{ marginBottom: "20px" }}
/>
<TextField
label={t("robot_duplication.fields.target_url")}
key={t("robot_duplication.fields.target_url")}
value={getTargetUrl() || ""}
onChange={(e) => handleTargetUrlChange(e.target.value)}
style={{ marginBottom: "20px" }}
/>
{robot.recording_meta.type !== 'search' && (
<TextField
label={t("robot_duplication.fields.target_url")}
key={t("robot_duplication.fields.target_url")}
value={getTargetUrl() || ""}
onChange={(e) => handleTargetUrlChange(e.target.value)}
style={{ marginBottom: "20px" }}
/>
)}
{renderCrawlConfigFields()}
{renderSearchConfigFields()}