diff --git a/Dockerfile.backend b/Dockerfile.backend index d0143a3d..fae0d146 100644 --- a/Dockerfile.backend +++ b/Dockerfile.backend @@ -1,26 +1,28 @@ -FROM --platform=$BUILDPLATFORM node:20-slim +FROM node:20-slim # Set working directory WORKDIR /app COPY .sequelizerc .sequelizerc -COPY .env .env # Install node dependencies COPY package*.json ./ COPY src ./src -COPY public ./public +COPY public ./public COPY server ./server COPY tsconfig.json ./ COPY server/tsconfig.json ./server/ -# COPY server/start.sh ./ +# COPY server/start.sh ./ # Install dependencies RUN npm install --legacy-peer-deps +# Build TypeScript server +RUN npm run build:server + # Expose backend port EXPOSE ${BACKEND_PORT:-8080} -# Run migrations & start backend using start script +# Run migrations & start backend using plain node CMD ["npm", "run", "server"] # CMD ["sh", "-c", "npm run migrate && npm run server"] \ No newline at end of file diff --git a/Dockerfile.frontend b/Dockerfile.frontend index 9cb25d6f..b75bb1ab 100644 --- a/Dockerfile.frontend +++ b/Dockerfile.frontend @@ -1,4 +1,4 @@ -FROM --platform=$BUILDPLATFORM node:18-alpine AS builder +FROM node:18-alpine AS builder WORKDIR /app diff --git a/package.json b/package.json index 41b1255c..1446418b 100644 --- a/package.json +++ b/package.json @@ -83,12 +83,11 @@ "winston": "^3.5.1" }, "scripts": { - "start": "concurrently -k \"npm run server\" \"npm run client\"", - "server": "cross-env NODE_OPTIONS='--max-old-space-size=8000' nodemon server/src/server.ts", + "start": "npm run build:server && concurrently -k \"npm run server\" \"npm run client\"", + "server": "cross-env NODE_OPTIONS='--max-old-space-size=512' node server/dist/server/src/server.js", "client": "vite", "build": "vite build", "build:server": "tsc -p server/tsconfig.json", - "start:server": "cross-env NODE_OPTIONS='--max-old-space-size=8000' server/dist/server/src/server.js", "preview": "vite preview", "lint": "./node_modules/.bin/eslint .", "migrate": "sequelize-cli db:migrate", diff --git a/server/docker-entrypoint.sh b/server/docker-entrypoint.sh index ad670faf..9101cb0f 100644 --- a/server/docker-entrypoint.sh +++ b/server/docker-entrypoint.sh @@ -27,7 +27,7 @@ wait_for_postgres() { wait_for_postgres # Run the application with migrations before startup -NODE_OPTIONS="--max-old-space-size=4096" node -e "require('./server/src/db/migrate')().then(() => { console.log('Migration process completed.'); })" +NODE_OPTIONS="--max-old-space-size=4096" node -e "require('./server/dist/server/src/db/migrate')().then(() => { console.log('Migration process completed.'); })" -# Run the server normally +# Run the server normally exec "$@" \ No newline at end of file diff --git a/server/src/server.ts b/server/src/server.ts index 61f57756..316bf3de 100644 --- a/server/src/server.ts +++ b/server/src/server.ts @@ -280,27 +280,39 @@ if (require.main === module) { const run = await Run.findOne({ where: { browserId, status: 'running' } }); if (run) { const limitedData = { - scrapeSchemaOutput: browser.interpreter.serializableDataByType?.scrapeSchema - ? { "schema-tabular": browser.interpreter.serializableDataByType.scrapeSchema } - : {}, + scrapeSchemaOutput: browser.interpreter.serializableDataByType?.scrapeSchema || {}, scrapeListOutput: browser.interpreter.serializableDataByType?.scrapeList || {}, binaryOutput: browser.interpreter.binaryData || [] }; const binaryOutputRecord = limitedData.binaryOutput.reduce((acc: Record, item: any, index: number) => { - acc[`item-${index}`] = item; + const key = item.name || `Screenshot ${index + 1}`; + acc[key] = { data: item.data, mimeType: item.mimeType }; return acc; }, {}); + let uploadedBinaryOutput = {}; + if (Object.keys(binaryOutputRecord).length > 0) { + try { + const { BinaryOutputService } = require('./storage/mino'); + const binaryOutputService = new BinaryOutputService('maxun-run-screenshots'); + uploadedBinaryOutput = await binaryOutputService.uploadAndStoreBinaryOutput(run, binaryOutputRecord); + logger.log('info', `Successfully uploaded ${Object.keys(uploadedBinaryOutput).length} screenshots to MinIO for interrupted run`); + } catch (minioError: any) { + logger.log('error', `Failed to upload binary data to MinIO during shutdown: ${minioError.message}`); + uploadedBinaryOutput = binaryOutputRecord; + } + } + await run.update({ status: 'failed', finishedAt: new Date().toLocaleString(), log: 'Process interrupted during execution - partial data preserved', serializableOutput: { - scrapeSchema: Object.values(limitedData.scrapeSchemaOutput), - scrapeList: Object.values(limitedData.scrapeListOutput), + scrapeSchema: limitedData.scrapeSchemaOutput, + scrapeList: limitedData.scrapeListOutput, }, - binaryOutput: binaryOutputRecord + binaryOutput: uploadedBinaryOutput }); } } diff --git a/src/components/robot/pages/RobotEditPage.tsx b/src/components/robot/pages/RobotEditPage.tsx index 178c5ad3..70fd9f36 100644 --- a/src/components/robot/pages/RobotEditPage.tsx +++ b/src/components/robot/pages/RobotEditPage.tsx @@ -383,15 +383,18 @@ export const RobotEditPage = ({ handleStart }: RobotSettingsProps) => { if (!prev) return prev; const updatedWorkflow = [...prev.recording.workflow]; + const pair = updatedWorkflow[pairIndex]; + const action = pair?.what?.[actionIndex]; if ( updatedWorkflow.length > pairIndex && - updatedWorkflow[pairIndex]?.what && - updatedWorkflow[pairIndex].what.length > actionIndex && - updatedWorkflow[pairIndex].what[actionIndex].args && - updatedWorkflow[pairIndex].what[actionIndex].args.length > argIndex + pair?.what && + pair.what.length > actionIndex && + action?.args && + action.args.length > argIndex ) { - updatedWorkflow[pairIndex].what[actionIndex].args[argIndex].limit = - newLimit; + if (action.args[argIndex]) { + action.args[argIndex].limit = newLimit; + } setScrapeListLimits((prev) => { return prev.map((item) => {