Merge pull request #913 from getmaxun/platform-fix

fix: multi-platform docker build
This commit is contained in:
Karishma Shukla
2025-12-05 23:19:15 +05:30
committed by GitHub
6 changed files with 40 additions and 24 deletions

View File

@@ -1,26 +1,28 @@
FROM --platform=$BUILDPLATFORM node:20-slim
FROM node:20-slim
# Set working directory
WORKDIR /app
COPY .sequelizerc .sequelizerc
COPY .env .env
# Install node dependencies
COPY package*.json ./
COPY src ./src
COPY public ./public
COPY public ./public
COPY server ./server
COPY tsconfig.json ./
COPY server/tsconfig.json ./server/
# COPY server/start.sh ./
# COPY server/start.sh ./
# Install dependencies
RUN npm install --legacy-peer-deps
# Build TypeScript server
RUN npm run build:server
# Expose backend port
EXPOSE ${BACKEND_PORT:-8080}
# Run migrations & start backend using start script
# Run migrations & start backend using plain node
CMD ["npm", "run", "server"]
# CMD ["sh", "-c", "npm run migrate && npm run server"]

View File

@@ -1,4 +1,4 @@
FROM --platform=$BUILDPLATFORM node:18-alpine AS builder
FROM node:18-alpine AS builder
WORKDIR /app

View File

@@ -83,12 +83,11 @@
"winston": "^3.5.1"
},
"scripts": {
"start": "concurrently -k \"npm run server\" \"npm run client\"",
"server": "cross-env NODE_OPTIONS='--max-old-space-size=8000' nodemon server/src/server.ts",
"start": "npm run build:server && concurrently -k \"npm run server\" \"npm run client\"",
"server": "cross-env NODE_OPTIONS='--max-old-space-size=512' node server/dist/server/src/server.js",
"client": "vite",
"build": "vite build",
"build:server": "tsc -p server/tsconfig.json",
"start:server": "cross-env NODE_OPTIONS='--max-old-space-size=8000' server/dist/server/src/server.js",
"preview": "vite preview",
"lint": "./node_modules/.bin/eslint .",
"migrate": "sequelize-cli db:migrate",

View File

@@ -27,7 +27,7 @@ wait_for_postgres() {
wait_for_postgres
# Run the application with migrations before startup
NODE_OPTIONS="--max-old-space-size=4096" node -e "require('./server/src/db/migrate')().then(() => { console.log('Migration process completed.'); })"
NODE_OPTIONS="--max-old-space-size=4096" node -e "require('./server/dist/server/src/db/migrate')().then(() => { console.log('Migration process completed.'); })"
# Run the server normally
# Run the server normally
exec "$@"

View File

@@ -280,27 +280,39 @@ if (require.main === module) {
const run = await Run.findOne({ where: { browserId, status: 'running' } });
if (run) {
const limitedData = {
scrapeSchemaOutput: browser.interpreter.serializableDataByType?.scrapeSchema
? { "schema-tabular": browser.interpreter.serializableDataByType.scrapeSchema }
: {},
scrapeSchemaOutput: browser.interpreter.serializableDataByType?.scrapeSchema || {},
scrapeListOutput: browser.interpreter.serializableDataByType?.scrapeList || {},
binaryOutput: browser.interpreter.binaryData || []
};
const binaryOutputRecord = limitedData.binaryOutput.reduce((acc: Record<string, any>, item: any, index: number) => {
acc[`item-${index}`] = item;
const key = item.name || `Screenshot ${index + 1}`;
acc[key] = { data: item.data, mimeType: item.mimeType };
return acc;
}, {});
let uploadedBinaryOutput = {};
if (Object.keys(binaryOutputRecord).length > 0) {
try {
const { BinaryOutputService } = require('./storage/mino');
const binaryOutputService = new BinaryOutputService('maxun-run-screenshots');
uploadedBinaryOutput = await binaryOutputService.uploadAndStoreBinaryOutput(run, binaryOutputRecord);
logger.log('info', `Successfully uploaded ${Object.keys(uploadedBinaryOutput).length} screenshots to MinIO for interrupted run`);
} catch (minioError: any) {
logger.log('error', `Failed to upload binary data to MinIO during shutdown: ${minioError.message}`);
uploadedBinaryOutput = binaryOutputRecord;
}
}
await run.update({
status: 'failed',
finishedAt: new Date().toLocaleString(),
log: 'Process interrupted during execution - partial data preserved',
serializableOutput: {
scrapeSchema: Object.values(limitedData.scrapeSchemaOutput),
scrapeList: Object.values(limitedData.scrapeListOutput),
scrapeSchema: limitedData.scrapeSchemaOutput,
scrapeList: limitedData.scrapeListOutput,
},
binaryOutput: binaryOutputRecord
binaryOutput: uploadedBinaryOutput
});
}
}

View File

@@ -383,15 +383,18 @@ export const RobotEditPage = ({ handleStart }: RobotSettingsProps) => {
if (!prev) return prev;
const updatedWorkflow = [...prev.recording.workflow];
const pair = updatedWorkflow[pairIndex];
const action = pair?.what?.[actionIndex];
if (
updatedWorkflow.length > pairIndex &&
updatedWorkflow[pairIndex]?.what &&
updatedWorkflow[pairIndex].what.length > actionIndex &&
updatedWorkflow[pairIndex].what[actionIndex].args &&
updatedWorkflow[pairIndex].what[actionIndex].args.length > argIndex
pair?.what &&
pair.what.length > actionIndex &&
action?.args &&
action.args.length > argIndex
) {
updatedWorkflow[pairIndex].what[actionIndex].args[argIndex].limit =
newLimit;
if (action.args[argIndex]) {
action.args[argIndex].limit = newLimit;
}
setScrapeListLimits((prev) => {
return prev.map((item) => {