batch input field processing for input actions when caching and running workflows with code (#4250)

This commit is contained in:
Shuchang Zheng
2025-12-16 10:08:19 +08:00
committed by GitHub
parent 1e5b8b36c1
commit 65b12f7ade
7 changed files with 320 additions and 57 deletions

View File

@@ -39,6 +39,7 @@ class ScriptBlockSource:
run_signature: str | None
workflow_run_id: str | None
workflow_run_block_id: str | None
input_fields: list[str] | None
# --------------------------------------------------------------------- #
@@ -122,6 +123,26 @@ INDENT = " " * 4
DOUBLE_INDENT = " " * 8
def _requires_mini_agent(act: dict[str, Any]) -> bool:
"""
Determine whether an input/select action should be forced into proactive mode.
Mirrors runtime logic that treats some inputs as mini-agent flows or TOTP-sensitive.
"""
if act.get("has_mini_agent", False):
return True
# context = act.get("input_or_select_context") or {}
# if isinstance(context, dict) and any(
# context.get(flag) for flag in ("is_location_input", "is_date_related", "date_format")
# ):
# return True
if act.get("totp_timing_info") and act.get("totp_timing_info", {}).get("is_totp_sequence"):
return True
return False
def _safe_name(label: str) -> str:
s = "".join(c if c.isalnum() else "_" for c in label).lower()
if not s or s[0].isdigit() or keyword.iskeyword(s):
@@ -304,6 +325,10 @@ def _action_to_stmt(act: dict[str, Any], task: dict[str, Any], assign_to_output:
else:
text_value = _value(act["text"])
ai_mode = GENERATE_CODE_AI_MODE_FALLBACK
if _requires_mini_agent(act):
ai_mode = GENERATE_CODE_AI_MODE_PROACTIVE
args.append(
cst.Arg(
keyword=cst.Name("value"),
@@ -317,7 +342,7 @@ def _action_to_stmt(act: dict[str, Any], task: dict[str, Any], assign_to_output:
args.append(
cst.Arg(
keyword=cst.Name("ai"),
value=_value(GENERATE_CODE_AI_MODE_PROACTIVE),
value=_value(ai_mode),
whitespace_after_arg=cst.ParenthesizedWhitespace(
indent=True,
last_line=cst.SimpleWhitespace(INDENT),
@@ -353,6 +378,10 @@ def _action_to_stmt(act: dict[str, Any], task: dict[str, Any], assign_to_output:
label = option.get("label")
value = value or label
if value:
# TODO: consider supporting fallback mode for select_option actions
# ai_mode = GENERATE_CODE_AI_MODE_FALLBACK
# if _requires_mini_agent(act):
ai_mode = GENERATE_CODE_AI_MODE_PROACTIVE
if act.get("field_name"):
option_value = cst.Subscript(
value=cst.Attribute(
@@ -376,7 +405,7 @@ def _action_to_stmt(act: dict[str, Any], task: dict[str, Any], assign_to_output:
args.append(
cst.Arg(
keyword=cst.Name("ai"),
value=_value(GENERATE_CODE_AI_MODE_PROACTIVE),
value=_value(ai_mode),
whitespace_after_arg=cst.ParenthesizedWhitespace(
indent=True,
last_line=cst.SimpleWhitespace(INDENT),
@@ -509,6 +538,33 @@ def _action_to_stmt(act: dict[str, Any], task: dict[str, Any], assign_to_output:
return cst.SimpleStatementLine([cst.Expr(await_expr)])
def _collect_block_input_fields(
block: dict[str, Any],
actions_by_task: dict[str, list[dict[str, Any]]],
) -> list[str]:
"""
Gather the sequence of workflow parameter field names referenced by input_text actions within a block.
"""
task_id = block.get("task_id")
if not task_id:
return []
all_fields: list[str] = []
for action in actions_by_task.get(task_id, []):
action_type = action.get("action_type")
# Only support input_text actions for now
if action_type not in {ActionType.INPUT_TEXT}:
continue
field_name = action.get("field_name")
if not field_name or not isinstance(field_name, str):
continue
all_fields.append(field_name)
return all_fields
def _build_block_fn(block: dict[str, Any], actions: list[dict[str, Any]]) -> FunctionDef:
name = _safe_name(block.get("label") or block.get("title") or f"block_{block.get('workflow_run_block_id')}")
cache_key = block.get("label") or block.get("title") or f"block_{block.get('workflow_run_block_id')}"
@@ -1908,6 +1964,9 @@ async def generate_workflow_script_python_code(
block_name = task.get("label") or task.get("title") or task.get("task_id") or f"task_{idx}"
cached_source = cached_blocks.get(block_name)
use_cached = cached_source is not None and block_name not in updated_block_labels
input_fields = _collect_block_input_fields(task, actions_by_task)
if not input_fields and cached_source and cached_source.input_fields:
input_fields = cached_source.input_fields
if use_cached:
assert cached_source is not None
@@ -1939,6 +1998,7 @@ async def generate_workflow_script_python_code(
run_signature=run_signature,
workflow_run_id=block_workflow_run_id,
workflow_run_block_id=block_workflow_run_block_id,
input_fields=input_fields,
)
except Exception as e:
LOG.error("Failed to create script block", error=str(e), exc_info=True)
@@ -1952,6 +2012,9 @@ async def generate_workflow_script_python_code(
cached_source = cached_blocks.get(task_v2_label)
use_cached = cached_source is not None and task_v2_label not in updated_block_labels
input_fields = _collect_block_input_fields(task_v2, actions_by_task)
if not input_fields and cached_source and cached_source.input_fields:
input_fields = cached_source.input_fields
block_code = ""
run_signature = None
@@ -1993,6 +2056,7 @@ async def generate_workflow_script_python_code(
run_signature=run_signature,
workflow_run_id=block_workflow_run_id,
workflow_run_block_id=block_workflow_run_block_id,
input_fields=input_fields,
)
except Exception as e:
LOG.error("Failed to create task_v2 script block", error=str(e), exc_info=True)
@@ -2071,6 +2135,7 @@ async def create_or_update_script_block(
run_signature: str | None = None,
workflow_run_id: str | None = None,
workflow_run_block_id: str | None = None,
input_fields: list[str] | None = None,
) -> None:
"""
Create a script block in the database and save the block code to a script file.
@@ -2086,6 +2151,7 @@ async def create_or_update_script_block(
run_signature: The function call code to execute this block (e.g., "await skyvern.action(...)")
workflow_run_id: The workflow run that generated this cached block
workflow_run_block_id: The workflow run block that generated this cached block
input_fields: Workflow parameter field names referenced by this block's cached actions
"""
block_code_bytes = block_code if isinstance(block_code, bytes) else block_code.encode("utf-8")
try:
@@ -2104,15 +2170,17 @@ async def create_or_update_script_block(
run_signature=run_signature,
workflow_run_id=workflow_run_id,
workflow_run_block_id=workflow_run_block_id,
input_fields=input_fields,
)
elif run_signature:
# Update the run_signature if provided
elif any(value is not None for value in [run_signature, workflow_run_id, workflow_run_block_id, input_fields]):
# Update metadata when new values are provided
script_block = await app.DATABASE.update_script_block(
script_block_id=script_block.script_block_id,
organization_id=organization_id,
run_signature=run_signature,
workflow_run_id=workflow_run_id,
workflow_run_block_id=workflow_run_block_id,
input_fields=input_fields,
)
# Step 4: Create script file for the block

View File

@@ -74,7 +74,22 @@ class SkyvernPage(Page):
*args: Any,
**kwargs: Any,
) -> Any:
return await fn(self, *args, **kwargs)
context = skyvern_context.current()
# label = self.current_label
# action_override = None
# if context and label:
# current_count = context.action_counters.get(label, 0) + 1
# context.action_counters[label] = current_count
# action_override = context.action_ai_overrides.get(label, {}).get(current_count)
# context.ai_mode_override = action_override
try:
return await fn(self, *args, **kwargs)
finally:
if context:
# Reset override after each action so defaults apply when no mapping is provided.
# context.ai_mode_override = None
pass
@staticmethod
def action_wrap(

View File

@@ -102,6 +102,7 @@ async def transform_workflow_run_to_code_gen_input(workflow_run_id: str, organiz
for action in actions:
action_dump = action.model_dump()
action_dump["xpath"] = action.get_xpath()
action_dump["has_mini_agent"] = action.has_mini_agent
if (
"data_extraction_goal" in final_dump
and final_dump["data_extraction_goal"]

View File

@@ -72,6 +72,8 @@ class SkyvernContext:
{"loop_value": "str", "output_parameter": "the key of the parameter", "output_value": Any}
"""
generate_script: bool = True
action_ai_overrides: dict[str, dict[int, str]] = field(default_factory=dict)
action_counters: dict[str, int] = field(default_factory=dict)
def __repr__(self) -> str:
return f"SkyvernContext(request_id={self.request_id}, organization_id={self.organization_id}, task_id={self.task_id}, step_id={self.step_id}, workflow_id={self.workflow_id}, workflow_run_id={self.workflow_run_id}, task_v2_id={self.task_v2_id}, max_steps_override={self.max_steps_override}, run_id={self.run_id})"

View File

@@ -709,7 +709,7 @@ class WorkflowService:
browser_profile_id=browser_profile_id,
block_labels=block_labels,
block_outputs=block_outputs,
workflow_script=workflow_script,
script=workflow_script,
)
if refreshed_workflow_run := await app.DATABASE.get_workflow_run(
@@ -758,93 +758,89 @@ class WorkflowService:
browser_profile_id: str | None = None,
block_labels: list[str] | None = None,
block_outputs: dict[str, Any] | None = None,
workflow_script: WorkflowScript | None = None,
script: Script | None = None,
) -> tuple[WorkflowRun, set[str]]:
organization_id = organization.organization_id
workflow_run_id = workflow_run.workflow_run_id
top_level_blocks = workflow.workflow_definition.blocks
all_blocks = get_all_blocks(top_level_blocks)
# Load script blocks if workflow_script is provided
# Load script blocks if script is provided
script_blocks_by_label: dict[str, Any] = {}
loaded_script_module = None
blocks_to_update: set[str] = set()
is_script_run = self.should_run_script(workflow, workflow_run)
if workflow_script:
if script:
LOG.info(
"Loading script blocks for workflow execution",
workflow_run_id=workflow_run_id,
script_id=workflow_script.script_id,
script_id=script.script_id,
script_revision_id=script.script_revision_id,
)
context = skyvern_context.ensure_context()
context.script_id = script.script_id
context.script_revision_id = script.script_revision_id
try:
# Load script blocks from database
script = await app.DATABASE.get_script(
script_id=workflow_script.script_id,
script_blocks = await app.DATABASE.get_script_blocks_by_script_revision_id(
script_revision_id=script.script_revision_id,
organization_id=organization_id,
)
if script:
script_blocks = await app.DATABASE.get_script_blocks_by_script_revision_id(
# Create mapping from block label to script block
for script_block in script_blocks:
if script_block.run_signature:
script_blocks_by_label[script_block.script_block_label] = script_block
if is_script_run:
# load the script files
script_files = await app.DATABASE.get_script_files(
script_revision_id=script.script_revision_id,
organization_id=organization_id,
)
await script_service.load_scripts(script, script_files)
# Create mapping from block label to script block
for script_block in script_blocks:
if script_block.run_signature:
script_blocks_by_label[script_block.script_block_label] = script_block
if is_script_run:
# load the script files
script_files = await app.DATABASE.get_script_files(
script_revision_id=script.script_revision_id,
organization_id=organization_id,
script_path = os.path.join(settings.TEMP_PATH, script.script_id, "main.py")
if os.path.exists(script_path):
# setup script run
parameter_tuples = await app.DATABASE.get_workflow_run_parameters(
workflow_run_id=workflow_run.workflow_run_id
)
await script_service.load_scripts(script, script_files)
script_parameters = {wf_param.key: run_param.value for wf_param, run_param in parameter_tuples}
script_path = os.path.join(settings.TEMP_PATH, workflow_script.script_id, "main.py")
if os.path.exists(script_path):
# setup script run
parameter_tuples = await app.DATABASE.get_workflow_run_parameters(
workflow_run_id=workflow_run.workflow_run_id
spec = importlib.util.spec_from_file_location("user_script", script_path)
if spec and spec.loader:
loaded_script_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(loaded_script_module)
await skyvern.setup(
script_parameters,
generated_parameter_cls=loaded_script_module.GeneratedWorkflowParameters,
)
script_parameters = {
wf_param.key: run_param.value for wf_param, run_param in parameter_tuples
}
spec = importlib.util.spec_from_file_location("user_script", script_path)
if spec and spec.loader:
loaded_script_module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(loaded_script_module)
await skyvern.setup(
script_parameters,
generated_parameter_cls=loaded_script_module.GeneratedWorkflowParameters,
)
LOG.info(
"Successfully loaded script module",
script_id=workflow_script.script_id,
block_count=len(script_blocks_by_label),
)
else:
LOG.warning(
"Script file not found at path",
script_path=script_path,
script_id=workflow_script.script_id,
LOG.info(
"Successfully loaded script module",
script_id=script.script_id,
block_count=len(script_blocks_by_label),
)
else:
LOG.warning(
"Script file not found at path",
script_path=script_path,
script_id=script.script_id,
)
except Exception as e:
LOG.warning(
"Failed to load script blocks, will fallback to normal execution",
error=str(e),
exc_info=True,
workflow_run_id=workflow_run_id,
script_id=workflow_script.script_id,
script_id=script.script_id,
)
script_blocks_by_label = {}
loaded_script_module = None
# Mark workflow as running with appropriate engine
run_with = "code" if workflow_script and is_script_run and script_blocks_by_label else "agent"
run_with = "code" if script and is_script_run and script_blocks_by_label else "agent"
await self.mark_workflow_run_as_running(workflow_run_id=workflow_run_id, run_with=run_with)
if block_labels and len(block_labels):

View File

@@ -61,6 +61,8 @@ from skyvern.schemas.scripts import (
ScriptStatus,
)
from skyvern.schemas.workflows import BlockResult, BlockStatus, BlockType, FileStorageType, FileType
from skyvern.webeye.actions.action_types import ActionType
from skyvern.webeye.actions.actions import Action
from skyvern.webeye.scraper.scraped_page import ElementTreeFormat
LOG = structlog.get_logger()
@@ -352,6 +354,7 @@ async def execute_script(
workflow_run_id=workflow_run_id,
browser_session_id=browser_session_id,
script_id=script_id,
script_revision_id=script.script_revision_id,
)
else:
# Execute synchronously
@@ -362,6 +365,8 @@ async def execute_script(
organization_id=organization_id,
workflow_run_id=workflow_run_id,
browser_session_id=browser_session_id,
script_id=script_id,
script_revision_id=script.script_revision_id,
)
else:
LOG.error("Script main.py not found", script_path=script_path, script_id=script_id)
@@ -686,6 +691,162 @@ async def _run_cached_function(cached_fn: Callable) -> Any:
return await cached_fn(page=run_context.page, context=run_context)
def _determine_action_ai_mode(
action: Action,
merged_value: str | None,
) -> str:
"""
Decide whether to run an input/select action in proactive or fallback mode.
"""
if action.has_mini_agent:
return "proactive"
# context = action.input_or_select_context
# if isinstance(context, dict) and any(
# context.get(flag) for flag in ("is_location_input", "is_date_related", "date_format")
# ):
# return "proactive"
# if getattr(action, "totp_code_required", False):
# return "proactive"
if action.totp_timing_info and action.totp_timing_info.get("is_totp_sequence"):
return "proactive"
if merged_value and str(merged_value).strip():
return "fallback"
return "proactive"
def _clear_cached_block_overrides(cache_key: str) -> None:
context = skyvern_context.current()
if not context:
return
context.action_ai_overrides.pop(cache_key, None)
context.action_counters.pop(cache_key, None)
async def _prepare_cached_block_inputs(cache_key: str, prompt: str | None, step_id: str | None = None) -> None:
"""
Fetch merged LLM inputs for a cached block and seed action-level AI overrides/parameters.
"""
context = skyvern_context.current()
if not context or not context.organization_id or not context.script_revision_id:
return
try:
script_block = await app.DATABASE.get_script_block_by_label(
organization_id=context.organization_id,
script_revision_id=context.script_revision_id,
script_block_label=cache_key,
)
except Exception:
return
input_fields: list[str] = []
workflow_run_block_id = None
if script_block:
input_fields = script_block.input_fields or []
workflow_run_block_id = script_block.workflow_run_block_id
if not input_fields or not workflow_run_block_id:
return
try:
source_block = await app.DATABASE.get_workflow_run_block(
workflow_run_block_id=workflow_run_block_id,
organization_id=context.organization_id,
)
except Exception:
return
task_id = source_block.task_id
if not task_id:
return
try:
# actios are ordered by created_at
actions = await app.DATABASE.get_task_actions_hydrated(task_id=task_id, organization_id=context.organization_id)
except Exception:
return
input_actions = [action for action in actions if action.action_type in {ActionType.INPUT_TEXT}]
# TODO: how to support select_option actions?
# input_actions = [
# action for action in actions if action.action_type in {ActionType.INPUT_TEXT, ActionType.SELECT_OPTION}
# ]
if not input_actions:
return
# Map actions to field names using stored field_name when present; otherwise consume in order from input_fields.
field_iter = iter(input_fields)
action_entries: list[tuple[Action, str | None]] = []
for action in input_actions:
field_name = None
try:
field_name = next(field_iter, None)
except StopIteration:
field_name = None
action_entries.append((action, field_name))
merged_values: dict[str, Any] = {}
run_context = script_run_context_manager.get_run_context()
if not run_context:
return
try:
parameters = {key: str(value) for key, value in run_context.parameters.items() if value}
serialized_params = json.dumps(parameters)
field_prompts = []
for action, field_name in action_entries:
if not field_name:
continue
prompt_text = action.intention or action.reasoning or ""
if action.input_or_select_context and action.input_or_select_context.intention:
prompt_text = action.input_or_select_context.intention
field_prompts.append({"name": field_name, "prompt": prompt_text})
if field_prompts:
merged_prompt = (
"You are helping fill web form fields for a workflow block.\n"
f"Block prompt/context:\n{prompt or ''}\n\n"
f"Workflow parameters (as JSON):\n{serialized_params}\n\n"
"Return a JSON object mapping field_name -> value for the following fields.\n"
"Leave value empty string if it cannot be determined.\n"
f"Fields:\n{json.dumps(field_prompts)}"
)
step = None
if step_id:
step = await app.DATABASE.get_step(step_id=step_id, organization_id=context.organization_id)
llm_response = await app.SCRIPT_GENERATION_LLM_API_HANDLER(
prompt=merged_prompt,
prompt_name="merged-block-inputs",
step=step,
)
if isinstance(llm_response, dict):
merged_values = llm_response
elif isinstance(llm_response, str):
try:
merged_values = json.loads(llm_response)
except Exception:
merged_values = {}
else:
merged_values = {}
except Exception:
merged_values = {}
overrides: dict[int, str] = {}
for idx, (action, field_name) in enumerate(action_entries, start=1):
merged_value = merged_values.get(field_name, "") if field_name else ""
ai_mode = _determine_action_ai_mode(action, merged_value)
overrides[idx] = ai_mode
if ai_mode == "fallback" and field_name and isinstance(merged_value, str):
# Seed the run context parameters with merged values for cached execution.
run_context.parameters[field_name] = merged_value
# if overrides:
# context.action_ai_overrides[cache_key] = overrides
# context.action_counters[cache_key] = 0
async def _detect_user_defined_errors(
task: Task,
step: Step,
@@ -1199,6 +1360,7 @@ async def _regenerate_script_block_after_ai_fallback(
block_label=existing_block.script_block_label,
workflow_run_id=existing_block.workflow_run_id,
workflow_run_block_id=existing_block.workflow_run_block_id,
input_fields=existing_block.input_fields,
)
block_file_content_bytes = (
block_file_content if isinstance(block_file_content, bytes) else block_file_content.encode("utf-8")
@@ -1357,6 +1519,7 @@ async def run_task(
context = skyvern_context.ensure_context()
context.prompt = prompt
try:
await _prepare_cached_block_inputs(cache_key, prompt)
output = await _run_cached_function(cached_fn)
# Update block status to completed if workflow block was created
@@ -1389,6 +1552,7 @@ async def run_task(
finally:
# clear the prompt in the RunContext
context.prompt = None
_clear_cached_block_overrides(cache_key)
else:
block_validation_output = await _validate_and_get_output_parameter(label)
task_block = NavigationBlock(
@@ -1444,6 +1608,7 @@ async def download(
context.prompt = prompt
try:
await _prepare_cached_block_inputs(cache_key, prompt)
await _run_cached_function(cached_fn)
# Update block status to completed if workflow block was created
@@ -1471,6 +1636,7 @@ async def download(
)
finally:
context.prompt = None
_clear_cached_block_overrides(cache_key)
else:
block_validation_output = await _validate_and_get_output_parameter(label)
file_download_block = FileDownloadBlock(
@@ -1525,6 +1691,7 @@ async def action(
context.prompt = prompt
try:
await _prepare_cached_block_inputs(cache_key, prompt)
await _run_cached_function(cached_fn)
# Update block status to completed if workflow block was created
@@ -1553,6 +1720,7 @@ async def action(
)
finally:
context.prompt = None
_clear_cached_block_overrides(cache_key)
else:
block_validation_output = await _validate_and_get_output_parameter(label)
action_block = ActionBlock(
@@ -1609,6 +1777,7 @@ async def login(
context = skyvern_context.ensure_context()
context.prompt = prompt
try:
await _prepare_cached_block_inputs(cache_key, prompt)
await _run_cached_function(cached_fn)
# Update block status to completed if workflow block was created
@@ -1637,6 +1806,7 @@ async def login(
)
finally:
context.prompt = None
_clear_cached_block_overrides(cache_key)
else:
block_validation_output = await _validate_and_get_output_parameter(label)
login_block = LoginBlock(
@@ -1804,13 +1974,23 @@ async def run_script(
organization_id: str | None = None,
workflow_run_id: str | None = None,
browser_session_id: str | None = None,
script_id: str | None = None,
script_revision_id: str | None = None,
) -> None:
# register the script run
context = skyvern_context.current()
if not context:
context = skyvern_context.ensure_context()
skyvern_context.set(skyvern_context.SkyvernContext())
context = skyvern_context.SkyvernContext()
skyvern_context.set(context)
context.browser_session_id = browser_session_id
if organization_id:
context.organization_id = organization_id
if script_id:
context.script_id = script_id
if script_revision_id:
context.script_revision_id = script_revision_id
if workflow_run_id and organization_id:
workflow_run = await app.DATABASE.get_workflow_run(
workflow_run_id=workflow_run_id, organization_id=organization_id

View File

@@ -150,6 +150,7 @@ async def _load_cached_script_block_sources(
run_signature=script_block.run_signature,
workflow_run_id=script_block.workflow_run_id,
workflow_run_block_id=script_block.workflow_run_block_id,
input_fields=script_block.input_fields,
)
return cached_blocks