2024-09-19 11:09:19 -07:00
import json
2025-10-12 12:57:38 +03:00
from datetime import datetime , timedelta
2025-09-23 10:16:48 -06:00
from typing import Any , List , Literal , Sequence , overload
2024-03-01 10:09:30 -08:00
import structlog
2026-02-11 00:42:11 -05:00
from sqlalchemy import (
Text ,
and_ ,
asc ,
case ,
cast ,
delete ,
distinct ,
exists ,
func ,
literal ,
or_ ,
pool ,
select ,
tuple_ ,
update ,
)
from sqlalchemy . dialects . postgresql import JSONB
2025-12-18 11:32:40 -07:00
from sqlalchemy . exc import (
SQLAlchemyError ,
)
from sqlalchemy . ext . asyncio import AsyncEngine , create_async_engine
2024-03-01 10:09:30 -08:00
2024-09-03 07:00:15 +03:00
from skyvern . config import settings
2025-10-06 17:09:37 -04:00
from skyvern . constants import DEFAULT_SCRIPT_RUN_ID
2025-11-04 17:36:41 -08:00
from skyvern . exceptions import BrowserProfileNotFound , WorkflowParameterNotFound , WorkflowRunNotFound
2024-03-01 10:09:30 -08:00
from skyvern . forge . sdk . artifact . models import Artifact , ArtifactType
2025-12-18 11:32:40 -07:00
from skyvern . forge . sdk . db . base_alchemy_db import BaseAlchemyDB , read_retry
2024-11-26 11:29:33 +08:00
from skyvern . forge . sdk . db . enums import OrganizationAuthTokenType , TaskType
2024-03-01 10:09:30 -08:00
from skyvern . forge . sdk . db . exceptions import NotFoundError
from skyvern . forge . sdk . db . models import (
2024-10-15 12:06:50 -07:00
ActionModel ,
2025-01-08 21:45:38 -08:00
AISuggestionModel ,
2024-03-01 10:09:30 -08:00
ArtifactModel ,
AWSSecretParameterModel ,
2025-09-12 11:01:57 -06:00
AzureVaultCredentialParameterModel ,
2026-01-13 15:31:33 -07:00
Base ,
2024-10-03 16:18:21 -07:00
BitwardenCreditCardDataParameterModel ,
2024-04-03 16:01:03 -07:00
BitwardenLoginCredentialParameterModel ,
2024-07-11 09:48:14 -07:00
BitwardenSensitiveInformationParameterModel ,
2025-08-28 20:05:24 -04:00
BlockRunModel ,
2025-11-04 17:36:41 -08:00
BrowserProfileModel ,
2025-02-14 00:00:19 +08:00
CredentialModel ,
CredentialParameterModel ,
2025-07-28 10:23:02 -04:00
DebugSessionModel ,
2025-11-05 18:37:18 +03:00
FolderModel ,
2025-06-12 04:20:27 -04:00
OnePasswordCredentialParameterModel ,
2024-03-01 10:09:30 -08:00
OrganizationAuthTokenModel ,
2025-02-20 13:50:41 -08:00
OrganizationBitwardenCollectionModel ,
2024-03-01 10:09:30 -08:00
OrganizationModel ,
2024-03-21 17:16:56 -07:00
OutputParameterModel ,
2025-01-08 18:14:38 +01:00
PersistentBrowserSessionModel ,
2025-08-08 20:24:44 -07:00
ScriptBlockModel ,
2025-08-06 22:23:38 -07:00
ScriptFileModel ,
ScriptModel ,
2024-03-01 10:09:30 -08:00
StepModel ,
2024-06-07 15:59:53 -07:00
TaskGenerationModel ,
2024-03-01 10:09:30 -08:00
TaskModel ,
2025-02-09 20:30:19 +08:00
TaskRunModel ,
2025-02-27 20:19:02 -08:00
TaskV2Model ,
ThoughtModel ,
2024-09-08 15:07:03 -07:00
TOTPCodeModel ,
2026-01-06 14:58:44 -07:00
WorkflowCopilotChatMessageModel ,
WorkflowCopilotChatModel ,
2024-03-01 10:09:30 -08:00
WorkflowModel ,
WorkflowParameterModel ,
2024-12-20 07:40:32 -08:00
WorkflowRunBlockModel ,
2024-03-01 10:09:30 -08:00
WorkflowRunModel ,
2024-03-21 17:16:56 -07:00
WorkflowRunOutputParameterModel ,
2024-03-01 10:09:30 -08:00
WorkflowRunParameterModel ,
2025-08-09 13:11:16 -07:00
WorkflowScriptModel ,
2025-12-11 18:39:21 -08:00
WorkflowTemplateModel ,
2024-03-01 10:09:30 -08:00
)
from skyvern . forge . sdk . db . utils import (
_custom_json_serializer ,
convert_to_artifact ,
convert_to_aws_secret_parameter ,
convert_to_organization ,
convert_to_organization_auth_token ,
2024-03-21 17:16:56 -07:00
convert_to_output_parameter ,
2025-08-06 22:23:38 -07:00
convert_to_script ,
2025-08-08 20:24:44 -07:00
convert_to_script_block ,
2025-08-06 22:23:38 -07:00
convert_to_script_file ,
2024-03-01 10:09:30 -08:00
convert_to_step ,
convert_to_task ,
2025-12-01 16:08:36 -08:00
convert_to_task_v2 ,
2024-03-01 10:09:30 -08:00
convert_to_workflow ,
2026-01-06 14:58:44 -07:00
convert_to_workflow_copilot_chat_message ,
2024-03-01 10:09:30 -08:00
convert_to_workflow_parameter ,
convert_to_workflow_run ,
2024-12-20 07:40:32 -08:00
convert_to_workflow_run_block ,
2024-03-21 17:16:56 -07:00
convert_to_workflow_run_output_parameter ,
2024-03-01 10:09:30 -08:00
convert_to_workflow_run_parameter ,
2025-06-10 16:07:02 -07:00
hydrate_action ,
2024-03-01 10:09:30 -08:00
)
2025-08-05 12:36:24 +08:00
from skyvern . forge . sdk . encrypt import encryptor
from skyvern . forge . sdk . encrypt . base import EncryptMethod
2024-12-18 00:32:38 +01:00
from skyvern . forge . sdk . log_artifacts import save_workflow_run_logs
2024-12-06 17:15:11 -08:00
from skyvern . forge . sdk . models import Step , StepStatus
2025-01-08 21:45:38 -08:00
from skyvern . forge . sdk . schemas . ai_suggestions import AISuggestion
2025-11-04 17:36:41 -08:00
from skyvern . forge . sdk . schemas . browser_profiles import BrowserProfile
2025-10-10 10:10:18 -06:00
from skyvern . forge . sdk . schemas . credentials import Credential , CredentialType , CredentialVaultType
2025-10-16 08:24:05 -04:00
from skyvern . forge . sdk . schemas . debug_sessions import BlockRun , DebugSession , DebugSessionRun
2025-02-20 13:50:41 -08:00
from skyvern . forge . sdk . schemas . organization_bitwarden_collections import OrganizationBitwardenCollection
2025-09-23 10:16:48 -06:00
from skyvern . forge . sdk . schemas . organizations import (
AzureClientSecretCredential ,
AzureOrganizationAuthToken ,
Organization ,
OrganizationAuthToken ,
)
2026-01-07 15:39:53 +08:00
from skyvern . forge . sdk . schemas . persistent_browser_sessions import (
Extensions ,
PersistentBrowserSession ,
PersistentBrowserType ,
)
2025-03-30 18:41:24 -07:00
from skyvern . forge . sdk . schemas . runs import Run
2024-06-07 15:59:53 -07:00
from skyvern . forge . sdk . schemas . task_generations import TaskGeneration
2025-02-27 20:19:02 -08:00
from skyvern . forge . sdk . schemas . task_v2 import TaskV2 , TaskV2Status , Thought , ThoughtType
2025-03-24 15:15:21 -07:00
from skyvern . forge . sdk . schemas . tasks import OrderBy , SortDirection , Task , TaskStatus
2025-10-14 16:24:14 +08:00
from skyvern . forge . sdk . schemas . totp_codes import OTPType , TOTPCode
2026-01-06 14:58:44 -07:00
from skyvern . forge . sdk . schemas . workflow_copilot import (
WorkflowCopilotChat ,
WorkflowCopilotChatMessage ,
WorkflowCopilotChatSender ,
)
2024-12-20 07:40:32 -08:00
from skyvern . forge . sdk . schemas . workflow_runs import WorkflowRunBlock
2026-02-09 19:23:42 -08:00
from skyvern . forge . sdk . utils . sanitization import sanitize_postgres_text
2024-03-21 17:16:56 -07:00
from skyvern . forge . sdk . workflow . models . parameter import (
2026-01-13 15:31:33 -07:00
PARAMETER_TYPE ,
2024-03-21 17:16:56 -07:00
AWSSecretParameter ,
2025-09-12 11:01:57 -06:00
AzureVaultCredentialParameter ,
2024-10-03 16:18:21 -07:00
BitwardenCreditCardDataParameter ,
2024-04-03 16:01:03 -07:00
BitwardenLoginCredentialParameter ,
2024-07-11 09:48:14 -07:00
BitwardenSensitiveInformationParameter ,
2026-01-13 15:31:33 -07:00
ContextParameter ,
2025-02-14 00:00:19 +08:00
CredentialParameter ,
2025-06-12 04:20:27 -04:00
OnePasswordCredentialParameter ,
2024-03-21 17:16:56 -07:00
OutputParameter ,
WorkflowParameter ,
WorkflowParameterType ,
)
from skyvern . forge . sdk . workflow . models . workflow import (
Workflow ,
WorkflowRun ,
WorkflowRunOutputParameter ,
WorkflowRunParameter ,
WorkflowRunStatus ,
)
2025-11-28 14:24:44 -08:00
from skyvern . schemas . runs import GeoTarget , ProxyLocation , ProxyLocationInput , RunEngine , RunType
2025-09-19 08:50:21 -07:00
from skyvern . schemas . scripts import Script , ScriptBlock , ScriptFile , ScriptStatus , WorkflowScript
2025-08-24 13:45:00 -07:00
from skyvern . schemas . steps import AgentStepOutput
2025-08-18 16:18:50 -07:00
from skyvern . schemas . workflows import BlockStatus , BlockType , WorkflowStatus
2024-10-15 12:06:50 -07:00
from skyvern . webeye . actions . actions import Action
2024-03-01 10:09:30 -08:00
LOG = structlog . get_logger ( )
2026-01-27 13:24:44 -07:00
_UNSET = object ( )
2024-03-01 10:09:30 -08:00
2025-11-28 14:24:44 -08:00
def _serialize_proxy_location ( proxy_location : ProxyLocationInput ) - > str | None :
"""
Serialize proxy_location for database storage .
Converts GeoTarget objects or dicts to JSON strings , passes through
ProxyLocation enum values as - is , and returns None for None .
"""
result : str | None = None
if proxy_location is None :
result = None
elif isinstance ( proxy_location , GeoTarget ) :
result = json . dumps ( proxy_location . model_dump ( ) )
elif isinstance ( proxy_location , dict ) :
result = json . dumps ( proxy_location )
else :
# ProxyLocation enum - return the string value
result = str ( proxy_location )
LOG . debug (
" Serializing proxy_location for DB " ,
input_type = type ( proxy_location ) . __name__ ,
input_value = str ( proxy_location ) ,
serialized_value = result ,
)
return result
2025-12-19 22:49:40 +08:00
DB_CONNECT_ARGS : dict [ str , Any ] = { }
2024-11-19 17:18:25 -08:00
2025-12-19 22:49:40 +08:00
if " postgresql+psycopg " in settings . DATABASE_STRING :
DB_CONNECT_ARGS = { " options " : f " -c statement_timeout= { settings . DATABASE_STATEMENT_TIMEOUT_MS } " }
elif " postgresql+asyncpg " in settings . DATABASE_STRING :
DB_CONNECT_ARGS = { " server_settings " : { " statement_timeout " : str ( settings . DATABASE_STATEMENT_TIMEOUT_MS ) } }
2024-11-19 17:18:25 -08:00
2024-03-01 10:09:30 -08:00
2025-12-18 11:32:40 -07:00
class AgentDB ( BaseAlchemyDB ) :
2025-06-09 16:12:23 -04:00
def __init__ ( self , database_string : str , debug_enabled : bool = False , db_engine : AsyncEngine | None = None ) - > None :
2025-12-19 22:49:40 +08:00
super ( ) . __init__ (
db_engine
or create_async_engine (
database_string ,
json_serializer = _custom_json_serializer ,
connect_args = DB_CONNECT_ARGS ,
poolclass = pool . NullPool if settings . DISABLE_CONNECTION_POOL else None ,
)
)
2024-03-01 10:09:30 -08:00
self . debug_enabled = debug_enabled
2025-12-18 11:32:40 -07:00
def is_retryable_error ( self , error : SQLAlchemyError ) - > bool :
error_msg = str ( error ) . lower ( )
return " server closed the connection " in error_msg
2024-03-01 10:09:30 -08:00
async def create_task (
self ,
url : str ,
2024-03-12 22:28:16 -07:00
title : str | None ,
2024-03-01 10:09:30 -08:00
navigation_goal : str | None ,
data_extraction_goal : str | None ,
navigation_payload : dict [ str , Any ] | list | str | None ,
2025-08-19 13:32:39 -07:00
status : str = " created " ,
complete_criterion : str | None = None ,
terminate_criterion : str | None = None ,
2024-03-01 10:09:30 -08:00
webhook_callback_url : str | None = None ,
2024-07-11 21:34:00 -07:00
totp_verification_url : str | None = None ,
2024-09-08 15:07:03 -07:00
totp_identifier : str | None = None ,
2024-03-01 10:09:30 -08:00
organization_id : str | None = None ,
2025-11-28 14:24:44 -08:00
proxy_location : ProxyLocationInput = None ,
2024-03-01 10:09:30 -08:00
extracted_information_schema : dict [ str , Any ] | list | str | None = None ,
workflow_run_id : str | None = None ,
order : int | None = None ,
retry : int | None = None ,
2024-05-11 14:13:21 -07:00
max_steps_per_run : int | None = None ,
2024-03-12 22:28:16 -07:00
error_code_mapping : dict [ str , str ] | None = None ,
2024-11-26 11:29:33 +08:00
task_type : str = TaskType . general ,
2024-11-29 05:43:02 -08:00
application : str | None = None ,
2025-05-15 08:18:24 -07:00
include_action_history_in_verification : bool | None = None ,
2025-05-30 20:07:12 -07:00
model : dict [ str , Any ] | None = None ,
2025-06-13 23:59:50 -07:00
max_screenshot_scrolling_times : int | None = None ,
2025-06-19 00:42:34 -07:00
extra_http_headers : dict [ str , str ] | None = None ,
2025-07-03 18:45:04 -07:00
browser_session_id : str | None = None ,
2025-08-21 11:16:22 +08:00
browser_address : str | None = None ,
2025-10-06 11:09:20 -04:00
download_timeout : float | None = None ,
2024-03-01 10:09:30 -08:00
) - > Task :
try :
2026-02-09 19:23:42 -08:00
# Sanitize text fields to remove NUL bytes and control characters
# that PostgreSQL cannot store in text columns
def _sanitize ( v : str | None ) - > str | None :
return sanitize_postgres_text ( v ) if isinstance ( v , str ) else v
navigation_goal = _sanitize ( navigation_goal )
data_extraction_goal = _sanitize ( data_extraction_goal )
title = _sanitize ( title )
url = sanitize_postgres_text ( url )
complete_criterion = _sanitize ( complete_criterion )
terminate_criterion = _sanitize ( terminate_criterion )
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
new_task = TaskModel (
2025-08-19 13:32:39 -07:00
status = status ,
2024-11-26 11:29:33 +08:00
task_type = task_type ,
2024-03-01 10:09:30 -08:00
url = url ,
2024-03-12 22:28:16 -07:00
title = title ,
2024-03-01 10:09:30 -08:00
webhook_callback_url = webhook_callback_url ,
2024-07-11 21:34:00 -07:00
totp_verification_url = totp_verification_url ,
2024-09-08 15:07:03 -07:00
totp_identifier = totp_identifier ,
2024-03-01 10:09:30 -08:00
navigation_goal = navigation_goal ,
2024-11-21 15:12:26 +08:00
complete_criterion = complete_criterion ,
terminate_criterion = terminate_criterion ,
2024-03-01 10:09:30 -08:00
data_extraction_goal = data_extraction_goal ,
navigation_payload = navigation_payload ,
organization_id = organization_id ,
2025-11-28 14:24:44 -08:00
proxy_location = _serialize_proxy_location ( proxy_location ) ,
2024-03-01 10:09:30 -08:00
extracted_information_schema = extracted_information_schema ,
workflow_run_id = workflow_run_id ,
order = order ,
retry = retry ,
2024-05-11 14:13:21 -07:00
max_steps_per_run = max_steps_per_run ,
2024-03-12 22:28:16 -07:00
error_code_mapping = error_code_mapping ,
2024-11-29 05:43:02 -08:00
application = application ,
2025-05-15 08:18:24 -07:00
include_action_history_in_verification = include_action_history_in_verification ,
2025-05-30 20:07:12 -07:00
model = model ,
2025-06-13 23:59:50 -07:00
max_screenshot_scrolling_times = max_screenshot_scrolling_times ,
2025-06-19 00:42:34 -07:00
extra_http_headers = extra_http_headers ,
2025-07-03 18:45:04 -07:00
browser_session_id = browser_session_id ,
2025-08-21 11:16:22 +08:00
browser_address = browser_address ,
2025-10-06 11:09:20 -04:00
download_timeout = download_timeout ,
2024-03-01 10:09:30 -08:00
)
session . add ( new_task )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( new_task )
2024-03-01 10:09:30 -08:00
return convert_to_task ( new_task , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def create_step (
self ,
task_id : str ,
order : int ,
retry_index : int ,
organization_id : str | None = None ,
2025-08-24 13:45:00 -07:00
status : StepStatus = StepStatus . created ,
2025-12-04 13:47:46 -08:00
created_by : str | None = None ,
2024-03-01 10:09:30 -08:00
) - > Step :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
new_step = StepModel (
task_id = task_id ,
order = order ,
retry_index = retry_index ,
2025-08-24 13:45:00 -07:00
status = status ,
2024-03-01 10:09:30 -08:00
organization_id = organization_id ,
2025-12-04 13:47:46 -08:00
created_by = created_by ,
2024-03-01 10:09:30 -08:00
)
session . add ( new_step )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( new_step )
2024-03-01 10:09:30 -08:00
return convert_to_step ( new_step , debug_enabled = self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def create_artifact (
self ,
artifact_id : str ,
artifact_type : str ,
uri : str ,
2025-06-27 00:27:48 +09:00
organization_id : str ,
2024-12-07 12:22:11 -08:00
step_id : str | None = None ,
task_id : str | None = None ,
workflow_run_id : str | None = None ,
workflow_run_block_id : str | None = None ,
2025-02-23 16:03:49 -08:00
task_v2_id : str | None = None ,
2025-06-27 00:27:48 +09:00
run_id : str | None = None ,
2025-02-27 20:19:02 -08:00
thought_id : str | None = None ,
2025-01-08 21:45:38 -08:00
ai_suggestion_id : str | None = None ,
2024-03-01 10:09:30 -08:00
) - > Artifact :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
new_artifact = ArtifactModel (
artifact_id = artifact_id ,
artifact_type = artifact_type ,
uri = uri ,
2024-12-07 12:22:11 -08:00
task_id = task_id ,
step_id = step_id ,
workflow_run_id = workflow_run_id ,
workflow_run_block_id = workflow_run_block_id ,
2025-02-23 16:03:49 -08:00
observer_cruise_id = task_v2_id ,
2025-02-27 20:19:02 -08:00
observer_thought_id = thought_id ,
2025-06-27 00:27:48 +09:00
run_id = run_id ,
2025-01-08 21:45:38 -08:00
ai_suggestion_id = ai_suggestion_id ,
2024-03-01 10:09:30 -08:00
organization_id = organization_id ,
)
session . add ( new_artifact )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( new_artifact )
2024-03-01 10:09:30 -08:00
return convert_to_artifact ( new_artifact , self . debug_enabled )
except SQLAlchemyError :
2024-04-30 00:27:32 -07:00
LOG . exception ( " SQLAlchemyError " )
2024-03-01 10:09:30 -08:00
raise
except Exception :
2024-04-30 00:27:32 -07:00
LOG . exception ( " UnexpectedError " )
2024-03-01 10:09:30 -08:00
raise
2025-12-16 23:00:51 +08:00
async def bulk_create_artifacts (
self ,
artifact_models : list [ ArtifactModel ] ,
) - > list [ Artifact ] :
"""
Bulk create multiple artifacts in a single database transaction .
Args :
artifact_models : List of ArtifactModel instances to insert
Returns :
List of created Artifact objects
"""
if not artifact_models :
return [ ]
try :
async with self . Session ( ) as session :
session . add_all ( artifact_models )
await session . commit ( )
# Refresh all artifacts to get their created_at and modified_at values
for artifact in artifact_models :
await session . refresh ( artifact )
return [ convert_to_artifact ( artifact , self . debug_enabled ) for artifact in artifact_models ]
except SQLAlchemyError :
LOG . exception ( " SQLAlchemyError during bulk artifact creation " )
raise
except Exception :
LOG . exception ( " UnexpectedError during bulk artifact creation " )
raise
2025-12-18 11:32:40 -07:00
@read_retry ( )
2024-03-01 10:09:30 -08:00
async def get_task ( self , task_id : str , organization_id : str | None = None ) - > Task | None :
""" Get a task by its id """
2025-12-18 11:32:40 -07:00
async with self . Session ( ) as session :
2026-02-04 16:15:39 +08:00
query = select ( TaskModel ) . filter_by ( task_id = task_id )
if organization_id is not None :
query = query . filter_by ( organization_id = organization_id )
if task_obj := ( await session . scalars ( query ) ) . first ( ) :
2025-12-18 11:32:40 -07:00
return convert_to_task ( task_obj , self . debug_enabled )
else :
LOG . info (
" Task not found " ,
task_id = task_id ,
organization_id = organization_id ,
)
return None
2024-03-01 10:09:30 -08:00
2024-12-22 20:54:53 -08:00
async def get_tasks_by_ids (
self ,
task_ids : list [ str ] ,
2026-02-04 16:15:39 +08:00
organization_id : str ,
2024-12-22 20:54:53 -08:00
) - > list [ Task ] :
try :
async with self . Session ( ) as session :
tasks = (
await session . scalars (
select ( TaskModel )
. filter ( TaskModel . task_id . in_ ( task_ids ) )
. filter_by ( organization_id = organization_id )
)
) . all ( )
return [ convert_to_task ( task , debug_enabled = self . debug_enabled ) for task in tasks ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-09-15 13:16:34 -07:00
async def get_step ( self , step_id : str , organization_id : str | None = None ) - > Step | None :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if step := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( StepModel ) . filter_by ( step_id = step_id ) . filter_by ( organization_id = organization_id )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_step ( step , debug_enabled = self . debug_enabled )
else :
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-06-05 19:00:20 -04:00
async def get_task_steps ( self , task_id : str , organization_id : str ) - > list [ Step ] :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
if steps := (
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( organization_id = organization_id )
. order_by ( StepModel . order )
. order_by ( StepModel . retry_index )
)
) . all ( ) :
2024-03-01 10:09:30 -08:00
return [ convert_to_step ( step , debug_enabled = self . debug_enabled ) for step in steps ]
else :
return [ ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-12-31 11:24:09 -08:00
async def get_steps_by_task_ids ( self , task_ids : list [ str ] , organization_id : str | None = None ) - > list [ Step ] :
try :
async with self . Session ( ) as session :
steps = (
await session . scalars (
select ( StepModel )
. filter ( StepModel . task_id . in_ ( task_ids ) )
. filter_by ( organization_id = organization_id )
)
) . all ( )
return [ convert_to_step ( step , debug_enabled = self . debug_enabled ) for step in steps ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-03-04 02:04:18 -05:00
async def get_total_unique_step_order_count_by_task_ids (
self ,
2025-06-13 10:44:04 -04:00
* ,
2025-03-04 02:04:18 -05:00
task_ids : list [ str ] ,
2025-06-13 10:44:04 -04:00
organization_id : str ,
2025-03-04 01:07:07 -05:00
) - > int :
2025-03-04 02:04:18 -05:00
"""
Get the total count of unique ( step . task_id , step . order ) pairs of StepModel for the given task ids
Basically translate this sql query into a SQLAlchemy query : select count ( distinct ( s . task_id , s . order ) ) from steps s
where s . task_id in task_ids
"""
2025-03-04 01:07:07 -05:00
try :
async with self . Session ( ) as session :
query = (
2025-03-04 03:22:16 -05:00
select ( func . count ( distinct ( tuple_ ( StepModel . task_id , StepModel . order ) ) ) )
2025-03-04 01:07:07 -05:00
. where ( StepModel . task_id . in_ ( task_ids ) )
2025-03-04 03:22:16 -05:00
. where ( StepModel . organization_id == organization_id )
2025-03-04 01:07:07 -05:00
)
2025-03-04 03:22:16 -05:00
return ( await session . execute ( query ) ) . scalar ( )
2025-03-04 01:07:07 -05:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-05-14 23:21:19 -07:00
async def get_task_step_models ( self , task_id : str , organization_id : str | None = None ) - > Sequence [ StepModel ] :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
return (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( organization_id = organization_id )
. order_by ( StepModel . order )
. order_by ( StepModel . retry_index )
)
) . all ( )
2024-03-01 10:09:30 -08:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2026-01-07 11:41:57 -08:00
async def get_task_step_count ( self , task_id : str , organization_id : str | None = None ) - > int :
try :
async with self . Session ( ) as session :
result = await session . scalar (
select ( func . count ( StepModel . step_id ) )
. filter_by ( task_id = task_id )
. filter_by ( organization_id = organization_id )
)
return result or 0
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-10-18 12:50:02 -07:00
async def get_task_actions ( self , task_id : str , organization_id : str | None = None ) - > list [ Action ] :
try :
async with self . Session ( ) as session :
query = (
select ( ActionModel )
. filter ( ActionModel . organization_id == organization_id )
. filter ( ActionModel . task_id == task_id )
2024-10-30 08:21:00 -07:00
. order_by ( ActionModel . created_at )
2024-10-18 12:50:02 -07:00
)
actions = ( await session . scalars ( query ) ) . all ( )
return [ Action . model_validate ( action ) for action in actions ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-12-07 21:19:31 -08:00
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-06-10 16:07:02 -07:00
async def get_task_actions_hydrated ( self , task_id : str , organization_id : str | None = None ) - > list [ Action ] :
try :
async with self . Session ( ) as session :
query = (
select ( ActionModel )
. filter ( ActionModel . organization_id == organization_id )
. filter ( ActionModel . task_id == task_id )
. order_by ( ActionModel . created_at )
)
actions = ( await session . scalars ( query ) ) . all ( )
return [ hydrate_action ( action ) for action in actions ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-12-17 17:17:18 -08:00
async def get_tasks_actions ( self , task_ids : list [ str ] , organization_id : str | None = None ) - > list [ Action ] :
try :
async with self . Session ( ) as session :
query = (
select ( ActionModel )
. filter ( ActionModel . organization_id == organization_id )
. filter ( ActionModel . task_id . in_ ( task_ids ) )
2026-02-05 12:13:45 -08:00
. order_by ( ActionModel . created_at . desc ( ) )
2024-12-17 17:17:18 -08:00
)
actions = ( await session . scalars ( query ) ) . all ( )
2026-02-05 11:42:29 -08:00
return [ hydrate_action ( action ) for action in actions ]
2024-12-17 17:17:18 -08:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2026-01-05 17:43:05 -08:00
async def get_action_count_for_step ( self , step_id : str , task_id : str , organization_id : str ) - > int :
""" Get count of actions for a step. Uses composite index for efficiency. """
try :
async with self . Session ( ) as session :
query = (
select ( func . count ( ) )
. select_from ( ActionModel )
. where ( ActionModel . organization_id == organization_id )
. where ( ActionModel . task_id == task_id )
. where ( ActionModel . step_id == step_id )
)
result = await session . scalar ( query )
return result or 0
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-12-07 21:19:31 -08:00
async def get_first_step ( self , task_id : str , organization_id : str | None = None ) - > Step | None :
try :
async with self . Session ( ) as session :
if step := (
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( organization_id = organization_id )
. order_by ( StepModel . order . asc ( ) )
2025-01-08 13:08:36 +08:00
. order_by ( StepModel . retry_index . asc ( ) )
2024-12-07 21:19:31 -08:00
)
) . first ( ) :
return convert_to_step ( step , debug_enabled = self . debug_enabled )
else :
LOG . info (
" Latest step not found " ,
task_id = task_id ,
organization_id = organization_id ,
)
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-10-18 12:50:02 -07:00
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def get_latest_step ( self , task_id : str , organization_id : str | None = None ) - > Step | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if step := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( organization_id = organization_id )
2025-11-16 15:01:40 -08:00
. filter ( StepModel . status != StepStatus . canceled )
2024-03-24 12:47:47 -07:00
. order_by ( StepModel . order . desc ( ) )
2024-12-18 13:35:42 +08:00
. order_by ( StepModel . retry_index . desc ( ) )
2024-03-24 12:47:47 -07:00
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_step ( step , debug_enabled = self . debug_enabled )
else :
2024-05-16 18:20:11 -07:00
LOG . info (
" Latest step not found " ,
task_id = task_id ,
organization_id = organization_id ,
)
2024-03-01 10:09:30 -08:00
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def update_step (
self ,
task_id : str ,
step_id : str ,
status : StepStatus | None = None ,
output : AgentStepOutput | None = None ,
is_last : bool | None = None ,
retry_index : int | None = None ,
organization_id : str | None = None ,
2024-03-16 23:13:18 -07:00
incremental_cost : float | None = None ,
2024-06-03 15:55:34 -07:00
incremental_input_tokens : int | None = None ,
incremental_output_tokens : int | None = None ,
2025-03-20 16:42:57 -07:00
incremental_reasoning_tokens : int | None = None ,
incremental_cached_tokens : int | None = None ,
2025-12-04 13:47:46 -08:00
created_by : str | None = None ,
2024-03-01 10:09:30 -08:00
) - > Step :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
if step := (
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( step_id = step_id )
. filter_by ( organization_id = organization_id )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
if status is not None :
step . status = status
2025-08-07 17:36:47 -04:00
if status . is_terminal ( ) and step . finished_at is None :
2025-10-12 12:57:38 +03:00
step . finished_at = datetime . utcnow ( )
2024-03-01 10:09:30 -08:00
if output is not None :
2024-06-05 13:18:35 -07:00
step . output = output . model_dump ( exclude_none = True )
2024-03-01 10:09:30 -08:00
if is_last is not None :
step . is_last = is_last
if retry_index is not None :
step . retry_index = retry_index
2024-03-16 23:13:18 -07:00
if incremental_cost is not None :
step . step_cost = incremental_cost + float ( step . step_cost or 0 )
2024-06-03 15:55:34 -07:00
if incremental_input_tokens is not None :
step . input_token_count = incremental_input_tokens + ( step . input_token_count or 0 )
if incremental_output_tokens is not None :
step . output_token_count = incremental_output_tokens + ( step . output_token_count or 0 )
2025-03-20 16:42:57 -07:00
if incremental_reasoning_tokens is not None :
step . reasoning_token_count = incremental_reasoning_tokens + ( step . reasoning_token_count or 0 )
if incremental_cached_tokens is not None :
step . cached_token_count = incremental_cached_tokens + ( step . cached_token_count or 0 )
2025-12-04 13:47:46 -08:00
if created_by is not None :
step . created_by = created_by
2024-03-01 10:09:30 -08:00
2024-03-24 12:47:47 -07:00
await session . commit ( )
2025-09-15 13:16:34 -07:00
updated_step = await self . get_step ( step_id , organization_id )
2024-03-01 10:09:30 -08:00
if not updated_step :
raise NotFoundError ( " Step not found " )
return updated_step
else :
raise NotFoundError ( " Step not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
2024-04-19 00:32:00 -07:00
raise
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-04-23 23:37:16 -07:00
async def clear_task_failure_reason ( self , organization_id : str , task_id : str ) - > Task :
2024-04-19 00:32:00 -07:00
try :
async with self . Session ( ) as session :
if task := (
await session . scalars (
select ( TaskModel ) . filter_by ( task_id = task_id ) . filter_by ( organization_id = organization_id )
)
) . first ( ) :
task . failure_reason = None
await session . commit ( )
2024-05-04 14:21:00 -04:00
await session . refresh ( task )
2024-04-19 00:32:00 -07:00
return convert_to_task ( task , debug_enabled = self . debug_enabled )
else :
raise NotFoundError ( " Task not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
2024-03-01 10:09:30 -08:00
raise
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def update_task (
self ,
task_id : str ,
2024-03-12 22:28:16 -07:00
status : TaskStatus | None = None ,
2024-03-01 10:09:30 -08:00
extracted_information : dict [ str , Any ] | list | str | None = None ,
2025-07-29 00:12:44 +08:00
webhook_failure_reason : str | None = None ,
2024-03-01 10:09:30 -08:00
failure_reason : str | None = None ,
2024-03-12 22:28:16 -07:00
errors : list [ dict [ str , Any ] ] | None = None ,
2024-07-18 18:19:14 -07:00
max_steps_per_run : int | None = None ,
2024-03-01 10:09:30 -08:00
organization_id : str | None = None ,
) - > Task :
2024-07-18 22:46:58 -07:00
if (
status is None
and extracted_information is None
and failure_reason is None
and errors is None
and max_steps_per_run is None
2025-07-29 00:12:44 +08:00
and webhook_failure_reason is None
2024-07-18 22:46:58 -07:00
) :
2024-03-12 22:28:16 -07:00
raise ValueError (
" At least one of status, extracted_information, or failure_reason must be provided to update the task "
)
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
if task := (
await session . scalars (
select ( TaskModel ) . filter_by ( task_id = task_id ) . filter_by ( organization_id = organization_id )
)
) . first ( ) :
2024-03-12 22:28:16 -07:00
if status is not None :
task . status = status
2025-06-11 23:36:49 -04:00
if status == TaskStatus . queued and task . queued_at is None :
task . queued_at = datetime . utcnow ( )
if status == TaskStatus . running and task . started_at is None :
task . started_at = datetime . utcnow ( )
if status . is_final ( ) and task . finished_at is None :
task . finished_at = datetime . utcnow ( )
2024-03-01 10:09:30 -08:00
if extracted_information is not None :
task . extracted_information = extracted_information
if failure_reason is not None :
task . failure_reason = failure_reason
2024-03-12 22:28:16 -07:00
if errors is not None :
2025-09-05 11:39:57 +08:00
task . errors = ( task . errors or [ ] ) + errors
2024-07-18 18:19:14 -07:00
if max_steps_per_run is not None :
task . max_steps_per_run = max_steps_per_run
2025-07-29 00:12:44 +08:00
if webhook_failure_reason is not None :
task . webhook_failure_reason = webhook_failure_reason
2024-03-24 12:47:47 -07:00
await session . commit ( )
2024-03-01 10:09:30 -08:00
updated_task = await self . get_task ( task_id , organization_id = organization_id )
if not updated_task :
raise NotFoundError ( " Task not found " )
return updated_task
else :
raise NotFoundError ( " Task not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-12-20 03:18:50 +08:00
async def bulk_update_tasks (
self ,
task_ids : list [ str ] ,
status : TaskStatus | None = None ,
failure_reason : str | None = None ,
) - > None :
""" Bulk update tasks by their IDs.
Args :
task_ids : List of task IDs to update
status : Optional status to set for all tasks
failure_reason : Optional failure reason to set for all tasks
"""
if not task_ids :
return
async with self . Session ( ) as session :
update_values = { }
if status :
update_values [ " status " ] = status . value
if failure_reason :
update_values [ " failure_reason " ] = failure_reason
if update_values :
update_stmt = update ( TaskModel ) . where ( TaskModel . task_id . in_ ( task_ids ) ) . values ( * * update_values )
await session . execute ( update_stmt )
await session . commit ( )
2024-04-24 20:39:19 +03:00
async def get_tasks (
self ,
page : int = 1 ,
page_size : int = 10 ,
2024-04-24 22:27:15 +03:00
task_status : list [ TaskStatus ] | None = None ,
2024-07-09 11:37:03 -07:00
workflow_run_id : str | None = None ,
2024-04-24 20:39:19 +03:00
organization_id : str | None = None ,
2024-10-07 14:09:46 -07:00
only_standalone_tasks : bool = False ,
2024-11-29 05:43:02 -08:00
application : str | None = None ,
2024-10-21 10:34:42 -07:00
order_by_column : OrderBy = OrderBy . created_at ,
order : SortDirection = SortDirection . desc ,
2024-04-24 20:39:19 +03:00
) - > list [ Task ] :
2024-03-01 10:09:30 -08:00
"""
Get all tasks .
: param page : Starts at 1
: param page_size :
2024-07-09 11:37:03 -07:00
: param task_status :
: param workflow_run_id :
2024-10-07 14:09:46 -07:00
: param only_standalone_tasks :
2024-10-21 10:34:42 -07:00
: param order_by_column :
: param order :
2024-03-01 10:09:30 -08:00
: return :
"""
if page < 1 :
raise ValueError ( f " Page must be greater than 0, got { page } " )
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
db_page = page - 1 # offset logic is 0 based
2025-05-12 08:30:37 -07:00
query = (
select ( TaskModel , WorkflowRunModel . workflow_permanent_id )
. join ( WorkflowRunModel , TaskModel . workflow_run_id == WorkflowRunModel . workflow_run_id , isouter = True )
. filter ( TaskModel . organization_id == organization_id )
)
2024-04-24 20:39:19 +03:00
if task_status :
2024-04-24 22:27:15 +03:00
query = query . filter ( TaskModel . status . in_ ( task_status ) )
2024-07-09 11:37:03 -07:00
if workflow_run_id :
query = query . filter ( TaskModel . workflow_run_id == workflow_run_id )
2024-10-07 14:09:46 -07:00
if only_standalone_tasks :
query = query . filter ( TaskModel . workflow_run_id . is_ ( None ) )
2024-11-29 05:43:02 -08:00
if application :
query = query . filter ( TaskModel . application == application )
2024-10-21 10:34:42 -07:00
order_by_col = getattr ( TaskModel , order_by_column )
query = (
query . order_by ( order_by_col . desc ( ) if order == SortDirection . desc else order_by_col . asc ( ) )
. limit ( page_size )
. offset ( db_page * page_size )
)
2025-05-12 08:30:37 -07:00
results = ( await session . execute ( query ) ) . all ( )
return [
convert_to_task ( task , debug_enabled = self . debug_enabled , workflow_permanent_id = workflow_permanent_id )
for task , workflow_permanent_id in results
]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_tasks_count (
self ,
organization_id : str ,
task_status : list [ TaskStatus ] | None = None ,
workflow_run_id : str | None = None ,
only_standalone_tasks : bool = False ,
application : str | None = None ,
) - > int :
try :
async with self . Session ( ) as session :
count_query = (
select ( func . count ( ) ) . select_from ( TaskModel ) . filter ( TaskModel . organization_id == organization_id )
)
if task_status :
count_query = count_query . filter ( TaskModel . status . in_ ( task_status ) )
if workflow_run_id :
count_query = count_query . filter ( TaskModel . workflow_run_id == workflow_run_id )
if only_standalone_tasks :
count_query = count_query . filter ( TaskModel . workflow_run_id . is_ ( None ) )
if application :
count_query = count_query . filter ( TaskModel . application == application )
return ( await session . execute ( count_query ) ) . scalar_one ( )
2024-03-01 10:09:30 -08:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-04-30 13:21:10 -04:00
async def get_all_organizations ( self ) - > list [ Organization ] :
try :
async with self . Session ( ) as session :
organizations = ( await session . scalars ( select ( OrganizationModel ) ) ) . all ( )
return [ convert_to_organization ( organization ) for organization in organizations ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def get_organization ( self , organization_id : str ) - > Organization | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if organization := (
2024-03-24 12:47:47 -07:00
await session . scalars ( select ( OrganizationModel ) . filter_by ( organization_id = organization_id ) )
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_organization ( organization )
else :
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-04-23 15:02:56 -07:00
async def get_organization_by_domain ( self , domain : str ) - > Organization | None :
async with self . Session ( ) as session :
if organization := ( await session . scalars ( select ( OrganizationModel ) . filter_by ( domain = domain ) ) ) . first ( ) :
return convert_to_organization ( organization )
return None
2024-03-01 10:09:30 -08:00
async def create_organization (
self ,
organization_name : str ,
webhook_callback_url : str | None = None ,
max_steps_per_run : int | None = None ,
2024-04-08 16:58:45 -07:00
max_retries_per_step : int | None = None ,
2024-04-23 15:02:56 -07:00
domain : str | None = None ,
2024-03-01 10:09:30 -08:00
) - > Organization :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
org = OrganizationModel (
organization_name = organization_name ,
webhook_callback_url = webhook_callback_url ,
max_steps_per_run = max_steps_per_run ,
2024-04-08 16:58:45 -07:00
max_retries_per_step = max_retries_per_step ,
2024-04-23 15:02:56 -07:00
domain = domain ,
2024-03-01 10:09:30 -08:00
)
session . add ( org )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( org )
2024-03-01 10:09:30 -08:00
return convert_to_organization ( org )
2024-06-16 19:42:20 -07:00
async def update_organization (
self ,
organization_id : str ,
organization_name : str | None = None ,
webhook_callback_url : str | None = None ,
max_steps_per_run : int | None = None ,
max_retries_per_step : int | None = None ,
) - > Organization :
async with self . Session ( ) as session :
organization = (
await session . scalars ( select ( OrganizationModel ) . filter_by ( organization_id = organization_id ) )
) . first ( )
if not organization :
raise NotFoundError
if organization_name :
organization . organization_name = organization_name
if webhook_callback_url :
organization . webhook_callback_url = webhook_callback_url
if max_steps_per_run :
organization . max_steps_per_run = max_steps_per_run
if max_retries_per_step :
organization . max_retries_per_step = max_retries_per_step
await session . commit ( )
await session . refresh ( organization )
return Organization . model_validate ( organization )
2025-09-23 10:16:48 -06:00
@overload
async def get_valid_org_auth_token (
self ,
organization_id : str ,
2025-12-03 06:55:04 +05:30
token_type : Literal [ " api " , " onepassword_service_account " , " custom_credential_service " ] ,
2025-09-23 10:16:48 -06:00
) - > OrganizationAuthToken | None : . . .
@overload
2025-09-29 09:29:47 -04:00
async def get_valid_org_auth_token ( # type: ignore
2025-09-23 10:16:48 -06:00
self ,
organization_id : str ,
2025-09-26 16:35:47 -07:00
token_type : Literal [ " azure_client_secret_credential " ] ,
2025-09-23 10:16:48 -06:00
) - > AzureOrganizationAuthToken | None : . . .
2024-03-01 10:09:30 -08:00
async def get_valid_org_auth_token (
self ,
organization_id : str ,
2025-12-03 06:55:04 +05:30
token_type : Literal [
" api " , " onepassword_service_account " , " azure_client_secret_credential " , " custom_credential_service "
] ,
2025-09-23 10:16:48 -06:00
) - > OrganizationAuthToken | AzureOrganizationAuthToken | None :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if token := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( OrganizationAuthTokenModel )
. filter_by ( organization_id = organization_id )
. filter_by ( token_type = token_type )
. filter_by ( valid = True )
2024-06-28 18:10:13 -07:00
. order_by ( OrganizationAuthTokenModel . created_at . desc ( ) )
2024-03-24 12:47:47 -07:00
)
) . first ( ) :
2025-09-23 10:16:48 -06:00
return await convert_to_organization_auth_token ( token , token_type )
2024-03-01 10:09:30 -08:00
else :
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-06-28 18:10:13 -07:00
async def get_valid_org_auth_tokens (
self ,
organization_id : str ,
token_type : OrganizationAuthTokenType ,
) - > list [ OrganizationAuthToken ] :
try :
async with self . Session ( ) as session :
tokens = (
await session . scalars (
select ( OrganizationAuthTokenModel )
. filter_by ( organization_id = organization_id )
. filter_by ( token_type = token_type )
. filter_by ( valid = True )
. order_by ( OrganizationAuthTokenModel . created_at . desc ( ) )
)
) . all ( )
2025-09-23 10:16:48 -06:00
return [ await convert_to_organization_auth_token ( token , token_type ) for token in tokens ]
2024-06-28 18:10:13 -07:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def validate_org_auth_token (
self ,
organization_id : str ,
token_type : OrganizationAuthTokenType ,
token : str ,
2024-06-29 23:55:05 -07:00
valid : bool | None = True ,
2025-08-05 12:36:24 +08:00
encrypted_method : EncryptMethod | None = None ,
2024-03-01 10:09:30 -08:00
) - > OrganizationAuthToken | None :
try :
2025-08-05 12:36:24 +08:00
encrypted_token = " "
if encrypted_method is not None :
encrypted_token = await encryptor . encrypt ( token , encrypted_method )
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-06-29 23:55:05 -07:00
query = (
select ( OrganizationAuthTokenModel )
. filter_by ( organization_id = organization_id )
. filter_by ( token_type = token_type )
)
2025-08-05 12:36:24 +08:00
if encrypted_token :
query = query . filter_by ( encrypted_token = encrypted_token )
else :
query = query . filter_by ( token = token )
2024-06-29 23:55:05 -07:00
if valid is not None :
query = query . filter_by ( valid = valid )
if token_obj := ( await session . scalars ( query ) ) . first ( ) :
2025-09-23 10:16:48 -06:00
return await convert_to_organization_auth_token ( token_obj , token_type )
2024-03-01 10:09:30 -08:00
else :
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def create_org_auth_token (
self ,
organization_id : str ,
token_type : OrganizationAuthTokenType ,
2025-09-23 10:16:48 -06:00
token : str | AzureClientSecretCredential ,
2025-08-05 12:36:24 +08:00
encrypted_method : EncryptMethod | None = None ,
2024-03-01 10:09:30 -08:00
) - > OrganizationAuthToken :
2025-09-23 10:16:48 -06:00
if token_type is OrganizationAuthTokenType . azure_client_secret_credential :
if not isinstance ( token , AzureClientSecretCredential ) :
raise TypeError ( " Expected AzureClientSecretCredential for this token_type " )
plaintext_token = token . model_dump_json ( )
else :
if not isinstance ( token , str ) :
raise TypeError ( " Expected str token for this token_type " )
plaintext_token = token
2025-08-05 12:36:24 +08:00
encrypted_token = " "
if encrypted_method is not None :
2025-09-23 10:16:48 -06:00
encrypted_token = await encryptor . encrypt ( plaintext_token , encrypted_method )
2025-08-05 12:36:24 +08:00
plaintext_token = " "
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-05-06 13:46:17 -07:00
auth_token = OrganizationAuthTokenModel (
2024-03-01 10:09:30 -08:00
organization_id = organization_id ,
token_type = token_type ,
2025-08-05 12:36:24 +08:00
token = plaintext_token ,
encrypted_token = encrypted_token ,
encrypted_method = encrypted_method . value if encrypted_method is not None else " " ,
2024-03-01 10:09:30 -08:00
)
2024-05-06 13:46:17 -07:00
session . add ( auth_token )
2024-03-24 12:47:47 -07:00
await session . commit ( )
2024-05-06 13:46:17 -07:00
await session . refresh ( auth_token )
2024-03-01 10:09:30 -08:00
2025-09-23 10:16:48 -06:00
return await convert_to_organization_auth_token ( auth_token , token_type )
2024-03-01 10:09:30 -08:00
2025-08-05 07:34:26 -07:00
async def invalidate_org_auth_tokens (
self ,
organization_id : str ,
token_type : OrganizationAuthTokenType ,
) - > None :
""" Invalidate all existing tokens of a specific type for an organization. """
try :
async with self . Session ( ) as session :
await session . execute (
update ( OrganizationAuthTokenModel )
. filter_by ( organization_id = organization_id )
. filter_by ( token_type = token_type )
. filter_by ( valid = True )
. values ( valid = False )
)
await session . commit ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-02-23 16:03:49 -08:00
async def get_artifacts_for_task_v2 (
2024-12-10 20:37:15 -08:00
self ,
2025-02-23 16:03:49 -08:00
task_v2_id : str ,
2024-12-10 20:37:15 -08:00
organization_id : str | None = None ,
artifact_types : list [ ArtifactType ] | None = None ,
) - > list [ Artifact ] :
try :
async with self . Session ( ) as session :
query = (
select ( ArtifactModel )
2025-02-23 16:03:49 -08:00
. filter_by ( observer_cruise_id = task_v2_id )
2024-12-10 20:37:15 -08:00
. filter_by ( organization_id = organization_id )
)
if artifact_types :
query = query . filter ( ArtifactModel . artifact_type . in_ ( artifact_types ) )
query = query . order_by ( ArtifactModel . created_at )
if artifacts := ( await session . scalars ( query ) ) . all ( ) :
return [ convert_to_artifact ( artifact , self . debug_enabled ) for artifact in artifacts ]
else :
return [ ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def get_artifacts_for_task_step (
self ,
task_id : str ,
step_id : str ,
organization_id : str | None = None ,
) - > list [ Artifact ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if artifacts := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( ArtifactModel )
. filter_by ( task_id = task_id )
. filter_by ( step_id = step_id )
. filter_by ( organization_id = organization_id )
2024-05-23 11:53:05 -07:00
. order_by ( ArtifactModel . created_at )
2024-03-24 12:47:47 -07:00
)
) . all ( ) :
2024-03-01 10:09:30 -08:00
return [ convert_to_artifact ( artifact , self . debug_enabled ) for artifact in artifacts ]
else :
return [ ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-06-16 20:00:19 -04:00
async def get_artifacts_for_run (
self ,
run_id : str ,
organization_id : str ,
artifact_types : list [ ArtifactType ] | None = None ,
group_by_type : bool = False ,
sort_by : str = " created_at " ,
) - > dict [ ArtifactType , list [ Artifact ] ] | list [ Artifact ] :
""" Return artifacts associated with a run.
Args :
run_id : The ID of the run to get artifacts for
organization_id : The ID of the organization that owns the run
artifact_types : Optional list of artifact types to filter by
group_by_type : If True , returns a dictionary mapping artifact types to lists of artifacts .
If False , returns a flat list of artifacts . Defaults to False .
sort_by : Field to sort artifacts by . Must be one of : ' created_at ' , ' step_id ' , ' task_id ' .
Defaults to ' created_at ' .
Returns :
If group_by_type is True , returns a dictionary mapping artifact types to lists of artifacts .
If group_by_type is False , returns a list of artifacts sorted by the specified field .
Raises :
ValueError : If sort_by is not one of the allowed values
"""
allowed_sort_fields = { " created_at " , " step_id " , " task_id " }
if sort_by not in allowed_sort_fields :
raise ValueError ( f " sort_by must be one of { allowed_sort_fields } " )
run = await self . get_run ( run_id , organization_id = organization_id )
if not run :
return [ ]
async with self . Session ( ) as session :
query = select ( ArtifactModel ) . filter_by ( organization_id = organization_id )
2025-06-27 00:27:48 +09:00
query = query . filter_by ( run_id = run . run_id )
2025-06-16 20:00:19 -04:00
if artifact_types :
query = query . filter ( ArtifactModel . artifact_type . in_ ( artifact_types ) )
# Apply sorting
if sort_by == " created_at " :
query = query . order_by ( ArtifactModel . created_at )
elif sort_by == " step_id " :
query = query . order_by ( ArtifactModel . step_id , ArtifactModel . created_at )
elif sort_by == " task_id " :
query = query . order_by ( ArtifactModel . task_id , ArtifactModel . created_at )
# Execute query and convert to Artifact objects
artifacts = [
convert_to_artifact ( artifact , self . debug_enabled ) for artifact in ( await session . scalars ( query ) ) . all ( )
]
# Group artifacts by type if requested
if group_by_type :
result : dict [ ArtifactType , list [ Artifact ] ] = { }
for artifact in artifacts :
if artifact . artifact_type not in result :
result [ artifact . artifact_type ] = [ ]
result [ artifact . artifact_type ] . append ( artifact )
return result
return artifacts
2024-03-01 10:09:30 -08:00
async def get_artifact_by_id (
self ,
artifact_id : str ,
organization_id : str ,
) - > Artifact | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if artifact := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( ArtifactModel )
. filter_by ( artifact_id = artifact_id )
. filter_by ( organization_id = organization_id )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_artifact ( artifact , self . debug_enabled )
else :
return None
except SQLAlchemyError :
2024-04-30 00:27:32 -07:00
LOG . exception ( " SQLAlchemyError " )
2024-03-01 10:09:30 -08:00
raise
except Exception :
2024-04-30 00:27:32 -07:00
LOG . exception ( " UnexpectedError " )
2024-03-01 10:09:30 -08:00
raise
2026-01-20 22:49:33 -08:00
async def get_artifacts_by_ids (
self ,
artifact_ids : list [ str ] ,
organization_id : str ,
) - > list [ Artifact ] :
if not artifact_ids :
return [ ]
try :
async with self . Session ( ) as session :
artifacts = (
await session . scalars (
select ( ArtifactModel )
. filter ( ArtifactModel . artifact_id . in_ ( artifact_ids ) )
. filter_by ( organization_id = organization_id )
)
) . all ( )
return [ convert_to_artifact ( artifact , self . debug_enabled ) for artifact in artifacts ]
except SQLAlchemyError :
LOG . exception ( " SQLAlchemyError " )
raise
except Exception :
LOG . exception ( " UnexpectedError " )
raise
2024-12-18 00:32:38 +01:00
async def get_artifacts_by_entity_id (
self ,
2025-06-13 10:44:04 -04:00
* ,
2025-06-19 00:24:11 -04:00
organization_id : str | None ,
2024-12-18 00:32:38 +01:00
artifact_type : ArtifactType | None = None ,
task_id : str | None = None ,
step_id : str | None = None ,
workflow_run_id : str | None = None ,
workflow_run_block_id : str | None = None ,
2025-02-27 20:19:02 -08:00
thought_id : str | None = None ,
2025-02-23 16:03:49 -08:00
task_v2_id : str | None = None ,
2025-12-05 12:30:05 -08:00
limit : int | None = None ,
2024-12-18 00:32:38 +01:00
) - > list [ Artifact ] :
try :
async with self . Session ( ) as session :
2025-06-19 00:24:11 -04:00
# Build base query
2024-12-18 00:32:38 +01:00
query = select ( ArtifactModel )
if artifact_type is not None :
query = query . filter_by ( artifact_type = artifact_type )
if task_id is not None :
query = query . filter_by ( task_id = task_id )
if step_id is not None :
query = query . filter_by ( step_id = step_id )
if workflow_run_id is not None :
query = query . filter_by ( workflow_run_id = workflow_run_id )
if workflow_run_block_id is not None :
query = query . filter_by ( workflow_run_block_id = workflow_run_block_id )
2025-02-27 20:19:02 -08:00
if thought_id is not None :
query = query . filter_by ( observer_thought_id = thought_id )
2025-02-23 16:03:49 -08:00
if task_v2_id is not None :
query = query . filter_by ( observer_cruise_id = task_v2_id )
2025-06-19 00:24:11 -04:00
# Handle backward compatibility where old artifact rows were stored with organization_id NULL
2024-12-18 00:32:38 +01:00
if organization_id is not None :
2025-06-19 00:24:11 -04:00
query = query . filter (
or_ ( ArtifactModel . organization_id == organization_id , ArtifactModel . organization_id . is_ ( None ) )
)
2024-12-18 00:32:38 +01:00
query = query . order_by ( ArtifactModel . created_at . desc ( ) )
2025-06-19 00:24:11 -04:00
2025-12-05 12:30:05 -08:00
if limit is not None :
query = query . limit ( limit )
2025-06-19 00:24:11 -04:00
artifacts = ( await session . scalars ( query ) ) . all ( )
LOG . debug ( " Artifacts fetched " , count = len ( artifacts ) )
return [ convert_to_artifact ( a , self . debug_enabled ) for a in artifacts ]
2024-12-18 00:32:38 +01:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_artifact_by_entity_id (
self ,
2025-06-13 10:44:04 -04:00
* ,
2024-12-18 00:32:38 +01:00
artifact_type : ArtifactType ,
2025-06-13 10:44:04 -04:00
organization_id : str ,
2024-12-18 00:32:38 +01:00
task_id : str | None = None ,
step_id : str | None = None ,
workflow_run_id : str | None = None ,
workflow_run_block_id : str | None = None ,
2025-02-27 20:19:02 -08:00
thought_id : str | None = None ,
2025-02-23 16:03:49 -08:00
task_v2_id : str | None = None ,
2024-12-18 00:32:38 +01:00
) - > Artifact | None :
artifacts = await self . get_artifacts_by_entity_id (
2025-06-13 10:44:04 -04:00
organization_id = organization_id ,
2024-12-18 00:32:38 +01:00
artifact_type = artifact_type ,
task_id = task_id ,
step_id = step_id ,
workflow_run_id = workflow_run_id ,
workflow_run_block_id = workflow_run_block_id ,
2025-02-27 20:19:02 -08:00
thought_id = thought_id ,
2025-02-23 16:03:49 -08:00
task_v2_id = task_v2_id ,
2025-12-05 12:30:05 -08:00
limit = 1 ,
2024-12-18 00:32:38 +01:00
)
return artifacts [ 0 ] if artifacts else None
2024-03-01 10:09:30 -08:00
async def get_artifact (
self ,
task_id : str ,
step_id : str ,
artifact_type : ArtifactType ,
organization_id : str | None = None ,
) - > Artifact | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
artifact = (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( ArtifactModel )
. filter_by ( task_id = task_id )
. filter_by ( step_id = step_id )
. filter_by ( organization_id = organization_id )
. filter_by ( artifact_type = artifact_type )
. order_by ( ArtifactModel . created_at . desc ( ) )
)
) . first ( )
2024-03-01 10:09:30 -08:00
if artifact :
return convert_to_artifact ( artifact , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-08-27 12:08:37 +08:00
async def get_artifact_for_run (
2024-03-01 10:09:30 -08:00
self ,
2025-08-27 12:08:37 +08:00
run_id : str ,
2024-03-01 10:09:30 -08:00
artifact_type : ArtifactType ,
organization_id : str | None = None ,
) - > Artifact | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
artifact = (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( ArtifactModel )
2025-08-27 12:08:37 +08:00
. filter ( ArtifactModel . run_id == run_id )
2024-03-24 12:47:47 -07:00
. filter ( ArtifactModel . artifact_type == artifact_type )
. filter ( ArtifactModel . organization_id == organization_id )
. order_by ( ArtifactModel . created_at . desc ( ) )
)
) . first ( )
2024-03-01 10:09:30 -08:00
if artifact :
return convert_to_artifact ( artifact , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_latest_artifact (
self ,
task_id : str ,
step_id : str | None = None ,
artifact_types : list [ ArtifactType ] | None = None ,
organization_id : str | None = None ,
) - > Artifact | None :
2024-04-02 14:43:29 -07:00
try :
artifacts = await self . get_latest_n_artifacts (
task_id = task_id ,
step_id = step_id ,
artifact_types = artifact_types ,
organization_id = organization_id ,
n = 1 ,
)
if artifacts :
return artifacts [ 0 ]
return None
except SQLAlchemyError :
2024-04-30 00:27:32 -07:00
LOG . exception ( " SQLAlchemyError " )
2024-04-02 14:43:29 -07:00
raise
except Exception :
2024-04-30 00:27:32 -07:00
LOG . exception ( " UnexpectedError " )
2024-04-02 14:43:29 -07:00
raise
async def get_latest_n_artifacts (
self ,
task_id : str ,
step_id : str | None = None ,
artifact_types : list [ ArtifactType ] | None = None ,
organization_id : str | None = None ,
n : int = 1 ,
) - > list [ Artifact ] | None :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
artifact_query = select ( ArtifactModel ) . filter_by ( task_id = task_id )
2024-03-01 10:09:30 -08:00
if organization_id :
artifact_query = artifact_query . filter_by ( organization_id = organization_id )
2024-04-28 16:23:17 -07:00
if step_id :
artifact_query = artifact_query . filter_by ( step_id = step_id )
2024-03-01 10:09:30 -08:00
if artifact_types :
artifact_query = artifact_query . filter ( ArtifactModel . artifact_type . in_ ( artifact_types ) )
2024-04-02 14:43:29 -07:00
artifacts = ( await session . scalars ( artifact_query . order_by ( ArtifactModel . created_at . desc ( ) ) ) ) . fetchmany (
n
)
if artifacts :
return [ convert_to_artifact ( artifact , self . debug_enabled ) for artifact in artifacts ]
2024-03-01 10:09:30 -08:00
return None
except SQLAlchemyError :
2024-04-30 00:27:32 -07:00
LOG . exception ( " SQLAlchemyError " )
2024-03-01 10:09:30 -08:00
raise
except Exception :
2024-04-30 00:27:32 -07:00
LOG . exception ( " UnexpectedError " )
2024-03-01 10:09:30 -08:00
raise
async def get_latest_task_by_workflow_id (
self ,
organization_id : str ,
workflow_id : str ,
before : datetime | None = None ,
) - > Task | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
query = select ( TaskModel ) . filter_by ( organization_id = organization_id ) . filter_by ( workflow_id = workflow_id )
2024-03-01 10:09:30 -08:00
if before :
query = query . filter ( TaskModel . created_at < before )
2024-03-24 12:47:47 -07:00
task = ( await session . scalars ( query . order_by ( TaskModel . created_at . desc ( ) ) ) ) . first ( )
2024-03-01 10:09:30 -08:00
if task :
return convert_to_task ( task , debug_enabled = self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def create_workflow (
self ,
title : str ,
workflow_definition : dict [ str , Any ] ,
2024-05-16 10:51:22 -07:00
organization_id : str | None = None ,
2024-03-01 10:09:30 -08:00
description : str | None = None ,
2025-11-28 14:24:44 -08:00
proxy_location : ProxyLocationInput = None ,
2024-05-16 10:51:22 -07:00
webhook_callback_url : str | None = None ,
2025-06-13 23:59:50 -07:00
max_screenshot_scrolling_times : int | None = None ,
2025-06-19 00:42:34 -07:00
extra_http_headers : dict [ str , str ] | None = None ,
2024-07-11 21:34:00 -07:00
totp_verification_url : str | None = None ,
2024-09-08 15:07:03 -07:00
totp_identifier : str | None = None ,
2024-09-06 12:01:56 -07:00
persist_browser_session : bool = False ,
2025-05-29 06:15:04 -07:00
model : dict [ str , Any ] | None = None ,
2024-05-16 10:51:22 -07:00
workflow_permanent_id : str | None = None ,
version : int | None = None ,
2024-06-27 12:53:08 -07:00
is_saved_task : bool = False ,
2025-01-25 04:08:51 +08:00
status : WorkflowStatus = WorkflowStatus . published ,
2025-09-29 15:14:15 -04:00
run_with : str | None = None ,
2025-08-29 05:24:17 +08:00
ai_fallback : bool = False ,
2025-08-06 08:32:14 -07:00
cache_key : str | None = None ,
2025-09-18 13:32:55 +08:00
run_sequentially : bool = False ,
2025-09-24 11:50:24 +08:00
sequential_key : str | None = None ,
2025-11-05 18:37:18 +03:00
folder_id : str | None = None ,
2024-03-01 10:09:30 -08:00
) - > Workflow :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
workflow = WorkflowModel (
organization_id = organization_id ,
title = title ,
description = description ,
workflow_definition = workflow_definition ,
2025-11-28 14:24:44 -08:00
proxy_location = _serialize_proxy_location ( proxy_location ) ,
2024-05-16 10:51:22 -07:00
webhook_callback_url = webhook_callback_url ,
2024-07-11 21:34:00 -07:00
totp_verification_url = totp_verification_url ,
2024-09-08 15:07:03 -07:00
totp_identifier = totp_identifier ,
2025-06-13 23:59:50 -07:00
max_screenshot_scrolling_times = max_screenshot_scrolling_times ,
2025-06-19 00:42:34 -07:00
extra_http_headers = extra_http_headers ,
2024-09-06 12:01:56 -07:00
persist_browser_session = persist_browser_session ,
2025-05-29 06:15:04 -07:00
model = model ,
2024-06-27 12:53:08 -07:00
is_saved_task = is_saved_task ,
2025-01-25 04:08:51 +08:00
status = status ,
2025-09-29 15:14:15 -04:00
run_with = run_with ,
2025-08-29 05:24:17 +08:00
ai_fallback = ai_fallback ,
2025-10-06 17:09:37 -04:00
cache_key = cache_key or DEFAULT_SCRIPT_RUN_ID ,
2025-09-18 13:32:55 +08:00
run_sequentially = run_sequentially ,
2025-09-24 11:50:24 +08:00
sequential_key = sequential_key ,
2025-11-05 18:37:18 +03:00
folder_id = folder_id ,
2024-03-01 10:09:30 -08:00
)
2024-05-16 10:51:22 -07:00
if workflow_permanent_id :
workflow . workflow_permanent_id = workflow_permanent_id
if version :
workflow . version = version
2024-03-01 10:09:30 -08:00
session . add ( workflow )
2025-11-06 20:09:26 +03:00
# Update folder's modified_at if folder_id is provided
if folder_id :
# Validate folder exists and belongs to the same organization
folder_stmt = (
select ( FolderModel )
. where ( FolderModel . folder_id == folder_id )
. where ( FolderModel . organization_id == organization_id )
. where ( FolderModel . deleted_at . is_ ( None ) )
)
folder_model = await session . scalar ( folder_stmt )
if not folder_model :
raise ValueError (
f " Folder { folder_id } not found or does not belong to organization { organization_id } "
)
folder_model . modified_at = datetime . utcnow ( )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow )
2024-03-01 10:09:30 -08:00
return convert_to_workflow ( workflow , self . debug_enabled )
2024-09-19 11:15:07 -07:00
async def soft_delete_workflow_by_id ( self , workflow_id : str , organization_id : str ) - > None :
try :
async with self . Session ( ) as session :
# soft delete the workflow by setting the deleted_at field to the current time
update_deleted_at_query = (
update ( WorkflowModel )
. where ( WorkflowModel . workflow_id == workflow_id )
. where ( WorkflowModel . organization_id == organization_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
. values ( deleted_at = datetime . utcnow ( ) )
)
await session . execute ( update_deleted_at_query )
await session . commit ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in soft_delete_workflow_by_id " , exc_info = True )
raise
2024-05-15 08:43:36 -07:00
async def get_workflow ( self , workflow_id : str , organization_id : str | None = None ) - > Workflow | None :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-05-16 10:51:22 -07:00
get_workflow_query = (
select ( WorkflowModel ) . filter_by ( workflow_id = workflow_id ) . filter ( WorkflowModel . deleted_at . is_ ( None ) )
)
2024-05-15 08:43:36 -07:00
if organization_id :
get_workflow_query = get_workflow_query . filter_by ( organization_id = organization_id )
if workflow := ( await session . scalars ( get_workflow_query ) ) . first ( ) :
2025-12-11 18:39:21 -08:00
is_template = (
await self . is_workflow_template (
workflow_permanent_id = workflow . workflow_permanent_id ,
organization_id = workflow . organization_id ,
)
if organization_id
else False
)
return convert_to_workflow (
workflow ,
self . debug_enabled ,
is_template = is_template ,
)
2024-03-01 10:09:30 -08:00
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-05-16 10:51:22 -07:00
async def get_workflow_by_permanent_id (
self ,
workflow_permanent_id : str ,
organization_id : str | None = None ,
version : int | None = None ,
2025-10-07 16:56:53 -07:00
ignore_version : int | None = None ,
2024-09-19 11:15:07 -07:00
exclude_deleted : bool = True ,
2024-05-16 10:51:22 -07:00
) - > Workflow | None :
try :
2024-09-19 11:15:07 -07:00
get_workflow_query = select ( WorkflowModel ) . filter_by ( workflow_permanent_id = workflow_permanent_id )
if exclude_deleted :
get_workflow_query = get_workflow_query . filter ( WorkflowModel . deleted_at . is_ ( None ) )
2024-05-16 10:51:22 -07:00
if organization_id :
get_workflow_query = get_workflow_query . filter_by ( organization_id = organization_id )
if version :
get_workflow_query = get_workflow_query . filter_by ( version = version )
2025-10-07 16:56:53 -07:00
if ignore_version :
get_workflow_query = get_workflow_query . filter ( WorkflowModel . version != ignore_version )
2024-05-16 10:51:22 -07:00
get_workflow_query = get_workflow_query . order_by ( WorkflowModel . version . desc ( ) )
async with self . Session ( ) as session :
if workflow := ( await session . scalars ( get_workflow_query ) ) . first ( ) :
2025-12-11 18:39:21 -08:00
is_template = (
await self . is_workflow_template (
workflow_permanent_id = workflow . workflow_permanent_id ,
organization_id = workflow . organization_id ,
)
if organization_id
else False
)
return convert_to_workflow (
workflow ,
self . debug_enabled ,
is_template = is_template ,
)
2024-05-16 10:51:22 -07:00
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-11-04 18:30:17 -05:00
async def get_workflow_for_workflow_run (
self ,
workflow_run_id : str ,
organization_id : str | None = None ,
exclude_deleted : bool = True ,
) - > Workflow | None :
try :
get_workflow_query = select ( WorkflowModel )
if exclude_deleted :
get_workflow_query = get_workflow_query . filter ( WorkflowModel . deleted_at . is_ ( None ) )
get_workflow_query = get_workflow_query . join (
WorkflowRunModel ,
WorkflowRunModel . workflow_id == WorkflowModel . workflow_id ,
)
2025-11-11 12:44:17 -05:00
if organization_id :
get_workflow_query = get_workflow_query . filter ( WorkflowRunModel . organization_id == organization_id )
2025-11-04 18:30:17 -05:00
get_workflow_query = get_workflow_query . filter ( WorkflowRunModel . workflow_run_id == workflow_run_id )
async with self . Session ( ) as session :
if workflow := ( await session . scalars ( get_workflow_query ) ) . first ( ) :
2025-12-11 18:39:21 -08:00
is_template = (
await self . is_workflow_template (
workflow_permanent_id = workflow . workflow_permanent_id ,
organization_id = workflow . organization_id ,
)
if organization_id
else False
)
return convert_to_workflow (
workflow ,
self . debug_enabled ,
is_template = is_template ,
)
2025-11-04 18:30:17 -05:00
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-09-21 02:48:27 -04:00
async def get_workflow_versions_by_permanent_id (
self ,
workflow_permanent_id : str ,
organization_id : str | None = None ,
exclude_deleted : bool = True ,
) - > list [ Workflow ] :
"""
Get all versions of a workflow by its permanent ID , ordered by version descending ( newest first ) .
"""
try :
get_workflows_query = select ( WorkflowModel ) . filter_by ( workflow_permanent_id = workflow_permanent_id )
if exclude_deleted :
get_workflows_query = get_workflows_query . filter ( WorkflowModel . deleted_at . is_ ( None ) )
if organization_id :
get_workflows_query = get_workflows_query . filter_by ( organization_id = organization_id )
get_workflows_query = get_workflows_query . order_by ( WorkflowModel . version . desc ( ) )
async with self . Session ( ) as session :
workflows = ( await session . scalars ( get_workflows_query ) ) . all ( )
2025-12-11 18:39:21 -08:00
template_permanent_ids : set [ str ] = set ( )
if workflows and organization_id :
template_permanent_ids = await self . get_org_template_permanent_ids ( organization_id )
return [
convert_to_workflow (
workflow ,
self . debug_enabled ,
is_template = workflow . workflow_permanent_id in template_permanent_ids ,
)
for workflow in workflows
]
2025-09-21 02:48:27 -04:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-01-28 15:04:18 +08:00
async def get_workflows_by_permanent_ids (
self ,
workflow_permanent_ids : list [ str ] ,
organization_id : str | None = None ,
page : int = 1 ,
page_size : int = 10 ,
title : str = " " ,
statuses : list [ WorkflowStatus ] | None = None ,
) - > list [ Workflow ] :
"""
Get all workflows with the latest version for the organization .
"""
if page < 1 :
raise ValueError ( f " Page must be greater than 0, got { page } " )
db_page = page - 1
try :
async with self . Session ( ) as session :
subquery = (
select (
WorkflowModel . workflow_permanent_id ,
func . max ( WorkflowModel . version ) . label ( " max_version " ) ,
)
. where ( WorkflowModel . workflow_permanent_id . in_ ( workflow_permanent_ids ) )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
. group_by (
WorkflowModel . workflow_permanent_id ,
)
. subquery ( )
)
main_query = select ( WorkflowModel ) . join (
subquery ,
( WorkflowModel . workflow_permanent_id == subquery . c . workflow_permanent_id )
& ( WorkflowModel . version == subquery . c . max_version ) ,
)
if organization_id :
main_query = main_query . where ( WorkflowModel . organization_id == organization_id )
if title :
main_query = main_query . where ( WorkflowModel . title . ilike ( f " % { title } % " ) )
if statuses :
main_query = main_query . where ( WorkflowModel . status . in_ ( statuses ) )
main_query = (
main_query . order_by ( WorkflowModel . created_at . desc ( ) ) . limit ( page_size ) . offset ( db_page * page_size )
)
workflows = ( await session . scalars ( main_query ) ) . all ( )
2025-12-11 18:39:21 -08:00
# Map template status by permanent_id so API responses surface is_template
template_permanent_ids : set [ str ] = set ( )
if workflows and organization_id :
template_permanent_ids = await self . get_org_template_permanent_ids ( organization_id )
return [
convert_to_workflow (
workflow ,
self . debug_enabled ,
is_template = workflow . workflow_permanent_id in template_permanent_ids ,
)
for workflow in workflows
]
2025-01-28 15:04:18 +08:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-05-16 10:51:22 -07:00
async def get_workflows_by_organization_id (
self ,
organization_id : str ,
page : int = 1 ,
page_size : int = 10 ,
2024-06-27 12:53:08 -07:00
only_saved_tasks : bool = False ,
only_workflows : bool = False ,
2025-12-11 18:39:21 -08:00
only_templates : bool = False ,
2025-10-16 16:04:53 +03:00
search_key : str | None = None ,
2025-11-05 18:37:18 +03:00
folder_id : str | None = None ,
2025-01-25 04:08:51 +08:00
statuses : list [ WorkflowStatus ] | None = None ,
2024-05-16 10:51:22 -07:00
) - > list [ Workflow ] :
"""
Get all workflows with the latest version for the organization .
2025-10-16 16:04:53 +03:00
Search semantics :
2025-11-05 18:37:18 +03:00
- If ` search_key ` is provided , its value is used as a unified search term for
` workflows . title ` , ` folders . title ` , and workflow parameter metadata ( key , description , and default_value ) .
2025-10-16 16:04:53 +03:00
- If ` search_key ` is not provided , no search filtering is applied .
- Parameter metadata search excludes soft - deleted parameter rows across parameter tables .
2024-05-16 10:51:22 -07:00
"""
if page < 1 :
raise ValueError ( f " Page must be greater than 0, got { page } " )
db_page = page - 1
try :
async with self . Session ( ) as session :
subquery = (
select (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
func . max ( WorkflowModel . version ) . label ( " max_version " ) ,
)
. where ( WorkflowModel . organization_id == organization_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
2024-05-16 18:20:11 -07:00
. group_by (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
)
2024-05-16 10:51:22 -07:00
. subquery ( )
)
2025-11-05 18:37:18 +03:00
main_query = (
select ( WorkflowModel )
. join (
subquery ,
( WorkflowModel . organization_id == subquery . c . organization_id )
& ( WorkflowModel . workflow_permanent_id == subquery . c . workflow_permanent_id )
& ( WorkflowModel . version == subquery . c . max_version ) ,
)
. outerjoin (
FolderModel ,
( WorkflowModel . folder_id == FolderModel . folder_id )
& ( FolderModel . organization_id == WorkflowModel . organization_id ) ,
)
2024-06-27 12:53:08 -07:00
)
if only_saved_tasks :
main_query = main_query . where ( WorkflowModel . is_saved_task . is_ ( True ) )
elif only_workflows :
main_query = main_query . where ( WorkflowModel . is_saved_task . is_ ( False ) )
2025-12-11 18:39:21 -08:00
if only_templates :
# Filter by workflow_templates table (templates at permanent_id level)
template_subquery = select ( WorkflowTemplateModel . workflow_permanent_id ) . where (
WorkflowTemplateModel . organization_id == organization_id ,
WorkflowTemplateModel . deleted_at . is_ ( None ) ,
)
main_query = main_query . where ( WorkflowModel . workflow_permanent_id . in_ ( template_subquery ) )
2025-01-25 04:08:51 +08:00
if statuses :
main_query = main_query . where ( WorkflowModel . status . in_ ( statuses ) )
2025-11-05 18:37:18 +03:00
if folder_id :
main_query = main_query . where ( WorkflowModel . folder_id == folder_id )
2025-10-16 16:04:53 +03:00
if search_key :
search_like = f " % { search_key } % "
title_like = WorkflowModel . title . ilike ( search_like )
2025-11-05 18:37:18 +03:00
folder_title_like = FolderModel . title . ilike ( search_like )
2025-10-16 16:04:53 +03:00
parameter_filters = [
# WorkflowParameterModel
exists (
select ( 1 )
. select_from ( WorkflowParameterModel )
. where ( WorkflowParameterModel . workflow_id == WorkflowModel . workflow_id )
. where ( WorkflowParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
WorkflowParameterModel . key . ilike ( search_like ) ,
WorkflowParameterModel . description . ilike ( search_like ) ,
WorkflowParameterModel . default_value . ilike ( search_like ) ,
)
)
) ,
# OutputParameterModel
exists (
select ( 1 )
. select_from ( OutputParameterModel )
. where ( OutputParameterModel . workflow_id == WorkflowModel . workflow_id )
. where ( OutputParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
OutputParameterModel . key . ilike ( search_like ) ,
OutputParameterModel . description . ilike ( search_like ) ,
)
)
) ,
# AWSSecretParameterModel
exists (
select ( 1 )
. select_from ( AWSSecretParameterModel )
. where ( AWSSecretParameterModel . workflow_id == WorkflowModel . workflow_id )
. where ( AWSSecretParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
AWSSecretParameterModel . key . ilike ( search_like ) ,
AWSSecretParameterModel . description . ilike ( search_like ) ,
)
)
) ,
# BitwardenLoginCredentialParameterModel
exists (
select ( 1 )
. select_from ( BitwardenLoginCredentialParameterModel )
. where ( BitwardenLoginCredentialParameterModel . workflow_id == WorkflowModel . workflow_id )
. where ( BitwardenLoginCredentialParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
BitwardenLoginCredentialParameterModel . key . ilike ( search_like ) ,
BitwardenLoginCredentialParameterModel . description . ilike ( search_like ) ,
)
)
) ,
# BitwardenSensitiveInformationParameterModel
exists (
select ( 1 )
. select_from ( BitwardenSensitiveInformationParameterModel )
. where ( BitwardenSensitiveInformationParameterModel . workflow_id == WorkflowModel . workflow_id )
. where ( BitwardenSensitiveInformationParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
BitwardenSensitiveInformationParameterModel . key . ilike ( search_like ) ,
BitwardenSensitiveInformationParameterModel . description . ilike ( search_like ) ,
)
)
) ,
# BitwardenCreditCardDataParameterModel
exists (
select ( 1 )
. select_from ( BitwardenCreditCardDataParameterModel )
. where ( BitwardenCreditCardDataParameterModel . workflow_id == WorkflowModel . workflow_id )
. where ( BitwardenCreditCardDataParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
BitwardenCreditCardDataParameterModel . key . ilike ( search_like ) ,
BitwardenCreditCardDataParameterModel . description . ilike ( search_like ) ,
)
)
) ,
# OnePasswordCredentialParameterModel
exists (
select ( 1 )
. select_from ( OnePasswordCredentialParameterModel )
. where ( OnePasswordCredentialParameterModel . workflow_id == WorkflowModel . workflow_id )
. where ( OnePasswordCredentialParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
OnePasswordCredentialParameterModel . key . ilike ( search_like ) ,
OnePasswordCredentialParameterModel . description . ilike ( search_like ) ,
)
)
) ,
# AzureVaultCredentialParameterModel
exists (
select ( 1 )
. select_from ( AzureVaultCredentialParameterModel )
. where ( AzureVaultCredentialParameterModel . workflow_id == WorkflowModel . workflow_id )
. where ( AzureVaultCredentialParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
AzureVaultCredentialParameterModel . key . ilike ( search_like ) ,
AzureVaultCredentialParameterModel . description . ilike ( search_like ) ,
)
)
) ,
# CredentialParameterModel
exists (
select ( 1 )
. select_from ( CredentialParameterModel )
. where ( CredentialParameterModel . workflow_id == WorkflowModel . workflow_id )
. where ( CredentialParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
CredentialParameterModel . key . ilike ( search_like ) ,
CredentialParameterModel . description . ilike ( search_like ) ,
)
)
) ,
]
2025-11-05 18:37:18 +03:00
main_query = main_query . where ( or_ ( title_like , folder_title_like , or_ ( * parameter_filters ) ) )
2024-05-16 10:51:22 -07:00
main_query = (
2024-06-27 12:53:08 -07:00
main_query . order_by ( WorkflowModel . created_at . desc ( ) ) . limit ( page_size ) . offset ( db_page * page_size )
2024-05-16 10:51:22 -07:00
)
workflows = ( await session . scalars ( main_query ) ) . all ( )
2025-12-11 18:39:21 -08:00
template_permanent_ids : set [ str ] = set ( )
if workflows and organization_id :
template_permanent_ids = await self . get_org_template_permanent_ids ( organization_id )
return [
convert_to_workflow (
workflow ,
self . debug_enabled ,
is_template = workflow . workflow_permanent_id in template_permanent_ids ,
)
for workflow in workflows
]
2024-05-16 10:51:22 -07:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def update_workflow (
self ,
workflow_id : str ,
2024-05-15 08:43:36 -07:00
organization_id : str | None = None ,
2024-03-01 10:09:30 -08:00
title : str | None = None ,
description : str | None = None ,
workflow_definition : dict [ str , Any ] | None = None ,
2024-05-16 10:51:22 -07:00
version : int | None = None ,
2025-09-29 15:14:15 -04:00
run_with : str | None = None ,
2025-08-06 08:32:14 -07:00
cache_key : str | None = None ,
2025-11-05 18:37:18 +03:00
status : str | None = None ,
import_error : str | None = None ,
2024-03-24 22:55:38 -07:00
) - > Workflow :
try :
async with self . Session ( ) as session :
2024-05-16 10:51:22 -07:00
get_workflow_query = (
select ( WorkflowModel ) . filter_by ( workflow_id = workflow_id ) . filter ( WorkflowModel . deleted_at . is_ ( None ) )
)
2024-05-15 08:43:36 -07:00
if organization_id :
get_workflow_query = get_workflow_query . filter_by ( organization_id = organization_id )
if workflow := ( await session . scalars ( get_workflow_query ) ) . first ( ) :
2025-08-01 17:07:08 -07:00
if title is not None :
2024-03-24 22:55:38 -07:00
workflow . title = title
2025-08-01 17:07:08 -07:00
if description is not None :
2024-03-24 22:55:38 -07:00
workflow . description = description
2025-08-01 17:07:08 -07:00
if workflow_definition is not None :
2024-03-24 22:55:38 -07:00
workflow . workflow_definition = workflow_definition
2025-08-01 17:07:08 -07:00
if version is not None :
2024-05-16 10:51:22 -07:00
workflow . version = version
2025-09-29 15:14:15 -04:00
if run_with is not None :
workflow . run_with = run_with
2025-08-06 08:32:14 -07:00
if cache_key is not None :
workflow . cache_key = cache_key
2025-11-05 18:37:18 +03:00
if status is not None :
workflow . status = status
if import_error is not None :
workflow . import_error = import_error
2024-03-24 22:55:38 -07:00
await session . commit ( )
await session . refresh ( workflow )
2025-12-11 18:39:21 -08:00
is_template = (
await self . is_workflow_template (
workflow_permanent_id = workflow . workflow_permanent_id ,
organization_id = workflow . organization_id ,
)
if organization_id
else False
)
return convert_to_workflow (
workflow ,
self . debug_enabled ,
is_template = is_template ,
)
2024-03-24 22:55:38 -07:00
else :
raise NotFoundError ( " Workflow not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except NotFoundError :
LOG . error ( " No workflow found to update " , workflow_id = workflow_id )
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
2024-05-16 10:51:22 -07:00
async def soft_delete_workflow_by_permanent_id (
self ,
workflow_permanent_id : str ,
organization_id : str | None = None ,
) - > None :
async with self . Session ( ) as session :
# soft delete the workflow by setting the deleted_at field
update_deleted_at_query = (
update ( WorkflowModel )
. where ( WorkflowModel . workflow_permanent_id == workflow_permanent_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
)
if organization_id :
update_deleted_at_query = update_deleted_at_query . filter_by ( organization_id = organization_id )
update_deleted_at_query = update_deleted_at_query . values ( deleted_at = datetime . utcnow ( ) )
await session . execute ( update_deleted_at_query )
await session . commit ( )
2025-12-11 18:39:21 -08:00
async def add_workflow_template (
self ,
workflow_permanent_id : str ,
organization_id : str ,
) - > None :
""" Add a workflow to the templates table. """
try :
async with self . Session ( ) as session :
existing = (
await session . scalars (
select ( WorkflowTemplateModel )
. where ( WorkflowTemplateModel . workflow_permanent_id == workflow_permanent_id )
. where ( WorkflowTemplateModel . organization_id == organization_id )
)
) . first ( )
if existing :
if existing . deleted_at is not None :
existing . deleted_at = None
await session . commit ( )
return
template = WorkflowTemplateModel (
workflow_permanent_id = workflow_permanent_id ,
organization_id = organization_id ,
)
session . add ( template )
await session . commit ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in add_workflow_template " , exc_info = True )
raise
async def remove_workflow_template (
self ,
workflow_permanent_id : str ,
organization_id : str ,
) - > None :
""" Soft delete a workflow from the templates table. """
try :
async with self . Session ( ) as session :
update_deleted_at_query = (
update ( WorkflowTemplateModel )
. where ( WorkflowTemplateModel . workflow_permanent_id == workflow_permanent_id )
. where ( WorkflowTemplateModel . organization_id == organization_id )
. where ( WorkflowTemplateModel . deleted_at . is_ ( None ) )
. values ( deleted_at = datetime . utcnow ( ) )
)
await session . execute ( update_deleted_at_query )
await session . commit ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in remove_workflow_template " , exc_info = True )
raise
async def get_org_template_permanent_ids (
self ,
organization_id : str ,
) - > set [ str ] :
""" Get all workflow_permanent_ids that are templates for an organization. """
try :
async with self . Session ( ) as session :
result = await session . scalars (
select ( WorkflowTemplateModel . workflow_permanent_id )
. where ( WorkflowTemplateModel . organization_id == organization_id )
. where ( WorkflowTemplateModel . deleted_at . is_ ( None ) )
)
return set ( result . all ( ) )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in get_org_template_permanent_ids " , exc_info = True )
raise
async def is_workflow_template (
self ,
workflow_permanent_id : str ,
organization_id : str ,
) - > bool :
""" Check if a workflow is marked as a template. """
try :
async with self . Session ( ) as session :
result = (
await session . scalars (
select ( WorkflowTemplateModel )
. where ( WorkflowTemplateModel . workflow_permanent_id == workflow_permanent_id )
. where ( WorkflowTemplateModel . organization_id == organization_id )
. where ( WorkflowTemplateModel . deleted_at . is_ ( None ) )
)
) . first ( )
return result is not None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in is_workflow_template " , exc_info = True )
raise
2025-11-05 18:37:18 +03:00
async def create_folder (
self ,
organization_id : str ,
title : str ,
description : str | None = None ,
) - > FolderModel :
""" Create a new folder. """
try :
async with self . Session ( ) as session :
folder = FolderModel (
organization_id = organization_id ,
title = title ,
description = description ,
)
session . add ( folder )
await session . commit ( )
await session . refresh ( folder )
return folder
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in create_folder " , exc_info = True )
raise
async def get_folders (
self ,
organization_id : str ,
page : int = 1 ,
page_size : int = 10 ,
search_query : str | None = None ,
) - > list [ FolderModel ] :
""" Get all folders for an organization with pagination and optional search. """
try :
async with self . Session ( ) as session :
stmt = (
select ( FolderModel )
. filter_by ( organization_id = organization_id )
. filter ( FolderModel . deleted_at . is_ ( None ) )
)
if search_query :
search_pattern = f " % { search_query } % "
stmt = stmt . filter (
or_ (
FolderModel . title . ilike ( search_pattern ) ,
FolderModel . description . ilike ( search_pattern ) ,
)
)
stmt = stmt . order_by ( FolderModel . modified_at . desc ( ) )
stmt = stmt . offset ( ( page - 1 ) * page_size ) . limit ( page_size )
result = await session . execute ( stmt )
return list ( result . scalars ( ) . all ( ) )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in get_folders " , exc_info = True )
raise
async def get_folder (
self ,
folder_id : str ,
organization_id : str ,
) - > FolderModel | None :
""" Get a folder by ID. """
try :
async with self . Session ( ) as session :
stmt = (
select ( FolderModel )
. filter_by ( folder_id = folder_id , organization_id = organization_id )
. filter ( FolderModel . deleted_at . is_ ( None ) )
)
result = await session . execute ( stmt )
return result . scalar_one_or_none ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in get_folder " , exc_info = True )
raise
async def update_folder (
self ,
folder_id : str ,
organization_id : str ,
title : str | None = None ,
description : str | None = None ,
) - > FolderModel | None :
""" Update a folder ' s title or description. """
try :
async with self . Session ( ) as session :
stmt = (
select ( FolderModel )
. filter_by ( folder_id = folder_id , organization_id = organization_id )
. filter ( FolderModel . deleted_at . is_ ( None ) )
)
result = await session . execute ( stmt )
folder = result . scalar_one_or_none ( )
if not folder :
return None
if title is not None :
folder . title = title
if description is not None :
folder . description = description
folder . modified_at = datetime . utcnow ( )
await session . commit ( )
await session . refresh ( folder )
return folder
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in update_folder " , exc_info = True )
raise
async def get_workflow_permanent_ids_in_folder (
self ,
folder_id : str ,
organization_id : str ,
) - > list [ str ] :
""" Get workflow permanent IDs (latest versions only) in a folder. """
try :
async with self . Session ( ) as session :
# Subquery to get the latest version for each workflow
subquery = (
select (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
func . max ( WorkflowModel . version ) . label ( " max_version " ) ,
)
. where ( WorkflowModel . organization_id == organization_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
. group_by (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
)
. subquery ( )
)
# Get workflow_permanent_ids where the latest version is in this folder
stmt = (
select ( WorkflowModel . workflow_permanent_id )
. join (
subquery ,
( WorkflowModel . organization_id == subquery . c . organization_id )
& ( WorkflowModel . workflow_permanent_id == subquery . c . workflow_permanent_id )
& ( WorkflowModel . version == subquery . c . max_version ) ,
)
. where ( WorkflowModel . folder_id == folder_id )
)
result = await session . execute ( stmt )
return list ( result . scalars ( ) . all ( ) )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in get_workflow_permanent_ids_in_folder " , exc_info = True )
raise
async def soft_delete_folder (
self ,
folder_id : str ,
organization_id : str ,
delete_workflows : bool = False ,
) - > bool :
""" Soft delete a folder. Optionally delete all workflows in the folder. """
try :
async with self . Session ( ) as session :
# Check if folder exists
folder_stmt = (
select ( FolderModel )
. filter_by ( folder_id = folder_id , organization_id = organization_id )
. filter ( FolderModel . deleted_at . is_ ( None ) )
)
folder_result = await session . execute ( folder_stmt )
folder = folder_result . scalar_one_or_none ( )
if not folder :
return False
# If delete_workflows is True, delete all workflows in the folder
if delete_workflows :
# Get workflow permanent IDs in the folder (inline logic)
subquery = (
select (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
func . max ( WorkflowModel . version ) . label ( " max_version " ) ,
)
. where ( WorkflowModel . organization_id == organization_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
. group_by (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
)
. subquery ( )
)
workflow_permanent_ids_stmt = (
select ( WorkflowModel . workflow_permanent_id )
. join (
subquery ,
( WorkflowModel . organization_id == subquery . c . organization_id )
& ( WorkflowModel . workflow_permanent_id == subquery . c . workflow_permanent_id )
& ( WorkflowModel . version == subquery . c . max_version ) ,
)
. where ( WorkflowModel . folder_id == folder_id )
)
result = await session . execute ( workflow_permanent_ids_stmt )
workflow_permanent_ids = list ( result . scalars ( ) . all ( ) )
# Soft delete all workflows with these permanent IDs in a single bulk update
if workflow_permanent_ids :
update_workflows_query = (
update ( WorkflowModel )
. where ( WorkflowModel . workflow_permanent_id . in_ ( workflow_permanent_ids ) )
. where ( WorkflowModel . organization_id == organization_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
. values ( deleted_at = datetime . utcnow ( ) )
)
await session . execute ( update_workflows_query )
else :
# Just remove folder_id from all workflows in this folder
update_workflows_query = (
update ( WorkflowModel )
. where ( WorkflowModel . folder_id == folder_id )
. where ( WorkflowModel . organization_id == organization_id )
. values ( folder_id = None , modified_at = datetime . utcnow ( ) )
)
await session . execute ( update_workflows_query )
# Soft delete the folder
folder . deleted_at = datetime . utcnow ( )
await session . commit ( )
return True
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in soft_delete_folder " , exc_info = True )
raise
async def get_folder_workflow_count (
self ,
folder_id : str ,
organization_id : str ,
) - > int :
""" Get the count of workflows (latest versions only) in a folder. """
try :
async with self . Session ( ) as session :
# Subquery to get the latest version for each workflow (same pattern as get_workflows_by_organization_id)
subquery = (
select (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
func . max ( WorkflowModel . version ) . label ( " max_version " ) ,
)
. where ( WorkflowModel . organization_id == organization_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
. group_by (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
)
. subquery ( )
)
# Count workflows where the latest version is in this folder
stmt = (
select ( func . count ( WorkflowModel . workflow_permanent_id ) )
. join (
subquery ,
( WorkflowModel . organization_id == subquery . c . organization_id )
& ( WorkflowModel . workflow_permanent_id == subquery . c . workflow_permanent_id )
& ( WorkflowModel . version == subquery . c . max_version ) ,
)
. where ( WorkflowModel . folder_id == folder_id )
)
result = await session . execute ( stmt )
return result . scalar_one ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in get_folder_workflow_count " , exc_info = True )
raise
async def get_folder_workflow_counts_batch (
self ,
folder_ids : list [ str ] ,
organization_id : str ,
) - > dict [ str , int ] :
""" Get workflow counts for multiple folders in a single query. """
try :
async with self . Session ( ) as session :
# Subquery to get the latest version for each workflow
subquery = (
select (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
func . max ( WorkflowModel . version ) . label ( " max_version " ) ,
)
. where ( WorkflowModel . organization_id == organization_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
. group_by (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
)
. subquery ( )
)
# Count workflows grouped by folder_id
stmt = (
select (
WorkflowModel . folder_id ,
func . count ( WorkflowModel . workflow_permanent_id ) . label ( " count " ) ,
)
. join (
subquery ,
( WorkflowModel . organization_id == subquery . c . organization_id )
& ( WorkflowModel . workflow_permanent_id == subquery . c . workflow_permanent_id )
& ( WorkflowModel . version == subquery . c . max_version ) ,
)
. where ( WorkflowModel . folder_id . in_ ( folder_ids ) )
. group_by ( WorkflowModel . folder_id )
)
result = await session . execute ( stmt )
rows = result . all ( )
# Convert to dict, defaulting to 0 for folders with no workflows
return { row . folder_id : row . count for row in rows }
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in get_folder_workflow_counts_batch " , exc_info = True )
raise
async def update_workflow_folder (
self ,
workflow_permanent_id : str ,
organization_id : str ,
folder_id : str | None ,
) - > Workflow | None :
""" Update folder assignment for the latest version of a workflow. """
try :
# Get the latest version of the workflow
latest_workflow = await self . get_workflow_by_permanent_id (
workflow_permanent_id = workflow_permanent_id ,
organization_id = organization_id ,
)
if not latest_workflow :
return None
async with self . Session ( ) as session :
# Validate folder exists in-org if folder_id is provided
if folder_id :
stmt = (
select ( FolderModel . folder_id )
. where ( FolderModel . folder_id == folder_id )
. where ( FolderModel . organization_id == organization_id )
. where ( FolderModel . deleted_at . is_ ( None ) )
)
if ( await session . scalar ( stmt ) ) is None :
raise ValueError ( f " Folder { folder_id } not found " )
workflow_model = await session . get ( WorkflowModel , latest_workflow . workflow_id )
if workflow_model :
workflow_model . folder_id = folder_id
workflow_model . modified_at = datetime . utcnow ( )
# Update folder's modified_at in the same transaction
if folder_id :
folder_model = await session . get ( FolderModel , folder_id )
if folder_model :
folder_model . modified_at = datetime . utcnow ( )
await session . commit ( )
await session . refresh ( workflow_model )
return convert_to_workflow ( workflow_model , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in update_workflow_folder " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def create_workflow_run (
2024-05-16 10:51:22 -07:00
self ,
2024-07-09 11:26:44 -07:00
workflow_permanent_id : str ,
2024-05-16 10:51:22 -07:00
workflow_id : str ,
2024-07-09 11:26:44 -07:00
organization_id : str ,
2025-07-03 18:45:04 -07:00
browser_session_id : str | None = None ,
2025-11-06 01:24:39 -08:00
browser_profile_id : str | None = None ,
2025-11-28 14:24:44 -08:00
proxy_location : ProxyLocationInput = None ,
2024-05-16 10:51:22 -07:00
webhook_callback_url : str | None = None ,
2024-07-11 21:34:00 -07:00
totp_verification_url : str | None = None ,
2024-09-08 15:07:03 -07:00
totp_identifier : str | None = None ,
2025-01-28 16:59:54 +08:00
parent_workflow_run_id : str | None = None ,
2025-06-13 23:59:50 -07:00
max_screenshot_scrolling_times : int | None = None ,
2025-06-19 00:42:34 -07:00
extra_http_headers : dict [ str , str ] | None = None ,
2025-08-21 11:16:22 +08:00
browser_address : str | None = None ,
2025-09-24 11:50:24 +08:00
sequential_key : str | None = None ,
2025-09-27 11:18:17 -07:00
run_with : str | None = None ,
2025-10-01 07:21:08 -04:00
debug_session_id : str | None = None ,
2025-10-01 14:13:56 -07:00
ai_fallback : bool | None = None ,
2025-10-02 16:06:54 -07:00
code_gen : bool | None = None ,
2024-03-01 10:09:30 -08:00
) - > WorkflowRun :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
workflow_run = WorkflowRunModel (
2024-07-09 11:26:44 -07:00
workflow_permanent_id = workflow_permanent_id ,
2024-03-01 10:09:30 -08:00
workflow_id = workflow_id ,
2024-07-09 11:26:44 -07:00
organization_id = organization_id ,
2025-07-03 18:45:04 -07:00
browser_session_id = browser_session_id ,
2025-11-06 01:24:39 -08:00
browser_profile_id = browser_profile_id ,
2025-11-28 14:24:44 -08:00
proxy_location = _serialize_proxy_location ( proxy_location ) ,
2024-03-01 10:09:30 -08:00
status = " created " ,
webhook_callback_url = webhook_callback_url ,
2024-07-11 21:34:00 -07:00
totp_verification_url = totp_verification_url ,
2024-09-08 15:07:03 -07:00
totp_identifier = totp_identifier ,
2025-01-28 16:59:54 +08:00
parent_workflow_run_id = parent_workflow_run_id ,
2025-06-13 23:59:50 -07:00
max_screenshot_scrolling_times = max_screenshot_scrolling_times ,
2025-06-19 00:42:34 -07:00
extra_http_headers = extra_http_headers ,
2025-08-21 11:16:22 +08:00
browser_address = browser_address ,
2025-09-24 11:50:24 +08:00
sequential_key = sequential_key ,
2025-09-27 11:18:17 -07:00
run_with = run_with ,
2025-10-01 07:21:08 -04:00
debug_session_id = debug_session_id ,
2025-10-01 14:13:56 -07:00
ai_fallback = ai_fallback ,
2025-10-02 16:06:54 -07:00
code_gen = code_gen ,
2024-03-01 10:09:30 -08:00
)
session . add ( workflow_run )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow_run )
2024-03-01 10:09:30 -08:00
return convert_to_workflow_run ( workflow_run )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-11-15 11:07:44 +08:00
async def update_workflow_run (
2025-07-29 00:12:44 +08:00
self ,
workflow_run_id : str ,
status : WorkflowRunStatus | None = None ,
failure_reason : str | None = None ,
webhook_failure_reason : str | None = None ,
2025-09-14 22:53:52 -07:00
ai_fallback_triggered : bool | None = None ,
2025-09-18 13:32:55 +08:00
job_id : str | None = None ,
2025-09-21 02:45:23 -04:00
run_with : str | None = None ,
2025-09-24 11:50:24 +08:00
sequential_key : str | None = None ,
2025-10-01 14:13:56 -07:00
ai_fallback : bool | None = None ,
2025-10-08 15:57:01 +08:00
depends_on_workflow_run_id : str | None = None ,
2025-10-28 10:18:12 -04:00
browser_session_id : str | None = None ,
2025-05-26 08:49:42 -07:00
) - > WorkflowRun :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
workflow_run = (
await session . scalars ( select ( WorkflowRunModel ) . filter_by ( workflow_run_id = workflow_run_id ) )
) . first ( )
2024-03-01 10:09:30 -08:00
if workflow_run :
2025-07-29 00:12:44 +08:00
if status :
workflow_run . status = status
if status and status == WorkflowRunStatus . queued and workflow_run . queued_at is None :
2025-06-11 23:36:49 -04:00
workflow_run . queued_at = datetime . utcnow ( )
2025-07-29 00:12:44 +08:00
if status and status == WorkflowRunStatus . running and workflow_run . started_at is None :
2025-06-11 23:36:49 -04:00
workflow_run . started_at = datetime . utcnow ( )
2025-07-29 00:12:44 +08:00
if status and status . is_final ( ) and workflow_run . finished_at is None :
2025-06-11 23:36:49 -04:00
workflow_run . finished_at = datetime . utcnow ( )
2025-07-29 00:12:44 +08:00
if failure_reason :
workflow_run . failure_reason = failure_reason
if webhook_failure_reason is not None :
workflow_run . webhook_failure_reason = webhook_failure_reason
2025-09-14 22:53:52 -07:00
if ai_fallback_triggered is not None :
workflow_run . script_run = { " ai_fallback_triggered " : ai_fallback_triggered }
2025-09-18 13:32:55 +08:00
if job_id :
workflow_run . job_id = job_id
2025-09-21 02:45:23 -04:00
if run_with :
workflow_run . run_with = run_with
2025-09-24 11:50:24 +08:00
if sequential_key :
workflow_run . sequential_key = sequential_key
2025-10-01 14:13:56 -07:00
if ai_fallback is not None :
workflow_run . ai_fallback = ai_fallback
2025-10-08 15:57:01 +08:00
if depends_on_workflow_run_id :
workflow_run . depends_on_workflow_run_id = depends_on_workflow_run_id
2025-10-28 10:18:12 -04:00
if browser_session_id :
workflow_run . browser_session_id = browser_session_id
2024-03-24 12:47:47 -07:00
await session . commit ( )
2024-12-18 00:32:38 +01:00
await save_workflow_run_logs ( workflow_run_id )
2026-01-07 14:13:26 -07:00
await session . refresh ( workflow_run )
2024-03-01 10:09:30 -08:00
return convert_to_workflow_run ( workflow_run )
2025-05-26 08:49:42 -07:00
else :
raise WorkflowRunNotFound ( workflow_run_id )
2024-03-01 10:09:30 -08:00
2025-12-20 03:18:50 +08:00
async def bulk_update_workflow_runs (
self ,
workflow_run_ids : list [ str ] ,
status : WorkflowRunStatus | None = None ,
failure_reason : str | None = None ,
) - > None :
""" Bulk update workflow runs by their IDs.
Args :
workflow_run_ids : List of workflow run IDs to update
status : Optional status to set for all workflow runs
failure_reason : Optional failure reason to set for all workflow runs
"""
if not workflow_run_ids :
return
async with self . Session ( ) as session :
update_values = { }
if status :
update_values [ " status " ] = status . value
if failure_reason :
update_values [ " failure_reason " ] = failure_reason
if update_values :
update_stmt = (
update ( WorkflowRunModel )
. where ( WorkflowRunModel . workflow_run_id . in_ ( workflow_run_ids ) )
. values ( * * update_values )
)
await session . execute ( update_stmt )
await session . commit ( )
2025-11-05 15:43:58 +08:00
async def clear_workflow_run_failure_reason ( self , workflow_run_id : str , organization_id : str ) - > WorkflowRun :
async with self . Session ( ) as session :
workflow_run = (
await session . scalars (
select ( WorkflowRunModel )
. filter_by ( workflow_run_id = workflow_run_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
if workflow_run :
workflow_run . failure_reason = None
await session . commit ( )
await session . refresh ( workflow_run )
return convert_to_workflow_run ( workflow_run )
else :
raise NotFoundError ( " Workflow run not found " )
2025-02-04 03:59:10 +08:00
async def get_all_runs (
2025-10-09 12:45:57 -04:00
self ,
organization_id : str ,
page : int = 1 ,
page_size : int = 10 ,
status : list [ WorkflowRunStatus ] | None = None ,
include_debugger_runs : bool = False ,
2025-11-14 01:33:39 +03:00
search_key : str | None = None ,
2025-02-04 03:59:10 +08:00
) - > list [ WorkflowRun | Task ] :
try :
async with self . Session ( ) as session :
# temporary limit to 10 pages
if page > 10 :
return [ ]
limit = page * page_size
2025-02-06 03:10:17 +08:00
workflow_run_query = (
select ( WorkflowRunModel , WorkflowModel . title )
. join ( WorkflowModel , WorkflowModel . workflow_id == WorkflowRunModel . workflow_id )
. filter ( WorkflowRunModel . organization_id == organization_id )
2025-02-17 16:52:22 +08:00
. filter ( WorkflowRunModel . parent_workflow_run_id . is_ ( None ) )
2025-02-04 03:59:10 +08:00
)
2025-10-09 12:45:57 -04:00
if not include_debugger_runs :
workflow_run_query = workflow_run_query . filter ( WorkflowRunModel . debug_session_id . is_ ( None ) )
2025-11-14 01:33:39 +03:00
if search_key :
key_like = f " % { search_key } % "
2026-01-16 01:00:48 +03:00
# Match workflow_run_id directly
id_matches = WorkflowRunModel . workflow_run_id . ilike ( key_like )
2026-02-09 17:26:10 +03:00
# Match parameter key or description (only for non-deleted parameter definitions)
param_key_desc_exists = exists (
2025-11-14 01:33:39 +03:00
select ( 1 )
. select_from ( WorkflowRunParameterModel )
. join (
WorkflowParameterModel ,
WorkflowParameterModel . workflow_parameter_id
== WorkflowRunParameterModel . workflow_parameter_id ,
)
. where ( WorkflowRunParameterModel . workflow_run_id == WorkflowRunModel . workflow_run_id )
. where ( WorkflowParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
WorkflowParameterModel . key . ilike ( key_like ) ,
WorkflowParameterModel . description . ilike ( key_like ) ,
)
)
)
2026-02-09 17:26:10 +03:00
# Match run parameter value directly (searches all values regardless of parameter definition status)
param_value_exists = exists (
select ( 1 )
. select_from ( WorkflowRunParameterModel )
. where ( WorkflowRunParameterModel . workflow_run_id == WorkflowRunModel . workflow_run_id )
. where ( WorkflowRunParameterModel . value . ilike ( key_like ) )
)
# Match extra HTTP headers (cast JSON to text for search, skip NULLs)
extra_headers_match = and_ (
WorkflowRunModel . extra_http_headers . isnot ( None ) ,
func . cast ( WorkflowRunModel . extra_http_headers , Text ( ) ) . ilike ( key_like ) ,
)
workflow_run_query = workflow_run_query . where (
or_ ( id_matches , param_key_desc_exists , param_value_exists , extra_headers_match )
)
2025-10-09 12:45:57 -04:00
2025-02-04 03:59:10 +08:00
if status :
workflow_run_query = workflow_run_query . filter ( WorkflowRunModel . status . in_ ( status ) )
workflow_run_query = workflow_run_query . order_by ( WorkflowRunModel . created_at . desc ( ) ) . limit ( limit )
2025-02-06 03:10:17 +08:00
workflow_run_query_result = ( await session . execute ( workflow_run_query ) ) . all ( )
2025-02-04 03:59:10 +08:00
workflow_runs = [
2025-02-06 03:10:17 +08:00
convert_to_workflow_run ( run , workflow_title = title , debug_enabled = self . debug_enabled )
for run , title in workflow_run_query_result
2025-02-04 03:59:10 +08:00
]
2025-02-04 22:13:36 +08:00
task_query = (
select ( TaskModel )
. filter ( TaskModel . organization_id == organization_id )
. filter ( TaskModel . workflow_run_id . is_ ( None ) )
)
2025-02-04 03:59:10 +08:00
if status :
task_query = task_query . filter ( TaskModel . status . in_ ( status ) )
task_query = task_query . order_by ( TaskModel . created_at . desc ( ) ) . limit ( limit )
task_query_result = ( await session . scalars ( task_query ) ) . all ( )
tasks = [ convert_to_task ( task , debug_enabled = self . debug_enabled ) for task in task_query_result ]
runs = workflow_runs + tasks
runs . sort ( key = lambda x : x . created_at , reverse = True )
lower = ( page - 1 ) * page_size
upper = page * page_size
return runs [ lower : upper ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-12-18 11:32:40 -07:00
@read_retry ( )
2025-10-08 15:57:01 +08:00
async def get_workflow_run (
2025-10-24 16:34:14 -04:00
self ,
workflow_run_id : str ,
organization_id : str | None = None ,
job_id : str | None = None ,
status : WorkflowRunStatus | None = None ,
2025-10-08 15:57:01 +08:00
) - > WorkflowRun | None :
2025-12-18 11:32:40 -07:00
async with self . Session ( ) as session :
get_workflow_run_query = select ( WorkflowRunModel ) . filter_by ( workflow_run_id = workflow_run_id )
if organization_id :
get_workflow_run_query = get_workflow_run_query . filter_by ( organization_id = organization_id )
if job_id :
get_workflow_run_query = get_workflow_run_query . filter_by ( job_id = job_id )
if status :
get_workflow_run_query = get_workflow_run_query . filter_by ( status = status . value )
if workflow_run := ( await session . scalars ( get_workflow_run_query ) ) . first ( ) :
return convert_to_workflow_run ( workflow_run )
return None
2024-03-01 10:09:30 -08:00
2025-09-18 13:32:55 +08:00
async def get_last_queued_workflow_run (
self ,
workflow_permanent_id : str ,
organization_id : str | None = None ,
2025-09-24 11:50:24 +08:00
sequential_key : str | None = None ,
2025-09-18 13:32:55 +08:00
) - > WorkflowRun | None :
try :
async with self . Session ( ) as session :
query = select ( WorkflowRunModel ) . filter_by ( workflow_permanent_id = workflow_permanent_id )
2025-12-04 00:25:35 +08:00
query = query . filter ( WorkflowRunModel . browser_session_id . is_ ( None ) )
2025-09-18 13:32:55 +08:00
if organization_id :
query = query . filter_by ( organization_id = organization_id )
query = query . filter_by ( status = WorkflowRunStatus . queued )
2025-09-24 11:50:24 +08:00
if sequential_key :
query = query . filter_by ( sequential_key = sequential_key )
2025-09-18 13:32:55 +08:00
query = query . order_by ( WorkflowRunModel . modified_at . desc ( ) )
workflow_run = ( await session . scalars ( query ) ) . first ( )
return convert_to_workflow_run ( workflow_run ) if workflow_run else None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-10-27 00:35:52 +08:00
async def get_workflow_runs_by_ids (
self ,
workflow_run_ids : list [ str ] ,
workflow_permanent_id : str | None = None ,
organization_id : str | None = None ,
) - > list [ WorkflowRun ] :
try :
async with self . Session ( ) as session :
query = select ( WorkflowRunModel ) . filter ( WorkflowRunModel . workflow_run_id . in_ ( workflow_run_ids ) )
if workflow_permanent_id :
query = query . filter_by ( workflow_permanent_id = workflow_permanent_id )
if organization_id :
query = query . filter_by ( organization_id = organization_id )
workflow_runs = ( await session . scalars ( query ) ) . all ( )
return [ convert_to_workflow_run ( workflow_run ) for workflow_run in workflow_runs ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-09-19 10:17:52 +08:00
async def get_last_running_workflow_run (
self ,
workflow_permanent_id : str ,
organization_id : str | None = None ,
2025-09-24 11:50:24 +08:00
sequential_key : str | None = None ,
2025-09-19 10:17:52 +08:00
) - > WorkflowRun | None :
try :
async with self . Session ( ) as session :
query = select ( WorkflowRunModel ) . filter_by ( workflow_permanent_id = workflow_permanent_id )
2025-12-04 00:25:35 +08:00
query = query . filter ( WorkflowRunModel . browser_session_id . is_ ( None ) )
2025-09-19 10:17:52 +08:00
if organization_id :
query = query . filter_by ( organization_id = organization_id )
query = query . filter_by ( status = WorkflowRunStatus . running )
2025-09-24 11:50:24 +08:00
if sequential_key :
query = query . filter_by ( sequential_key = sequential_key )
2025-09-19 10:17:52 +08:00
query = query . filter (
WorkflowRunModel . started_at . isnot ( None )
) # filter out workflow runs that does not have a started_at timestamp
query = query . order_by ( WorkflowRunModel . started_at . desc ( ) )
workflow_run = ( await session . scalars ( query ) ) . first ( )
return convert_to_workflow_run ( workflow_run ) if workflow_run else None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-12-04 00:25:35 +08:00
async def get_last_workflow_run_for_browser_session (
self ,
browser_session_id : str ,
organization_id : str | None = None ,
) - > WorkflowRun | None :
try :
async with self . Session ( ) as session :
# check if there's a queued run
query = select ( WorkflowRunModel ) . filter_by ( browser_session_id = browser_session_id )
if organization_id :
query = query . filter_by ( organization_id = organization_id )
queue_query = query . filter_by ( status = WorkflowRunStatus . queued )
queue_query = queue_query . order_by ( WorkflowRunModel . modified_at . desc ( ) )
workflow_run = ( await session . scalars ( queue_query ) ) . first ( )
if workflow_run :
return convert_to_workflow_run ( workflow_run )
# check if there's a running run
running_query = query . filter_by ( status = WorkflowRunStatus . running )
running_query = running_query . filter ( WorkflowRunModel . started_at . isnot ( None ) )
running_query = running_query . order_by ( WorkflowRunModel . started_at . desc ( ) )
workflow_run = ( await session . scalars ( running_query ) ) . first ( )
if workflow_run :
return convert_to_workflow_run ( workflow_run )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-11-17 13:38:38 -08:00
async def get_workflows_depending_on (
self ,
workflow_run_id : str ,
) - > list [ WorkflowRun ] :
"""
Get all workflow runs that depend on the given workflow_run_id .
Used to find workflows that should be signaled when a workflow completes ,
for sequential workflow dependency handling .
Args :
workflow_run_id : The workflow_run_id to find dependents for
Returns :
List of WorkflowRun objects that have depends_on_workflow_run_id set to workflow_run_id
"""
try :
async with self . Session ( ) as session :
query = select ( WorkflowRunModel ) . filter_by ( depends_on_workflow_run_id = workflow_run_id )
workflow_runs = ( await session . scalars ( query ) ) . all ( )
return [ convert_to_workflow_run ( workflow_run ) for workflow_run in workflow_runs ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2026-02-11 00:42:11 -05:00
@staticmethod
def _apply_search_key_filter ( query , search_key : str | None ) : # type: ignore[no-untyped-def]
if not search_key :
return query
key_like = f " % { search_key } % "
# Match workflow_run_id directly
id_matches = WorkflowRunModel . workflow_run_id . ilike ( key_like )
# Match parameter key or description (only for non-deleted parameter definitions)
# Use EXISTS to avoid duplicate rows and to keep pagination correct
param_key_desc_exists = exists (
select ( 1 )
. select_from ( WorkflowRunParameterModel )
. join (
WorkflowParameterModel ,
WorkflowParameterModel . workflow_parameter_id == WorkflowRunParameterModel . workflow_parameter_id ,
)
. where ( WorkflowRunParameterModel . workflow_run_id == WorkflowRunModel . workflow_run_id )
. where ( WorkflowParameterModel . deleted_at . is_ ( None ) )
. where (
or_ (
WorkflowParameterModel . key . ilike ( key_like ) ,
WorkflowParameterModel . description . ilike ( key_like ) ,
)
)
)
# Match run parameter value directly (searches all values regardless of parameter definition status)
param_value_exists = exists (
select ( 1 )
. select_from ( WorkflowRunParameterModel )
. where ( WorkflowRunParameterModel . workflow_run_id == WorkflowRunModel . workflow_run_id )
. where ( WorkflowRunParameterModel . value . ilike ( key_like ) )
)
# Match extra HTTP headers (cast JSON to text for search, skip NULLs)
extra_headers_match = and_ (
WorkflowRunModel . extra_http_headers . isnot ( None ) ,
func . cast ( WorkflowRunModel . extra_http_headers , Text ( ) ) . ilike ( key_like ) ,
)
return query . where ( or_ ( id_matches , param_key_desc_exists , param_value_exists , extra_headers_match ) )
@staticmethod
def _apply_error_code_filter ( query , error_code : str | None ) : # type: ignore[no-untyped-def]
if not error_code :
return query
error_code_exists = exists (
select ( 1 )
. select_from ( TaskModel )
. where ( TaskModel . workflow_run_id == WorkflowRunModel . workflow_run_id )
. where ( cast ( TaskModel . errors , JSONB ) . contains ( literal ( [ { " error_code " : error_code } ] , type_ = JSONB ) ) )
)
return query . where ( error_code_exists )
2025-01-24 23:31:26 +08:00
async def get_workflow_runs (
2025-05-17 14:26:18 -07:00
self ,
organization_id : str ,
page : int = 1 ,
page_size : int = 10 ,
status : list [ WorkflowRunStatus ] | None = None ,
ordering : tuple [ str , str ] | None = None ,
2026-02-11 00:42:11 -05:00
search_key : str | None = None ,
error_code : str | None = None ,
2025-01-24 23:31:26 +08:00
) - > list [ WorkflowRun ] :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-07-05 16:39:42 -07:00
db_page = page - 1 # offset logic is 0 based
2025-05-17 14:26:18 -07:00
2025-01-28 16:59:54 +08:00
query = (
2025-02-06 03:10:17 +08:00
select ( WorkflowRunModel , WorkflowModel . title )
. join ( WorkflowModel , WorkflowModel . workflow_id == WorkflowRunModel . workflow_id )
2025-01-28 16:59:54 +08:00
. filter ( WorkflowRunModel . organization_id == organization_id )
. filter ( WorkflowRunModel . parent_workflow_run_id . is_ ( None ) )
)
2025-05-17 14:26:18 -07:00
2026-02-11 00:42:11 -05:00
query = self . _apply_search_key_filter ( query , search_key )
query = self . _apply_error_code_filter ( query , error_code )
2025-01-24 23:31:26 +08:00
if status :
query = query . filter ( WorkflowRunModel . status . in_ ( status ) )
2025-05-17 14:26:18 -07:00
allowed_ordering_fields = {
" created_at " : WorkflowRunModel . created_at ,
" status " : WorkflowRunModel . status ,
}
field , direction = ( " created_at " , " desc " )
if ordering and isinstance ( ordering , tuple ) and len ( ordering ) == 2 :
req_field , req_direction = ordering
if req_field in allowed_ordering_fields and req_direction in ( " asc " , " desc " ) :
field , direction = req_field , req_direction
order_column = allowed_ordering_fields [ field ]
if direction == " asc " :
query = query . order_by ( order_column . asc ( ) )
else :
query = query . order_by ( order_column . desc ( ) )
query = query . limit ( page_size ) . offset ( db_page * page_size )
2025-02-06 03:10:17 +08:00
workflow_runs = ( await session . execute ( query ) ) . all ( )
2025-05-17 14:26:18 -07:00
2025-02-06 03:10:17 +08:00
return [
convert_to_workflow_run ( run , workflow_title = title , debug_enabled = self . debug_enabled )
for run , title in workflow_runs
]
2025-05-17 14:26:18 -07:00
2024-07-05 16:39:42 -07:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-05-12 08:30:37 -07:00
async def get_workflow_runs_count (
self ,
organization_id : str ,
status : list [ WorkflowRunStatus ] | None = None ,
) - > int :
try :
async with self . Session ( ) as session :
count_query = (
select ( func . count ( ) )
. select_from ( WorkflowRunModel )
. filter ( WorkflowRunModel . organization_id == organization_id )
)
if status :
count_query = count_query . filter ( WorkflowRunModel . status . in_ ( status ) )
return ( await session . execute ( count_query ) ) . scalar_one ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-07-05 16:39:42 -07:00
async def get_workflow_runs_for_workflow_permanent_id (
2025-01-24 23:31:26 +08:00
self ,
workflow_permanent_id : str ,
organization_id : str ,
page : int = 1 ,
page_size : int = 10 ,
status : list [ WorkflowRunStatus ] | None = None ,
2025-10-16 16:04:53 +03:00
search_key : str | None = None ,
2026-02-11 00:42:11 -05:00
error_code : str | None = None ,
2024-07-05 16:39:42 -07:00
) - > list [ WorkflowRun ] :
2025-10-16 16:04:53 +03:00
"""
2026-02-09 17:26:10 +03:00
Get runs for a workflow , with optional ` search_key ` on run ID , parameter key / description / value ,
or extra HTTP headers .
2025-10-16 16:04:53 +03:00
"""
2024-07-05 16:39:42 -07:00
try :
async with self . Session ( ) as session :
db_page = page - 1 # offset logic is 0 based
2025-01-24 23:31:26 +08:00
query = (
2025-02-06 03:10:17 +08:00
select ( WorkflowRunModel , WorkflowModel . title )
. join ( WorkflowModel , WorkflowModel . workflow_id == WorkflowRunModel . workflow_id )
2025-01-24 23:31:26 +08:00
. filter ( WorkflowRunModel . workflow_permanent_id == workflow_permanent_id )
. filter ( WorkflowRunModel . organization_id == organization_id )
)
2026-02-11 00:42:11 -05:00
query = self . _apply_search_key_filter ( query , search_key )
query = self . _apply_error_code_filter ( query , error_code )
2025-01-24 23:31:26 +08:00
if status :
query = query . filter ( WorkflowRunModel . status . in_ ( status ) )
query = query . order_by ( WorkflowRunModel . created_at . desc ( ) ) . limit ( page_size ) . offset ( db_page * page_size )
2025-02-06 03:10:17 +08:00
workflow_runs_and_titles_tuples = ( await session . execute ( query ) ) . all ( )
workflow_runs = [
convert_to_workflow_run ( run , workflow_title = title , debug_enabled = self . debug_enabled )
for run , title in workflow_runs_and_titles_tuples
]
return workflow_runs
2024-03-01 10:09:30 -08:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-02-18 23:21:17 +08:00
async def get_workflow_runs_by_parent_workflow_run_id (
self ,
parent_workflow_run_id : str ,
2025-04-01 23:48:39 -04:00
organization_id : str | None = None ,
2025-02-18 23:21:17 +08:00
) - > list [ WorkflowRun ] :
try :
async with self . Session ( ) as session :
query = (
select ( WorkflowRunModel )
. filter ( WorkflowRunModel . organization_id == organization_id )
. filter ( WorkflowRunModel . parent_workflow_run_id == parent_workflow_run_id )
)
workflow_runs = ( await session . scalars ( query ) ) . all ( )
return [ convert_to_workflow_run ( run ) for run in workflow_runs ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def create_workflow_parameter (
self ,
workflow_id : str ,
workflow_parameter_type : WorkflowParameterType ,
key : str ,
default_value : Any ,
description : str | None = None ,
) - > WorkflowParameter :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-09-19 13:12:46 -07:00
default_value = (
json . dumps ( default_value )
if workflow_parameter_type == WorkflowParameterType . JSON
else default_value
)
2024-03-01 10:09:30 -08:00
workflow_parameter = WorkflowParameterModel (
workflow_id = workflow_id ,
workflow_parameter_type = workflow_parameter_type ,
key = key ,
2024-09-19 13:12:46 -07:00
default_value = default_value ,
2024-03-01 10:09:30 -08:00
description = description ,
)
session . add ( workflow_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow_parameter )
2024-03-01 10:09:30 -08:00
return convert_to_workflow_parameter ( workflow_parameter , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def create_aws_secret_parameter (
self ,
workflow_id : str ,
key : str ,
aws_key : str ,
description : str | None = None ,
) - > AWSSecretParameter :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
aws_secret_parameter = AWSSecretParameterModel (
workflow_id = workflow_id ,
key = key ,
aws_key = aws_key ,
description = description ,
)
session . add ( aws_secret_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( aws_secret_parameter )
2024-03-01 10:09:30 -08:00
return convert_to_aws_secret_parameter ( aws_secret_parameter )
2024-03-21 17:16:56 -07:00
async def create_output_parameter (
self ,
workflow_id : str ,
key : str ,
description : str | None = None ,
) - > OutputParameter :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-21 17:16:56 -07:00
output_parameter = OutputParameterModel (
key = key ,
description = description ,
workflow_id = workflow_id ,
)
session . add ( output_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( output_parameter )
2024-03-21 17:16:56 -07:00
return convert_to_output_parameter ( output_parameter )
2026-01-13 15:31:33 -07:00
@staticmethod
def _convert_parameter_to_model ( parameter : PARAMETER_TYPE ) - > Base :
""" Convert a parameter object to its corresponding SQLAlchemy model. """
if isinstance ( parameter , WorkflowParameter ) :
default_value = (
json . dumps ( parameter . default_value )
if parameter . workflow_parameter_type == WorkflowParameterType . JSON
else parameter . default_value
)
return WorkflowParameterModel (
workflow_parameter_id = parameter . workflow_parameter_id ,
workflow_parameter_type = parameter . workflow_parameter_type . value ,
key = parameter . key ,
description = parameter . description ,
workflow_id = parameter . workflow_id ,
default_value = default_value ,
deleted_at = parameter . deleted_at ,
)
elif isinstance ( parameter , OutputParameter ) :
return OutputParameterModel (
output_parameter_id = parameter . output_parameter_id ,
key = parameter . key ,
description = parameter . description ,
workflow_id = parameter . workflow_id ,
deleted_at = parameter . deleted_at ,
)
elif isinstance ( parameter , AWSSecretParameter ) :
return AWSSecretParameterModel (
aws_secret_parameter_id = parameter . aws_secret_parameter_id ,
workflow_id = parameter . workflow_id ,
key = parameter . key ,
description = parameter . description ,
aws_key = parameter . aws_key ,
deleted_at = parameter . deleted_at ,
)
elif isinstance ( parameter , BitwardenLoginCredentialParameter ) :
return BitwardenLoginCredentialParameterModel (
bitwarden_login_credential_parameter_id = parameter . bitwarden_login_credential_parameter_id ,
workflow_id = parameter . workflow_id ,
key = parameter . key ,
description = parameter . description ,
bitwarden_client_id_aws_secret_key = parameter . bitwarden_client_id_aws_secret_key ,
bitwarden_client_secret_aws_secret_key = parameter . bitwarden_client_secret_aws_secret_key ,
bitwarden_master_password_aws_secret_key = parameter . bitwarden_master_password_aws_secret_key ,
bitwarden_collection_id = parameter . bitwarden_collection_id ,
bitwarden_item_id = parameter . bitwarden_item_id ,
url_parameter_key = parameter . url_parameter_key ,
deleted_at = parameter . deleted_at ,
)
elif isinstance ( parameter , BitwardenSensitiveInformationParameter ) :
return BitwardenSensitiveInformationParameterModel (
bitwarden_sensitive_information_parameter_id = parameter . bitwarden_sensitive_information_parameter_id ,
workflow_id = parameter . workflow_id ,
key = parameter . key ,
description = parameter . description ,
bitwarden_client_id_aws_secret_key = parameter . bitwarden_client_id_aws_secret_key ,
bitwarden_client_secret_aws_secret_key = parameter . bitwarden_client_secret_aws_secret_key ,
bitwarden_master_password_aws_secret_key = parameter . bitwarden_master_password_aws_secret_key ,
bitwarden_collection_id = parameter . bitwarden_collection_id ,
bitwarden_identity_key = parameter . bitwarden_identity_key ,
bitwarden_identity_fields = parameter . bitwarden_identity_fields ,
deleted_at = parameter . deleted_at ,
)
elif isinstance ( parameter , BitwardenCreditCardDataParameter ) :
return BitwardenCreditCardDataParameterModel (
bitwarden_credit_card_data_parameter_id = parameter . bitwarden_credit_card_data_parameter_id ,
workflow_id = parameter . workflow_id ,
key = parameter . key ,
description = parameter . description ,
bitwarden_client_id_aws_secret_key = parameter . bitwarden_client_id_aws_secret_key ,
bitwarden_client_secret_aws_secret_key = parameter . bitwarden_client_secret_aws_secret_key ,
bitwarden_master_password_aws_secret_key = parameter . bitwarden_master_password_aws_secret_key ,
bitwarden_collection_id = parameter . bitwarden_collection_id ,
bitwarden_item_id = parameter . bitwarden_item_id ,
deleted_at = parameter . deleted_at ,
)
elif isinstance ( parameter , CredentialParameter ) :
return CredentialParameterModel (
credential_parameter_id = parameter . credential_parameter_id ,
workflow_id = parameter . workflow_id ,
key = parameter . key ,
description = parameter . description ,
credential_id = parameter . credential_id ,
deleted_at = parameter . deleted_at ,
)
elif isinstance ( parameter , OnePasswordCredentialParameter ) :
return OnePasswordCredentialParameterModel (
onepassword_credential_parameter_id = parameter . onepassword_credential_parameter_id ,
workflow_id = parameter . workflow_id ,
key = parameter . key ,
description = parameter . description ,
vault_id = parameter . vault_id ,
item_id = parameter . item_id ,
deleted_at = parameter . deleted_at ,
)
elif isinstance ( parameter , AzureVaultCredentialParameter ) :
return AzureVaultCredentialParameterModel (
azure_vault_credential_parameter_id = parameter . azure_vault_credential_parameter_id ,
workflow_id = parameter . workflow_id ,
key = parameter . key ,
description = parameter . description ,
vault_name = parameter . vault_name ,
username_key = parameter . username_key ,
password_key = parameter . password_key ,
totp_secret_key = parameter . totp_secret_key ,
deleted_at = parameter . deleted_at ,
)
else :
raise ValueError ( f " Unsupported workflow definition parameter type: { type ( parameter ) . __name__ } " )
async def save_workflow_definition_parameters ( self , parameters : list [ PARAMETER_TYPE ] ) - > None :
""" Save multiple workflow definition parameters in a single transaction. """
# ContextParameter is not persisted
parameters_to_save = [ p for p in parameters if not isinstance ( p , ContextParameter ) ]
if not parameters_to_save :
return
async with self . Session ( ) as session :
try :
for parameter in parameters_to_save :
model = self . _convert_parameter_to_model ( parameter )
session . add ( model )
await session . commit ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-03-21 17:16:56 -07:00
async def get_workflow_output_parameters ( self , workflow_id : str ) - > list [ OutputParameter ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
output_parameters = (
await session . scalars ( select ( OutputParameterModel ) . filter_by ( workflow_id = workflow_id ) )
) . all ( )
2024-03-21 17:16:56 -07:00
return [ convert_to_output_parameter ( parameter ) for parameter in output_parameters ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-03-17 16:22:44 -07:00
async def get_workflow_output_parameters_by_ids ( self , output_parameter_ids : list [ str ] ) - > list [ OutputParameter ] :
try :
async with self . Session ( ) as session :
output_parameters = (
await session . scalars (
select ( OutputParameterModel ) . filter (
OutputParameterModel . output_parameter_id . in_ ( output_parameter_ids )
)
)
) . all ( )
return [ convert_to_output_parameter ( parameter ) for parameter in output_parameters ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-03-21 17:16:56 -07:00
async def get_workflow_run_output_parameters ( self , workflow_run_id : str ) - > list [ WorkflowRunOutputParameter ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-21 17:16:56 -07:00
workflow_run_output_parameters = (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( WorkflowRunOutputParameterModel )
. filter_by ( workflow_run_id = workflow_run_id )
. order_by ( WorkflowRunOutputParameterModel . created_at )
)
) . all ( )
2024-03-21 17:16:56 -07:00
return [
convert_to_workflow_run_output_parameter ( parameter , self . debug_enabled )
for parameter in workflow_run_output_parameters
]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-08-28 20:05:24 -04:00
async def get_workflow_run_output_parameter_by_id (
self , workflow_run_id : str , output_parameter_id : str
) - > WorkflowRunOutputParameter | None :
try :
async with self . Session ( ) as session :
parameter = (
await session . scalars (
select ( WorkflowRunOutputParameterModel )
. filter_by ( workflow_run_id = workflow_run_id )
. filter_by ( output_parameter_id = output_parameter_id )
. order_by ( WorkflowRunOutputParameterModel . created_at )
)
) . first ( )
if parameter :
return convert_to_workflow_run_output_parameter ( parameter , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-09-24 08:51:51 -07:00
async def create_or_update_workflow_run_output_parameter (
2024-05-16 18:20:11 -07:00
self ,
workflow_run_id : str ,
output_parameter_id : str ,
value : dict [ str , Any ] | list | str | None ,
2024-03-21 17:16:56 -07:00
) - > WorkflowRunOutputParameter :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-09-24 08:51:51 -07:00
# check if the workflow run output parameter already exists
# if it does, update the value
if workflow_run_output_parameter := (
await session . scalars (
select ( WorkflowRunOutputParameterModel )
. filter_by ( workflow_run_id = workflow_run_id )
. filter_by ( output_parameter_id = output_parameter_id )
)
) . first ( ) :
LOG . info (
f " Updating existing workflow run output parameter with { workflow_run_output_parameter . workflow_run_id } - { workflow_run_output_parameter . output_parameter_id } "
)
workflow_run_output_parameter . value = value
await session . commit ( )
await session . refresh ( workflow_run_output_parameter )
return convert_to_workflow_run_output_parameter ( workflow_run_output_parameter , self . debug_enabled )
# if it does not exist, create a new one
2024-03-21 17:16:56 -07:00
workflow_run_output_parameter = WorkflowRunOutputParameterModel (
workflow_run_id = workflow_run_id ,
output_parameter_id = output_parameter_id ,
value = value ,
)
session . add ( workflow_run_output_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow_run_output_parameter )
2024-03-21 17:16:56 -07:00
return convert_to_workflow_run_output_parameter ( workflow_run_output_parameter , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
2024-09-24 08:51:51 -07:00
raise
async def update_workflow_run_output_parameter (
self ,
workflow_run_id : str ,
output_parameter_id : str ,
value : dict [ str , Any ] | list | str | None ,
) - > WorkflowRunOutputParameter :
try :
async with self . Session ( ) as session :
workflow_run_output_parameter = (
await session . scalars (
select ( WorkflowRunOutputParameterModel )
. filter_by ( workflow_run_id = workflow_run_id )
. filter_by ( output_parameter_id = output_parameter_id )
)
) . first ( )
if not workflow_run_output_parameter :
raise NotFoundError (
f " WorkflowRunOutputParameter not found for { workflow_run_id } and { output_parameter_id } "
)
workflow_run_output_parameter . value = value
await session . commit ( )
await session . refresh ( workflow_run_output_parameter )
return convert_to_workflow_run_output_parameter ( workflow_run_output_parameter , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
2024-03-21 17:16:56 -07:00
raise
2024-03-01 10:09:30 -08:00
async def get_workflow_parameters ( self , workflow_id : str ) - > list [ WorkflowParameter ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
workflow_parameters = (
await session . scalars ( select ( WorkflowParameterModel ) . filter_by ( workflow_id = workflow_id ) )
) . all ( )
2024-03-01 10:09:30 -08:00
return [ convert_to_workflow_parameter ( parameter ) for parameter in workflow_parameters ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def get_workflow_parameter ( self , workflow_parameter_id : str ) - > WorkflowParameter | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if workflow_parameter := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( WorkflowParameterModel ) . filter_by ( workflow_parameter_id = workflow_parameter_id )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_workflow_parameter ( workflow_parameter , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def create_workflow_run_parameter (
2024-10-22 17:36:25 -07:00
self , workflow_run_id : str , workflow_parameter : WorkflowParameter , value : Any
2024-03-01 10:09:30 -08:00
) - > WorkflowRunParameter :
2024-10-22 17:36:25 -07:00
workflow_parameter_id = workflow_parameter . workflow_parameter_id
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
workflow_run_parameter = WorkflowRunParameterModel (
workflow_run_id = workflow_run_id ,
workflow_parameter_id = workflow_parameter_id ,
value = value ,
)
session . add ( workflow_run_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow_run_parameter )
2024-03-01 10:09:30 -08:00
return convert_to_workflow_run_parameter ( workflow_run_parameter , workflow_parameter , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def get_workflow_run_parameters (
self , workflow_run_id : str
) - > list [ tuple [ WorkflowParameter , WorkflowRunParameter ] ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
workflow_run_parameters = (
2024-03-24 12:47:47 -07:00
await session . scalars ( select ( WorkflowRunParameterModel ) . filter_by ( workflow_run_id = workflow_run_id ) )
) . all ( )
2024-03-01 10:09:30 -08:00
results = [ ]
for workflow_run_parameter in workflow_run_parameters :
workflow_parameter = await self . get_workflow_parameter ( workflow_run_parameter . workflow_parameter_id )
if not workflow_parameter :
raise WorkflowParameterNotFound (
workflow_parameter_id = workflow_run_parameter . workflow_parameter_id
)
results . append (
(
workflow_parameter ,
convert_to_workflow_run_parameter (
2024-05-16 18:20:11 -07:00
workflow_run_parameter ,
workflow_parameter ,
self . debug_enabled ,
2024-03-01 10:09:30 -08:00
) ,
)
)
return results
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def get_last_task_for_workflow_run ( self , workflow_run_id : str ) - > Task | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if task := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( TaskModel )
. filter_by ( workflow_run_id = workflow_run_id )
. order_by ( TaskModel . created_at . desc ( ) )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_task ( task , debug_enabled = self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def get_tasks_by_workflow_run_id ( self , workflow_run_id : str ) - > list [ Task ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
tasks = (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( TaskModel ) . filter_by ( workflow_run_id = workflow_run_id ) . order_by ( TaskModel . created_at )
)
) . all ( )
2024-03-01 10:09:30 -08:00
return [ convert_to_task ( task , debug_enabled = self . debug_enabled ) for task in tasks ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def delete_task_artifacts ( self , organization_id : str , task_id : str ) - > None :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
# delete artifacts by filtering organization_id and task_id
stmt = delete ( ArtifactModel ) . where (
and_ (
ArtifactModel . organization_id == organization_id ,
ArtifactModel . task_id == task_id ,
)
)
2024-03-24 12:47:47 -07:00
await session . execute ( stmt )
await session . commit ( )
2024-03-01 10:09:30 -08:00
2025-02-23 16:03:49 -08:00
async def delete_task_v2_artifacts ( self , task_v2_id : str , organization_id : str | None = None ) - > None :
2025-01-11 19:49:51 -08:00
async with self . Session ( ) as session :
stmt = delete ( ArtifactModel ) . where (
and_ (
2025-02-23 16:03:49 -08:00
ArtifactModel . observer_cruise_id == task_v2_id ,
2025-01-11 19:49:51 -08:00
ArtifactModel . organization_id == organization_id ,
)
)
await session . execute ( stmt )
await session . commit ( )
2024-03-01 10:09:30 -08:00
async def delete_task_steps ( self , organization_id : str , task_id : str ) - > None :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
# delete artifacts by filtering organization_id and task_id
stmt = delete ( StepModel ) . where (
and_ (
StepModel . organization_id == organization_id ,
StepModel . task_id == task_id ,
)
)
2024-03-24 12:47:47 -07:00
await session . execute ( stmt )
await session . commit ( )
2024-06-07 15:59:53 -07:00
async def create_task_generation (
self ,
organization_id : str ,
user_prompt : str ,
2024-09-03 07:00:15 +03:00
user_prompt_hash : str ,
2024-06-07 15:59:53 -07:00
url : str | None = None ,
navigation_goal : str | None = None ,
navigation_payload : dict [ str , Any ] | None = None ,
data_extraction_goal : str | None = None ,
extracted_information_schema : dict [ str , Any ] | None = None ,
2024-08-23 23:16:41 +03:00
suggested_title : str | None = None ,
2024-06-07 15:59:53 -07:00
llm : str | None = None ,
llm_prompt : str | None = None ,
llm_response : str | None = None ,
2024-09-03 07:00:15 +03:00
source_task_generation_id : str | None = None ,
2024-06-07 15:59:53 -07:00
) - > TaskGeneration :
async with self . Session ( ) as session :
new_task_generation = TaskGenerationModel (
organization_id = organization_id ,
user_prompt = user_prompt ,
2024-09-03 07:00:15 +03:00
user_prompt_hash = user_prompt_hash ,
2024-06-07 15:59:53 -07:00
url = url ,
navigation_goal = navigation_goal ,
navigation_payload = navigation_payload ,
data_extraction_goal = data_extraction_goal ,
extracted_information_schema = extracted_information_schema ,
llm = llm ,
llm_prompt = llm_prompt ,
llm_response = llm_response ,
2024-08-23 23:16:41 +03:00
suggested_title = suggested_title ,
2024-09-03 07:00:15 +03:00
source_task_generation_id = source_task_generation_id ,
2024-06-07 15:59:53 -07:00
)
session . add ( new_task_generation )
await session . commit ( )
await session . refresh ( new_task_generation )
return TaskGeneration . model_validate ( new_task_generation )
2024-09-03 07:00:15 +03:00
2025-01-08 21:45:38 -08:00
async def create_ai_suggestion (
self ,
organization_id : str ,
ai_suggestion_type : str ,
) - > AISuggestion :
async with self . Session ( ) as session :
new_ai_suggestion = AISuggestionModel (
organization_id = organization_id ,
ai_suggestion_type = ai_suggestion_type ,
)
session . add ( new_ai_suggestion )
await session . commit ( )
await session . refresh ( new_ai_suggestion )
return AISuggestion . model_validate ( new_ai_suggestion )
2026-01-06 14:58:44 -07:00
async def create_workflow_copilot_chat (
self ,
organization_id : str ,
workflow_permanent_id : str ,
) - > WorkflowCopilotChat :
async with self . Session ( ) as session :
new_chat = WorkflowCopilotChatModel (
organization_id = organization_id ,
workflow_permanent_id = workflow_permanent_id ,
)
session . add ( new_chat )
await session . commit ( )
await session . refresh ( new_chat )
return WorkflowCopilotChat . model_validate ( new_chat )
2026-01-27 13:24:44 -07:00
async def update_workflow_copilot_chat (
self ,
organization_id : str ,
workflow_copilot_chat_id : str ,
proposed_workflow : dict | None | object = _UNSET ,
auto_accept : bool | None = None ,
) - > WorkflowCopilotChat | None :
async with self . Session ( ) as session :
chat = (
await session . scalars (
select ( WorkflowCopilotChatModel )
. where ( WorkflowCopilotChatModel . organization_id == organization_id )
. where ( WorkflowCopilotChatModel . workflow_copilot_chat_id == workflow_copilot_chat_id )
)
) . first ( )
if not chat :
return None
if proposed_workflow is not _UNSET :
chat . proposed_workflow = proposed_workflow
if auto_accept is not None :
chat . auto_accept = auto_accept
await session . commit ( )
await session . refresh ( chat )
return WorkflowCopilotChat . model_validate ( chat )
2026-01-06 14:58:44 -07:00
async def create_workflow_copilot_chat_message (
self ,
organization_id : str ,
workflow_copilot_chat_id : str ,
sender : WorkflowCopilotChatSender ,
content : str ,
global_llm_context : str | None = None ,
) - > WorkflowCopilotChatMessage :
async with self . Session ( ) as session :
new_message = WorkflowCopilotChatMessageModel (
workflow_copilot_chat_id = workflow_copilot_chat_id ,
organization_id = organization_id ,
sender = sender ,
content = content ,
global_llm_context = global_llm_context ,
)
session . add ( new_message )
await session . commit ( )
await session . refresh ( new_message )
return convert_to_workflow_copilot_chat_message ( new_message , self . debug_enabled )
async def get_workflow_copilot_chat_messages (
self ,
workflow_copilot_chat_id : str ,
) - > list [ WorkflowCopilotChatMessage ] :
async with self . Session ( ) as session :
query = (
select ( WorkflowCopilotChatMessageModel )
. filter ( WorkflowCopilotChatMessageModel . workflow_copilot_chat_id == workflow_copilot_chat_id )
. order_by ( WorkflowCopilotChatMessageModel . workflow_copilot_chat_message_id . asc ( ) )
)
messages = ( await session . scalars ( query ) ) . all ( )
return [ convert_to_workflow_copilot_chat_message ( message , self . debug_enabled ) for message in messages ]
async def get_workflow_copilot_chat_by_id (
self ,
organization_id : str ,
workflow_copilot_chat_id : str ,
) - > WorkflowCopilotChat | None :
async with self . Session ( ) as session :
query = (
select ( WorkflowCopilotChatModel )
. filter ( WorkflowCopilotChatModel . organization_id == organization_id )
. filter ( WorkflowCopilotChatModel . workflow_copilot_chat_id == workflow_copilot_chat_id )
. order_by ( WorkflowCopilotChatModel . created_at . desc ( ) )
. limit ( 1 )
)
chat = ( await session . scalars ( query ) ) . first ( )
if not chat :
return None
return WorkflowCopilotChat . model_validate ( chat )
async def get_latest_workflow_copilot_chat (
self ,
organization_id : str ,
workflow_permanent_id : str ,
) - > WorkflowCopilotChat | None :
async with self . Session ( ) as session :
query = (
select ( WorkflowCopilotChatModel )
. filter ( WorkflowCopilotChatModel . organization_id == organization_id )
. filter ( WorkflowCopilotChatModel . workflow_permanent_id == workflow_permanent_id )
. order_by ( WorkflowCopilotChatModel . created_at . desc ( ) )
. limit ( 1 )
)
chat = ( await session . scalars ( query ) ) . first ( )
if not chat :
return None
return WorkflowCopilotChat . model_validate ( chat )
2024-09-03 07:00:15 +03:00
async def get_task_generation_by_prompt_hash (
self ,
user_prompt_hash : str ,
2024-09-02 21:32:57 -07:00
query_window_hours : int = settings . PROMPT_CACHE_WINDOW_HOURS ,
2024-09-03 07:00:15 +03:00
) - > TaskGeneration | None :
before_time = datetime . utcnow ( ) - timedelta ( hours = query_window_hours )
async with self . Session ( ) as session :
query = (
select ( TaskGenerationModel )
. filter_by ( user_prompt_hash = user_prompt_hash )
. filter ( TaskGenerationModel . llm . is_not ( None ) )
. filter ( TaskGenerationModel . created_at > before_time )
)
task_generation = ( await session . scalars ( query ) ) . first ( )
if not task_generation :
return None
return TaskGeneration . model_validate ( task_generation )
2024-09-08 15:07:03 -07:00
2025-10-14 16:24:14 +08:00
async def get_otp_codes (
2024-09-08 15:07:03 -07:00
self ,
organization_id : str ,
totp_identifier : str ,
valid_lifespan_minutes : int = settings . TOTP_LIFESPAN_MINUTES ,
2025-10-14 16:24:14 +08:00
otp_type : OTPType | None = None ,
2025-10-29 20:49:25 -07:00
workflow_run_id : str | None = None ,
limit : int | None = None ,
2024-09-08 15:07:03 -07:00
) - > list [ TOTPCode ] :
"""
1. filter by :
- organization_id
- totp_identifier
2025-10-29 20:49:25 -07:00
- workflow_run_id ( optional )
2024-09-08 15:07:03 -07:00
2. make sure created_at is within the valid lifespan
2025-11-20 16:39:11 +08:00
3. sort by task_id / workflow_id / workflow_run_id nullslast and created_at desc
2025-10-29 20:49:25 -07:00
4. apply an optional limit at the DB layer
2024-09-08 15:07:03 -07:00
"""
2025-11-20 16:39:11 +08:00
all_null = and_ (
TOTPCodeModel . task_id . is_ ( None ) ,
TOTPCodeModel . workflow_id . is_ ( None ) ,
TOTPCodeModel . workflow_run_id . is_ ( None ) ,
)
2024-09-08 15:07:03 -07:00
async with self . Session ( ) as session :
query = (
select ( TOTPCodeModel )
. filter_by ( organization_id = organization_id )
. filter_by ( totp_identifier = totp_identifier )
. filter ( TOTPCodeModel . created_at > datetime . utcnow ( ) - timedelta ( minutes = valid_lifespan_minutes ) )
)
2025-10-14 16:24:14 +08:00
if otp_type :
query = query . filter ( TOTPCodeModel . otp_type == otp_type )
2025-10-29 20:49:25 -07:00
if workflow_run_id is not None :
query = query . filter ( TOTPCodeModel . workflow_run_id == workflow_run_id )
2025-11-20 16:39:11 +08:00
query = query . order_by ( asc ( all_null ) , TOTPCodeModel . created_at . desc ( ) )
2025-10-29 20:49:25 -07:00
if limit is not None :
query = query . limit ( limit )
2024-09-08 15:07:03 -07:00
totp_code = ( await session . scalars ( query ) ) . all ( )
return [ TOTPCode . model_validate ( totp_code ) for totp_code in totp_code ]
2024-10-15 12:06:50 -07:00
2025-10-29 20:49:25 -07:00
async def get_recent_otp_codes (
self ,
organization_id : str ,
limit : int = 50 ,
2025-11-07 09:38:52 -08:00
valid_lifespan_minutes : int | None = None ,
2025-10-29 20:49:25 -07:00
otp_type : OTPType | None = None ,
workflow_run_id : str | None = None ,
2025-11-24 10:04:14 -08:00
totp_identifier : str | None = None ,
2025-10-29 20:49:25 -07:00
) - > list [ TOTPCode ] :
"""
Return recent otp codes for an organization ordered by newest first with optional
workflow_run_id filtering .
"""
async with self . Session ( ) as session :
2025-11-07 09:38:52 -08:00
query = select ( TOTPCodeModel ) . filter_by ( organization_id = organization_id )
if valid_lifespan_minutes is not None :
query = query . filter (
TOTPCodeModel . created_at > datetime . utcnow ( ) - timedelta ( minutes = valid_lifespan_minutes )
)
2025-10-29 20:49:25 -07:00
if otp_type :
query = query . filter ( TOTPCodeModel . otp_type == otp_type )
if workflow_run_id is not None :
query = query . filter ( TOTPCodeModel . workflow_run_id == workflow_run_id )
2025-11-24 10:04:14 -08:00
if totp_identifier :
query = query . filter ( TOTPCodeModel . totp_identifier == totp_identifier )
query = query . order_by ( TOTPCodeModel . created_at . desc ( ) ) . limit ( limit )
2025-10-29 20:49:25 -07:00
totp_codes = ( await session . scalars ( query ) ) . all ( )
return [ TOTPCode . model_validate ( totp_code ) for totp_code in totp_codes ]
2025-10-14 16:24:14 +08:00
async def create_otp_code (
2025-01-30 14:06:22 +08:00
self ,
organization_id : str ,
totp_identifier : str ,
content : str ,
code : str ,
2025-10-14 16:24:14 +08:00
otp_type : OTPType ,
2025-01-30 14:06:22 +08:00
task_id : str | None = None ,
workflow_id : str | None = None ,
2025-04-21 08:24:29 +08:00
workflow_run_id : str | None = None ,
2025-01-30 14:06:22 +08:00
source : str | None = None ,
expired_at : datetime | None = None ,
) - > TOTPCode :
async with self . Session ( ) as session :
new_totp_code = TOTPCodeModel (
organization_id = organization_id ,
totp_identifier = totp_identifier ,
content = content ,
code = code ,
task_id = task_id ,
workflow_id = workflow_id ,
2025-04-21 08:24:29 +08:00
workflow_run_id = workflow_run_id ,
2025-01-30 14:06:22 +08:00
source = source ,
expired_at = expired_at ,
2025-10-14 16:24:14 +08:00
otp_type = otp_type ,
2025-01-30 14:06:22 +08:00
)
session . add ( new_totp_code )
await session . commit ( )
await session . refresh ( new_totp_code )
return TOTPCode . model_validate ( new_totp_code )
2024-10-15 12:06:50 -07:00
async def create_action ( self , action : Action ) - > Action :
async with self . Session ( ) as session :
new_action = ActionModel (
action_type = action . action_type ,
source_action_id = action . source_action_id ,
organization_id = action . organization_id ,
workflow_run_id = action . workflow_run_id ,
task_id = action . task_id ,
step_id = action . step_id ,
step_order = action . step_order ,
action_order = action . action_order ,
status = action . status ,
reasoning = action . reasoning ,
intention = action . intention ,
response = action . response ,
element_id = action . element_id ,
skyvern_element_hash = action . skyvern_element_hash ,
skyvern_element_data = action . skyvern_element_data ,
2026-01-07 02:12:22 +03:00
screenshot_artifact_id = action . screenshot_artifact_id ,
2024-10-15 12:06:50 -07:00
action_json = action . model_dump ( ) ,
2024-10-20 12:10:58 -07:00
confidence_float = action . confidence_float ,
2025-08-26 13:17:38 -07:00
created_by = action . created_by ,
2024-10-15 12:06:50 -07:00
)
session . add ( new_action )
await session . commit ( )
await session . refresh ( new_action )
2026-01-18 11:17:02 -08:00
return hydrate_action ( new_action )
2024-10-15 12:06:50 -07:00
2026-01-07 02:12:22 +03:00
async def update_action_screenshot_artifact_id (
self , * , organization_id : str , action_id : str , screenshot_artifact_id : str
) - > None :
async with self . Session ( ) as session :
await session . execute (
update ( ActionModel )
. where ( ActionModel . action_id == action_id , ActionModel . organization_id == organization_id )
. values ( screenshot_artifact_id = screenshot_artifact_id )
)
await session . commit ( )
2025-10-02 18:53:08 -07:00
async def update_action_reasoning (
self ,
organization_id : str ,
action_id : str ,
reasoning : str ,
) - > Action :
async with self . Session ( ) as session :
action = (
await session . scalars (
select ( ActionModel ) . filter_by ( action_id = action_id ) . filter_by ( organization_id = organization_id )
)
) . first ( )
if action :
action . reasoning = reasoning
await session . commit ( )
await session . refresh ( action )
return Action . model_validate ( action )
raise NotFoundError ( f " Action { action_id } " )
2024-10-15 12:06:50 -07:00
async def retrieve_action_plan ( self , task : Task ) - > list [ Action ] :
async with self . Session ( ) as session :
subquery = (
select ( TaskModel . task_id )
. filter ( TaskModel . url == task . url )
. filter ( TaskModel . navigation_goal == task . navigation_goal )
. filter ( TaskModel . status == TaskStatus . completed )
. order_by ( TaskModel . created_at . desc ( ) )
. limit ( 1 )
. subquery ( )
)
query = (
select ( ActionModel )
. filter ( ActionModel . task_id == subquery . c . task_id )
. order_by ( ActionModel . step_order , ActionModel . action_order , ActionModel . created_at )
)
actions = ( await session . scalars ( query ) ) . all ( )
return [ Action . model_validate ( action ) for action in actions ]
async def get_previous_actions_for_task ( self , task_id : str ) - > list [ Action ] :
async with self . Session ( ) as session :
query = (
select ( ActionModel )
. filter_by ( task_id = task_id )
. order_by ( ActionModel . step_order , ActionModel . action_order , ActionModel . created_at )
)
actions = ( await session . scalars ( query ) ) . all ( )
return [ Action . model_validate ( action ) for action in actions ]
2024-10-17 23:24:12 -07:00
async def delete_task_actions ( self , organization_id : str , task_id : str ) - > None :
async with self . Session ( ) as session :
# delete actions by filtering organization_id and task_id
stmt = delete ( ActionModel ) . where (
and_ (
ActionModel . organization_id == organization_id ,
ActionModel . task_id == task_id ,
)
)
await session . execute ( stmt )
await session . commit ( )
2024-12-08 12:43:59 -08:00
2025-12-18 11:32:40 -07:00
@read_retry ( )
2025-02-27 20:19:02 -08:00
async def get_task_v2 ( self , task_v2_id : str , organization_id : str | None = None ) - > TaskV2 | None :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-23 16:03:49 -08:00
if task_v2 := (
2024-12-08 12:43:59 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( TaskV2Model )
2025-02-23 16:03:49 -08:00
. filter_by ( observer_cruise_id = task_v2_id )
2024-12-08 12:43:59 -08:00
. filter_by ( organization_id = organization_id )
)
) . first ( ) :
2025-12-01 16:08:36 -08:00
return convert_to_task_v2 ( task_v2 , debug_enabled = self . debug_enabled )
2024-12-08 12:43:59 -08:00
return None
2025-02-27 20:19:02 -08:00
async def delete_thoughts ( self , task_v2_id : str , organization_id : str | None = None ) - > None :
2025-01-11 19:49:51 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
stmt = delete ( ThoughtModel ) . where (
2025-01-11 19:49:51 -08:00
and_ (
2025-02-27 20:19:02 -08:00
ThoughtModel . observer_cruise_id == task_v2_id ,
ThoughtModel . organization_id == organization_id ,
2025-01-11 19:49:51 -08:00
)
)
await session . execute ( stmt )
await session . commit ( )
2025-02-23 16:03:49 -08:00
async def get_task_v2_by_workflow_run_id (
2024-12-23 11:48:27 -08:00
self ,
workflow_run_id : str ,
organization_id : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > TaskV2 | None :
2024-12-23 11:48:27 -08:00
async with self . Session ( ) as session :
2025-02-23 16:03:49 -08:00
if task_v2 := (
2024-12-23 11:48:27 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( TaskV2Model )
2024-12-23 11:48:27 -08:00
. filter_by ( organization_id = organization_id )
. filter_by ( workflow_run_id = workflow_run_id )
)
) . first ( ) :
2025-12-01 16:08:36 -08:00
return convert_to_task_v2 ( task_v2 , debug_enabled = self . debug_enabled )
2024-12-23 11:48:27 -08:00
return None
2025-02-27 20:19:02 -08:00
async def get_thought ( self , thought_id : str , organization_id : str | None = None ) - > Thought | None :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
if thought := (
2024-12-08 12:43:59 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( ThoughtModel )
. filter_by ( observer_thought_id = thought_id )
2024-12-08 12:43:59 -08:00
. filter_by ( organization_id = organization_id )
)
) . first ( ) :
2025-02-27 20:19:02 -08:00
return Thought . model_validate ( thought )
2024-12-08 12:43:59 -08:00
return None
2025-02-27 20:19:02 -08:00
async def get_thoughts (
2024-12-17 17:17:18 -08:00
self ,
2025-06-13 10:44:04 -04:00
* ,
2025-02-23 16:03:49 -08:00
task_v2_id : str ,
2025-06-13 10:44:04 -04:00
thought_types : list [ ThoughtType ] ,
organization_id : str ,
2025-02-27 20:19:02 -08:00
) - > list [ Thought ] :
2024-12-17 17:17:18 -08:00
async with self . Session ( ) as session :
2024-12-27 09:04:09 -08:00
query = (
2025-02-27 20:19:02 -08:00
select ( ThoughtModel )
2025-02-23 16:03:49 -08:00
. filter_by ( observer_cruise_id = task_v2_id )
2024-12-27 09:04:09 -08:00
. filter_by ( organization_id = organization_id )
2025-02-27 20:19:02 -08:00
. order_by ( ThoughtModel . created_at )
2024-12-27 09:04:09 -08:00
)
2025-02-27 20:19:02 -08:00
if thought_types :
query = query . filter ( ThoughtModel . observer_thought_type . in_ ( thought_types ) )
thoughts = ( await session . scalars ( query ) ) . all ( )
return [ Thought . model_validate ( thought ) for thought in thoughts ]
2024-12-17 17:17:18 -08:00
2025-02-23 16:03:49 -08:00
async def create_task_v2 (
2024-12-08 12:43:59 -08:00
self ,
workflow_run_id : str | None = None ,
workflow_id : str | None = None ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id : str | None = None ,
2024-12-08 12:43:59 -08:00
prompt : str | None = None ,
url : str | None = None ,
organization_id : str | None = None ,
2025-11-28 14:24:44 -08:00
proxy_location : ProxyLocationInput = None ,
2025-01-24 16:21:26 +08:00
totp_identifier : str | None = None ,
totp_verification_url : str | None = None ,
webhook_callback_url : str | None = None ,
2025-03-18 15:36:42 -07:00
extracted_information_schema : dict | list | str | None = None ,
error_code_mapping : dict | None = None ,
2025-05-30 20:07:12 -07:00
model : dict [ str , Any ] | None = None ,
2025-06-13 23:59:50 -07:00
max_screenshot_scrolling_times : int | None = None ,
2025-06-19 00:42:34 -07:00
extra_http_headers : dict [ str , str ] | None = None ,
2025-08-21 11:16:22 +08:00
browser_address : str | None = None ,
2025-09-29 15:14:15 -04:00
run_with : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > TaskV2 :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
new_task_v2 = TaskV2Model (
2024-12-08 12:43:59 -08:00
workflow_run_id = workflow_run_id ,
workflow_id = workflow_id ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id = workflow_permanent_id ,
2024-12-08 12:43:59 -08:00
prompt = prompt ,
url = url ,
2025-11-28 14:24:44 -08:00
proxy_location = _serialize_proxy_location ( proxy_location ) ,
2025-01-24 16:21:26 +08:00
totp_identifier = totp_identifier ,
totp_verification_url = totp_verification_url ,
webhook_callback_url = webhook_callback_url ,
2025-03-18 15:36:42 -07:00
extracted_information_schema = extracted_information_schema ,
error_code_mapping = error_code_mapping ,
2024-12-08 12:43:59 -08:00
organization_id = organization_id ,
2025-05-30 20:07:12 -07:00
model = model ,
2025-06-13 23:59:50 -07:00
max_screenshot_scrolling_times = max_screenshot_scrolling_times ,
2025-06-19 00:42:34 -07:00
extra_http_headers = extra_http_headers ,
2025-08-21 11:16:22 +08:00
browser_address = browser_address ,
2025-09-29 15:14:15 -04:00
run_with = run_with ,
2024-12-08 12:43:59 -08:00
)
2025-02-23 16:03:49 -08:00
session . add ( new_task_v2 )
2024-12-08 12:43:59 -08:00
await session . commit ( )
2025-02-23 16:03:49 -08:00
await session . refresh ( new_task_v2 )
2025-12-01 16:08:36 -08:00
return convert_to_task_v2 ( new_task_v2 , debug_enabled = self . debug_enabled )
2024-12-08 12:43:59 -08:00
2025-02-27 20:19:02 -08:00
async def create_thought (
2024-12-08 12:43:59 -08:00
self ,
2025-02-23 16:03:49 -08:00
task_v2_id : str ,
2024-12-08 12:43:59 -08:00
workflow_run_id : str | None = None ,
workflow_id : str | None = None ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id : str | None = None ,
2024-12-08 12:43:59 -08:00
workflow_run_block_id : str | None = None ,
user_input : str | None = None ,
observation : str | None = None ,
thought : str | None = None ,
answer : str | None = None ,
2025-02-27 20:19:02 -08:00
thought_scenario : str | None = None ,
thought_type : str = ThoughtType . plan ,
2024-12-27 09:04:09 -08:00
output : dict [ str , Any ] | None = None ,
2025-01-22 07:45:40 +08:00
input_token_count : int | None = None ,
output_token_count : int | None = None ,
2025-03-20 16:42:57 -07:00
reasoning_token_count : int | None = None ,
cached_token_count : int | None = None ,
2025-01-22 07:45:40 +08:00
thought_cost : float | None = None ,
2024-12-08 12:43:59 -08:00
organization_id : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > Thought :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
new_thought = ThoughtModel (
2025-02-23 16:03:49 -08:00
observer_cruise_id = task_v2_id ,
2024-12-08 12:43:59 -08:00
workflow_run_id = workflow_run_id ,
workflow_id = workflow_id ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id = workflow_permanent_id ,
2024-12-08 12:43:59 -08:00
workflow_run_block_id = workflow_run_block_id ,
user_input = user_input ,
observation = observation ,
thought = thought ,
answer = answer ,
2025-02-27 20:19:02 -08:00
observer_thought_scenario = thought_scenario ,
observer_thought_type = thought_type ,
2024-12-27 09:04:09 -08:00
output = output ,
2025-01-22 07:45:40 +08:00
input_token_count = input_token_count ,
output_token_count = output_token_count ,
2025-03-20 16:42:57 -07:00
reasoning_token_count = reasoning_token_count ,
cached_token_count = cached_token_count ,
2025-01-22 07:45:40 +08:00
thought_cost = thought_cost ,
2024-12-08 12:43:59 -08:00
organization_id = organization_id ,
)
2025-02-27 20:19:02 -08:00
session . add ( new_thought )
2024-12-08 12:43:59 -08:00
await session . commit ( )
2025-02-27 20:19:02 -08:00
await session . refresh ( new_thought )
return Thought . model_validate ( new_thought )
2024-12-08 12:43:59 -08:00
2025-02-27 20:19:02 -08:00
async def update_thought (
2024-12-22 23:01:02 -08:00
self ,
2025-02-27 20:19:02 -08:00
thought_id : str ,
2024-12-22 23:01:02 -08:00
workflow_run_block_id : str | None = None ,
2024-12-27 09:04:09 -08:00
workflow_run_id : str | None = None ,
workflow_id : str | None = None ,
workflow_permanent_id : str | None = None ,
2024-12-22 23:01:02 -08:00
observation : str | None = None ,
thought : str | None = None ,
answer : str | None = None ,
2024-12-27 09:04:09 -08:00
output : dict [ str , Any ] | None = None ,
2025-01-22 07:45:40 +08:00
input_token_count : int | None = None ,
output_token_count : int | None = None ,
2025-03-20 16:42:57 -07:00
reasoning_token_count : int | None = None ,
cached_token_count : int | None = None ,
2025-01-22 07:45:40 +08:00
thought_cost : float | None = None ,
2024-12-22 23:01:02 -08:00
organization_id : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > Thought :
2024-12-22 23:01:02 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
thought_obj = (
2024-12-22 23:01:02 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( ThoughtModel )
. filter_by ( observer_thought_id = thought_id )
2024-12-22 23:01:02 -08:00
. filter_by ( organization_id = organization_id )
)
) . first ( )
2025-02-27 20:19:02 -08:00
if thought_obj :
2024-12-22 23:01:02 -08:00
if workflow_run_block_id :
2025-02-27 20:19:02 -08:00
thought_obj . workflow_run_block_id = workflow_run_block_id
2024-12-27 09:04:09 -08:00
if workflow_run_id :
2025-02-27 20:19:02 -08:00
thought_obj . workflow_run_id = workflow_run_id
2024-12-27 09:04:09 -08:00
if workflow_id :
2025-02-27 20:19:02 -08:00
thought_obj . workflow_id = workflow_id
2024-12-27 09:04:09 -08:00
if workflow_permanent_id :
2025-02-27 20:19:02 -08:00
thought_obj . workflow_permanent_id = workflow_permanent_id
2024-12-22 23:01:02 -08:00
if observation :
2025-02-27 20:19:02 -08:00
thought_obj . observation = observation
2024-12-22 23:01:02 -08:00
if thought :
2025-02-27 20:19:02 -08:00
thought_obj . thought = thought
2024-12-22 23:01:02 -08:00
if answer :
2025-02-27 20:19:02 -08:00
thought_obj . answer = answer
2024-12-27 09:04:09 -08:00
if output :
2025-02-27 20:19:02 -08:00
thought_obj . output = output
2025-01-22 07:45:40 +08:00
if input_token_count :
2025-02-27 20:19:02 -08:00
thought_obj . input_token_count = input_token_count
2025-01-22 07:45:40 +08:00
if output_token_count :
2025-02-27 20:19:02 -08:00
thought_obj . output_token_count = output_token_count
2025-03-20 16:42:57 -07:00
if reasoning_token_count :
thought_obj . reasoning_token_count = reasoning_token_count
if cached_token_count :
thought_obj . cached_token_count = cached_token_count
2025-01-22 07:45:40 +08:00
if thought_cost :
2025-02-27 20:19:02 -08:00
thought_obj . thought_cost = thought_cost
2024-12-22 23:01:02 -08:00
await session . commit ( )
2025-02-27 20:19:02 -08:00
await session . refresh ( thought_obj )
return Thought . model_validate ( thought_obj )
raise NotFoundError ( f " Thought { thought_id } " )
2024-12-22 23:01:02 -08:00
2025-02-23 16:03:49 -08:00
async def update_task_v2 (
2024-12-08 12:43:59 -08:00
self ,
2025-02-23 16:03:49 -08:00
task_v2_id : str ,
2025-02-27 20:19:02 -08:00
status : TaskV2Status | None = None ,
2024-12-08 21:17:58 -08:00
workflow_run_id : str | None = None ,
workflow_id : str | None = None ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id : str | None = None ,
2024-12-08 21:17:58 -08:00
url : str | None = None ,
prompt : str | None = None ,
2025-01-10 14:59:53 -08:00
summary : str | None = None ,
output : dict [ str , Any ] | None = None ,
2024-12-08 12:43:59 -08:00
organization_id : str | None = None ,
2025-07-29 00:12:44 +08:00
webhook_failure_reason : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > TaskV2 :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-23 16:03:49 -08:00
task_v2 = (
2024-12-08 12:43:59 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( TaskV2Model )
2025-02-23 16:03:49 -08:00
. filter_by ( observer_cruise_id = task_v2_id )
2024-12-08 12:43:59 -08:00
. filter_by ( organization_id = organization_id )
)
) . first ( )
2025-02-23 16:03:49 -08:00
if task_v2 :
2024-12-08 12:43:59 -08:00
if status :
2025-02-23 16:03:49 -08:00
task_v2 . status = status
2025-06-11 23:36:49 -04:00
if status == TaskV2Status . queued and task_v2 . queued_at is None :
task_v2 . queued_at = datetime . utcnow ( )
if status == TaskV2Status . running and task_v2 . started_at is None :
task_v2 . started_at = datetime . utcnow ( )
if status . is_final ( ) and task_v2 . finished_at is None :
task_v2 . finished_at = datetime . utcnow ( )
2024-12-08 21:17:58 -08:00
if workflow_run_id :
2025-02-23 16:03:49 -08:00
task_v2 . workflow_run_id = workflow_run_id
2024-12-08 21:17:58 -08:00
if workflow_id :
2025-02-23 16:03:49 -08:00
task_v2 . workflow_id = workflow_id
2024-12-16 14:34:31 -08:00
if workflow_permanent_id :
2025-02-23 16:03:49 -08:00
task_v2 . workflow_permanent_id = workflow_permanent_id
2024-12-08 21:17:58 -08:00
if url :
2025-02-23 16:03:49 -08:00
task_v2 . url = url
2024-12-08 21:17:58 -08:00
if prompt :
2025-02-23 16:03:49 -08:00
task_v2 . prompt = prompt
2025-01-10 14:59:53 -08:00
if summary :
2025-02-23 16:03:49 -08:00
task_v2 . summary = summary
2025-01-10 14:59:53 -08:00
if output :
2025-02-23 16:03:49 -08:00
task_v2 . output = output
2025-07-29 00:12:44 +08:00
if webhook_failure_reason is not None :
task_v2 . webhook_failure_reason = webhook_failure_reason
2024-12-08 12:43:59 -08:00
await session . commit ( )
2025-02-23 16:03:49 -08:00
await session . refresh ( task_v2 )
2025-12-01 16:08:36 -08:00
return convert_to_task_v2 ( task_v2 , debug_enabled = self . debug_enabled )
2025-02-23 16:03:49 -08:00
raise NotFoundError ( f " TaskV2 { task_v2_id } not found " )
2024-12-20 07:40:32 -08:00
async def create_workflow_run_block (
self ,
workflow_run_id : str ,
parent_workflow_run_block_id : str | None = None ,
organization_id : str | None = None ,
task_id : str | None = None ,
label : str | None = None ,
block_type : BlockType | None = None ,
status : BlockStatus = BlockStatus . running ,
output : dict | list | str | None = None ,
continue_on_failure : bool = False ,
2025-06-17 00:25:58 -07:00
engine : RunEngine | None = None ,
2024-12-20 07:40:32 -08:00
) - > WorkflowRunBlock :
async with self . Session ( ) as session :
new_workflow_run_block = WorkflowRunBlockModel (
workflow_run_id = workflow_run_id ,
parent_workflow_run_block_id = parent_workflow_run_block_id ,
organization_id = organization_id ,
task_id = task_id ,
label = label ,
block_type = block_type ,
status = status ,
output = output ,
continue_on_failure = continue_on_failure ,
2025-06-17 00:25:58 -07:00
engine = engine ,
2024-12-20 07:40:32 -08:00
)
session . add ( new_workflow_run_block )
await session . commit ( )
await session . refresh ( new_workflow_run_block )
task = None
if task_id :
task = await self . get_task ( task_id , organization_id = organization_id )
return convert_to_workflow_run_block ( new_workflow_run_block , task = task )
2025-01-03 14:22:42 -08:00
async def delete_workflow_run_blocks ( self , workflow_run_id : str , organization_id : str | None = None ) - > None :
async with self . Session ( ) as session :
stmt = delete ( WorkflowRunBlockModel ) . where (
and_ (
WorkflowRunBlockModel . workflow_run_id == workflow_run_id ,
WorkflowRunBlockModel . organization_id == organization_id ,
)
)
await session . execute ( stmt )
await session . commit ( )
2024-12-20 07:40:32 -08:00
async def update_workflow_run_block (
self ,
workflow_run_block_id : str ,
2025-01-03 15:55:57 -08:00
organization_id : str | None = None ,
2024-12-20 07:40:32 -08:00
status : BlockStatus | None = None ,
output : dict | list | str | None = None ,
2024-12-22 11:16:23 -08:00
failure_reason : str | None = None ,
task_id : str | None = None ,
2024-12-23 01:13:25 -08:00
loop_values : list | None = None ,
current_value : str | None = None ,
current_index : int | None = None ,
2024-12-23 11:48:27 -08:00
recipients : list [ str ] | None = None ,
attachments : list [ str ] | None = None ,
subject : str | None = None ,
body : str | None = None ,
prompt : str | None = None ,
wait_sec : int | None = None ,
2025-01-03 15:55:57 -08:00
description : str | None = None ,
2025-01-28 16:59:54 +08:00
block_workflow_run_id : str | None = None ,
2025-06-17 00:25:58 -07:00
engine : str | None = None ,
2025-07-01 14:10:58 -04:00
# HTTP request block parameters
http_request_method : str | None = None ,
http_request_url : str | None = None ,
http_request_headers : dict [ str , str ] | None = None ,
http_request_body : dict [ str , Any ] | None = None ,
http_request_parameters : dict [ str , Any ] | None = None ,
http_request_timeout : int | None = None ,
http_request_follow_redirects : bool | None = None ,
2025-09-14 22:53:52 -07:00
ai_fallback_triggered : bool | None = None ,
2025-10-27 14:50:17 -04:00
# human interaction block
instructions : str | None = None ,
positive_descriptor : str | None = None ,
negative_descriptor : str | None = None ,
2025-12-07 12:37:00 -08:00
# conditional block
executed_branch_id : str | None = None ,
executed_branch_expression : str | None = None ,
executed_branch_result : bool | None = None ,
executed_branch_next_block : str | None = None ,
2024-12-20 07:40:32 -08:00
) - > WorkflowRunBlock :
async with self . Session ( ) as session :
workflow_run_block = (
await session . scalars (
2024-12-22 11:16:23 -08:00
select ( WorkflowRunBlockModel )
. filter_by ( workflow_run_block_id = workflow_run_block_id )
. filter_by ( organization_id = organization_id )
2024-12-20 07:40:32 -08:00
)
) . first ( )
if workflow_run_block :
if status :
workflow_run_block . status = status
if output :
workflow_run_block . output = output
2024-12-22 11:16:23 -08:00
if task_id :
workflow_run_block . task_id = task_id
if failure_reason :
workflow_run_block . failure_reason = failure_reason
2024-12-23 01:13:25 -08:00
if loop_values :
workflow_run_block . loop_values = loop_values
if current_value :
workflow_run_block . current_value = current_value
if current_index :
workflow_run_block . current_index = current_index
2024-12-23 11:48:27 -08:00
if recipients :
workflow_run_block . recipients = recipients
if attachments :
workflow_run_block . attachments = attachments
if subject :
workflow_run_block . subject = subject
if body :
workflow_run_block . body = body
if prompt :
workflow_run_block . prompt = prompt
if wait_sec :
workflow_run_block . wait_sec = wait_sec
2025-01-03 15:55:57 -08:00
if description :
workflow_run_block . description = description
2025-01-28 16:59:54 +08:00
if block_workflow_run_id :
workflow_run_block . block_workflow_run_id = block_workflow_run_id
2025-06-17 00:25:58 -07:00
if engine :
workflow_run_block . engine = engine
2025-07-01 14:10:58 -04:00
# HTTP request block fields
if http_request_method :
workflow_run_block . http_request_method = http_request_method
if http_request_url :
workflow_run_block . http_request_url = http_request_url
if http_request_headers :
workflow_run_block . http_request_headers = http_request_headers
if http_request_body :
workflow_run_block . http_request_body = http_request_body
if http_request_parameters :
workflow_run_block . http_request_parameters = http_request_parameters
if http_request_timeout :
workflow_run_block . http_request_timeout = http_request_timeout
if http_request_follow_redirects is not None :
workflow_run_block . http_request_follow_redirects = http_request_follow_redirects
2025-09-14 22:53:52 -07:00
if ai_fallback_triggered is not None :
workflow_run_block . script_run = { " ai_fallback_triggered " : ai_fallback_triggered }
2025-10-27 14:50:17 -04:00
# human interaction block fields
if instructions :
workflow_run_block . instructions = instructions
if positive_descriptor :
workflow_run_block . positive_descriptor = positive_descriptor
if negative_descriptor :
workflow_run_block . negative_descriptor = negative_descriptor
2025-12-07 12:37:00 -08:00
# conditional block fields
if executed_branch_id :
workflow_run_block . executed_branch_id = executed_branch_id
if executed_branch_expression is not None :
workflow_run_block . executed_branch_expression = executed_branch_expression
if executed_branch_result is not None :
workflow_run_block . executed_branch_result = executed_branch_result
if executed_branch_next_block is not None :
workflow_run_block . executed_branch_next_block = executed_branch_next_block
2024-12-20 07:40:32 -08:00
await session . commit ( )
await session . refresh ( workflow_run_block )
else :
raise NotFoundError ( f " WorkflowRunBlock { workflow_run_block_id } not found " )
task = None
task_id = workflow_run_block . task_id
if task_id :
task = await self . get_task ( task_id , organization_id = workflow_run_block . organization_id )
return convert_to_workflow_run_block ( workflow_run_block , task = task )
2024-12-22 11:16:23 -08:00
async def get_workflow_run_block (
self ,
workflow_run_block_id : str ,
organization_id : str | None = None ,
) - > WorkflowRunBlock :
async with self . Session ( ) as session :
workflow_run_block = (
await session . scalars (
select ( WorkflowRunBlockModel )
. filter_by ( workflow_run_block_id = workflow_run_block_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
if workflow_run_block :
task = None
task_id = workflow_run_block . task_id
if task_id :
task = await self . get_task ( task_id , organization_id = organization_id )
return convert_to_workflow_run_block ( workflow_run_block , task = task )
raise NotFoundError ( f " WorkflowRunBlock { workflow_run_block_id } not found " )
2024-12-22 20:54:53 -08:00
2025-06-17 00:25:58 -07:00
async def get_workflow_run_block_by_task_id (
self ,
task_id : str ,
organization_id : str | None = None ,
) - > WorkflowRunBlock :
async with self . Session ( ) as session :
workflow_run_block = (
await session . scalars (
select ( WorkflowRunBlockModel ) . filter_by ( task_id = task_id ) . filter_by ( organization_id = organization_id )
)
) . first ( )
if workflow_run_block :
task = None
task_id = workflow_run_block . task_id
if task_id :
task = await self . get_task ( task_id , organization_id = organization_id )
return convert_to_workflow_run_block ( workflow_run_block , task = task )
raise NotFoundError ( f " WorkflowRunBlock not found by { task_id } " )
2024-12-22 20:54:53 -08:00
async def get_workflow_run_blocks (
self ,
workflow_run_id : str ,
organization_id : str | None = None ,
) - > list [ WorkflowRunBlock ] :
async with self . Session ( ) as session :
workflow_run_blocks = (
await session . scalars (
select ( WorkflowRunBlockModel )
. filter_by ( workflow_run_id = workflow_run_id )
. filter_by ( organization_id = organization_id )
2025-01-06 06:04:24 -08:00
. order_by ( WorkflowRunBlockModel . created_at . desc ( ) )
2024-12-22 20:54:53 -08:00
)
) . all ( )
tasks = await self . get_tasks_by_workflow_run_id ( workflow_run_id )
tasks_dict = { task . task_id : task for task in tasks }
return [
convert_to_workflow_run_block ( workflow_run_block , task = tasks_dict . get ( workflow_run_block . task_id ) )
for workflow_run_block in workflow_run_blocks
]
2025-01-08 18:14:38 +01:00
2025-11-04 17:36:41 -08:00
async def create_browser_profile (
self ,
organization_id : str ,
name : str ,
description : str | None = None ,
) - > BrowserProfile :
try :
async with self . Session ( ) as session :
browser_profile = BrowserProfileModel (
organization_id = organization_id ,
name = name ,
description = description ,
)
session . add ( browser_profile )
await session . commit ( )
await session . refresh ( browser_profile )
return BrowserProfile . model_validate ( browser_profile )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in create_browser_profile " , exc_info = True )
raise
async def get_browser_profile (
self ,
profile_id : str ,
organization_id : str ,
include_deleted : bool = False ,
) - > BrowserProfile | None :
try :
async with self . Session ( ) as session :
query = (
select ( BrowserProfileModel )
. filter_by ( browser_profile_id = profile_id )
. filter_by ( organization_id = organization_id )
)
if not include_deleted :
query = query . filter ( BrowserProfileModel . deleted_at . is_ ( None ) )
browser_profile = ( await session . scalars ( query ) ) . first ( )
if not browser_profile :
return None
return BrowserProfile . model_validate ( browser_profile )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in get_browser_profile " , exc_info = True )
raise
async def list_browser_profiles (
self ,
organization_id : str ,
include_deleted : bool = False ,
) - > list [ BrowserProfile ] :
try :
async with self . Session ( ) as session :
query = select ( BrowserProfileModel ) . filter_by ( organization_id = organization_id )
if not include_deleted :
query = query . filter ( BrowserProfileModel . deleted_at . is_ ( None ) )
browser_profiles = await session . scalars ( query . order_by ( asc ( BrowserProfileModel . created_at ) ) )
return [ BrowserProfile . model_validate ( profile ) for profile in browser_profiles . all ( ) ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in list_browser_profiles " , exc_info = True )
raise
async def delete_browser_profile (
self ,
profile_id : str ,
organization_id : str ,
) - > None :
try :
async with self . Session ( ) as session :
query = (
select ( BrowserProfileModel )
. filter_by ( browser_profile_id = profile_id )
. filter_by ( organization_id = organization_id )
. filter ( BrowserProfileModel . deleted_at . is_ ( None ) )
)
browser_profile = ( await session . scalars ( query ) ) . first ( )
if not browser_profile :
raise BrowserProfileNotFound ( profile_id = profile_id , organization_id = organization_id )
browser_profile . deleted_at = datetime . utcnow ( )
await session . commit ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in delete_browser_profile " , exc_info = True )
raise
2025-08-08 15:46:49 -07:00
async def get_active_persistent_browser_sessions (
self ,
organization_id : str ,
active_hours : int = 24 ,
) - > list [ PersistentBrowserSession ] :
2025-01-08 18:14:38 +01:00
""" Get all active persistent browser sessions for an organization. """
try :
async with self . Session ( ) as session :
result = await session . execute (
select ( PersistentBrowserSessionModel )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
2025-08-08 15:46:49 -07:00
. filter_by ( completed_at = None )
. filter (
PersistentBrowserSessionModel . created_at > datetime . utcnow ( ) - timedelta ( hours = active_hours )
)
2025-01-08 18:14:38 +01:00
)
sessions = result . scalars ( ) . all ( )
return [ PersistentBrowserSession . model_validate ( session ) for session in sessions ]
2025-09-29 09:29:47 -04:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_persistent_browser_sessions_history (
self ,
organization_id : str ,
page : int = 1 ,
page_size : int = 10 ,
lookback_hours : int = 24 * 7 ,
) - > list [ PersistentBrowserSession ] :
""" Get persistent browser sessions history for an organization. """
try :
async with self . Session ( ) as session :
open_first = case (
(
2025-12-04 15:07:55 +08:00
PersistentBrowserSessionModel . status == " running " ,
2025-09-29 09:29:47 -04:00
0 , # open
) ,
else_ = 1 , # not open
)
result = await session . execute (
select ( PersistentBrowserSessionModel )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
. filter (
2025-12-04 15:07:55 +08:00
PersistentBrowserSessionModel . created_at > ( datetime . utcnow ( ) - timedelta ( hours = lookback_hours ) )
2025-09-29 09:29:47 -04:00
)
. order_by (
open_first . asc ( ) , # open sessions first
PersistentBrowserSessionModel . created_at . desc ( ) , # then newest within each group
)
. offset ( ( page - 1 ) * page_size )
. limit ( page_size )
)
sessions = result . scalars ( ) . all ( )
return [ PersistentBrowserSession . model_validate ( session ) for session in sessions ]
2025-01-08 18:14:38 +01:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
2025-05-15 11:35:33 -07:00
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-12-18 11:32:40 -07:00
@read_retry ( )
2025-05-15 11:35:33 -07:00
async def get_persistent_browser_session_by_runnable_id (
self , runnable_id : str , organization_id : str | None = None
) - > PersistentBrowserSession | None :
""" Get a specific persistent browser session. """
try :
async with self . Session ( ) as session :
query = (
select ( PersistentBrowserSessionModel )
. filter_by ( runnable_id = runnable_id )
. filter_by ( deleted_at = None )
. filter_by ( completed_at = None )
)
if organization_id :
query = query . filter_by ( organization_id = organization_id )
persistent_browser_session = ( await session . scalars ( query ) ) . first ( )
if persistent_browser_session :
return PersistentBrowserSession . model_validate ( persistent_browser_session )
2025-05-15 12:50:34 -07:00
return None
2025-05-15 11:35:33 -07:00
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
2025-02-16 13:11:03 +01:00
2025-01-08 18:14:38 +01:00
async def get_persistent_browser_session (
2025-05-14 16:24:44 -07:00
self ,
session_id : str ,
organization_id : str | None = None ,
2025-05-13 16:06:13 -07:00
) - > PersistentBrowserSession | None :
2025-01-08 18:14:38 +01:00
""" Get a specific persistent browser session. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if persistent_browser_session :
return PersistentBrowserSession . model_validate ( persistent_browser_session )
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except NotFoundError :
2025-08-04 14:47:40 -04:00
return None
2025-01-08 18:14:38 +01:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def create_persistent_browser_session (
self ,
organization_id : str ,
runnable_type : str | None = None ,
runnable_id : str | None = None ,
2025-05-13 16:06:13 -07:00
timeout_minutes : int | None = None ,
2025-11-28 14:24:44 -08:00
proxy_location : ProxyLocationInput = ProxyLocation . RESIDENTIAL ,
2025-12-24 13:01:52 +08:00
extensions : list [ Extensions ] | None = None ,
2026-01-07 15:39:53 +08:00
browser_type : PersistentBrowserType | None = None ,
2025-07-17 23:54:51 -07:00
) - > PersistentBrowserSession :
2025-01-08 18:14:38 +01:00
""" Create a new persistent browser session. """
2025-12-24 13:56:00 +08:00
extensions_str : list [ str ] | None = (
[ extension . value for extension in extensions ] if extensions is not None else None
)
2025-01-08 18:14:38 +01:00
try :
async with self . Session ( ) as session :
browser_session = PersistentBrowserSessionModel (
organization_id = organization_id ,
runnable_type = runnable_type ,
runnable_id = runnable_id ,
2025-05-13 16:06:13 -07:00
timeout_minutes = timeout_minutes ,
2025-11-28 14:24:44 -08:00
proxy_location = _serialize_proxy_location ( proxy_location ) ,
2025-12-24 13:01:52 +08:00
extensions = extensions_str ,
2026-01-07 15:39:53 +08:00
browser_type = browser_type . value if browser_type else None ,
2025-01-08 18:14:38 +01:00
)
session . add ( browser_session )
await session . commit ( )
await session . refresh ( browser_session )
return PersistentBrowserSession . model_validate ( browser_session )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-07-29 09:32:52 -04:00
async def update_persistent_browser_session (
self ,
browser_session_id : str ,
2025-08-04 15:14:05 -04:00
* ,
status : str | None = None ,
timeout_minutes : int | None = None ,
2025-07-29 09:32:52 -04:00
organization_id : str | None = None ,
2026-01-21 22:27:16 -05:00
completed_at : datetime | None = None ,
2025-07-29 09:32:52 -04:00
) - > PersistentBrowserSession :
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = browser_session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if not persistent_browser_session :
raise NotFoundError ( f " PersistentBrowserSession { browser_session_id } not found " )
2025-08-04 15:14:05 -04:00
if status :
persistent_browser_session . status = status
if timeout_minutes :
persistent_browser_session . timeout_minutes = timeout_minutes
2026-01-21 22:27:16 -05:00
if completed_at :
persistent_browser_session . completed_at = completed_at
2025-08-04 15:14:05 -04:00
2025-07-29 09:32:52 -04:00
await session . commit ( )
await session . refresh ( persistent_browser_session )
return PersistentBrowserSession . model_validate ( persistent_browser_session )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-05-14 16:24:44 -07:00
async def set_persistent_browser_session_browser_address (
self ,
browser_session_id : str ,
2025-09-12 13:09:43 +08:00
browser_address : str | None ,
2026-01-21 22:27:16 -05:00
ip_address : str | None ,
2025-07-17 14:50:05 +08:00
ecs_task_arn : str | None ,
2025-05-14 16:24:44 -07:00
organization_id : str | None = None ,
) - > None :
""" Set the browser address for a persistent browser session. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = browser_session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if persistent_browser_session :
2025-09-12 13:09:43 +08:00
if browser_address :
persistent_browser_session . browser_address = browser_address
# once the address is set, the session is started
persistent_browser_session . started_at = datetime . utcnow ( )
if ip_address :
persistent_browser_session . ip_address = ip_address
if ecs_task_arn :
persistent_browser_session . ecs_task_arn = ecs_task_arn
2025-05-14 16:24:44 -07:00
await session . commit ( )
await session . refresh ( persistent_browser_session )
else :
raise NotFoundError ( f " PersistentBrowserSession { browser_session_id } not found " )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2026-01-21 22:27:16 -05:00
async def update_persistent_browser_session_compute_cost (
self ,
session_id : str ,
organization_id : str ,
instance_type : str ,
vcpu_millicores : int ,
memory_mb : int ,
duration_ms : int ,
compute_cost : float ,
) - > None :
""" Update the compute cost fields for a persistent browser session """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if persistent_browser_session :
persistent_browser_session . instance_type = instance_type
persistent_browser_session . vcpu_millicores = vcpu_millicores
persistent_browser_session . memory_mb = memory_mb
persistent_browser_session . duration_ms = duration_ms
persistent_browser_session . compute_cost = compute_cost
await session . commit ( )
await session . refresh ( persistent_browser_session )
else :
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-01-08 18:14:38 +01:00
async def mark_persistent_browser_session_deleted ( self , session_id : str , organization_id : str ) - > None :
""" Mark a persistent browser session as deleted. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
if persistent_browser_session :
persistent_browser_session . deleted_at = datetime . utcnow ( )
await session . commit ( )
await session . refresh ( persistent_browser_session )
else :
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def occupy_persistent_browser_session (
self , session_id : str , runnable_type : str , runnable_id : str , organization_id : str
) - > None :
""" Occupy a specific persistent browser session. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if persistent_browser_session :
persistent_browser_session . runnable_type = runnable_type
persistent_browser_session . runnable_id = runnable_id
await session . commit ( )
await session . refresh ( persistent_browser_session )
else :
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-05-14 16:24:44 -07:00
async def release_persistent_browser_session (
self ,
session_id : str ,
organization_id : str ,
) - > PersistentBrowserSession :
2025-01-08 18:14:38 +01:00
""" Release a specific persistent browser session. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if persistent_browser_session :
persistent_browser_session . runnable_type = None
persistent_browser_session . runnable_id = None
await session . commit ( )
await session . refresh ( persistent_browser_session )
2025-05-14 16:24:44 -07:00
return PersistentBrowserSession . model_validate ( persistent_browser_session )
2025-01-08 18:14:38 +01:00
else :
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-05-14 16:24:44 -07:00
async def close_persistent_browser_session ( self , session_id : str , organization_id : str ) - > PersistentBrowserSession :
""" Close a specific persistent browser session. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if persistent_browser_session :
2025-12-05 14:52:32 +08:00
if persistent_browser_session . completed_at :
return PersistentBrowserSession . model_validate ( persistent_browser_session )
2025-05-14 16:24:44 -07:00
persistent_browser_session . completed_at = datetime . utcnow ( )
await session . commit ( )
await session . refresh ( persistent_browser_session )
return PersistentBrowserSession . model_validate ( persistent_browser_session )
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-01-08 18:14:38 +01:00
async def get_all_active_persistent_browser_sessions ( self ) - > List [ PersistentBrowserSessionModel ] :
""" Get all active persistent browser sessions across all organizations. """
try :
async with self . Session ( ) as session :
result = await session . execute ( select ( PersistentBrowserSessionModel ) . filter_by ( deleted_at = None ) )
return result . scalars ( ) . all ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-02-09 20:30:19 +08:00
async def create_task_run (
self ,
2025-03-30 18:34:48 -07:00
task_run_type : RunType ,
2025-02-09 20:30:19 +08:00
organization_id : str ,
run_id : str ,
title : str | None = None ,
url : str | None = None ,
url_hash : str | None = None ,
2025-03-30 18:41:24 -07:00
) - > Run :
2025-02-09 20:30:19 +08:00
async with self . Session ( ) as session :
task_run = TaskRunModel (
task_run_type = task_run_type ,
organization_id = organization_id ,
run_id = run_id ,
title = title ,
url = url ,
url_hash = url_hash ,
)
session . add ( task_run )
await session . commit ( )
await session . refresh ( task_run )
2025-03-30 18:41:24 -07:00
return Run . model_validate ( task_run )
2025-02-11 14:47:41 +08:00
2025-10-22 13:58:40 -06:00
async def update_task_run (
self ,
organization_id : str ,
run_id : str ,
title : str | None = None ,
url : str | None = None ,
url_hash : str | None = None ,
) - > None :
async with self . Session ( ) as session :
task_run = (
await session . scalars (
select ( TaskRunModel ) . filter_by ( run_id = run_id ) . filter_by ( organization_id = organization_id )
)
) . first ( )
if not task_run :
raise NotFoundError ( f " TaskRun { run_id } not found " )
if title :
task_run . title = title
if url :
task_run . url = url
if url_hash :
task_run . url_hash = url_hash
await session . commit ( )
2025-12-09 15:23:47 -08:00
async def update_job_run_compute_cost (
self ,
organization_id : str ,
run_id : str ,
instance_type : str | None = None ,
vcpu_millicores : int | None = None ,
2025-12-10 11:30:41 -08:00
memory_mb : int | None = None ,
2025-12-09 15:23:47 -08:00
duration_ms : int | None = None ,
compute_cost : float | None = None ,
) - > None :
""" Update compute cost metrics for a job run. """
async with self . Session ( ) as session :
task_run = (
await session . scalars (
select ( TaskRunModel ) . filter_by ( run_id = run_id ) . filter_by ( organization_id = organization_id )
)
) . first ( )
if not task_run :
LOG . warning (
" TaskRun not found for compute cost update " ,
run_id = run_id ,
organization_id = organization_id ,
)
return
if instance_type is not None :
task_run . instance_type = instance_type
if vcpu_millicores is not None :
task_run . vcpu_millicores = vcpu_millicores
2025-12-10 11:30:41 -08:00
if memory_mb is not None :
task_run . memory_mb = memory_mb
2025-12-09 15:23:47 -08:00
if duration_ms is not None :
task_run . duration_ms = duration_ms
if compute_cost is not None :
task_run . compute_cost = compute_cost
await session . commit ( )
2025-02-14 00:00:19 +08:00
async def create_credential (
2025-02-20 13:50:41 -08:00
self ,
organization_id : str ,
2025-10-10 10:10:18 -06:00
name : str ,
vault_type : CredentialVaultType ,
2025-02-20 13:50:41 -08:00
item_id : str ,
2025-10-10 10:10:18 -06:00
credential_type : CredentialType ,
username : str | None ,
totp_type : str ,
card_last4 : str | None ,
card_brand : str | None ,
2025-12-01 16:19:37 -08:00
totp_identifier : str | None = None ,
2025-12-09 11:19:57 -08:00
secret_label : str | None = None ,
2025-02-14 00:00:19 +08:00
) - > Credential :
async with self . Session ( ) as session :
credential = CredentialModel (
organization_id = organization_id ,
name = name ,
2025-10-10 10:10:18 -06:00
vault_type = vault_type ,
2025-02-20 13:50:41 -08:00
item_id = item_id ,
2025-10-10 10:10:18 -06:00
credential_type = credential_type ,
username = username ,
2025-10-08 11:38:34 -07:00
totp_type = totp_type ,
2025-12-01 16:19:37 -08:00
totp_identifier = totp_identifier ,
2025-10-10 10:10:18 -06:00
card_last4 = card_last4 ,
card_brand = card_brand ,
2025-12-09 11:19:57 -08:00
secret_label = secret_label ,
2025-02-14 00:00:19 +08:00
)
session . add ( credential )
await session . commit ( )
await session . refresh ( credential )
return Credential . model_validate ( credential )
2025-09-10 14:20:23 +08:00
async def get_credential ( self , credential_id : str , organization_id : str ) - > Credential | None :
2025-02-14 00:00:19 +08:00
async with self . Session ( ) as session :
credential = (
await session . scalars (
select ( CredentialModel )
. filter_by ( credential_id = credential_id )
. filter_by ( organization_id = organization_id )
. filter ( CredentialModel . deleted_at . is_ ( None ) )
)
) . first ( )
if credential :
return Credential . model_validate ( credential )
2025-09-10 14:20:23 +08:00
return None
2025-02-14 00:00:19 +08:00
2025-02-20 13:50:41 -08:00
async def get_credentials ( self , organization_id : str , page : int = 1 , page_size : int = 10 ) - > list [ Credential ] :
2025-02-14 00:00:19 +08:00
async with self . Session ( ) as session :
credentials = (
await session . scalars (
select ( CredentialModel )
. filter_by ( organization_id = organization_id )
. filter ( CredentialModel . deleted_at . is_ ( None ) )
. order_by ( CredentialModel . created_at . desc ( ) )
2025-02-20 13:50:41 -08:00
. offset ( ( page - 1 ) * page_size )
. limit ( page_size )
2025-02-14 00:00:19 +08:00
)
) . all ( )
return [ Credential . model_validate ( credential ) for credential in credentials ]
async def update_credential (
self , credential_id : str , organization_id : str , name : str | None = None , website_url : str | None = None
) - > Credential :
async with self . Session ( ) as session :
credential = (
await session . scalars (
select ( CredentialModel )
. filter_by ( credential_id = credential_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
if not credential :
raise NotFoundError ( f " Credential { credential_id } not found " )
if name :
credential . name = name
if website_url :
credential . website_url = website_url
await session . commit ( )
await session . refresh ( credential )
return Credential . model_validate ( credential )
2026-02-11 00:04:51 -05:00
async def update_credential_vault_data (
self ,
credential_id : str ,
organization_id : str ,
item_id : str ,
name : str ,
credential_type : CredentialType ,
username : str | None = None ,
totp_type : str = " none " ,
totp_identifier : str | None = None ,
card_last4 : str | None = None ,
card_brand : str | None = None ,
secret_label : str | None = None ,
) - > Credential :
async with self . Session ( ) as session :
credential = (
await session . scalars (
select ( CredentialModel )
. filter_by ( credential_id = credential_id )
. filter_by ( organization_id = organization_id )
. filter ( CredentialModel . deleted_at . is_ ( None ) )
. with_for_update ( )
)
) . first ( )
if not credential :
raise NotFoundError ( f " Credential { credential_id } not found " )
credential . item_id = item_id
credential . name = name
credential . credential_type = credential_type
credential . username = username
credential . totp_type = totp_type
credential . totp_identifier = totp_identifier
credential . card_last4 = card_last4
credential . card_brand = card_brand
credential . secret_label = secret_label
await session . commit ( )
await session . refresh ( credential )
return Credential . model_validate ( credential )
2025-02-14 00:00:19 +08:00
async def delete_credential ( self , credential_id : str , organization_id : str ) - > None :
async with self . Session ( ) as session :
credential = (
await session . scalars (
select ( CredentialModel )
. filter_by ( credential_id = credential_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
if not credential :
raise NotFoundError ( f " Credential { credential_id } not found " )
credential . deleted_at = datetime . utcnow ( )
await session . commit ( )
await session . refresh ( credential )
return None
2025-02-20 13:50:41 -08:00
async def create_organization_bitwarden_collection (
self ,
organization_id : str ,
collection_id : str ,
) - > OrganizationBitwardenCollection :
async with self . Session ( ) as session :
organization_bitwarden_collection = OrganizationBitwardenCollectionModel (
organization_id = organization_id , collection_id = collection_id
)
session . add ( organization_bitwarden_collection )
await session . commit ( )
await session . refresh ( organization_bitwarden_collection )
return OrganizationBitwardenCollection . model_validate ( organization_bitwarden_collection )
async def get_organization_bitwarden_collection (
self ,
organization_id : str ,
) - > OrganizationBitwardenCollection | None :
async with self . Session ( ) as session :
organization_bitwarden_collection = (
await session . scalars (
2025-10-10 10:10:18 -06:00
select ( OrganizationBitwardenCollectionModel )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
2025-02-20 13:50:41 -08:00
)
) . first ( )
if organization_bitwarden_collection :
return OrganizationBitwardenCollection . model_validate ( organization_bitwarden_collection )
return None
2025-03-30 18:41:24 -07:00
async def cache_task_run ( self , run_id : str , organization_id : str | None = None ) - > Run :
2025-02-11 14:47:41 +08:00
async with self . Session ( ) as session :
2025-02-12 01:48:52 +08:00
task_run = (
await session . scalars (
select ( TaskRunModel ) . filter_by ( organization_id = organization_id ) . filter_by ( run_id = run_id )
)
2025-02-11 14:47:41 +08:00
) . first ( )
if task_run :
task_run . cached = True
await session . commit ( )
await session . refresh ( task_run )
2025-03-30 18:41:24 -07:00
return Run . model_validate ( task_run )
raise NotFoundError ( f " Run { run_id } not found " )
2025-02-11 14:47:41 +08:00
async def get_cached_task_run (
2025-03-30 18:34:48 -07:00
self , task_run_type : RunType , url_hash : str | None = None , organization_id : str | None = None
2025-03-30 18:41:24 -07:00
) - > Run | None :
2025-02-11 14:47:41 +08:00
async with self . Session ( ) as session :
query = select ( TaskRunModel )
if task_run_type :
query = query . filter_by ( task_run_type = task_run_type )
if url_hash :
query = query . filter_by ( url_hash = url_hash )
if organization_id :
query = query . filter_by ( organization_id = organization_id )
query = query . filter_by ( cached = True ) . order_by ( TaskRunModel . created_at . desc ( ) )
2025-02-12 01:48:52 +08:00
task_run = ( await session . scalars ( query ) ) . first ( )
2025-03-30 18:41:24 -07:00
return Run . model_validate ( task_run ) if task_run else None
2025-03-16 13:21:40 -07:00
2025-03-30 21:14:52 -07:00
async def get_run (
2025-03-16 13:21:40 -07:00
self ,
run_id : str ,
organization_id : str | None = None ,
2025-03-30 18:41:24 -07:00
) - > Run | None :
2025-03-16 13:21:40 -07:00
async with self . Session ( ) as session :
query = select ( TaskRunModel ) . filter_by ( run_id = run_id )
if organization_id :
query = query . filter_by ( organization_id = organization_id )
task_run = ( await session . scalars ( query ) ) . first ( )
2025-03-30 18:41:24 -07:00
return Run . model_validate ( task_run ) if task_run else None
2025-07-28 10:23:02 -04:00
async def get_debug_session (
self ,
2025-07-29 09:32:52 -04:00
* ,
2025-07-28 10:23:02 -04:00
organization_id : str ,
user_id : str ,
2025-07-29 09:32:52 -04:00
workflow_permanent_id : str ,
2025-07-28 10:23:02 -04:00
) - > DebugSession | None :
async with self . Session ( ) as session :
debug_session = (
await session . scalars (
select ( DebugSessionModel )
. filter_by ( organization_id = organization_id )
. filter_by ( workflow_permanent_id = workflow_permanent_id )
. filter_by ( user_id = user_id )
2025-07-31 11:43:05 -04:00
. filter_by ( deleted_at = None )
. filter_by ( status = " created " )
. order_by ( DebugSessionModel . created_at . desc ( ) )
2025-07-28 10:23:02 -04:00
)
) . first ( )
if not debug_session :
return None
return DebugSession . model_validate ( debug_session )
2025-08-28 20:05:24 -04:00
async def get_latest_block_run (
self ,
* ,
organization_id : str ,
user_id : str ,
block_label : str ,
) - > BlockRun | None :
async with self . Session ( ) as session :
query = (
select ( BlockRunModel )
. filter_by ( organization_id = organization_id )
. filter_by ( user_id = user_id )
. filter_by ( block_label = block_label )
. order_by ( BlockRunModel . created_at . desc ( ) )
)
model = ( await session . scalars ( query ) ) . first ( )
return BlockRun . model_validate ( model ) if model else None
async def get_latest_completed_block_run (
self ,
* ,
organization_id : str ,
user_id : str ,
block_label : str ,
workflow_permanent_id : str ,
) - > BlockRun | None :
async with self . Session ( ) as session :
query = (
select ( BlockRunModel )
. join ( WorkflowRunModel , BlockRunModel . workflow_run_id == WorkflowRunModel . workflow_run_id )
. filter ( BlockRunModel . organization_id == organization_id )
. filter ( BlockRunModel . user_id == user_id )
. filter ( BlockRunModel . block_label == block_label )
. filter ( WorkflowRunModel . status == WorkflowRunStatus . completed )
. filter ( WorkflowRunModel . workflow_permanent_id == workflow_permanent_id )
. order_by ( BlockRunModel . created_at . desc ( ) )
)
model = ( await session . scalars ( query ) ) . first ( )
return BlockRun . model_validate ( model ) if model else None
async def create_block_run (
self ,
* ,
organization_id : str ,
user_id : str ,
block_label : str ,
output_parameter_id : str ,
workflow_run_id : str ,
) - > None :
async with self . Session ( ) as session :
block_run = BlockRunModel (
organization_id = organization_id ,
user_id = user_id ,
block_label = block_label ,
output_parameter_id = output_parameter_id ,
workflow_run_id = workflow_run_id ,
)
session . add ( block_run )
await session . commit ( )
2025-08-07 17:08:50 -04:00
async def get_latest_debug_session_for_user (
self ,
* ,
organization_id : str ,
user_id : str ,
workflow_permanent_id : str ,
) - > DebugSession | None :
async with self . Session ( ) as session :
query = (
select ( DebugSessionModel )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
. filter_by ( status = " created " )
. filter_by ( user_id = user_id )
. filter_by ( workflow_permanent_id = workflow_permanent_id )
. order_by ( DebugSessionModel . created_at . desc ( ) )
)
model = ( await session . scalars ( query ) ) . first ( )
return DebugSession . model_validate ( model ) if model else None
2025-10-16 08:24:05 -04:00
async def get_debug_session_by_id (
self ,
debug_session_id : str ,
organization_id : str ,
) - > DebugSession | None :
async with self . Session ( ) as session :
query = (
select ( DebugSessionModel )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
. filter_by ( debug_session_id = debug_session_id )
)
model = ( await session . scalars ( query ) ) . first ( )
return DebugSession . model_validate ( model ) if model else None
2026-01-14 23:19:46 -08:00
async def get_debug_session_by_browser_session_id (
self ,
browser_session_id : str ,
organization_id : str ,
) - > DebugSession | None :
async with self . Session ( ) as session :
query = (
select ( DebugSessionModel )
. filter_by ( browser_session_id = browser_session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
model = ( await session . scalars ( query ) ) . first ( )
return DebugSession . model_validate ( model ) if model else None
2025-10-16 08:24:05 -04:00
async def get_workflow_runs_by_debug_session_id (
self ,
debug_session_id : str ,
organization_id : str ,
) - > list [ DebugSessionRun ] :
async with self . Session ( ) as session :
query = (
select ( WorkflowRunModel , BlockRunModel )
. join ( BlockRunModel , BlockRunModel . workflow_run_id == WorkflowRunModel . workflow_run_id )
. filter ( WorkflowRunModel . organization_id == organization_id )
. filter ( WorkflowRunModel . debug_session_id == debug_session_id )
. order_by ( WorkflowRunModel . created_at . desc ( ) )
)
results = ( await session . execute ( query ) ) . all ( )
debug_session_runs = [ ]
for workflow_run , block_run in results :
debug_session_runs . append (
DebugSessionRun (
ai_fallback = workflow_run . ai_fallback ,
block_label = block_run . block_label ,
browser_session_id = workflow_run . browser_session_id ,
code_gen = workflow_run . code_gen ,
debug_session_id = workflow_run . debug_session_id ,
failure_reason = workflow_run . failure_reason ,
output_parameter_id = block_run . output_parameter_id ,
run_with = workflow_run . run_with ,
script_run_id = workflow_run . script_run . get ( " script_run_id " ) if workflow_run . script_run else None ,
status = workflow_run . status ,
workflow_id = workflow_run . workflow_id ,
workflow_permanent_id = workflow_run . workflow_permanent_id ,
workflow_run_id = workflow_run . workflow_run_id ,
created_at = workflow_run . created_at ,
queued_at = workflow_run . queued_at ,
started_at = workflow_run . started_at ,
finished_at = workflow_run . finished_at ,
)
)
return debug_session_runs
2025-07-31 11:43:05 -04:00
async def complete_debug_sessions (
self ,
* ,
organization_id : str ,
user_id : str | None = None ,
workflow_permanent_id : str | None = None ,
) - > list [ DebugSession ] :
async with self . Session ( ) as session :
query = (
select ( DebugSessionModel )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
. filter_by ( status = " created " )
)
if user_id :
query = query . filter_by ( user_id = user_id )
if workflow_permanent_id :
query = query . filter_by ( workflow_permanent_id = workflow_permanent_id )
models = ( await session . scalars ( query ) ) . all ( )
for model in models :
model . status = " completed "
debug_sessions = [ DebugSession . model_validate ( model ) for model in models ]
await session . commit ( )
return debug_sessions
2025-07-28 10:23:02 -04:00
async def create_debug_session (
self ,
2025-07-29 09:32:52 -04:00
* ,
browser_session_id : str ,
2025-07-28 10:23:02 -04:00
organization_id : str ,
user_id : str ,
2025-07-29 09:32:52 -04:00
workflow_permanent_id : str ,
2025-09-12 21:16:48 +08:00
vnc_streaming_supported : bool ,
2025-07-28 10:23:02 -04:00
) - > DebugSession :
async with self . Session ( ) as session :
debug_session = DebugSessionModel (
organization_id = organization_id ,
workflow_permanent_id = workflow_permanent_id ,
user_id = user_id ,
2025-07-29 09:32:52 -04:00
browser_session_id = browser_session_id ,
2025-09-12 21:16:48 +08:00
vnc_streaming_supported = vnc_streaming_supported ,
2025-07-31 11:43:05 -04:00
status = " created " ,
2025-07-28 10:23:02 -04:00
)
session . add ( debug_session )
await session . commit ( )
await session . refresh ( debug_session )
return DebugSession . model_validate ( debug_session )
2025-07-31 21:25:17 -07:00
2025-08-06 22:23:38 -07:00
async def create_script (
2025-07-31 21:25:17 -07:00
self ,
organization_id : str ,
run_id : str | None = None ,
2025-08-06 22:23:38 -07:00
script_id : str | None = None ,
2025-07-31 21:25:17 -07:00
version : int | None = None ,
2025-08-06 22:23:38 -07:00
) - > Script :
2025-07-31 21:25:17 -07:00
try :
async with self . Session ( ) as session :
2025-08-06 22:23:38 -07:00
script = ScriptModel (
2025-07-31 21:25:17 -07:00
organization_id = organization_id ,
run_id = run_id ,
)
2025-08-06 22:23:38 -07:00
if script_id :
script . script_id = script_id
2025-07-31 21:25:17 -07:00
if version :
2025-08-06 22:23:38 -07:00
script . version = version
session . add ( script )
2025-07-31 21:25:17 -07:00
await session . commit ( )
2025-08-06 22:23:38 -07:00
await session . refresh ( script )
return convert_to_script ( script )
2025-07-31 21:25:17 -07:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-08-06 22:23:38 -07:00
async def get_scripts (
2025-07-31 21:25:17 -07:00
self ,
organization_id : str ,
page : int = 1 ,
page_size : int = 10 ,
2025-08-06 22:23:38 -07:00
) - > list [ Script ] :
2025-07-31 21:25:17 -07:00
try :
async with self . Session ( ) as session :
# Calculate offset for pagination
offset = ( page - 1 ) * page_size
2025-08-06 22:23:38 -07:00
# Subquery to get the latest version of each script
2025-07-31 21:25:17 -07:00
latest_versions_subquery = (
2025-08-06 22:23:38 -07:00
select ( ScriptModel . script_id , func . max ( ScriptModel . version ) . label ( " latest_version " ) )
2025-07-31 21:25:17 -07:00
. filter_by ( organization_id = organization_id )
2025-08-06 22:23:38 -07:00
. filter ( ScriptModel . deleted_at . is_ ( None ) )
. group_by ( ScriptModel . script_id )
2025-07-31 21:25:17 -07:00
. subquery ( )
)
2025-08-06 22:23:38 -07:00
# Main query to get scripts with their latest versions
get_scripts_query = (
select ( ScriptModel )
2025-07-31 21:25:17 -07:00
. join (
latest_versions_subquery ,
and_ (
2025-08-06 22:23:38 -07:00
ScriptModel . script_id == latest_versions_subquery . c . script_id ,
ScriptModel . version == latest_versions_subquery . c . latest_version ,
2025-07-31 21:25:17 -07:00
) ,
)
. filter_by ( organization_id = organization_id )
2025-08-06 22:23:38 -07:00
. filter ( ScriptModel . deleted_at . is_ ( None ) )
. order_by ( ScriptModel . created_at . desc ( ) )
2025-07-31 21:25:17 -07:00
. limit ( page_size )
. offset ( offset )
)
2025-08-06 22:23:38 -07:00
scripts = ( await session . scalars ( get_scripts_query ) ) . all ( )
return [ convert_to_script ( script ) for script in scripts ]
2025-07-31 21:25:17 -07:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-08-06 22:23:38 -07:00
async def get_script (
2025-07-31 21:25:17 -07:00
self ,
2025-08-06 22:23:38 -07:00
script_id : str ,
2025-07-31 21:25:17 -07:00
organization_id : str ,
version : int | None = None ,
2025-08-06 22:23:38 -07:00
) - > Script | None :
""" Get a specific script by ID and optionally by version. """
2025-07-31 21:25:17 -07:00
try :
async with self . Session ( ) as session :
2025-08-06 22:23:38 -07:00
get_script_query = (
select ( ScriptModel )
. filter_by ( script_id = script_id )
2025-07-31 21:25:17 -07:00
. filter_by ( organization_id = organization_id )
2025-08-06 22:23:38 -07:00
. filter ( ScriptModel . deleted_at . is_ ( None ) )
2025-07-31 21:25:17 -07:00
)
if version is not None :
2025-08-06 22:23:38 -07:00
get_script_query = get_script_query . filter_by ( version = version )
2025-07-31 21:25:17 -07:00
else :
# Get the latest version
2025-08-06 22:23:38 -07:00
get_script_query = get_script_query . order_by ( ScriptModel . version . desc ( ) ) . limit ( 1 )
2025-07-31 21:25:17 -07:00
2025-08-06 22:23:38 -07:00
if script := ( await session . scalars ( get_script_query ) ) . first ( ) :
return convert_to_script ( script )
2025-07-31 21:25:17 -07:00
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-08-06 22:23:38 -07:00
async def get_script_revision ( self , script_revision_id : str , organization_id : str ) - > Script | None :
2025-08-04 00:33:34 -07:00
async with self . Session ( ) as session :
2025-08-06 22:23:38 -07:00
script = (
2025-08-04 00:33:34 -07:00
await session . scalars (
2025-08-06 22:23:38 -07:00
select ( ScriptModel )
. filter_by ( script_revision_id = script_revision_id )
2025-08-04 00:33:34 -07:00
. filter_by ( organization_id = organization_id )
)
) . first ( )
2025-08-06 22:23:38 -07:00
return convert_to_script ( script ) if script else None
2025-08-04 00:33:34 -07:00
2025-08-06 22:23:38 -07:00
async def create_script_file (
2025-07-31 21:25:17 -07:00
self ,
2025-08-06 22:23:38 -07:00
script_revision_id : str ,
script_id : str ,
2025-07-31 21:25:17 -07:00
organization_id : str ,
file_path : str ,
file_name : str ,
file_type : str ,
content_hash : str | None = None ,
file_size : int | None = None ,
mime_type : str | None = None ,
encoding : str = " utf-8 " ,
artifact_id : str | None = None ,
2025-08-10 13:16:46 -07:00
) - > ScriptFile :
""" Create a script file. """
async with self . Session ( ) as session :
script_file = ScriptFileModel (
script_revision_id = script_revision_id ,
script_id = script_id ,
organization_id = organization_id ,
file_path = file_path ,
file_name = file_name ,
file_type = file_type ,
content_hash = content_hash ,
file_size = file_size ,
mime_type = mime_type ,
encoding = encoding ,
artifact_id = artifact_id ,
)
session . add ( script_file )
await session . commit ( )
await session . refresh ( script_file )
return convert_to_script_file ( script_file )
async def create_script_block (
self ,
script_revision_id : str ,
script_id : str ,
organization_id : str ,
script_block_label : str ,
script_file_id : str | None = None ,
2025-10-14 16:17:03 -07:00
run_signature : str | None = None ,
2025-11-05 08:46:03 +08:00
workflow_run_id : str | None = None ,
workflow_run_block_id : str | None = None ,
2025-12-10 20:58:39 -08:00
input_fields : list [ str ] | None = None ,
2025-08-10 13:16:46 -07:00
) - > ScriptBlock :
""" Create a script block. """
async with self . Session ( ) as session :
script_block = ScriptBlockModel (
script_revision_id = script_revision_id ,
script_id = script_id ,
organization_id = organization_id ,
script_block_label = script_block_label ,
script_file_id = script_file_id ,
2025-10-14 16:17:03 -07:00
run_signature = run_signature ,
2025-11-05 08:46:03 +08:00
workflow_run_id = workflow_run_id ,
workflow_run_block_id = workflow_run_block_id ,
2025-12-10 20:58:39 -08:00
input_fields = input_fields ,
2025-08-10 13:16:46 -07:00
)
session . add ( script_block )
await session . commit ( )
await session . refresh ( script_block )
return convert_to_script_block ( script_block )
async def update_script_block (
self ,
script_block_id : str ,
organization_id : str ,
script_file_id : str | None = None ,
2025-10-14 16:17:03 -07:00
run_signature : str | None = None ,
2025-11-05 08:46:03 +08:00
workflow_run_id : str | None = None ,
workflow_run_block_id : str | None = None ,
2025-11-05 15:26:11 +08:00
clear_run_signature : bool = False ,
2025-12-10 20:58:39 -08:00
input_fields : list [ str ] | None = None ,
2025-08-10 13:16:46 -07:00
) - > ScriptBlock :
async with self . Session ( ) as session :
script_block = (
await session . scalars (
select ( ScriptBlockModel )
. filter_by ( script_block_id = script_block_id )
. filter_by ( organization_id = organization_id )
2025-07-31 21:25:17 -07:00
)
2025-08-10 13:16:46 -07:00
) . first ( )
if script_block :
2025-10-14 16:17:03 -07:00
if script_file_id is not None :
2025-08-10 13:16:46 -07:00
script_block . script_file_id = script_file_id
2025-11-05 15:26:11 +08:00
if clear_run_signature :
script_block . run_signature = None
elif run_signature is not None :
2025-10-14 16:17:03 -07:00
script_block . run_signature = run_signature
2025-11-05 08:46:03 +08:00
if workflow_run_id is not None :
script_block . workflow_run_id = workflow_run_id
if workflow_run_block_id is not None :
script_block . workflow_run_block_id = workflow_run_block_id
2025-12-10 20:58:39 -08:00
if input_fields is not None :
script_block . input_fields = input_fields
2025-07-31 21:25:17 -07:00
await session . commit ( )
2025-08-10 13:16:46 -07:00
await session . refresh ( script_block )
return convert_to_script_block ( script_block )
else :
raise NotFoundError ( " Script block not found " )
2025-08-04 00:33:34 -07:00
2025-08-06 22:23:38 -07:00
async def get_script_files ( self , script_revision_id : str , organization_id : str ) - > list [ ScriptFile ] :
2025-08-04 00:33:34 -07:00
async with self . Session ( ) as session :
2025-08-06 22:23:38 -07:00
script_files = (
2025-08-04 00:33:34 -07:00
await session . scalars (
2025-08-06 22:23:38 -07:00
select ( ScriptFileModel )
. filter_by ( script_revision_id = script_revision_id )
2025-08-04 00:33:34 -07:00
. filter_by ( organization_id = organization_id )
)
) . all ( )
2025-08-06 22:23:38 -07:00
return [ convert_to_script_file ( script_file ) for script_file in script_files ]
2025-08-08 20:24:44 -07:00
2025-08-11 19:21:44 -04:00
async def get_script_file_by_id (
self ,
script_revision_id : str ,
file_id : str ,
organization_id : str ,
) - > ScriptFile | None :
async with self . Session ( ) as session :
script_file = (
await session . scalars (
select ( ScriptFileModel )
. filter_by ( script_revision_id = script_revision_id )
. filter_by ( file_id = file_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
return convert_to_script_file ( script_file ) if script_file else None
2025-09-19 08:50:21 -07:00
async def get_script_file_by_path (
self ,
script_revision_id : str ,
file_path : str ,
organization_id : str ,
) - > ScriptFile | None :
async with self . Session ( ) as session :
script_file = (
await session . scalars (
select ( ScriptFileModel )
. filter_by ( script_revision_id = script_revision_id )
. filter_by ( file_path = file_path )
. filter_by ( organization_id = organization_id )
)
) . first ( )
return convert_to_script_file ( script_file ) if script_file else None
async def update_script_file (
self ,
script_file_id : str ,
organization_id : str ,
artifact_id : str | None = None ,
) - > ScriptFile :
async with self . Session ( ) as session :
script_file = (
await session . scalars (
select ( ScriptFileModel ) . filter_by ( file_id = script_file_id ) . filter_by ( organization_id = organization_id )
)
) . first ( )
if script_file :
if artifact_id :
script_file . artifact_id = artifact_id
await session . commit ( )
await session . refresh ( script_file )
return convert_to_script_file ( script_file )
else :
raise NotFoundError ( " Script file not found " )
2025-08-08 20:24:44 -07:00
async def get_script_block (
self ,
script_block_id : str ,
organization_id : str ,
) - > ScriptBlock | None :
async with self . Session ( ) as session :
record = (
await session . scalars (
select ( ScriptBlockModel )
. filter_by ( script_block_id = script_block_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
return convert_to_script_block ( record ) if record else None
2025-09-19 08:50:21 -07:00
async def get_script_block_by_label (
self ,
organization_id : str ,
script_revision_id : str ,
script_block_label : str ,
) - > ScriptBlock | None :
async with self . Session ( ) as session :
record = (
await session . scalars (
select ( ScriptBlockModel )
. filter_by ( script_revision_id = script_revision_id )
. filter_by ( script_block_label = script_block_label )
. filter_by ( organization_id = organization_id )
)
) . first ( )
return convert_to_script_block ( record ) if record else None
2025-08-08 20:24:44 -07:00
async def get_script_blocks_by_script_revision_id (
self ,
script_revision_id : str ,
organization_id : str ,
) - > list [ ScriptBlock ] :
async with self . Session ( ) as session :
records = (
await session . scalars (
select ( ScriptBlockModel )
. filter_by ( script_revision_id = script_revision_id )
. filter_by ( organization_id = organization_id )
2025-08-31 11:46:31 +08:00
. order_by ( ScriptBlockModel . created_at . asc ( ) )
2025-08-08 20:24:44 -07:00
)
) . all ( )
return [ convert_to_script_block ( record ) for record in records ]
2025-08-09 13:11:16 -07:00
async def create_workflow_script (
self ,
* ,
organization_id : str ,
script_id : str ,
workflow_permanent_id : str ,
cache_key : str ,
cache_key_value : str ,
workflow_id : str | None = None ,
workflow_run_id : str | None = None ,
2025-09-19 08:50:21 -07:00
status : ScriptStatus = ScriptStatus . published ,
2025-08-09 13:11:16 -07:00
) - > None :
""" Create a workflow->script cache mapping entry. """
try :
async with self . Session ( ) as session :
record = WorkflowScriptModel (
organization_id = organization_id ,
script_id = script_id ,
workflow_permanent_id = workflow_permanent_id ,
workflow_id = workflow_id ,
workflow_run_id = workflow_run_id ,
cache_key = cache_key ,
cache_key_value = cache_key_value ,
2025-09-19 08:50:21 -07:00
status = status ,
2025-08-09 13:11:16 -07:00
)
session . add ( record )
await session . commit ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-09-19 08:50:21 -07:00
async def get_workflow_script (
self ,
organization_id : str ,
workflow_permanent_id : str ,
workflow_run_id : str ,
statuses : list [ ScriptStatus ] | None = None ,
) - > WorkflowScript | None :
async with self . Session ( ) as session :
query = (
select ( WorkflowScriptModel )
. filter_by ( organization_id = organization_id )
. filter_by ( workflow_permanent_id = workflow_permanent_id )
. filter_by ( workflow_run_id = workflow_run_id )
)
if statuses :
query = query . filter ( WorkflowScriptModel . status . in_ ( statuses ) )
workflow_script_model = ( await session . scalars ( query ) ) . first ( )
return WorkflowScript . model_validate ( workflow_script_model ) if workflow_script_model else None
2025-11-25 12:15:06 -07:00
async def get_workflow_script_by_cache_key_value (
2025-08-09 13:11:16 -07:00
self ,
* ,
organization_id : str ,
workflow_permanent_id : str ,
cache_key_value : str ,
2025-09-19 08:50:21 -07:00
workflow_run_id : str | None = None ,
2025-08-11 19:21:44 -04:00
cache_key : str | None = None ,
2025-09-18 17:57:23 -04:00
statuses : list [ ScriptStatus ] | None = None ,
2025-11-25 12:15:06 -07:00
) - > Script | None :
""" Get latest script version linked to a workflow by a specific cache_key_value. """
2025-08-09 13:11:16 -07:00
try :
async with self . Session ( ) as session :
2025-11-25 12:15:06 -07:00
# Build the query: join workflow_scripts with scripts
2026-01-09 20:01:28 -08:00
# Join on both script_id and organization_id to leverage uc_org_script_version index
2025-11-25 12:15:06 -07:00
query = (
select ( ScriptModel )
2026-01-09 20:01:28 -08:00
. join (
WorkflowScriptModel ,
and_ (
ScriptModel . organization_id == WorkflowScriptModel . organization_id ,
ScriptModel . script_id == WorkflowScriptModel . script_id ,
) ,
)
2025-11-25 12:15:06 -07:00
. where (
WorkflowScriptModel . organization_id == organization_id ,
WorkflowScriptModel . workflow_permanent_id == workflow_permanent_id ,
WorkflowScriptModel . cache_key_value == cache_key_value ,
WorkflowScriptModel . deleted_at . is_ ( None ) ,
)
2025-08-09 13:11:16 -07:00
)
2025-11-25 12:15:06 -07:00
2025-09-19 08:50:21 -07:00
if workflow_run_id :
2025-11-25 12:15:06 -07:00
query = query . where ( WorkflowScriptModel . workflow_run_id == workflow_run_id )
2025-08-09 13:11:16 -07:00
2025-08-22 18:54:40 -07:00
if cache_key is not None :
2025-11-25 12:15:06 -07:00
query = query . where ( WorkflowScriptModel . cache_key == cache_key )
2025-08-11 19:21:44 -04:00
2025-09-18 17:57:23 -04:00
if statuses is not None and len ( statuses ) > 0 :
2025-11-25 12:15:06 -07:00
query = query . where ( WorkflowScriptModel . status . in_ ( statuses ) )
2025-08-09 13:11:16 -07:00
2025-11-25 12:15:06 -07:00
query = query . order_by ( ScriptModel . created_at . desc ( ) , ScriptModel . version . desc ( ) ) . limit ( 1 )
2025-08-09 13:11:16 -07:00
2025-11-25 12:15:06 -07:00
script = ( await session . scalars ( query ) ) . first ( )
return convert_to_script ( script ) if script else None
2025-08-09 13:11:16 -07:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-08-21 19:01:59 -04:00
async def get_workflow_cache_key_count (
self ,
organization_id : str ,
workflow_permanent_id : str ,
cache_key : str ,
filter : str | None = None ,
) - > int :
try :
async with self . Session ( ) as session :
query = (
select ( func . count ( ) )
. select_from ( WorkflowScriptModel )
. filter_by ( organization_id = organization_id )
. filter_by ( workflow_permanent_id = workflow_permanent_id )
. filter_by ( cache_key = cache_key )
. filter_by ( deleted_at = None )
2025-09-19 08:50:21 -07:00
. filter_by ( status = " published " )
2025-08-21 19:01:59 -04:00
)
if filter :
query = query . filter ( WorkflowScriptModel . cache_key_value . contains ( filter ) )
return ( await session . execute ( query ) ) . scalar_one ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_workflow_cache_key_values (
self ,
organization_id : str ,
workflow_permanent_id : str ,
cache_key : str ,
page : int = 1 ,
page_size : int = 100 ,
filter : str | None = None ,
) - > list [ str ] :
try :
async with self . Session ( ) as session :
query = (
select ( WorkflowScriptModel . cache_key_value )
. order_by ( WorkflowScriptModel . cache_key_value . asc ( ) )
. filter_by ( organization_id = organization_id )
. filter_by ( workflow_permanent_id = workflow_permanent_id )
. filter_by ( cache_key = cache_key )
. filter_by ( deleted_at = None )
2025-09-19 08:50:21 -07:00
. filter_by ( status = " published " )
2025-08-21 19:01:59 -04:00
. offset ( ( page - 1 ) * page_size )
. limit ( page_size )
)
if filter :
query = query . filter ( WorkflowScriptModel . cache_key_value . contains ( filter ) )
return ( await session . scalars ( query ) ) . all ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def delete_workflow_cache_key_value (
self ,
organization_id : str ,
workflow_permanent_id : str ,
cache_key_value : str ,
) - > bool :
"""
Soft delete workflow cache key values by setting deleted_at timestamp .
Returns True if any records were deleted , False otherwise .
"""
try :
async with self . Session ( ) as session :
stmt = (
update ( WorkflowScriptModel )
. where (
and_ (
WorkflowScriptModel . organization_id == organization_id ,
WorkflowScriptModel . workflow_permanent_id == workflow_permanent_id ,
WorkflowScriptModel . cache_key_value == cache_key_value ,
WorkflowScriptModel . deleted_at . is_ ( None ) ,
)
)
2025-10-12 12:57:38 +03:00
. values ( deleted_at = datetime . utcnow ( ) )
2025-08-21 19:01:59 -04:00
)
result = await session . execute ( stmt )
await session . commit ( )
return result . rowcount > 0
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-10-01 13:49:42 -07:00
async def delete_workflow_scripts_by_permanent_id (
self ,
organization_id : str ,
workflow_permanent_id : str ,
2025-10-10 11:52:08 -04:00
statuses : list [ ScriptStatus ] | None = None ,
script_ids : list [ str ] | None = None ,
2025-10-01 13:49:42 -07:00
) - > int :
"""
Soft delete all published workflow scripts for a workflow permanent id by setting deleted_at timestamp .
Returns True if any records were deleted , False otherwise .
"""
try :
async with self . Session ( ) as session :
stmt = (
update ( WorkflowScriptModel )
. where (
and_ (
WorkflowScriptModel . organization_id == organization_id ,
WorkflowScriptModel . workflow_permanent_id == workflow_permanent_id ,
WorkflowScriptModel . deleted_at . is_ ( None ) ,
)
)
2025-10-12 12:57:38 +03:00
. values ( deleted_at = datetime . utcnow ( ) )
2025-10-01 13:49:42 -07:00
)
2025-10-10 11:52:08 -04:00
if statuses :
stmt = stmt . where ( WorkflowScriptModel . status . in_ ( [ s . value for s in statuses ] ) )
if script_ids :
stmt = stmt . where ( WorkflowScriptModel . script_id . in_ ( script_ids ) )
2025-10-01 13:49:42 -07:00
result = await session . execute ( stmt )
await session . commit ( )
return result . rowcount
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-10-07 16:56:53 -07:00
async def get_workflow_scripts_by_permanent_id (
self ,
organization_id : str ,
workflow_permanent_id : str ,
2025-10-10 11:52:08 -04:00
statuses : list [ ScriptStatus ] | None = None ,
2025-10-07 16:56:53 -07:00
) - > list [ WorkflowScriptModel ] :
try :
async with self . Session ( ) as session :
query = (
select ( WorkflowScriptModel )
. filter_by ( organization_id = organization_id )
. filter_by ( workflow_permanent_id = workflow_permanent_id )
. filter_by ( deleted_at = None )
)
2025-10-10 11:52:08 -04:00
if statuses :
query = query . filter ( WorkflowScriptModel . status . in_ ( [ s . value for s in statuses ] ) )
2025-10-07 16:56:53 -07:00
return ( await session . scalars ( query ) ) . all ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise