2024-09-19 11:09:19 -07:00
import json
2024-09-03 07:00:15 +03:00
from datetime import datetime , timedelta
2025-01-08 18:14:38 +01:00
from typing import Any , List , Optional , Sequence
2024-03-01 10:09:30 -08:00
import structlog
2025-03-04 03:22:16 -05:00
from sqlalchemy import and_ , delete , distinct , func , select , tuple_ , update
2024-03-01 10:09:30 -08:00
from sqlalchemy . exc import SQLAlchemyError
2024-03-24 12:47:47 -07:00
from sqlalchemy . ext . asyncio import async_sessionmaker , create_async_engine
2024-03-01 10:09:30 -08:00
2024-09-03 07:00:15 +03:00
from skyvern . config import settings
2024-03-01 10:09:30 -08:00
from skyvern . exceptions import WorkflowParameterNotFound
from skyvern . forge . sdk . artifact . models import Artifact , ArtifactType
2024-11-26 11:29:33 +08:00
from skyvern . forge . sdk . db . enums import OrganizationAuthTokenType , TaskType
2024-03-01 10:09:30 -08:00
from skyvern . forge . sdk . db . exceptions import NotFoundError
from skyvern . forge . sdk . db . models import (
2024-10-15 12:06:50 -07:00
ActionModel ,
2025-01-08 21:45:38 -08:00
AISuggestionModel ,
2024-03-01 10:09:30 -08:00
ArtifactModel ,
AWSSecretParameterModel ,
2024-10-03 16:18:21 -07:00
BitwardenCreditCardDataParameterModel ,
2024-04-03 16:01:03 -07:00
BitwardenLoginCredentialParameterModel ,
2024-07-11 09:48:14 -07:00
BitwardenSensitiveInformationParameterModel ,
2025-02-14 00:00:19 +08:00
CredentialModel ,
CredentialParameterModel ,
2024-03-01 10:09:30 -08:00
OrganizationAuthTokenModel ,
2025-02-20 13:50:41 -08:00
OrganizationBitwardenCollectionModel ,
2024-03-01 10:09:30 -08:00
OrganizationModel ,
2024-03-21 17:16:56 -07:00
OutputParameterModel ,
2025-01-08 18:14:38 +01:00
PersistentBrowserSessionModel ,
2024-03-01 10:09:30 -08:00
StepModel ,
2024-06-07 15:59:53 -07:00
TaskGenerationModel ,
2024-03-01 10:09:30 -08:00
TaskModel ,
2025-02-09 20:30:19 +08:00
TaskRunModel ,
2025-02-27 20:19:02 -08:00
TaskV2Model ,
ThoughtModel ,
2024-09-08 15:07:03 -07:00
TOTPCodeModel ,
2024-03-01 10:09:30 -08:00
WorkflowModel ,
WorkflowParameterModel ,
2024-12-20 07:40:32 -08:00
WorkflowRunBlockModel ,
2024-03-01 10:09:30 -08:00
WorkflowRunModel ,
2024-03-21 17:16:56 -07:00
WorkflowRunOutputParameterModel ,
2024-03-01 10:09:30 -08:00
WorkflowRunParameterModel ,
)
from skyvern . forge . sdk . db . utils import (
_custom_json_serializer ,
convert_to_artifact ,
convert_to_aws_secret_parameter ,
2024-04-03 16:01:03 -07:00
convert_to_bitwarden_login_credential_parameter ,
2024-07-11 09:48:14 -07:00
convert_to_bitwarden_sensitive_information_parameter ,
2024-03-01 10:09:30 -08:00
convert_to_organization ,
convert_to_organization_auth_token ,
2024-03-21 17:16:56 -07:00
convert_to_output_parameter ,
2024-03-01 10:09:30 -08:00
convert_to_step ,
convert_to_task ,
convert_to_workflow ,
convert_to_workflow_parameter ,
convert_to_workflow_run ,
2024-12-20 07:40:32 -08:00
convert_to_workflow_run_block ,
2024-03-21 17:16:56 -07:00
convert_to_workflow_run_output_parameter ,
2024-03-01 10:09:30 -08:00
convert_to_workflow_run_parameter ,
)
2024-12-18 00:32:38 +01:00
from skyvern . forge . sdk . log_artifacts import save_workflow_run_logs
2024-12-06 17:15:11 -08:00
from skyvern . forge . sdk . models import Step , StepStatus
2025-01-08 21:45:38 -08:00
from skyvern . forge . sdk . schemas . ai_suggestions import AISuggestion
2025-02-14 00:00:19 +08:00
from skyvern . forge . sdk . schemas . credentials import Credential , CredentialType
2025-02-20 13:50:41 -08:00
from skyvern . forge . sdk . schemas . organization_bitwarden_collections import OrganizationBitwardenCollection
2024-12-06 17:15:11 -08:00
from skyvern . forge . sdk . schemas . organizations import Organization , OrganizationAuthToken
2025-01-08 18:14:38 +01:00
from skyvern . forge . sdk . schemas . persistent_browser_sessions import PersistentBrowserSession
2025-03-30 18:41:24 -07:00
from skyvern . forge . sdk . schemas . runs import Run
2024-06-07 15:59:53 -07:00
from skyvern . forge . sdk . schemas . task_generations import TaskGeneration
2025-02-27 20:19:02 -08:00
from skyvern . forge . sdk . schemas . task_v2 import TaskV2 , TaskV2Status , Thought , ThoughtType
2025-03-24 15:15:21 -07:00
from skyvern . forge . sdk . schemas . tasks import OrderBy , SortDirection , Task , TaskStatus
2024-09-08 15:07:03 -07:00
from skyvern . forge . sdk . schemas . totp_codes import TOTPCode
2024-12-20 07:40:32 -08:00
from skyvern . forge . sdk . schemas . workflow_runs import WorkflowRunBlock
from skyvern . forge . sdk . workflow . models . block import BlockStatus , BlockType
2024-03-21 17:16:56 -07:00
from skyvern . forge . sdk . workflow . models . parameter import (
AWSSecretParameter ,
2024-10-03 16:18:21 -07:00
BitwardenCreditCardDataParameter ,
2024-04-03 16:01:03 -07:00
BitwardenLoginCredentialParameter ,
2024-07-11 09:48:14 -07:00
BitwardenSensitiveInformationParameter ,
2025-02-14 00:00:19 +08:00
CredentialParameter ,
2024-03-21 17:16:56 -07:00
OutputParameter ,
WorkflowParameter ,
WorkflowParameterType ,
)
from skyvern . forge . sdk . workflow . models . workflow import (
Workflow ,
WorkflowRun ,
WorkflowRunOutputParameter ,
WorkflowRunParameter ,
WorkflowRunStatus ,
2025-01-25 04:08:51 +08:00
WorkflowStatus ,
2024-03-21 17:16:56 -07:00
)
2025-03-30 18:34:48 -07:00
from skyvern . schemas . runs import ProxyLocation , RunType
2024-10-15 12:06:50 -07:00
from skyvern . webeye . actions . actions import Action
2024-03-01 10:09:30 -08:00
from skyvern . webeye . actions . models import AgentStepOutput
LOG = structlog . get_logger ( )
2024-11-19 17:18:25 -08:00
DB_CONNECT_ARGS : dict [ str , Any ] = { }
if " postgresql+psycopg " in settings . DATABASE_STRING :
DB_CONNECT_ARGS = { " options " : f " -c statement_timeout= { settings . DATABASE_STATEMENT_TIMEOUT_MS } " }
elif " postgresql+asyncpg " in settings . DATABASE_STRING :
DB_CONNECT_ARGS = { " server_settings " : { " statement_timeout " : str ( settings . DATABASE_STATEMENT_TIMEOUT_MS ) } }
2024-03-01 10:09:30 -08:00
class AgentDB :
def __init__ ( self , database_string : str , debug_enabled : bool = False ) - > None :
super ( ) . __init__ ( )
self . debug_enabled = debug_enabled
2024-11-19 17:18:25 -08:00
self . engine = create_async_engine (
database_string ,
json_serializer = _custom_json_serializer ,
connect_args = DB_CONNECT_ARGS ,
)
2024-03-24 12:47:47 -07:00
self . Session = async_sessionmaker ( bind = self . engine )
2024-03-01 10:09:30 -08:00
async def create_task (
self ,
url : str ,
2024-03-12 22:28:16 -07:00
title : str | None ,
2024-11-21 15:12:26 +08:00
complete_criterion : str | None ,
terminate_criterion : str | None ,
2024-03-01 10:09:30 -08:00
navigation_goal : str | None ,
data_extraction_goal : str | None ,
navigation_payload : dict [ str , Any ] | list | str | None ,
webhook_callback_url : str | None = None ,
2024-07-11 21:34:00 -07:00
totp_verification_url : str | None = None ,
2024-09-08 15:07:03 -07:00
totp_identifier : str | None = None ,
2024-03-01 10:09:30 -08:00
organization_id : str | None = None ,
proxy_location : ProxyLocation | None = None ,
extracted_information_schema : dict [ str , Any ] | list | str | None = None ,
workflow_run_id : str | None = None ,
order : int | None = None ,
retry : int | None = None ,
2024-05-11 14:13:21 -07:00
max_steps_per_run : int | None = None ,
2024-03-12 22:28:16 -07:00
error_code_mapping : dict [ str , str ] | None = None ,
2024-11-26 11:29:33 +08:00
task_type : str = TaskType . general ,
2024-11-29 05:43:02 -08:00
application : str | None = None ,
2024-03-01 10:09:30 -08:00
) - > Task :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
new_task = TaskModel (
status = " created " ,
2024-11-26 11:29:33 +08:00
task_type = task_type ,
2024-03-01 10:09:30 -08:00
url = url ,
2024-03-12 22:28:16 -07:00
title = title ,
2024-03-01 10:09:30 -08:00
webhook_callback_url = webhook_callback_url ,
2024-07-11 21:34:00 -07:00
totp_verification_url = totp_verification_url ,
2024-09-08 15:07:03 -07:00
totp_identifier = totp_identifier ,
2024-03-01 10:09:30 -08:00
navigation_goal = navigation_goal ,
2024-11-21 15:12:26 +08:00
complete_criterion = complete_criterion ,
terminate_criterion = terminate_criterion ,
2024-03-01 10:09:30 -08:00
data_extraction_goal = data_extraction_goal ,
navigation_payload = navigation_payload ,
organization_id = organization_id ,
proxy_location = proxy_location ,
extracted_information_schema = extracted_information_schema ,
workflow_run_id = workflow_run_id ,
order = order ,
retry = retry ,
2024-05-11 14:13:21 -07:00
max_steps_per_run = max_steps_per_run ,
2024-03-12 22:28:16 -07:00
error_code_mapping = error_code_mapping ,
2024-11-29 05:43:02 -08:00
application = application ,
2024-03-01 10:09:30 -08:00
)
session . add ( new_task )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( new_task )
2024-03-01 10:09:30 -08:00
return convert_to_task ( new_task , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def create_step (
self ,
task_id : str ,
order : int ,
retry_index : int ,
organization_id : str | None = None ,
) - > Step :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
new_step = StepModel (
task_id = task_id ,
order = order ,
retry_index = retry_index ,
status = " created " ,
organization_id = organization_id ,
)
session . add ( new_step )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( new_step )
2024-03-01 10:09:30 -08:00
return convert_to_step ( new_step , debug_enabled = self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def create_artifact (
self ,
artifact_id : str ,
artifact_type : str ,
uri : str ,
2024-12-07 12:22:11 -08:00
step_id : str | None = None ,
task_id : str | None = None ,
workflow_run_id : str | None = None ,
workflow_run_block_id : str | None = None ,
2025-02-23 16:03:49 -08:00
task_v2_id : str | None = None ,
2025-02-27 20:19:02 -08:00
thought_id : str | None = None ,
2025-01-08 21:45:38 -08:00
ai_suggestion_id : str | None = None ,
2024-03-01 10:09:30 -08:00
organization_id : str | None = None ,
) - > Artifact :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
new_artifact = ArtifactModel (
artifact_id = artifact_id ,
artifact_type = artifact_type ,
uri = uri ,
2024-12-07 12:22:11 -08:00
task_id = task_id ,
step_id = step_id ,
workflow_run_id = workflow_run_id ,
workflow_run_block_id = workflow_run_block_id ,
2025-02-23 16:03:49 -08:00
observer_cruise_id = task_v2_id ,
2025-02-27 20:19:02 -08:00
observer_thought_id = thought_id ,
2025-01-08 21:45:38 -08:00
ai_suggestion_id = ai_suggestion_id ,
2024-03-01 10:09:30 -08:00
organization_id = organization_id ,
)
session . add ( new_artifact )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( new_artifact )
2024-03-01 10:09:30 -08:00
return convert_to_artifact ( new_artifact , self . debug_enabled )
except SQLAlchemyError :
2024-04-30 00:27:32 -07:00
LOG . exception ( " SQLAlchemyError " )
2024-03-01 10:09:30 -08:00
raise
except Exception :
2024-04-30 00:27:32 -07:00
LOG . exception ( " UnexpectedError " )
2024-03-01 10:09:30 -08:00
raise
async def get_task ( self , task_id : str , organization_id : str | None = None ) - > Task | None :
""" Get a task by its id """
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if task_obj := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( TaskModel ) . filter_by ( task_id = task_id ) . filter_by ( organization_id = organization_id )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_task ( task_obj , self . debug_enabled )
else :
2024-05-16 18:20:11 -07:00
LOG . info (
" Task not found " ,
task_id = task_id ,
organization_id = organization_id ,
)
2024-03-01 10:09:30 -08:00
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-12-22 20:54:53 -08:00
async def get_tasks_by_ids (
self ,
task_ids : list [ str ] ,
organization_id : str | None = None ,
) - > list [ Task ] :
try :
async with self . Session ( ) as session :
tasks = (
await session . scalars (
select ( TaskModel )
. filter ( TaskModel . task_id . in_ ( task_ids ) )
. filter_by ( organization_id = organization_id )
)
) . all ( )
return [ convert_to_task ( task , debug_enabled = self . debug_enabled ) for task in tasks ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def get_step ( self , task_id : str , step_id : str , organization_id : str | None = None ) - > Step | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if step := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( StepModel ) . filter_by ( step_id = step_id ) . filter_by ( organization_id = organization_id )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_step ( step , debug_enabled = self . debug_enabled )
else :
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_task_steps ( self , task_id : str , organization_id : str | None = None ) - > list [ Step ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
if steps := (
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( organization_id = organization_id )
. order_by ( StepModel . order )
. order_by ( StepModel . retry_index )
)
) . all ( ) :
2024-03-01 10:09:30 -08:00
return [ convert_to_step ( step , debug_enabled = self . debug_enabled ) for step in steps ]
else :
return [ ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-12-31 11:24:09 -08:00
async def get_steps_by_task_ids ( self , task_ids : list [ str ] , organization_id : str | None = None ) - > list [ Step ] :
try :
async with self . Session ( ) as session :
steps = (
await session . scalars (
select ( StepModel )
. filter ( StepModel . task_id . in_ ( task_ids ) )
. filter_by ( organization_id = organization_id )
)
) . all ( )
return [ convert_to_step ( step , debug_enabled = self . debug_enabled ) for step in steps ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-03-04 02:04:18 -05:00
async def get_total_unique_step_order_count_by_task_ids (
self ,
task_ids : list [ str ] ,
organization_id : str | None = None ,
2025-03-04 01:07:07 -05:00
) - > int :
2025-03-04 02:04:18 -05:00
"""
Get the total count of unique ( step . task_id , step . order ) pairs of StepModel for the given task ids
Basically translate this sql query into a SQLAlchemy query : select count ( distinct ( s . task_id , s . order ) ) from steps s
where s . task_id in task_ids
"""
2025-03-04 01:07:07 -05:00
try :
async with self . Session ( ) as session :
query = (
2025-03-04 03:22:16 -05:00
select ( func . count ( distinct ( tuple_ ( StepModel . task_id , StepModel . order ) ) ) )
2025-03-04 01:07:07 -05:00
. where ( StepModel . task_id . in_ ( task_ids ) )
2025-03-04 03:22:16 -05:00
. where ( StepModel . organization_id == organization_id )
2025-03-04 01:07:07 -05:00
)
2025-03-04 03:22:16 -05:00
return ( await session . execute ( query ) ) . scalar ( )
2025-03-04 01:07:07 -05:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-05-14 23:21:19 -07:00
async def get_task_step_models ( self , task_id : str , organization_id : str | None = None ) - > Sequence [ StepModel ] :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
return (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( organization_id = organization_id )
. order_by ( StepModel . order )
. order_by ( StepModel . retry_index )
)
) . all ( )
2024-03-01 10:09:30 -08:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-10-18 12:50:02 -07:00
async def get_task_actions ( self , task_id : str , organization_id : str | None = None ) - > list [ Action ] :
try :
async with self . Session ( ) as session :
query = (
select ( ActionModel )
. filter ( ActionModel . organization_id == organization_id )
. filter ( ActionModel . task_id == task_id )
2024-10-30 08:21:00 -07:00
. order_by ( ActionModel . created_at )
2024-10-18 12:50:02 -07:00
)
actions = ( await session . scalars ( query ) ) . all ( )
return [ Action . model_validate ( action ) for action in actions ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-12-07 21:19:31 -08:00
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-12-17 17:17:18 -08:00
async def get_tasks_actions ( self , task_ids : list [ str ] , organization_id : str | None = None ) - > list [ Action ] :
try :
async with self . Session ( ) as session :
query = (
select ( ActionModel )
. filter ( ActionModel . organization_id == organization_id )
. filter ( ActionModel . task_id . in_ ( task_ids ) )
2025-01-06 06:04:24 -08:00
. order_by ( ActionModel . created_at . desc ( ) )
2024-12-17 17:17:18 -08:00
)
actions = ( await session . scalars ( query ) ) . all ( )
return [ Action . model_validate ( action ) for action in actions ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-12-07 21:19:31 -08:00
async def get_first_step ( self , task_id : str , organization_id : str | None = None ) - > Step | None :
try :
async with self . Session ( ) as session :
if step := (
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( organization_id = organization_id )
. order_by ( StepModel . order . asc ( ) )
2025-01-08 13:08:36 +08:00
. order_by ( StepModel . retry_index . asc ( ) )
2024-12-07 21:19:31 -08:00
)
) . first ( ) :
return convert_to_step ( step , debug_enabled = self . debug_enabled )
else :
LOG . info (
" Latest step not found " ,
task_id = task_id ,
organization_id = organization_id ,
)
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-10-18 12:50:02 -07:00
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def get_latest_step ( self , task_id : str , organization_id : str | None = None ) - > Step | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if step := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( organization_id = organization_id )
. order_by ( StepModel . order . desc ( ) )
2024-12-18 13:35:42 +08:00
. order_by ( StepModel . retry_index . desc ( ) )
2024-03-24 12:47:47 -07:00
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_step ( step , debug_enabled = self . debug_enabled )
else :
2024-05-16 18:20:11 -07:00
LOG . info (
" Latest step not found " ,
task_id = task_id ,
organization_id = organization_id ,
)
2024-03-01 10:09:30 -08:00
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def update_step (
self ,
task_id : str ,
step_id : str ,
status : StepStatus | None = None ,
output : AgentStepOutput | None = None ,
is_last : bool | None = None ,
retry_index : int | None = None ,
organization_id : str | None = None ,
2024-03-16 23:13:18 -07:00
incremental_cost : float | None = None ,
2024-06-03 15:55:34 -07:00
incremental_input_tokens : int | None = None ,
incremental_output_tokens : int | None = None ,
2025-03-20 16:42:57 -07:00
incremental_reasoning_tokens : int | None = None ,
incremental_cached_tokens : int | None = None ,
2024-03-01 10:09:30 -08:00
) - > Step :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
if step := (
await session . scalars (
select ( StepModel )
. filter_by ( task_id = task_id )
. filter_by ( step_id = step_id )
. filter_by ( organization_id = organization_id )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
if status is not None :
step . status = status
if output is not None :
2024-06-05 13:18:35 -07:00
step . output = output . model_dump ( exclude_none = True )
2024-03-01 10:09:30 -08:00
if is_last is not None :
step . is_last = is_last
if retry_index is not None :
step . retry_index = retry_index
2024-03-16 23:13:18 -07:00
if incremental_cost is not None :
step . step_cost = incremental_cost + float ( step . step_cost or 0 )
2024-06-03 15:55:34 -07:00
if incremental_input_tokens is not None :
step . input_token_count = incremental_input_tokens + ( step . input_token_count or 0 )
if incremental_output_tokens is not None :
step . output_token_count = incremental_output_tokens + ( step . output_token_count or 0 )
2025-03-20 16:42:57 -07:00
if incremental_reasoning_tokens is not None :
step . reasoning_token_count = incremental_reasoning_tokens + ( step . reasoning_token_count or 0 )
if incremental_cached_tokens is not None :
step . cached_token_count = incremental_cached_tokens + ( step . cached_token_count or 0 )
2024-03-01 10:09:30 -08:00
2024-03-24 12:47:47 -07:00
await session . commit ( )
2024-03-01 10:09:30 -08:00
updated_step = await self . get_step ( task_id , step_id , organization_id )
if not updated_step :
raise NotFoundError ( " Step not found " )
return updated_step
else :
raise NotFoundError ( " Step not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
2024-04-19 00:32:00 -07:00
raise
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-04-23 23:37:16 -07:00
async def clear_task_failure_reason ( self , organization_id : str , task_id : str ) - > Task :
2024-04-19 00:32:00 -07:00
try :
async with self . Session ( ) as session :
if task := (
await session . scalars (
select ( TaskModel ) . filter_by ( task_id = task_id ) . filter_by ( organization_id = organization_id )
)
) . first ( ) :
task . failure_reason = None
await session . commit ( )
2024-05-04 14:21:00 -04:00
await session . refresh ( task )
2024-04-19 00:32:00 -07:00
return convert_to_task ( task , debug_enabled = self . debug_enabled )
else :
raise NotFoundError ( " Task not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
2024-03-01 10:09:30 -08:00
raise
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def update_task (
self ,
task_id : str ,
2024-03-12 22:28:16 -07:00
status : TaskStatus | None = None ,
2024-03-01 10:09:30 -08:00
extracted_information : dict [ str , Any ] | list | str | None = None ,
failure_reason : str | None = None ,
2024-03-12 22:28:16 -07:00
errors : list [ dict [ str , Any ] ] | None = None ,
2024-07-18 18:19:14 -07:00
max_steps_per_run : int | None = None ,
2024-03-01 10:09:30 -08:00
organization_id : str | None = None ,
) - > Task :
2024-07-18 22:46:58 -07:00
if (
status is None
and extracted_information is None
and failure_reason is None
and errors is None
and max_steps_per_run is None
) :
2024-03-12 22:28:16 -07:00
raise ValueError (
" At least one of status, extracted_information, or failure_reason must be provided to update the task "
)
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
if task := (
await session . scalars (
select ( TaskModel ) . filter_by ( task_id = task_id ) . filter_by ( organization_id = organization_id )
)
) . first ( ) :
2024-03-12 22:28:16 -07:00
if status is not None :
task . status = status
2024-03-01 10:09:30 -08:00
if extracted_information is not None :
task . extracted_information = extracted_information
if failure_reason is not None :
task . failure_reason = failure_reason
2024-03-12 22:28:16 -07:00
if errors is not None :
task . errors = errors
2024-07-18 18:19:14 -07:00
if max_steps_per_run is not None :
task . max_steps_per_run = max_steps_per_run
2024-03-24 12:47:47 -07:00
await session . commit ( )
2024-03-01 10:09:30 -08:00
updated_task = await self . get_task ( task_id , organization_id = organization_id )
if not updated_task :
raise NotFoundError ( " Task not found " )
return updated_task
else :
raise NotFoundError ( " Task not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-04-24 20:39:19 +03:00
async def get_tasks (
self ,
page : int = 1 ,
page_size : int = 10 ,
2024-04-24 22:27:15 +03:00
task_status : list [ TaskStatus ] | None = None ,
2024-07-09 11:37:03 -07:00
workflow_run_id : str | None = None ,
2024-04-24 20:39:19 +03:00
organization_id : str | None = None ,
2024-10-07 14:09:46 -07:00
only_standalone_tasks : bool = False ,
2024-11-29 05:43:02 -08:00
application : str | None = None ,
2024-10-21 10:34:42 -07:00
order_by_column : OrderBy = OrderBy . created_at ,
order : SortDirection = SortDirection . desc ,
2024-04-24 20:39:19 +03:00
) - > list [ Task ] :
2024-03-01 10:09:30 -08:00
"""
Get all tasks .
: param page : Starts at 1
: param page_size :
2024-07-09 11:37:03 -07:00
: param task_status :
: param workflow_run_id :
2024-10-07 14:09:46 -07:00
: param only_standalone_tasks :
2024-10-21 10:34:42 -07:00
: param order_by_column :
: param order :
2024-03-01 10:09:30 -08:00
: return :
"""
if page < 1 :
raise ValueError ( f " Page must be greater than 0, got { page } " )
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
db_page = page - 1 # offset logic is 0 based
2024-07-09 11:37:03 -07:00
query = select ( TaskModel ) . filter ( TaskModel . organization_id == organization_id )
2024-04-24 20:39:19 +03:00
if task_status :
2024-04-24 22:27:15 +03:00
query = query . filter ( TaskModel . status . in_ ( task_status ) )
2024-07-09 11:37:03 -07:00
if workflow_run_id :
query = query . filter ( TaskModel . workflow_run_id == workflow_run_id )
2024-10-07 14:09:46 -07:00
if only_standalone_tasks :
query = query . filter ( TaskModel . workflow_run_id . is_ ( None ) )
2024-11-29 05:43:02 -08:00
if application :
query = query . filter ( TaskModel . application == application )
2024-10-21 10:34:42 -07:00
order_by_col = getattr ( TaskModel , order_by_column )
query = (
query . order_by ( order_by_col . desc ( ) if order == SortDirection . desc else order_by_col . asc ( ) )
. limit ( page_size )
. offset ( db_page * page_size )
)
2024-04-24 20:39:19 +03:00
tasks = ( await session . scalars ( query ) ) . all ( )
2024-03-01 10:09:30 -08:00
return [ convert_to_task ( task , debug_enabled = self . debug_enabled ) for task in tasks ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_organization ( self , organization_id : str ) - > Organization | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if organization := (
2024-03-24 12:47:47 -07:00
await session . scalars ( select ( OrganizationModel ) . filter_by ( organization_id = organization_id ) )
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_organization ( organization )
else :
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-04-23 15:02:56 -07:00
async def get_organization_by_domain ( self , domain : str ) - > Organization | None :
async with self . Session ( ) as session :
if organization := ( await session . scalars ( select ( OrganizationModel ) . filter_by ( domain = domain ) ) ) . first ( ) :
return convert_to_organization ( organization )
return None
2024-03-01 10:09:30 -08:00
async def create_organization (
self ,
organization_name : str ,
webhook_callback_url : str | None = None ,
max_steps_per_run : int | None = None ,
2024-04-08 16:58:45 -07:00
max_retries_per_step : int | None = None ,
2024-04-23 15:02:56 -07:00
domain : str | None = None ,
2024-03-01 10:09:30 -08:00
) - > Organization :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
org = OrganizationModel (
organization_name = organization_name ,
webhook_callback_url = webhook_callback_url ,
max_steps_per_run = max_steps_per_run ,
2024-04-08 16:58:45 -07:00
max_retries_per_step = max_retries_per_step ,
2024-04-23 15:02:56 -07:00
domain = domain ,
2024-03-01 10:09:30 -08:00
)
session . add ( org )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( org )
2024-03-01 10:09:30 -08:00
return convert_to_organization ( org )
2024-06-16 19:42:20 -07:00
async def update_organization (
self ,
organization_id : str ,
organization_name : str | None = None ,
webhook_callback_url : str | None = None ,
max_steps_per_run : int | None = None ,
max_retries_per_step : int | None = None ,
) - > Organization :
async with self . Session ( ) as session :
organization = (
await session . scalars ( select ( OrganizationModel ) . filter_by ( organization_id = organization_id ) )
) . first ( )
if not organization :
raise NotFoundError
if organization_name :
organization . organization_name = organization_name
if webhook_callback_url :
organization . webhook_callback_url = webhook_callback_url
if max_steps_per_run :
organization . max_steps_per_run = max_steps_per_run
if max_retries_per_step :
organization . max_retries_per_step = max_retries_per_step
await session . commit ( )
await session . refresh ( organization )
return Organization . model_validate ( organization )
2024-03-01 10:09:30 -08:00
async def get_valid_org_auth_token (
self ,
organization_id : str ,
token_type : OrganizationAuthTokenType ,
) - > OrganizationAuthToken | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if token := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( OrganizationAuthTokenModel )
. filter_by ( organization_id = organization_id )
. filter_by ( token_type = token_type )
. filter_by ( valid = True )
2024-06-28 18:10:13 -07:00
. order_by ( OrganizationAuthTokenModel . created_at . desc ( ) )
2024-03-24 12:47:47 -07:00
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_organization_auth_token ( token )
else :
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-06-28 18:10:13 -07:00
async def get_valid_org_auth_tokens (
self ,
organization_id : str ,
token_type : OrganizationAuthTokenType ,
) - > list [ OrganizationAuthToken ] :
try :
async with self . Session ( ) as session :
tokens = (
await session . scalars (
select ( OrganizationAuthTokenModel )
. filter_by ( organization_id = organization_id )
. filter_by ( token_type = token_type )
. filter_by ( valid = True )
. order_by ( OrganizationAuthTokenModel . created_at . desc ( ) )
)
) . all ( )
return [ convert_to_organization_auth_token ( token ) for token in tokens ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def validate_org_auth_token (
self ,
organization_id : str ,
token_type : OrganizationAuthTokenType ,
token : str ,
2024-06-29 23:55:05 -07:00
valid : bool | None = True ,
2024-03-01 10:09:30 -08:00
) - > OrganizationAuthToken | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-06-29 23:55:05 -07:00
query = (
select ( OrganizationAuthTokenModel )
. filter_by ( organization_id = organization_id )
. filter_by ( token_type = token_type )
. filter_by ( token = token )
)
if valid is not None :
query = query . filter_by ( valid = valid )
if token_obj := ( await session . scalars ( query ) ) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_organization_auth_token ( token_obj )
else :
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def create_org_auth_token (
self ,
organization_id : str ,
token_type : OrganizationAuthTokenType ,
token : str ,
) - > OrganizationAuthToken :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-05-06 13:46:17 -07:00
auth_token = OrganizationAuthTokenModel (
2024-03-01 10:09:30 -08:00
organization_id = organization_id ,
token_type = token_type ,
token = token ,
)
2024-05-06 13:46:17 -07:00
session . add ( auth_token )
2024-03-24 12:47:47 -07:00
await session . commit ( )
2024-05-06 13:46:17 -07:00
await session . refresh ( auth_token )
2024-03-01 10:09:30 -08:00
2024-05-06 13:46:17 -07:00
return convert_to_organization_auth_token ( auth_token )
2024-03-01 10:09:30 -08:00
2025-02-23 16:03:49 -08:00
async def get_artifacts_for_task_v2 (
2024-12-10 20:37:15 -08:00
self ,
2025-02-23 16:03:49 -08:00
task_v2_id : str ,
2024-12-10 20:37:15 -08:00
organization_id : str | None = None ,
artifact_types : list [ ArtifactType ] | None = None ,
) - > list [ Artifact ] :
try :
async with self . Session ( ) as session :
query = (
select ( ArtifactModel )
2025-02-23 16:03:49 -08:00
. filter_by ( observer_cruise_id = task_v2_id )
2024-12-10 20:37:15 -08:00
. filter_by ( organization_id = organization_id )
)
if artifact_types :
query = query . filter ( ArtifactModel . artifact_type . in_ ( artifact_types ) )
query = query . order_by ( ArtifactModel . created_at )
if artifacts := ( await session . scalars ( query ) ) . all ( ) :
return [ convert_to_artifact ( artifact , self . debug_enabled ) for artifact in artifacts ]
else :
return [ ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def get_artifacts_for_task_step (
self ,
task_id : str ,
step_id : str ,
organization_id : str | None = None ,
) - > list [ Artifact ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if artifacts := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( ArtifactModel )
. filter_by ( task_id = task_id )
. filter_by ( step_id = step_id )
. filter_by ( organization_id = organization_id )
2024-05-23 11:53:05 -07:00
. order_by ( ArtifactModel . created_at )
2024-03-24 12:47:47 -07:00
)
) . all ( ) :
2024-03-01 10:09:30 -08:00
return [ convert_to_artifact ( artifact , self . debug_enabled ) for artifact in artifacts ]
else :
return [ ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_artifact_by_id (
self ,
artifact_id : str ,
organization_id : str ,
) - > Artifact | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if artifact := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( ArtifactModel )
. filter_by ( artifact_id = artifact_id )
. filter_by ( organization_id = organization_id )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_artifact ( artifact , self . debug_enabled )
else :
return None
except SQLAlchemyError :
2024-04-30 00:27:32 -07:00
LOG . exception ( " SQLAlchemyError " )
2024-03-01 10:09:30 -08:00
raise
except Exception :
2024-04-30 00:27:32 -07:00
LOG . exception ( " UnexpectedError " )
2024-03-01 10:09:30 -08:00
raise
2024-12-18 00:32:38 +01:00
async def get_artifacts_by_entity_id (
self ,
artifact_type : ArtifactType | None = None ,
task_id : str | None = None ,
step_id : str | None = None ,
workflow_run_id : str | None = None ,
workflow_run_block_id : str | None = None ,
2025-02-27 20:19:02 -08:00
thought_id : str | None = None ,
2025-02-23 16:03:49 -08:00
task_v2_id : str | None = None ,
2024-12-18 00:32:38 +01:00
organization_id : str | None = None ,
) - > list [ Artifact ] :
try :
async with self . Session ( ) as session :
query = select ( ArtifactModel )
if artifact_type is not None :
query = query . filter_by ( artifact_type = artifact_type )
if task_id is not None :
query = query . filter_by ( task_id = task_id )
if step_id is not None :
query = query . filter_by ( step_id = step_id )
if workflow_run_id is not None :
query = query . filter_by ( workflow_run_id = workflow_run_id )
if workflow_run_block_id is not None :
query = query . filter_by ( workflow_run_block_id = workflow_run_block_id )
2025-02-27 20:19:02 -08:00
if thought_id is not None :
query = query . filter_by ( observer_thought_id = thought_id )
2025-02-23 16:03:49 -08:00
if task_v2_id is not None :
query = query . filter_by ( observer_cruise_id = task_v2_id )
2024-12-18 00:32:38 +01:00
if organization_id is not None :
query = query . filter_by ( organization_id = organization_id )
query = query . order_by ( ArtifactModel . created_at . desc ( ) )
if artifacts := ( await session . scalars ( query ) ) . all ( ) :
return [ convert_to_artifact ( artifact , self . debug_enabled ) for artifact in artifacts ]
else :
return [ ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_artifact_by_entity_id (
self ,
artifact_type : ArtifactType ,
task_id : str | None = None ,
step_id : str | None = None ,
workflow_run_id : str | None = None ,
workflow_run_block_id : str | None = None ,
2025-02-27 20:19:02 -08:00
thought_id : str | None = None ,
2025-02-23 16:03:49 -08:00
task_v2_id : str | None = None ,
2024-12-18 00:32:38 +01:00
organization_id : str | None = None ,
) - > Artifact | None :
artifacts = await self . get_artifacts_by_entity_id (
artifact_type = artifact_type ,
task_id = task_id ,
step_id = step_id ,
workflow_run_id = workflow_run_id ,
workflow_run_block_id = workflow_run_block_id ,
2025-02-27 20:19:02 -08:00
thought_id = thought_id ,
2025-02-23 16:03:49 -08:00
task_v2_id = task_v2_id ,
2024-12-18 00:32:38 +01:00
organization_id = organization_id ,
)
return artifacts [ 0 ] if artifacts else None
2024-03-01 10:09:30 -08:00
async def get_artifact (
self ,
task_id : str ,
step_id : str ,
artifact_type : ArtifactType ,
organization_id : str | None = None ,
) - > Artifact | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
artifact = (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( ArtifactModel )
. filter_by ( task_id = task_id )
. filter_by ( step_id = step_id )
. filter_by ( organization_id = organization_id )
. filter_by ( artifact_type = artifact_type )
. order_by ( ArtifactModel . created_at . desc ( ) )
)
) . first ( )
2024-03-01 10:09:30 -08:00
if artifact :
return convert_to_artifact ( artifact , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_artifact_for_workflow_run (
self ,
workflow_run_id : str ,
artifact_type : ArtifactType ,
organization_id : str | None = None ,
) - > Artifact | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
artifact = (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( ArtifactModel )
. join ( TaskModel , TaskModel . task_id == ArtifactModel . task_id )
. filter ( TaskModel . workflow_run_id == workflow_run_id )
. filter ( ArtifactModel . artifact_type == artifact_type )
. filter ( ArtifactModel . organization_id == organization_id )
. order_by ( ArtifactModel . created_at . desc ( ) )
)
) . first ( )
2024-03-01 10:09:30 -08:00
if artifact :
return convert_to_artifact ( artifact , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_latest_artifact (
self ,
task_id : str ,
step_id : str | None = None ,
artifact_types : list [ ArtifactType ] | None = None ,
organization_id : str | None = None ,
) - > Artifact | None :
2024-04-02 14:43:29 -07:00
try :
artifacts = await self . get_latest_n_artifacts (
task_id = task_id ,
step_id = step_id ,
artifact_types = artifact_types ,
organization_id = organization_id ,
n = 1 ,
)
if artifacts :
return artifacts [ 0 ]
return None
except SQLAlchemyError :
2024-04-30 00:27:32 -07:00
LOG . exception ( " SQLAlchemyError " )
2024-04-02 14:43:29 -07:00
raise
except Exception :
2024-04-30 00:27:32 -07:00
LOG . exception ( " UnexpectedError " )
2024-04-02 14:43:29 -07:00
raise
async def get_latest_n_artifacts (
self ,
task_id : str ,
step_id : str | None = None ,
artifact_types : list [ ArtifactType ] | None = None ,
organization_id : str | None = None ,
n : int = 1 ,
) - > list [ Artifact ] | None :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
artifact_query = select ( ArtifactModel ) . filter_by ( task_id = task_id )
2024-03-01 10:09:30 -08:00
if organization_id :
artifact_query = artifact_query . filter_by ( organization_id = organization_id )
2024-04-28 16:23:17 -07:00
if step_id :
artifact_query = artifact_query . filter_by ( step_id = step_id )
2024-03-01 10:09:30 -08:00
if artifact_types :
artifact_query = artifact_query . filter ( ArtifactModel . artifact_type . in_ ( artifact_types ) )
2024-04-02 14:43:29 -07:00
artifacts = ( await session . scalars ( artifact_query . order_by ( ArtifactModel . created_at . desc ( ) ) ) ) . fetchmany (
n
)
if artifacts :
return [ convert_to_artifact ( artifact , self . debug_enabled ) for artifact in artifacts ]
2024-03-01 10:09:30 -08:00
return None
except SQLAlchemyError :
2024-04-30 00:27:32 -07:00
LOG . exception ( " SQLAlchemyError " )
2024-03-01 10:09:30 -08:00
raise
except Exception :
2024-04-30 00:27:32 -07:00
LOG . exception ( " UnexpectedError " )
2024-03-01 10:09:30 -08:00
raise
async def get_latest_task_by_workflow_id (
self ,
organization_id : str ,
workflow_id : str ,
before : datetime | None = None ,
) - > Task | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
query = select ( TaskModel ) . filter_by ( organization_id = organization_id ) . filter_by ( workflow_id = workflow_id )
2024-03-01 10:09:30 -08:00
if before :
query = query . filter ( TaskModel . created_at < before )
2024-03-24 12:47:47 -07:00
task = ( await session . scalars ( query . order_by ( TaskModel . created_at . desc ( ) ) ) ) . first ( )
2024-03-01 10:09:30 -08:00
if task :
return convert_to_task ( task , debug_enabled = self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def create_workflow (
self ,
title : str ,
workflow_definition : dict [ str , Any ] ,
2024-05-16 10:51:22 -07:00
organization_id : str | None = None ,
2024-03-01 10:09:30 -08:00
description : str | None = None ,
2024-05-16 10:51:22 -07:00
proxy_location : ProxyLocation | None = None ,
webhook_callback_url : str | None = None ,
2024-07-11 21:34:00 -07:00
totp_verification_url : str | None = None ,
2024-09-08 15:07:03 -07:00
totp_identifier : str | None = None ,
2024-09-06 12:01:56 -07:00
persist_browser_session : bool = False ,
2024-05-16 10:51:22 -07:00
workflow_permanent_id : str | None = None ,
version : int | None = None ,
2024-06-27 12:53:08 -07:00
is_saved_task : bool = False ,
2025-01-25 04:08:51 +08:00
status : WorkflowStatus = WorkflowStatus . published ,
2024-03-01 10:09:30 -08:00
) - > Workflow :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
workflow = WorkflowModel (
organization_id = organization_id ,
title = title ,
description = description ,
workflow_definition = workflow_definition ,
2024-05-16 10:51:22 -07:00
proxy_location = proxy_location ,
webhook_callback_url = webhook_callback_url ,
2024-07-11 21:34:00 -07:00
totp_verification_url = totp_verification_url ,
2024-09-08 15:07:03 -07:00
totp_identifier = totp_identifier ,
2024-09-06 12:01:56 -07:00
persist_browser_session = persist_browser_session ,
2024-06-27 12:53:08 -07:00
is_saved_task = is_saved_task ,
2025-01-25 04:08:51 +08:00
status = status ,
2024-03-01 10:09:30 -08:00
)
2024-05-16 10:51:22 -07:00
if workflow_permanent_id :
workflow . workflow_permanent_id = workflow_permanent_id
if version :
workflow . version = version
2024-03-01 10:09:30 -08:00
session . add ( workflow )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow )
2024-03-01 10:09:30 -08:00
return convert_to_workflow ( workflow , self . debug_enabled )
2024-09-19 11:15:07 -07:00
async def soft_delete_workflow_by_id ( self , workflow_id : str , organization_id : str ) - > None :
try :
async with self . Session ( ) as session :
# soft delete the workflow by setting the deleted_at field to the current time
update_deleted_at_query = (
update ( WorkflowModel )
. where ( WorkflowModel . workflow_id == workflow_id )
. where ( WorkflowModel . organization_id == organization_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
. values ( deleted_at = datetime . utcnow ( ) )
)
await session . execute ( update_deleted_at_query )
await session . commit ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError in soft_delete_workflow_by_id " , exc_info = True )
raise
2024-05-15 08:43:36 -07:00
async def get_workflow ( self , workflow_id : str , organization_id : str | None = None ) - > Workflow | None :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-05-16 10:51:22 -07:00
get_workflow_query = (
select ( WorkflowModel ) . filter_by ( workflow_id = workflow_id ) . filter ( WorkflowModel . deleted_at . is_ ( None ) )
)
2024-05-15 08:43:36 -07:00
if organization_id :
get_workflow_query = get_workflow_query . filter_by ( organization_id = organization_id )
if workflow := ( await session . scalars ( get_workflow_query ) ) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_workflow ( workflow , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-05-16 10:51:22 -07:00
async def get_workflow_by_permanent_id (
self ,
workflow_permanent_id : str ,
organization_id : str | None = None ,
version : int | None = None ,
2024-09-19 11:15:07 -07:00
exclude_deleted : bool = True ,
2024-05-16 10:51:22 -07:00
) - > Workflow | None :
try :
2024-09-19 11:15:07 -07:00
get_workflow_query = select ( WorkflowModel ) . filter_by ( workflow_permanent_id = workflow_permanent_id )
if exclude_deleted :
get_workflow_query = get_workflow_query . filter ( WorkflowModel . deleted_at . is_ ( None ) )
2024-05-16 10:51:22 -07:00
if organization_id :
get_workflow_query = get_workflow_query . filter_by ( organization_id = organization_id )
if version :
get_workflow_query = get_workflow_query . filter_by ( version = version )
get_workflow_query = get_workflow_query . order_by ( WorkflowModel . version . desc ( ) )
async with self . Session ( ) as session :
if workflow := ( await session . scalars ( get_workflow_query ) ) . first ( ) :
return convert_to_workflow ( workflow , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-01-28 15:04:18 +08:00
async def get_workflows_by_permanent_ids (
self ,
workflow_permanent_ids : list [ str ] ,
organization_id : str | None = None ,
page : int = 1 ,
page_size : int = 10 ,
title : str = " " ,
statuses : list [ WorkflowStatus ] | None = None ,
) - > list [ Workflow ] :
"""
Get all workflows with the latest version for the organization .
"""
if page < 1 :
raise ValueError ( f " Page must be greater than 0, got { page } " )
db_page = page - 1
try :
async with self . Session ( ) as session :
subquery = (
select (
WorkflowModel . workflow_permanent_id ,
func . max ( WorkflowModel . version ) . label ( " max_version " ) ,
)
. where ( WorkflowModel . workflow_permanent_id . in_ ( workflow_permanent_ids ) )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
. group_by (
WorkflowModel . workflow_permanent_id ,
)
. subquery ( )
)
main_query = select ( WorkflowModel ) . join (
subquery ,
( WorkflowModel . workflow_permanent_id == subquery . c . workflow_permanent_id )
& ( WorkflowModel . version == subquery . c . max_version ) ,
)
if organization_id :
main_query = main_query . where ( WorkflowModel . organization_id == organization_id )
if title :
main_query = main_query . where ( WorkflowModel . title . ilike ( f " % { title } % " ) )
if statuses :
main_query = main_query . where ( WorkflowModel . status . in_ ( statuses ) )
main_query = (
main_query . order_by ( WorkflowModel . created_at . desc ( ) ) . limit ( page_size ) . offset ( db_page * page_size )
)
workflows = ( await session . scalars ( main_query ) ) . all ( )
return [ convert_to_workflow ( workflow , self . debug_enabled ) for workflow in workflows ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-05-16 10:51:22 -07:00
async def get_workflows_by_organization_id (
self ,
organization_id : str ,
page : int = 1 ,
page_size : int = 10 ,
2024-06-27 12:53:08 -07:00
only_saved_tasks : bool = False ,
only_workflows : bool = False ,
2025-01-25 02:59:02 +08:00
title : str = " " ,
2025-01-25 04:08:51 +08:00
statuses : list [ WorkflowStatus ] | None = None ,
2024-05-16 10:51:22 -07:00
) - > list [ Workflow ] :
"""
Get all workflows with the latest version for the organization .
"""
if page < 1 :
raise ValueError ( f " Page must be greater than 0, got { page } " )
db_page = page - 1
try :
async with self . Session ( ) as session :
subquery = (
select (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
func . max ( WorkflowModel . version ) . label ( " max_version " ) ,
)
. where ( WorkflowModel . organization_id == organization_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
2024-05-16 18:20:11 -07:00
. group_by (
WorkflowModel . organization_id ,
WorkflowModel . workflow_permanent_id ,
)
2024-05-16 10:51:22 -07:00
. subquery ( )
)
2024-06-27 12:53:08 -07:00
main_query = select ( WorkflowModel ) . join (
subquery ,
( WorkflowModel . organization_id == subquery . c . organization_id )
& ( WorkflowModel . workflow_permanent_id == subquery . c . workflow_permanent_id )
& ( WorkflowModel . version == subquery . c . max_version ) ,
)
if only_saved_tasks :
main_query = main_query . where ( WorkflowModel . is_saved_task . is_ ( True ) )
elif only_workflows :
main_query = main_query . where ( WorkflowModel . is_saved_task . is_ ( False ) )
2025-01-25 02:59:02 +08:00
if title :
main_query = main_query . where ( WorkflowModel . title . ilike ( f " % { title } % " ) )
2025-01-25 04:08:51 +08:00
if statuses :
main_query = main_query . where ( WorkflowModel . status . in_ ( statuses ) )
2024-05-16 10:51:22 -07:00
main_query = (
2024-06-27 12:53:08 -07:00
main_query . order_by ( WorkflowModel . created_at . desc ( ) ) . limit ( page_size ) . offset ( db_page * page_size )
2024-05-16 10:51:22 -07:00
)
workflows = ( await session . scalars ( main_query ) ) . all ( )
return [ convert_to_workflow ( workflow , self . debug_enabled ) for workflow in workflows ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def update_workflow (
self ,
workflow_id : str ,
2024-05-15 08:43:36 -07:00
organization_id : str | None = None ,
2024-03-01 10:09:30 -08:00
title : str | None = None ,
description : str | None = None ,
workflow_definition : dict [ str , Any ] | None = None ,
2024-05-16 10:51:22 -07:00
version : int | None = None ,
2024-03-24 22:55:38 -07:00
) - > Workflow :
try :
async with self . Session ( ) as session :
2024-05-16 10:51:22 -07:00
get_workflow_query = (
select ( WorkflowModel ) . filter_by ( workflow_id = workflow_id ) . filter ( WorkflowModel . deleted_at . is_ ( None ) )
)
2024-05-15 08:43:36 -07:00
if organization_id :
get_workflow_query = get_workflow_query . filter_by ( organization_id = organization_id )
if workflow := ( await session . scalars ( get_workflow_query ) ) . first ( ) :
2024-03-24 22:55:38 -07:00
if title :
workflow . title = title
if description :
workflow . description = description
if workflow_definition :
workflow . workflow_definition = workflow_definition
2024-05-16 10:51:22 -07:00
if version :
workflow . version = version
2024-03-24 22:55:38 -07:00
await session . commit ( )
await session . refresh ( workflow )
return convert_to_workflow ( workflow , self . debug_enabled )
else :
raise NotFoundError ( " Workflow not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except NotFoundError :
LOG . error ( " No workflow found to update " , workflow_id = workflow_id )
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
2024-05-16 10:51:22 -07:00
async def soft_delete_workflow_by_permanent_id (
self ,
workflow_permanent_id : str ,
organization_id : str | None = None ,
) - > None :
async with self . Session ( ) as session :
# soft delete the workflow by setting the deleted_at field
update_deleted_at_query = (
update ( WorkflowModel )
. where ( WorkflowModel . workflow_permanent_id == workflow_permanent_id )
. where ( WorkflowModel . deleted_at . is_ ( None ) )
)
if organization_id :
update_deleted_at_query = update_deleted_at_query . filter_by ( organization_id = organization_id )
update_deleted_at_query = update_deleted_at_query . values ( deleted_at = datetime . utcnow ( ) )
await session . execute ( update_deleted_at_query )
await session . commit ( )
2024-03-01 10:09:30 -08:00
async def create_workflow_run (
2024-05-16 10:51:22 -07:00
self ,
2024-07-09 11:26:44 -07:00
workflow_permanent_id : str ,
2024-05-16 10:51:22 -07:00
workflow_id : str ,
2024-07-09 11:26:44 -07:00
organization_id : str ,
2024-05-16 10:51:22 -07:00
proxy_location : ProxyLocation | None = None ,
webhook_callback_url : str | None = None ,
2024-07-11 21:34:00 -07:00
totp_verification_url : str | None = None ,
2024-09-08 15:07:03 -07:00
totp_identifier : str | None = None ,
2025-01-28 16:59:54 +08:00
parent_workflow_run_id : str | None = None ,
2024-03-01 10:09:30 -08:00
) - > WorkflowRun :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
workflow_run = WorkflowRunModel (
2024-07-09 11:26:44 -07:00
workflow_permanent_id = workflow_permanent_id ,
2024-03-01 10:09:30 -08:00
workflow_id = workflow_id ,
2024-07-09 11:26:44 -07:00
organization_id = organization_id ,
2024-03-01 10:09:30 -08:00
proxy_location = proxy_location ,
status = " created " ,
webhook_callback_url = webhook_callback_url ,
2024-07-11 21:34:00 -07:00
totp_verification_url = totp_verification_url ,
2024-09-08 15:07:03 -07:00
totp_identifier = totp_identifier ,
2025-01-28 16:59:54 +08:00
parent_workflow_run_id = parent_workflow_run_id ,
2024-03-01 10:09:30 -08:00
)
session . add ( workflow_run )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow_run )
2024-03-01 10:09:30 -08:00
return convert_to_workflow_run ( workflow_run )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-11-15 11:07:44 +08:00
async def update_workflow_run (
self , workflow_run_id : str , status : WorkflowRunStatus , failure_reason : str | None = None
) - > WorkflowRun | None :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
workflow_run = (
await session . scalars ( select ( WorkflowRunModel ) . filter_by ( workflow_run_id = workflow_run_id ) )
) . first ( )
2024-03-01 10:09:30 -08:00
if workflow_run :
workflow_run . status = status
2024-11-15 11:07:44 +08:00
workflow_run . failure_reason = failure_reason
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow_run )
2024-12-18 00:32:38 +01:00
await save_workflow_run_logs ( workflow_run_id )
2024-03-01 10:09:30 -08:00
return convert_to_workflow_run ( workflow_run )
2024-05-16 18:20:11 -07:00
LOG . error (
" WorkflowRun not found, nothing to update " ,
workflow_run_id = workflow_run_id ,
)
2024-03-01 10:09:30 -08:00
return None
2025-02-04 03:59:10 +08:00
async def get_all_runs (
self , organization_id : str , page : int = 1 , page_size : int = 10 , status : list [ WorkflowRunStatus ] | None = None
) - > list [ WorkflowRun | Task ] :
try :
async with self . Session ( ) as session :
# temporary limit to 10 pages
if page > 10 :
return [ ]
limit = page * page_size
2025-02-06 03:10:17 +08:00
workflow_run_query = (
select ( WorkflowRunModel , WorkflowModel . title )
. join ( WorkflowModel , WorkflowModel . workflow_id == WorkflowRunModel . workflow_id )
. filter ( WorkflowRunModel . organization_id == organization_id )
2025-02-17 16:52:22 +08:00
. filter ( WorkflowRunModel . parent_workflow_run_id . is_ ( None ) )
2025-02-04 03:59:10 +08:00
)
if status :
workflow_run_query = workflow_run_query . filter ( WorkflowRunModel . status . in_ ( status ) )
workflow_run_query = workflow_run_query . order_by ( WorkflowRunModel . created_at . desc ( ) ) . limit ( limit )
2025-02-06 03:10:17 +08:00
workflow_run_query_result = ( await session . execute ( workflow_run_query ) ) . all ( )
2025-02-04 03:59:10 +08:00
workflow_runs = [
2025-02-06 03:10:17 +08:00
convert_to_workflow_run ( run , workflow_title = title , debug_enabled = self . debug_enabled )
for run , title in workflow_run_query_result
2025-02-04 03:59:10 +08:00
]
2025-02-04 22:13:36 +08:00
task_query = (
select ( TaskModel )
. filter ( TaskModel . organization_id == organization_id )
. filter ( TaskModel . workflow_run_id . is_ ( None ) )
)
2025-02-04 03:59:10 +08:00
if status :
task_query = task_query . filter ( TaskModel . status . in_ ( status ) )
task_query = task_query . order_by ( TaskModel . created_at . desc ( ) ) . limit ( limit )
task_query_result = ( await session . scalars ( task_query ) ) . all ( )
tasks = [ convert_to_task ( task , debug_enabled = self . debug_enabled ) for task in task_query_result ]
runs = workflow_runs + tasks
runs . sort ( key = lambda x : x . created_at , reverse = True )
lower = ( page - 1 ) * page_size
upper = page * page_size
return runs [ lower : upper ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-12-22 17:49:33 -08:00
async def get_workflow_run ( self , workflow_run_id : str , organization_id : str | None = None ) - > WorkflowRun | None :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-12-22 17:49:33 -08:00
get_workflow_run_query = select ( WorkflowRunModel ) . filter_by ( workflow_run_id = workflow_run_id )
if organization_id :
get_workflow_run_query = get_workflow_run_query . filter_by ( organization_id = organization_id )
if workflow_run := ( await session . scalars ( get_workflow_run_query ) ) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_workflow_run ( workflow_run )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-01-24 23:31:26 +08:00
async def get_workflow_runs (
self , organization_id : str , page : int = 1 , page_size : int = 10 , status : list [ WorkflowRunStatus ] | None = None
) - > list [ WorkflowRun ] :
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-07-05 16:39:42 -07:00
db_page = page - 1 # offset logic is 0 based
2025-01-28 16:59:54 +08:00
query = (
2025-02-06 03:10:17 +08:00
select ( WorkflowRunModel , WorkflowModel . title )
. join ( WorkflowModel , WorkflowModel . workflow_id == WorkflowRunModel . workflow_id )
2025-01-28 16:59:54 +08:00
. filter ( WorkflowRunModel . organization_id == organization_id )
. filter ( WorkflowRunModel . parent_workflow_run_id . is_ ( None ) )
)
2025-01-24 23:31:26 +08:00
if status :
query = query . filter ( WorkflowRunModel . status . in_ ( status ) )
query = query . order_by ( WorkflowRunModel . created_at . desc ( ) ) . limit ( page_size ) . offset ( db_page * page_size )
2025-02-06 03:10:17 +08:00
workflow_runs = ( await session . execute ( query ) ) . all ( )
return [
convert_to_workflow_run ( run , workflow_title = title , debug_enabled = self . debug_enabled )
for run , title in workflow_runs
]
2024-07-05 16:39:42 -07:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def get_workflow_runs_for_workflow_permanent_id (
2025-01-24 23:31:26 +08:00
self ,
workflow_permanent_id : str ,
organization_id : str ,
page : int = 1 ,
page_size : int = 10 ,
status : list [ WorkflowRunStatus ] | None = None ,
2024-07-05 16:39:42 -07:00
) - > list [ WorkflowRun ] :
try :
async with self . Session ( ) as session :
db_page = page - 1 # offset logic is 0 based
2025-01-24 23:31:26 +08:00
query = (
2025-02-06 03:10:17 +08:00
select ( WorkflowRunModel , WorkflowModel . title )
. join ( WorkflowModel , WorkflowModel . workflow_id == WorkflowRunModel . workflow_id )
2025-01-24 23:31:26 +08:00
. filter ( WorkflowRunModel . workflow_permanent_id == workflow_permanent_id )
. filter ( WorkflowRunModel . organization_id == organization_id )
)
if status :
query = query . filter ( WorkflowRunModel . status . in_ ( status ) )
query = query . order_by ( WorkflowRunModel . created_at . desc ( ) ) . limit ( page_size ) . offset ( db_page * page_size )
2025-02-06 03:10:17 +08:00
workflow_runs_and_titles_tuples = ( await session . execute ( query ) ) . all ( )
workflow_runs = [
convert_to_workflow_run ( run , workflow_title = title , debug_enabled = self . debug_enabled )
for run , title in workflow_runs_and_titles_tuples
]
return workflow_runs
2024-03-01 10:09:30 -08:00
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-02-18 23:21:17 +08:00
async def get_workflow_runs_by_parent_workflow_run_id (
self ,
organization_id : str ,
parent_workflow_run_id : str ,
) - > list [ WorkflowRun ] :
try :
async with self . Session ( ) as session :
query = (
select ( WorkflowRunModel )
. filter ( WorkflowRunModel . organization_id == organization_id )
. filter ( WorkflowRunModel . parent_workflow_run_id == parent_workflow_run_id )
)
workflow_runs = ( await session . scalars ( query ) ) . all ( )
return [ convert_to_workflow_run ( run ) for run in workflow_runs ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-03-01 10:09:30 -08:00
async def create_workflow_parameter (
self ,
workflow_id : str ,
workflow_parameter_type : WorkflowParameterType ,
key : str ,
default_value : Any ,
description : str | None = None ,
) - > WorkflowParameter :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-09-19 13:12:46 -07:00
default_value = (
json . dumps ( default_value )
if workflow_parameter_type == WorkflowParameterType . JSON
else default_value
)
2024-03-01 10:09:30 -08:00
workflow_parameter = WorkflowParameterModel (
workflow_id = workflow_id ,
workflow_parameter_type = workflow_parameter_type ,
key = key ,
2024-09-19 13:12:46 -07:00
default_value = default_value ,
2024-03-01 10:09:30 -08:00
description = description ,
)
session . add ( workflow_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow_parameter )
2024-03-01 10:09:30 -08:00
return convert_to_workflow_parameter ( workflow_parameter , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def create_aws_secret_parameter (
self ,
workflow_id : str ,
key : str ,
aws_key : str ,
description : str | None = None ,
) - > AWSSecretParameter :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
aws_secret_parameter = AWSSecretParameterModel (
workflow_id = workflow_id ,
key = key ,
aws_key = aws_key ,
description = description ,
)
session . add ( aws_secret_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( aws_secret_parameter )
2024-03-01 10:09:30 -08:00
return convert_to_aws_secret_parameter ( aws_secret_parameter )
2024-04-03 16:01:03 -07:00
async def create_bitwarden_login_credential_parameter (
self ,
workflow_id : str ,
bitwarden_client_id_aws_secret_key : str ,
bitwarden_client_secret_aws_secret_key : str ,
bitwarden_master_password_aws_secret_key : str ,
key : str ,
2025-03-03 11:45:50 -05:00
url_parameter_key : str | None = None ,
2024-04-03 16:01:03 -07:00
description : str | None = None ,
2024-06-10 22:06:58 -07:00
bitwarden_collection_id : str | None = None ,
2025-03-03 11:45:50 -05:00
bitwarden_item_id : str | None = None ,
2024-04-03 16:01:03 -07:00
) - > BitwardenLoginCredentialParameter :
async with self . Session ( ) as session :
bitwarden_login_credential_parameter = BitwardenLoginCredentialParameterModel (
workflow_id = workflow_id ,
bitwarden_client_id_aws_secret_key = bitwarden_client_id_aws_secret_key ,
bitwarden_client_secret_aws_secret_key = bitwarden_client_secret_aws_secret_key ,
bitwarden_master_password_aws_secret_key = bitwarden_master_password_aws_secret_key ,
url_parameter_key = url_parameter_key ,
key = key ,
description = description ,
2024-06-10 22:06:58 -07:00
bitwarden_collection_id = bitwarden_collection_id ,
2025-03-03 11:45:50 -05:00
bitwarden_item_id = bitwarden_item_id ,
2024-04-03 16:01:03 -07:00
)
session . add ( bitwarden_login_credential_parameter )
await session . commit ( )
await session . refresh ( bitwarden_login_credential_parameter )
return convert_to_bitwarden_login_credential_parameter ( bitwarden_login_credential_parameter )
2024-07-11 09:48:14 -07:00
async def create_bitwarden_sensitive_information_parameter (
self ,
workflow_id : str ,
bitwarden_client_id_aws_secret_key : str ,
bitwarden_client_secret_aws_secret_key : str ,
bitwarden_master_password_aws_secret_key : str ,
bitwarden_collection_id : str ,
bitwarden_identity_key : str ,
bitwarden_identity_fields : list [ str ] ,
key : str ,
description : str | None = None ,
) - > BitwardenSensitiveInformationParameter :
async with self . Session ( ) as session :
bitwarden_sensitive_information_parameter = BitwardenSensitiveInformationParameterModel (
workflow_id = workflow_id ,
bitwarden_client_id_aws_secret_key = bitwarden_client_id_aws_secret_key ,
bitwarden_client_secret_aws_secret_key = bitwarden_client_secret_aws_secret_key ,
bitwarden_master_password_aws_secret_key = bitwarden_master_password_aws_secret_key ,
bitwarden_collection_id = bitwarden_collection_id ,
bitwarden_identity_key = bitwarden_identity_key ,
bitwarden_identity_fields = bitwarden_identity_fields ,
key = key ,
description = description ,
)
session . add ( bitwarden_sensitive_information_parameter )
await session . commit ( )
await session . refresh ( bitwarden_sensitive_information_parameter )
return convert_to_bitwarden_sensitive_information_parameter ( bitwarden_sensitive_information_parameter )
2024-10-03 16:18:21 -07:00
async def create_bitwarden_credit_card_data_parameter (
self ,
workflow_id : str ,
bitwarden_client_id_aws_secret_key : str ,
bitwarden_client_secret_aws_secret_key : str ,
bitwarden_master_password_aws_secret_key : str ,
bitwarden_collection_id : str ,
bitwarden_item_id : str ,
key : str ,
description : str | None = None ,
) - > BitwardenCreditCardDataParameter :
async with self . Session ( ) as session :
bitwarden_credit_card_data_parameter = BitwardenCreditCardDataParameterModel (
workflow_id = workflow_id ,
bitwarden_client_id_aws_secret_key = bitwarden_client_id_aws_secret_key ,
bitwarden_client_secret_aws_secret_key = bitwarden_client_secret_aws_secret_key ,
bitwarden_master_password_aws_secret_key = bitwarden_master_password_aws_secret_key ,
bitwarden_collection_id = bitwarden_collection_id ,
bitwarden_item_id = bitwarden_item_id ,
key = key ,
description = description ,
)
session . add ( bitwarden_credit_card_data_parameter )
await session . commit ( )
await session . refresh ( bitwarden_credit_card_data_parameter )
return BitwardenCreditCardDataParameter . model_validate ( bitwarden_credit_card_data_parameter )
2024-03-21 17:16:56 -07:00
async def create_output_parameter (
self ,
workflow_id : str ,
key : str ,
description : str | None = None ,
) - > OutputParameter :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-21 17:16:56 -07:00
output_parameter = OutputParameterModel (
key = key ,
description = description ,
workflow_id = workflow_id ,
)
session . add ( output_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( output_parameter )
2024-03-21 17:16:56 -07:00
return convert_to_output_parameter ( output_parameter )
async def get_workflow_output_parameters ( self , workflow_id : str ) - > list [ OutputParameter ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
output_parameters = (
await session . scalars ( select ( OutputParameterModel ) . filter_by ( workflow_id = workflow_id ) )
) . all ( )
2024-03-21 17:16:56 -07:00
return [ convert_to_output_parameter ( parameter ) for parameter in output_parameters ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-03-17 16:22:44 -07:00
async def get_workflow_output_parameters_by_ids ( self , output_parameter_ids : list [ str ] ) - > list [ OutputParameter ] :
try :
async with self . Session ( ) as session :
output_parameters = (
await session . scalars (
select ( OutputParameterModel ) . filter (
OutputParameterModel . output_parameter_id . in_ ( output_parameter_ids )
)
)
) . all ( )
return [ convert_to_output_parameter ( parameter ) for parameter in output_parameters ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2025-02-14 00:00:19 +08:00
async def create_credential_parameter (
self , workflow_id : str , key : str , credential_id : str , description : str | None = None
) - > CredentialParameter :
async with self . Session ( ) as session :
credential_parameter = CredentialParameterModel (
workflow_id = workflow_id ,
key = key ,
description = description ,
credential_id = credential_id ,
)
session . add ( credential_parameter )
await session . commit ( )
await session . refresh ( credential_parameter )
return CredentialParameter (
credential_parameter_id = credential_parameter . credential_parameter_id ,
workflow_id = credential_parameter . workflow_id ,
key = credential_parameter . key ,
description = credential_parameter . description ,
credential_id = credential_parameter . credential_id ,
created_at = credential_parameter . created_at ,
modified_at = credential_parameter . modified_at ,
deleted_at = credential_parameter . deleted_at ,
)
2024-03-21 17:16:56 -07:00
async def get_workflow_run_output_parameters ( self , workflow_run_id : str ) - > list [ WorkflowRunOutputParameter ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-21 17:16:56 -07:00
workflow_run_output_parameters = (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( WorkflowRunOutputParameterModel )
. filter_by ( workflow_run_id = workflow_run_id )
. order_by ( WorkflowRunOutputParameterModel . created_at )
)
) . all ( )
2024-03-21 17:16:56 -07:00
return [
convert_to_workflow_run_output_parameter ( parameter , self . debug_enabled )
for parameter in workflow_run_output_parameters
]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
2024-09-24 08:51:51 -07:00
async def create_or_update_workflow_run_output_parameter (
2024-05-16 18:20:11 -07:00
self ,
workflow_run_id : str ,
output_parameter_id : str ,
value : dict [ str , Any ] | list | str | None ,
2024-03-21 17:16:56 -07:00
) - > WorkflowRunOutputParameter :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-09-24 08:51:51 -07:00
# check if the workflow run output parameter already exists
# if it does, update the value
if workflow_run_output_parameter := (
await session . scalars (
select ( WorkflowRunOutputParameterModel )
. filter_by ( workflow_run_id = workflow_run_id )
. filter_by ( output_parameter_id = output_parameter_id )
)
) . first ( ) :
LOG . info (
f " Updating existing workflow run output parameter with { workflow_run_output_parameter . workflow_run_id } - { workflow_run_output_parameter . output_parameter_id } "
)
workflow_run_output_parameter . value = value
await session . commit ( )
await session . refresh ( workflow_run_output_parameter )
return convert_to_workflow_run_output_parameter ( workflow_run_output_parameter , self . debug_enabled )
# if it does not exist, create a new one
2024-03-21 17:16:56 -07:00
workflow_run_output_parameter = WorkflowRunOutputParameterModel (
workflow_run_id = workflow_run_id ,
output_parameter_id = output_parameter_id ,
value = value ,
)
session . add ( workflow_run_output_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow_run_output_parameter )
2024-03-21 17:16:56 -07:00
return convert_to_workflow_run_output_parameter ( workflow_run_output_parameter , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
2024-09-24 08:51:51 -07:00
raise
async def update_workflow_run_output_parameter (
self ,
workflow_run_id : str ,
output_parameter_id : str ,
value : dict [ str , Any ] | list | str | None ,
) - > WorkflowRunOutputParameter :
try :
async with self . Session ( ) as session :
workflow_run_output_parameter = (
await session . scalars (
select ( WorkflowRunOutputParameterModel )
. filter_by ( workflow_run_id = workflow_run_id )
. filter_by ( output_parameter_id = output_parameter_id )
)
) . first ( )
if not workflow_run_output_parameter :
raise NotFoundError (
f " WorkflowRunOutputParameter not found for { workflow_run_id } and { output_parameter_id } "
)
workflow_run_output_parameter . value = value
await session . commit ( )
await session . refresh ( workflow_run_output_parameter )
return convert_to_workflow_run_output_parameter ( workflow_run_output_parameter , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
2024-03-21 17:16:56 -07:00
raise
2024-03-01 10:09:30 -08:00
async def get_workflow_parameters ( self , workflow_id : str ) - > list [ WorkflowParameter ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
workflow_parameters = (
await session . scalars ( select ( WorkflowParameterModel ) . filter_by ( workflow_id = workflow_id ) )
) . all ( )
2024-03-01 10:09:30 -08:00
return [ convert_to_workflow_parameter ( parameter ) for parameter in workflow_parameters ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def get_workflow_parameter ( self , workflow_parameter_id : str ) - > WorkflowParameter | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if workflow_parameter := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( WorkflowParameterModel ) . filter_by ( workflow_parameter_id = workflow_parameter_id )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_workflow_parameter ( workflow_parameter , self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def create_workflow_run_parameter (
2024-10-22 17:36:25 -07:00
self , workflow_run_id : str , workflow_parameter : WorkflowParameter , value : Any
2024-03-01 10:09:30 -08:00
) - > WorkflowRunParameter :
2024-10-22 17:36:25 -07:00
workflow_parameter_id = workflow_parameter . workflow_parameter_id
2024-03-01 10:09:30 -08:00
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
workflow_run_parameter = WorkflowRunParameterModel (
workflow_run_id = workflow_run_id ,
workflow_parameter_id = workflow_parameter_id ,
value = value ,
)
session . add ( workflow_run_parameter )
2024-03-24 12:47:47 -07:00
await session . commit ( )
await session . refresh ( workflow_run_parameter )
2024-03-01 10:09:30 -08:00
return convert_to_workflow_run_parameter ( workflow_run_parameter , workflow_parameter , self . debug_enabled )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def get_workflow_run_parameters (
self , workflow_run_id : str
) - > list [ tuple [ WorkflowParameter , WorkflowRunParameter ] ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
workflow_run_parameters = (
2024-03-24 12:47:47 -07:00
await session . scalars ( select ( WorkflowRunParameterModel ) . filter_by ( workflow_run_id = workflow_run_id ) )
) . all ( )
2024-03-01 10:09:30 -08:00
results = [ ]
for workflow_run_parameter in workflow_run_parameters :
workflow_parameter = await self . get_workflow_parameter ( workflow_run_parameter . workflow_parameter_id )
if not workflow_parameter :
raise WorkflowParameterNotFound (
workflow_parameter_id = workflow_run_parameter . workflow_parameter_id
)
results . append (
(
workflow_parameter ,
convert_to_workflow_run_parameter (
2024-05-16 18:20:11 -07:00
workflow_run_parameter ,
workflow_parameter ,
self . debug_enabled ,
2024-03-01 10:09:30 -08:00
) ,
)
)
return results
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def get_last_task_for_workflow_run ( self , workflow_run_id : str ) - > Task | None :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
if task := (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( TaskModel )
. filter_by ( workflow_run_id = workflow_run_id )
. order_by ( TaskModel . created_at . desc ( ) )
)
) . first ( ) :
2024-03-01 10:09:30 -08:00
return convert_to_task ( task , debug_enabled = self . debug_enabled )
return None
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def get_tasks_by_workflow_run_id ( self , workflow_run_id : str ) - > list [ Task ] :
try :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
tasks = (
2024-03-24 12:47:47 -07:00
await session . scalars (
select ( TaskModel ) . filter_by ( workflow_run_id = workflow_run_id ) . order_by ( TaskModel . created_at )
)
) . all ( )
2024-03-01 10:09:30 -08:00
return [ convert_to_task ( task , debug_enabled = self . debug_enabled ) for task in tasks ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
async def delete_task_artifacts ( self , organization_id : str , task_id : str ) - > None :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
# delete artifacts by filtering organization_id and task_id
stmt = delete ( ArtifactModel ) . where (
and_ (
ArtifactModel . organization_id == organization_id ,
ArtifactModel . task_id == task_id ,
)
)
2024-03-24 12:47:47 -07:00
await session . execute ( stmt )
await session . commit ( )
2024-03-01 10:09:30 -08:00
2025-02-23 16:03:49 -08:00
async def delete_task_v2_artifacts ( self , task_v2_id : str , organization_id : str | None = None ) - > None :
2025-01-11 19:49:51 -08:00
async with self . Session ( ) as session :
stmt = delete ( ArtifactModel ) . where (
and_ (
2025-02-23 16:03:49 -08:00
ArtifactModel . observer_cruise_id == task_v2_id ,
2025-01-11 19:49:51 -08:00
ArtifactModel . organization_id == organization_id ,
)
)
await session . execute ( stmt )
await session . commit ( )
2024-03-01 10:09:30 -08:00
async def delete_task_steps ( self , organization_id : str , task_id : str ) - > None :
2024-03-24 12:47:47 -07:00
async with self . Session ( ) as session :
2024-03-01 10:09:30 -08:00
# delete artifacts by filtering organization_id and task_id
stmt = delete ( StepModel ) . where (
and_ (
StepModel . organization_id == organization_id ,
StepModel . task_id == task_id ,
)
)
2024-03-24 12:47:47 -07:00
await session . execute ( stmt )
await session . commit ( )
2024-06-07 15:59:53 -07:00
async def create_task_generation (
self ,
organization_id : str ,
user_prompt : str ,
2024-09-03 07:00:15 +03:00
user_prompt_hash : str ,
2024-06-07 15:59:53 -07:00
url : str | None = None ,
navigation_goal : str | None = None ,
navigation_payload : dict [ str , Any ] | None = None ,
data_extraction_goal : str | None = None ,
extracted_information_schema : dict [ str , Any ] | None = None ,
2024-08-23 23:16:41 +03:00
suggested_title : str | None = None ,
2024-06-07 15:59:53 -07:00
llm : str | None = None ,
llm_prompt : str | None = None ,
llm_response : str | None = None ,
2024-09-03 07:00:15 +03:00
source_task_generation_id : str | None = None ,
2024-06-07 15:59:53 -07:00
) - > TaskGeneration :
async with self . Session ( ) as session :
new_task_generation = TaskGenerationModel (
organization_id = organization_id ,
user_prompt = user_prompt ,
2024-09-03 07:00:15 +03:00
user_prompt_hash = user_prompt_hash ,
2024-06-07 15:59:53 -07:00
url = url ,
navigation_goal = navigation_goal ,
navigation_payload = navigation_payload ,
data_extraction_goal = data_extraction_goal ,
extracted_information_schema = extracted_information_schema ,
llm = llm ,
llm_prompt = llm_prompt ,
llm_response = llm_response ,
2024-08-23 23:16:41 +03:00
suggested_title = suggested_title ,
2024-09-03 07:00:15 +03:00
source_task_generation_id = source_task_generation_id ,
2024-06-07 15:59:53 -07:00
)
session . add ( new_task_generation )
await session . commit ( )
await session . refresh ( new_task_generation )
return TaskGeneration . model_validate ( new_task_generation )
2024-09-03 07:00:15 +03:00
2025-01-08 21:45:38 -08:00
async def create_ai_suggestion (
self ,
organization_id : str ,
ai_suggestion_type : str ,
) - > AISuggestion :
async with self . Session ( ) as session :
new_ai_suggestion = AISuggestionModel (
organization_id = organization_id ,
ai_suggestion_type = ai_suggestion_type ,
)
session . add ( new_ai_suggestion )
await session . commit ( )
await session . refresh ( new_ai_suggestion )
return AISuggestion . model_validate ( new_ai_suggestion )
2024-09-03 07:00:15 +03:00
async def get_task_generation_by_prompt_hash (
self ,
user_prompt_hash : str ,
2024-09-02 21:32:57 -07:00
query_window_hours : int = settings . PROMPT_CACHE_WINDOW_HOURS ,
2024-09-03 07:00:15 +03:00
) - > TaskGeneration | None :
before_time = datetime . utcnow ( ) - timedelta ( hours = query_window_hours )
async with self . Session ( ) as session :
query = (
select ( TaskGenerationModel )
. filter_by ( user_prompt_hash = user_prompt_hash )
. filter ( TaskGenerationModel . llm . is_not ( None ) )
. filter ( TaskGenerationModel . created_at > before_time )
)
task_generation = ( await session . scalars ( query ) ) . first ( )
if not task_generation :
return None
return TaskGeneration . model_validate ( task_generation )
2024-09-08 15:07:03 -07:00
async def get_totp_codes (
self ,
organization_id : str ,
totp_identifier : str ,
valid_lifespan_minutes : int = settings . TOTP_LIFESPAN_MINUTES ,
) - > list [ TOTPCode ] :
"""
1. filter by :
- organization_id
- totp_identifier
2. make sure created_at is within the valid lifespan
3. sort by created_at desc
"""
async with self . Session ( ) as session :
query = (
select ( TOTPCodeModel )
. filter_by ( organization_id = organization_id )
. filter_by ( totp_identifier = totp_identifier )
. filter ( TOTPCodeModel . created_at > datetime . utcnow ( ) - timedelta ( minutes = valid_lifespan_minutes ) )
. order_by ( TOTPCodeModel . created_at . desc ( ) )
)
totp_code = ( await session . scalars ( query ) ) . all ( )
return [ TOTPCode . model_validate ( totp_code ) for totp_code in totp_code ]
2024-10-15 12:06:50 -07:00
2025-01-30 14:06:22 +08:00
async def create_totp_code (
self ,
organization_id : str ,
totp_identifier : str ,
content : str ,
code : str ,
task_id : str | None = None ,
workflow_id : str | None = None ,
source : str | None = None ,
expired_at : datetime | None = None ,
) - > TOTPCode :
async with self . Session ( ) as session :
new_totp_code = TOTPCodeModel (
organization_id = organization_id ,
totp_identifier = totp_identifier ,
content = content ,
code = code ,
task_id = task_id ,
workflow_id = workflow_id ,
source = source ,
expired_at = expired_at ,
)
session . add ( new_totp_code )
await session . commit ( )
await session . refresh ( new_totp_code )
return TOTPCode . model_validate ( new_totp_code )
2024-10-15 12:06:50 -07:00
async def create_action ( self , action : Action ) - > Action :
async with self . Session ( ) as session :
new_action = ActionModel (
action_type = action . action_type ,
source_action_id = action . source_action_id ,
organization_id = action . organization_id ,
workflow_run_id = action . workflow_run_id ,
task_id = action . task_id ,
step_id = action . step_id ,
step_order = action . step_order ,
action_order = action . action_order ,
status = action . status ,
reasoning = action . reasoning ,
intention = action . intention ,
response = action . response ,
element_id = action . element_id ,
skyvern_element_hash = action . skyvern_element_hash ,
skyvern_element_data = action . skyvern_element_data ,
action_json = action . model_dump ( ) ,
2024-10-20 12:10:58 -07:00
confidence_float = action . confidence_float ,
2024-10-15 12:06:50 -07:00
)
session . add ( new_action )
await session . commit ( )
await session . refresh ( new_action )
return Action . model_validate ( new_action )
async def retrieve_action_plan ( self , task : Task ) - > list [ Action ] :
async with self . Session ( ) as session :
subquery = (
select ( TaskModel . task_id )
. filter ( TaskModel . url == task . url )
. filter ( TaskModel . navigation_goal == task . navigation_goal )
. filter ( TaskModel . status == TaskStatus . completed )
. order_by ( TaskModel . created_at . desc ( ) )
. limit ( 1 )
. subquery ( )
)
query = (
select ( ActionModel )
. filter ( ActionModel . task_id == subquery . c . task_id )
. order_by ( ActionModel . step_order , ActionModel . action_order , ActionModel . created_at )
)
actions = ( await session . scalars ( query ) ) . all ( )
return [ Action . model_validate ( action ) for action in actions ]
async def get_previous_actions_for_task ( self , task_id : str ) - > list [ Action ] :
async with self . Session ( ) as session :
query = (
select ( ActionModel )
. filter_by ( task_id = task_id )
. order_by ( ActionModel . step_order , ActionModel . action_order , ActionModel . created_at )
)
actions = ( await session . scalars ( query ) ) . all ( )
return [ Action . model_validate ( action ) for action in actions ]
2024-10-17 23:24:12 -07:00
async def delete_task_actions ( self , organization_id : str , task_id : str ) - > None :
async with self . Session ( ) as session :
# delete actions by filtering organization_id and task_id
stmt = delete ( ActionModel ) . where (
and_ (
ActionModel . organization_id == organization_id ,
ActionModel . task_id == task_id ,
)
)
await session . execute ( stmt )
await session . commit ( )
2024-12-08 12:43:59 -08:00
2025-02-27 20:19:02 -08:00
async def get_task_v2 ( self , task_v2_id : str , organization_id : str | None = None ) - > TaskV2 | None :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-23 16:03:49 -08:00
if task_v2 := (
2024-12-08 12:43:59 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( TaskV2Model )
2025-02-23 16:03:49 -08:00
. filter_by ( observer_cruise_id = task_v2_id )
2024-12-08 12:43:59 -08:00
. filter_by ( organization_id = organization_id )
)
) . first ( ) :
2025-02-27 20:19:02 -08:00
return TaskV2 . model_validate ( task_v2 )
2024-12-08 12:43:59 -08:00
return None
2025-02-27 20:19:02 -08:00
async def delete_thoughts ( self , task_v2_id : str , organization_id : str | None = None ) - > None :
2025-01-11 19:49:51 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
stmt = delete ( ThoughtModel ) . where (
2025-01-11 19:49:51 -08:00
and_ (
2025-02-27 20:19:02 -08:00
ThoughtModel . observer_cruise_id == task_v2_id ,
ThoughtModel . organization_id == organization_id ,
2025-01-11 19:49:51 -08:00
)
)
await session . execute ( stmt )
await session . commit ( )
2025-02-23 16:03:49 -08:00
async def get_task_v2_by_workflow_run_id (
2024-12-23 11:48:27 -08:00
self ,
workflow_run_id : str ,
organization_id : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > TaskV2 | None :
2024-12-23 11:48:27 -08:00
async with self . Session ( ) as session :
2025-02-23 16:03:49 -08:00
if task_v2 := (
2024-12-23 11:48:27 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( TaskV2Model )
2024-12-23 11:48:27 -08:00
. filter_by ( organization_id = organization_id )
. filter_by ( workflow_run_id = workflow_run_id )
)
) . first ( ) :
2025-02-27 20:19:02 -08:00
return TaskV2 . model_validate ( task_v2 )
2024-12-23 11:48:27 -08:00
return None
2025-02-27 20:19:02 -08:00
async def get_thought ( self , thought_id : str , organization_id : str | None = None ) - > Thought | None :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
if thought := (
2024-12-08 12:43:59 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( ThoughtModel )
. filter_by ( observer_thought_id = thought_id )
2024-12-08 12:43:59 -08:00
. filter_by ( organization_id = organization_id )
)
) . first ( ) :
2025-02-27 20:19:02 -08:00
return Thought . model_validate ( thought )
2024-12-08 12:43:59 -08:00
return None
2025-02-27 20:19:02 -08:00
async def get_thoughts (
2024-12-17 17:17:18 -08:00
self ,
2025-02-23 16:03:49 -08:00
task_v2_id : str ,
2025-02-27 20:19:02 -08:00
thought_types : list [ ThoughtType ] | None = None ,
2024-12-17 17:17:18 -08:00
organization_id : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > list [ Thought ] :
2024-12-17 17:17:18 -08:00
async with self . Session ( ) as session :
2024-12-27 09:04:09 -08:00
query = (
2025-02-27 20:19:02 -08:00
select ( ThoughtModel )
2025-02-23 16:03:49 -08:00
. filter_by ( observer_cruise_id = task_v2_id )
2024-12-27 09:04:09 -08:00
. filter_by ( organization_id = organization_id )
2025-02-27 20:19:02 -08:00
. order_by ( ThoughtModel . created_at )
2024-12-27 09:04:09 -08:00
)
2025-02-27 20:19:02 -08:00
if thought_types :
query = query . filter ( ThoughtModel . observer_thought_type . in_ ( thought_types ) )
thoughts = ( await session . scalars ( query ) ) . all ( )
return [ Thought . model_validate ( thought ) for thought in thoughts ]
2024-12-17 17:17:18 -08:00
2025-02-23 16:03:49 -08:00
async def create_task_v2 (
2024-12-08 12:43:59 -08:00
self ,
workflow_run_id : str | None = None ,
workflow_id : str | None = None ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id : str | None = None ,
2024-12-08 12:43:59 -08:00
prompt : str | None = None ,
url : str | None = None ,
organization_id : str | None = None ,
2025-01-24 16:21:26 +08:00
proxy_location : ProxyLocation | None = None ,
totp_identifier : str | None = None ,
totp_verification_url : str | None = None ,
webhook_callback_url : str | None = None ,
2025-03-18 15:36:42 -07:00
extracted_information_schema : dict | list | str | None = None ,
error_code_mapping : dict | None = None ,
2025-02-27 20:19:02 -08:00
) - > TaskV2 :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
new_task_v2 = TaskV2Model (
2024-12-08 12:43:59 -08:00
workflow_run_id = workflow_run_id ,
workflow_id = workflow_id ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id = workflow_permanent_id ,
2024-12-08 12:43:59 -08:00
prompt = prompt ,
url = url ,
2025-01-24 16:21:26 +08:00
proxy_location = proxy_location ,
totp_identifier = totp_identifier ,
totp_verification_url = totp_verification_url ,
webhook_callback_url = webhook_callback_url ,
2025-03-18 15:36:42 -07:00
extracted_information_schema = extracted_information_schema ,
error_code_mapping = error_code_mapping ,
2024-12-08 12:43:59 -08:00
organization_id = organization_id ,
)
2025-02-23 16:03:49 -08:00
session . add ( new_task_v2 )
2024-12-08 12:43:59 -08:00
await session . commit ( )
2025-02-23 16:03:49 -08:00
await session . refresh ( new_task_v2 )
2025-02-27 20:19:02 -08:00
return TaskV2 . model_validate ( new_task_v2 )
2024-12-08 12:43:59 -08:00
2025-02-27 20:19:02 -08:00
async def create_thought (
2024-12-08 12:43:59 -08:00
self ,
2025-02-23 16:03:49 -08:00
task_v2_id : str ,
2024-12-08 12:43:59 -08:00
workflow_run_id : str | None = None ,
workflow_id : str | None = None ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id : str | None = None ,
2024-12-08 12:43:59 -08:00
workflow_run_block_id : str | None = None ,
user_input : str | None = None ,
observation : str | None = None ,
thought : str | None = None ,
answer : str | None = None ,
2025-02-27 20:19:02 -08:00
thought_scenario : str | None = None ,
thought_type : str = ThoughtType . plan ,
2024-12-27 09:04:09 -08:00
output : dict [ str , Any ] | None = None ,
2025-01-22 07:45:40 +08:00
input_token_count : int | None = None ,
output_token_count : int | None = None ,
2025-03-20 16:42:57 -07:00
reasoning_token_count : int | None = None ,
cached_token_count : int | None = None ,
2025-01-22 07:45:40 +08:00
thought_cost : float | None = None ,
2024-12-08 12:43:59 -08:00
organization_id : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > Thought :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
new_thought = ThoughtModel (
2025-02-23 16:03:49 -08:00
observer_cruise_id = task_v2_id ,
2024-12-08 12:43:59 -08:00
workflow_run_id = workflow_run_id ,
workflow_id = workflow_id ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id = workflow_permanent_id ,
2024-12-08 12:43:59 -08:00
workflow_run_block_id = workflow_run_block_id ,
user_input = user_input ,
observation = observation ,
thought = thought ,
answer = answer ,
2025-02-27 20:19:02 -08:00
observer_thought_scenario = thought_scenario ,
observer_thought_type = thought_type ,
2024-12-27 09:04:09 -08:00
output = output ,
2025-01-22 07:45:40 +08:00
input_token_count = input_token_count ,
output_token_count = output_token_count ,
2025-03-20 16:42:57 -07:00
reasoning_token_count = reasoning_token_count ,
cached_token_count = cached_token_count ,
2025-01-22 07:45:40 +08:00
thought_cost = thought_cost ,
2024-12-08 12:43:59 -08:00
organization_id = organization_id ,
)
2025-02-27 20:19:02 -08:00
session . add ( new_thought )
2024-12-08 12:43:59 -08:00
await session . commit ( )
2025-02-27 20:19:02 -08:00
await session . refresh ( new_thought )
return Thought . model_validate ( new_thought )
2024-12-08 12:43:59 -08:00
2025-02-27 20:19:02 -08:00
async def update_thought (
2024-12-22 23:01:02 -08:00
self ,
2025-02-27 20:19:02 -08:00
thought_id : str ,
2024-12-22 23:01:02 -08:00
workflow_run_block_id : str | None = None ,
2024-12-27 09:04:09 -08:00
workflow_run_id : str | None = None ,
workflow_id : str | None = None ,
workflow_permanent_id : str | None = None ,
2024-12-22 23:01:02 -08:00
observation : str | None = None ,
thought : str | None = None ,
answer : str | None = None ,
2024-12-27 09:04:09 -08:00
output : dict [ str , Any ] | None = None ,
2025-01-22 07:45:40 +08:00
input_token_count : int | None = None ,
output_token_count : int | None = None ,
2025-03-20 16:42:57 -07:00
reasoning_token_count : int | None = None ,
cached_token_count : int | None = None ,
2025-01-22 07:45:40 +08:00
thought_cost : float | None = None ,
2024-12-22 23:01:02 -08:00
organization_id : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > Thought :
2024-12-22 23:01:02 -08:00
async with self . Session ( ) as session :
2025-02-27 20:19:02 -08:00
thought_obj = (
2024-12-22 23:01:02 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( ThoughtModel )
. filter_by ( observer_thought_id = thought_id )
2024-12-22 23:01:02 -08:00
. filter_by ( organization_id = organization_id )
)
) . first ( )
2025-02-27 20:19:02 -08:00
if thought_obj :
2024-12-22 23:01:02 -08:00
if workflow_run_block_id :
2025-02-27 20:19:02 -08:00
thought_obj . workflow_run_block_id = workflow_run_block_id
2024-12-27 09:04:09 -08:00
if workflow_run_id :
2025-02-27 20:19:02 -08:00
thought_obj . workflow_run_id = workflow_run_id
2024-12-27 09:04:09 -08:00
if workflow_id :
2025-02-27 20:19:02 -08:00
thought_obj . workflow_id = workflow_id
2024-12-27 09:04:09 -08:00
if workflow_permanent_id :
2025-02-27 20:19:02 -08:00
thought_obj . workflow_permanent_id = workflow_permanent_id
2024-12-22 23:01:02 -08:00
if observation :
2025-02-27 20:19:02 -08:00
thought_obj . observation = observation
2024-12-22 23:01:02 -08:00
if thought :
2025-02-27 20:19:02 -08:00
thought_obj . thought = thought
2024-12-22 23:01:02 -08:00
if answer :
2025-02-27 20:19:02 -08:00
thought_obj . answer = answer
2024-12-27 09:04:09 -08:00
if output :
2025-02-27 20:19:02 -08:00
thought_obj . output = output
2025-01-22 07:45:40 +08:00
if input_token_count :
2025-02-27 20:19:02 -08:00
thought_obj . input_token_count = input_token_count
2025-01-22 07:45:40 +08:00
if output_token_count :
2025-02-27 20:19:02 -08:00
thought_obj . output_token_count = output_token_count
2025-03-20 16:42:57 -07:00
if reasoning_token_count :
thought_obj . reasoning_token_count = reasoning_token_count
if cached_token_count :
thought_obj . cached_token_count = cached_token_count
2025-01-22 07:45:40 +08:00
if thought_cost :
2025-02-27 20:19:02 -08:00
thought_obj . thought_cost = thought_cost
2024-12-22 23:01:02 -08:00
await session . commit ( )
2025-02-27 20:19:02 -08:00
await session . refresh ( thought_obj )
return Thought . model_validate ( thought_obj )
raise NotFoundError ( f " Thought { thought_id } " )
2024-12-22 23:01:02 -08:00
2025-02-23 16:03:49 -08:00
async def update_task_v2 (
2024-12-08 12:43:59 -08:00
self ,
2025-02-23 16:03:49 -08:00
task_v2_id : str ,
2025-02-27 20:19:02 -08:00
status : TaskV2Status | None = None ,
2024-12-08 21:17:58 -08:00
workflow_run_id : str | None = None ,
workflow_id : str | None = None ,
2024-12-16 14:34:31 -08:00
workflow_permanent_id : str | None = None ,
2024-12-08 21:17:58 -08:00
url : str | None = None ,
prompt : str | None = None ,
2025-01-10 14:59:53 -08:00
summary : str | None = None ,
output : dict [ str , Any ] | None = None ,
2024-12-08 12:43:59 -08:00
organization_id : str | None = None ,
2025-02-27 20:19:02 -08:00
) - > TaskV2 :
2024-12-08 12:43:59 -08:00
async with self . Session ( ) as session :
2025-02-23 16:03:49 -08:00
task_v2 = (
2024-12-08 12:43:59 -08:00
await session . scalars (
2025-02-27 20:19:02 -08:00
select ( TaskV2Model )
2025-02-23 16:03:49 -08:00
. filter_by ( observer_cruise_id = task_v2_id )
2024-12-08 12:43:59 -08:00
. filter_by ( organization_id = organization_id )
)
) . first ( )
2025-02-23 16:03:49 -08:00
if task_v2 :
2024-12-08 12:43:59 -08:00
if status :
2025-02-23 16:03:49 -08:00
task_v2 . status = status
2024-12-08 21:17:58 -08:00
if workflow_run_id :
2025-02-23 16:03:49 -08:00
task_v2 . workflow_run_id = workflow_run_id
2024-12-08 21:17:58 -08:00
if workflow_id :
2025-02-23 16:03:49 -08:00
task_v2 . workflow_id = workflow_id
2024-12-16 14:34:31 -08:00
if workflow_permanent_id :
2025-02-23 16:03:49 -08:00
task_v2 . workflow_permanent_id = workflow_permanent_id
2024-12-08 21:17:58 -08:00
if url :
2025-02-23 16:03:49 -08:00
task_v2 . url = url
2024-12-08 21:17:58 -08:00
if prompt :
2025-02-23 16:03:49 -08:00
task_v2 . prompt = prompt
2025-01-10 14:59:53 -08:00
if summary :
2025-02-23 16:03:49 -08:00
task_v2 . summary = summary
2025-01-10 14:59:53 -08:00
if output :
2025-02-23 16:03:49 -08:00
task_v2 . output = output
2024-12-08 12:43:59 -08:00
await session . commit ( )
2025-02-23 16:03:49 -08:00
await session . refresh ( task_v2 )
2025-02-27 20:19:02 -08:00
return TaskV2 . model_validate ( task_v2 )
2025-02-23 16:03:49 -08:00
raise NotFoundError ( f " TaskV2 { task_v2_id } not found " )
2024-12-20 07:40:32 -08:00
async def create_workflow_run_block (
self ,
workflow_run_id : str ,
parent_workflow_run_block_id : str | None = None ,
organization_id : str | None = None ,
task_id : str | None = None ,
label : str | None = None ,
block_type : BlockType | None = None ,
status : BlockStatus = BlockStatus . running ,
output : dict | list | str | None = None ,
continue_on_failure : bool = False ,
) - > WorkflowRunBlock :
async with self . Session ( ) as session :
new_workflow_run_block = WorkflowRunBlockModel (
workflow_run_id = workflow_run_id ,
parent_workflow_run_block_id = parent_workflow_run_block_id ,
organization_id = organization_id ,
task_id = task_id ,
label = label ,
block_type = block_type ,
status = status ,
output = output ,
continue_on_failure = continue_on_failure ,
)
session . add ( new_workflow_run_block )
await session . commit ( )
await session . refresh ( new_workflow_run_block )
task = None
if task_id :
task = await self . get_task ( task_id , organization_id = organization_id )
return convert_to_workflow_run_block ( new_workflow_run_block , task = task )
2025-01-03 14:22:42 -08:00
async def delete_workflow_run_blocks ( self , workflow_run_id : str , organization_id : str | None = None ) - > None :
async with self . Session ( ) as session :
stmt = delete ( WorkflowRunBlockModel ) . where (
and_ (
WorkflowRunBlockModel . workflow_run_id == workflow_run_id ,
WorkflowRunBlockModel . organization_id == organization_id ,
)
)
await session . execute ( stmt )
await session . commit ( )
2024-12-20 07:40:32 -08:00
async def update_workflow_run_block (
self ,
workflow_run_block_id : str ,
2025-01-03 15:55:57 -08:00
organization_id : str | None = None ,
2024-12-20 07:40:32 -08:00
status : BlockStatus | None = None ,
output : dict | list | str | None = None ,
2024-12-22 11:16:23 -08:00
failure_reason : str | None = None ,
task_id : str | None = None ,
2024-12-23 01:13:25 -08:00
loop_values : list | None = None ,
current_value : str | None = None ,
current_index : int | None = None ,
2024-12-23 11:48:27 -08:00
recipients : list [ str ] | None = None ,
attachments : list [ str ] | None = None ,
subject : str | None = None ,
body : str | None = None ,
prompt : str | None = None ,
wait_sec : int | None = None ,
2025-01-03 15:55:57 -08:00
description : str | None = None ,
2025-01-28 16:59:54 +08:00
block_workflow_run_id : str | None = None ,
2024-12-20 07:40:32 -08:00
) - > WorkflowRunBlock :
async with self . Session ( ) as session :
workflow_run_block = (
await session . scalars (
2024-12-22 11:16:23 -08:00
select ( WorkflowRunBlockModel )
. filter_by ( workflow_run_block_id = workflow_run_block_id )
. filter_by ( organization_id = organization_id )
2024-12-20 07:40:32 -08:00
)
) . first ( )
if workflow_run_block :
if status :
workflow_run_block . status = status
if output :
workflow_run_block . output = output
2024-12-22 11:16:23 -08:00
if task_id :
workflow_run_block . task_id = task_id
if failure_reason :
workflow_run_block . failure_reason = failure_reason
2024-12-23 01:13:25 -08:00
if loop_values :
workflow_run_block . loop_values = loop_values
if current_value :
workflow_run_block . current_value = current_value
if current_index :
workflow_run_block . current_index = current_index
2024-12-23 11:48:27 -08:00
if recipients :
workflow_run_block . recipients = recipients
if attachments :
workflow_run_block . attachments = attachments
if subject :
workflow_run_block . subject = subject
if body :
workflow_run_block . body = body
if prompt :
workflow_run_block . prompt = prompt
if wait_sec :
workflow_run_block . wait_sec = wait_sec
2025-01-03 15:55:57 -08:00
if description :
workflow_run_block . description = description
2025-01-28 16:59:54 +08:00
if block_workflow_run_id :
workflow_run_block . block_workflow_run_id = block_workflow_run_id
2024-12-20 07:40:32 -08:00
await session . commit ( )
await session . refresh ( workflow_run_block )
else :
raise NotFoundError ( f " WorkflowRunBlock { workflow_run_block_id } not found " )
task = None
task_id = workflow_run_block . task_id
if task_id :
task = await self . get_task ( task_id , organization_id = workflow_run_block . organization_id )
return convert_to_workflow_run_block ( workflow_run_block , task = task )
2024-12-22 11:16:23 -08:00
async def get_workflow_run_block (
self ,
workflow_run_block_id : str ,
organization_id : str | None = None ,
) - > WorkflowRunBlock :
async with self . Session ( ) as session :
workflow_run_block = (
await session . scalars (
select ( WorkflowRunBlockModel )
. filter_by ( workflow_run_block_id = workflow_run_block_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
if workflow_run_block :
task = None
task_id = workflow_run_block . task_id
if task_id :
task = await self . get_task ( task_id , organization_id = organization_id )
return convert_to_workflow_run_block ( workflow_run_block , task = task )
raise NotFoundError ( f " WorkflowRunBlock { workflow_run_block_id } not found " )
2024-12-22 20:54:53 -08:00
async def get_workflow_run_blocks (
self ,
workflow_run_id : str ,
organization_id : str | None = None ,
) - > list [ WorkflowRunBlock ] :
async with self . Session ( ) as session :
workflow_run_blocks = (
await session . scalars (
select ( WorkflowRunBlockModel )
. filter_by ( workflow_run_id = workflow_run_id )
. filter_by ( organization_id = organization_id )
2025-01-06 06:04:24 -08:00
. order_by ( WorkflowRunBlockModel . created_at . desc ( ) )
2024-12-22 20:54:53 -08:00
)
) . all ( )
tasks = await self . get_tasks_by_workflow_run_id ( workflow_run_id )
tasks_dict = { task . task_id : task for task in tasks }
return [
convert_to_workflow_run_block ( workflow_run_block , task = tasks_dict . get ( workflow_run_block . task_id ) )
for workflow_run_block in workflow_run_blocks
]
2025-01-08 18:14:38 +01:00
async def get_active_persistent_browser_sessions ( self , organization_id : str ) - > List [ PersistentBrowserSession ] :
""" Get all active persistent browser sessions for an organization. """
try :
async with self . Session ( ) as session :
result = await session . execute (
select ( PersistentBrowserSessionModel )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
sessions = result . scalars ( ) . all ( )
return [ PersistentBrowserSession . model_validate ( session ) for session in sessions ]
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
2025-02-16 13:11:03 +01:00
raise
2025-03-20 14:26:14 -07:00
async def get_persistent_browser_session_by_id (
self , session_id : str , organization_id : str | None = None
) - > Optional [ PersistentBrowserSession ] :
2025-02-16 13:11:03 +01:00
""" Get a specific persistent browser session. """
try :
async with self . Session ( ) as session :
2025-03-20 14:26:14 -07:00
query = (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( deleted_at = None )
)
if organization_id :
query = query . filter_by ( organization_id = organization_id )
persistent_browser_session = ( await session . scalars ( query ) ) . first ( )
2025-02-16 13:11:03 +01:00
if persistent_browser_session :
return PersistentBrowserSession . model_validate ( persistent_browser_session )
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
2025-01-08 18:14:38 +01:00
raise
async def get_persistent_browser_session (
self , session_id : str , organization_id : str
) - > Optional [ PersistentBrowserSessionModel ] :
""" Get a specific persistent browser session. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if persistent_browser_session :
return PersistentBrowserSession . model_validate ( persistent_browser_session )
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def create_persistent_browser_session (
self ,
organization_id : str ,
runnable_type : str | None = None ,
runnable_id : str | None = None ,
) - > PersistentBrowserSessionModel :
""" Create a new persistent browser session. """
try :
async with self . Session ( ) as session :
browser_session = PersistentBrowserSessionModel (
organization_id = organization_id ,
runnable_type = runnable_type ,
runnable_id = runnable_id ,
)
session . add ( browser_session )
await session . commit ( )
await session . refresh ( browser_session )
return PersistentBrowserSession . model_validate ( browser_session )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def mark_persistent_browser_session_deleted ( self , session_id : str , organization_id : str ) - > None :
""" Mark a persistent browser session as deleted. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
if persistent_browser_session :
persistent_browser_session . deleted_at = datetime . utcnow ( )
await session . commit ( )
await session . refresh ( persistent_browser_session )
else :
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def occupy_persistent_browser_session (
self , session_id : str , runnable_type : str , runnable_id : str , organization_id : str
) - > None :
""" Occupy a specific persistent browser session. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if persistent_browser_session :
persistent_browser_session . runnable_type = runnable_type
persistent_browser_session . runnable_id = runnable_id
await session . commit ( )
await session . refresh ( persistent_browser_session )
else :
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def release_persistent_browser_session ( self , session_id : str , organization_id : str ) - > None :
""" Release a specific persistent browser session. """
try :
async with self . Session ( ) as session :
persistent_browser_session = (
await session . scalars (
select ( PersistentBrowserSessionModel )
. filter_by ( persistent_browser_session_id = session_id )
. filter_by ( organization_id = organization_id )
. filter_by ( deleted_at = None )
)
) . first ( )
if persistent_browser_session :
persistent_browser_session . runnable_type = None
persistent_browser_session . runnable_id = None
await session . commit ( )
await session . refresh ( persistent_browser_session )
else :
raise NotFoundError ( f " PersistentBrowserSession { session_id } not found " )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except NotFoundError :
LOG . error ( " NotFoundError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
async def get_all_active_persistent_browser_sessions ( self ) - > List [ PersistentBrowserSessionModel ] :
""" Get all active persistent browser sessions across all organizations. """
try :
async with self . Session ( ) as session :
result = await session . execute ( select ( PersistentBrowserSessionModel ) . filter_by ( deleted_at = None ) )
return result . scalars ( ) . all ( )
except SQLAlchemyError :
LOG . error ( " SQLAlchemyError " , exc_info = True )
raise
except Exception :
LOG . error ( " UnexpectedError " , exc_info = True )
raise
2025-02-09 20:30:19 +08:00
async def create_task_run (
self ,
2025-03-30 18:34:48 -07:00
task_run_type : RunType ,
2025-02-09 20:30:19 +08:00
organization_id : str ,
run_id : str ,
title : str | None = None ,
url : str | None = None ,
url_hash : str | None = None ,
2025-03-30 18:41:24 -07:00
) - > Run :
2025-02-09 20:30:19 +08:00
async with self . Session ( ) as session :
task_run = TaskRunModel (
task_run_type = task_run_type ,
organization_id = organization_id ,
run_id = run_id ,
title = title ,
url = url ,
url_hash = url_hash ,
)
session . add ( task_run )
await session . commit ( )
await session . refresh ( task_run )
2025-03-30 18:41:24 -07:00
return Run . model_validate ( task_run )
2025-02-11 14:47:41 +08:00
2025-02-14 00:00:19 +08:00
async def create_credential (
2025-02-20 13:50:41 -08:00
self ,
name : str ,
credential_type : CredentialType ,
organization_id : str ,
item_id : str ,
2025-02-14 00:00:19 +08:00
) - > Credential :
async with self . Session ( ) as session :
credential = CredentialModel (
organization_id = organization_id ,
name = name ,
credential_type = credential_type ,
2025-02-20 13:50:41 -08:00
item_id = item_id ,
2025-02-14 00:00:19 +08:00
)
session . add ( credential )
await session . commit ( )
await session . refresh ( credential )
return Credential . model_validate ( credential )
async def get_credential ( self , credential_id : str , organization_id : str ) - > Credential :
async with self . Session ( ) as session :
credential = (
await session . scalars (
select ( CredentialModel )
. filter_by ( credential_id = credential_id )
. filter_by ( organization_id = organization_id )
. filter ( CredentialModel . deleted_at . is_ ( None ) )
)
) . first ( )
if credential :
return Credential . model_validate ( credential )
raise NotFoundError ( f " Credential { credential_id } not found " )
2025-02-20 13:50:41 -08:00
async def get_credentials ( self , organization_id : str , page : int = 1 , page_size : int = 10 ) - > list [ Credential ] :
2025-02-14 00:00:19 +08:00
async with self . Session ( ) as session :
credentials = (
await session . scalars (
select ( CredentialModel )
. filter_by ( organization_id = organization_id )
. filter ( CredentialModel . deleted_at . is_ ( None ) )
. order_by ( CredentialModel . created_at . desc ( ) )
2025-02-20 13:50:41 -08:00
. offset ( ( page - 1 ) * page_size )
. limit ( page_size )
2025-02-14 00:00:19 +08:00
)
) . all ( )
return [ Credential . model_validate ( credential ) for credential in credentials ]
async def update_credential (
self , credential_id : str , organization_id : str , name : str | None = None , website_url : str | None = None
) - > Credential :
async with self . Session ( ) as session :
credential = (
await session . scalars (
select ( CredentialModel )
. filter_by ( credential_id = credential_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
if not credential :
raise NotFoundError ( f " Credential { credential_id } not found " )
if name :
credential . name = name
if website_url :
credential . website_url = website_url
await session . commit ( )
await session . refresh ( credential )
return Credential . model_validate ( credential )
async def delete_credential ( self , credential_id : str , organization_id : str ) - > None :
async with self . Session ( ) as session :
credential = (
await session . scalars (
select ( CredentialModel )
. filter_by ( credential_id = credential_id )
. filter_by ( organization_id = organization_id )
)
) . first ( )
if not credential :
raise NotFoundError ( f " Credential { credential_id } not found " )
credential . deleted_at = datetime . utcnow ( )
await session . commit ( )
await session . refresh ( credential )
return None
2025-02-20 13:50:41 -08:00
async def create_organization_bitwarden_collection (
self ,
organization_id : str ,
collection_id : str ,
) - > OrganizationBitwardenCollection :
async with self . Session ( ) as session :
organization_bitwarden_collection = OrganizationBitwardenCollectionModel (
organization_id = organization_id , collection_id = collection_id
)
session . add ( organization_bitwarden_collection )
await session . commit ( )
await session . refresh ( organization_bitwarden_collection )
return OrganizationBitwardenCollection . model_validate ( organization_bitwarden_collection )
async def get_organization_bitwarden_collection (
self ,
organization_id : str ,
) - > OrganizationBitwardenCollection | None :
async with self . Session ( ) as session :
organization_bitwarden_collection = (
await session . scalars (
select ( OrganizationBitwardenCollectionModel ) . filter_by ( organization_id = organization_id )
)
) . first ( )
if organization_bitwarden_collection :
return OrganizationBitwardenCollection . model_validate ( organization_bitwarden_collection )
return None
2025-03-30 18:41:24 -07:00
async def cache_task_run ( self , run_id : str , organization_id : str | None = None ) - > Run :
2025-02-11 14:47:41 +08:00
async with self . Session ( ) as session :
2025-02-12 01:48:52 +08:00
task_run = (
await session . scalars (
select ( TaskRunModel ) . filter_by ( organization_id = organization_id ) . filter_by ( run_id = run_id )
)
2025-02-11 14:47:41 +08:00
) . first ( )
if task_run :
task_run . cached = True
await session . commit ( )
await session . refresh ( task_run )
2025-03-30 18:41:24 -07:00
return Run . model_validate ( task_run )
raise NotFoundError ( f " Run { run_id } not found " )
2025-02-11 14:47:41 +08:00
async def get_cached_task_run (
2025-03-30 18:34:48 -07:00
self , task_run_type : RunType , url_hash : str | None = None , organization_id : str | None = None
2025-03-30 18:41:24 -07:00
) - > Run | None :
2025-02-11 14:47:41 +08:00
async with self . Session ( ) as session :
query = select ( TaskRunModel )
if task_run_type :
query = query . filter_by ( task_run_type = task_run_type )
if url_hash :
query = query . filter_by ( url_hash = url_hash )
if organization_id :
query = query . filter_by ( organization_id = organization_id )
query = query . filter_by ( cached = True ) . order_by ( TaskRunModel . created_at . desc ( ) )
2025-02-12 01:48:52 +08:00
task_run = ( await session . scalars ( query ) ) . first ( )
2025-03-30 18:41:24 -07:00
return Run . model_validate ( task_run ) if task_run else None
2025-03-16 13:21:40 -07:00
2025-03-30 21:14:52 -07:00
async def get_run (
2025-03-16 13:21:40 -07:00
self ,
run_id : str ,
organization_id : str | None = None ,
2025-03-30 18:41:24 -07:00
) - > Run | None :
2025-03-16 13:21:40 -07:00
async with self . Session ( ) as session :
query = select ( TaskRunModel ) . filter_by ( run_id = run_id )
if organization_id :
query = query . filter_by ( organization_id = organization_id )
task_run = ( await session . scalars ( query ) ) . first ( )
2025-03-30 18:41:24 -07:00
return Run . model_validate ( task_run ) if task_run else None