Add run lifecycle timestamps - improvments (#2601)
Co-authored-by: Shuchang Zheng <wintonzheng0325@gmail.com>
This commit is contained in:
committed by
GitHub
parent
90feb96b0f
commit
b5bf9d291f
45
alembic/versions/2025_06_02_0050-add_run_timestamps.py
Normal file
45
alembic/versions/2025_06_02_0050-add_run_timestamps.py
Normal file
@@ -0,0 +1,45 @@
|
||||
"""add queued_at started_at finished_at columns
|
||||
|
||||
Revision ID: add_run_timestamps
|
||||
Revises: babaa7307e8a
|
||||
Create Date: 2025-06-02 00:50:00+00:00
|
||||
"""
|
||||
|
||||
from typing import Sequence, Union
|
||||
|
||||
import sqlalchemy as sa
|
||||
|
||||
from alembic import op
|
||||
|
||||
revision: str = "add_run_timestamps"
|
||||
down_revision: Union[str, None] = "babaa7307e8a"
|
||||
branch_labels: Union[str, Sequence[str], None] = None
|
||||
depends_on: Union[str, Sequence[str], None] = None
|
||||
|
||||
|
||||
def upgrade() -> None:
|
||||
op.add_column("tasks", sa.Column("queued_at", sa.DateTime(), nullable=True))
|
||||
op.add_column("tasks", sa.Column("started_at", sa.DateTime(), nullable=True))
|
||||
op.add_column("tasks", sa.Column("finished_at", sa.DateTime(), nullable=True))
|
||||
|
||||
op.add_column("observer_cruises", sa.Column("queued_at", sa.DateTime(), nullable=True))
|
||||
op.add_column("observer_cruises", sa.Column("started_at", sa.DateTime(), nullable=True))
|
||||
op.add_column("observer_cruises", sa.Column("finished_at", sa.DateTime(), nullable=True))
|
||||
|
||||
op.add_column("workflow_runs", sa.Column("queued_at", sa.DateTime(), nullable=True))
|
||||
op.add_column("workflow_runs", sa.Column("started_at", sa.DateTime(), nullable=True))
|
||||
op.add_column("workflow_runs", sa.Column("finished_at", sa.DateTime(), nullable=True))
|
||||
|
||||
|
||||
def downgrade() -> None:
|
||||
op.drop_column("workflow_runs", "finished_at")
|
||||
op.drop_column("workflow_runs", "started_at")
|
||||
op.drop_column("workflow_runs", "queued_at")
|
||||
|
||||
op.drop_column("observer_cruises", "finished_at")
|
||||
op.drop_column("observer_cruises", "started_at")
|
||||
op.drop_column("observer_cruises", "queued_at")
|
||||
|
||||
op.drop_column("tasks", "finished_at")
|
||||
op.drop_column("tasks", "started_at")
|
||||
op.drop_column("tasks", "queued_at")
|
||||
@@ -2,12 +2,17 @@ import json
|
||||
import os
|
||||
from typing import Any, Optional, cast
|
||||
|
||||
import pytest
|
||||
import requests
|
||||
from dotenv import load_dotenv
|
||||
|
||||
from skyvern.forge import app
|
||||
from skyvern.forge.sdk.schemas.tasks import TaskRequest
|
||||
|
||||
# Skip tests if network access is not available
|
||||
pytest.skip("requires network access", allow_module_level=True)
|
||||
|
||||
# Load environment variables and set up configuration
|
||||
load_dotenv("./skyvern-frontend/.env")
|
||||
API_KEY = os.getenv("VITE_SKYVERN_API_KEY")
|
||||
|
||||
|
||||
@@ -625,6 +625,12 @@ class AgentDB:
|
||||
).first():
|
||||
if status is not None:
|
||||
task.status = status
|
||||
if status == TaskStatus.queued and task.queued_at is None:
|
||||
task.queued_at = datetime.utcnow()
|
||||
if status == TaskStatus.running and task.started_at is None:
|
||||
task.started_at = datetime.utcnow()
|
||||
if status.is_final() and task.finished_at is None:
|
||||
task.finished_at = datetime.utcnow()
|
||||
if extracted_information is not None:
|
||||
task.extracted_information = extracted_information
|
||||
if failure_reason is not None:
|
||||
@@ -1499,6 +1505,12 @@ class AgentDB:
|
||||
if workflow_run:
|
||||
workflow_run.status = status
|
||||
workflow_run.failure_reason = failure_reason
|
||||
if status == WorkflowRunStatus.queued and workflow_run.queued_at is None:
|
||||
workflow_run.queued_at = datetime.utcnow()
|
||||
if status == WorkflowRunStatus.running and workflow_run.started_at is None:
|
||||
workflow_run.started_at = datetime.utcnow()
|
||||
if status.is_final() and workflow_run.finished_at is None:
|
||||
workflow_run.finished_at = datetime.utcnow()
|
||||
await session.commit()
|
||||
await session.refresh(workflow_run)
|
||||
await save_workflow_run_logs(workflow_run_id)
|
||||
@@ -2542,6 +2554,12 @@ class AgentDB:
|
||||
if task_v2:
|
||||
if status:
|
||||
task_v2.status = status
|
||||
if status == TaskV2Status.queued and task_v2.queued_at is None:
|
||||
task_v2.queued_at = datetime.utcnow()
|
||||
if status == TaskV2Status.running and task_v2.started_at is None:
|
||||
task_v2.started_at = datetime.utcnow()
|
||||
if status.is_final() and task_v2.finished_at is None:
|
||||
task_v2.finished_at = datetime.utcnow()
|
||||
if workflow_run_id:
|
||||
task_v2.workflow_run_id = workflow_run_id
|
||||
if workflow_id:
|
||||
|
||||
@@ -84,6 +84,9 @@ class TaskModel(Base):
|
||||
max_steps_per_run = Column(Integer, nullable=True)
|
||||
application = Column(String, nullable=True)
|
||||
include_action_history_in_verification = Column(Boolean, default=False, nullable=True)
|
||||
queued_at = Column(DateTime, nullable=True)
|
||||
started_at = Column(DateTime, nullable=True)
|
||||
finished_at = Column(DateTime, nullable=True)
|
||||
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False, index=True)
|
||||
modified_at = Column(
|
||||
DateTime,
|
||||
@@ -251,6 +254,10 @@ class WorkflowRunModel(Base):
|
||||
totp_verification_url = Column(String)
|
||||
totp_identifier = Column(String)
|
||||
|
||||
queued_at = Column(DateTime, nullable=True)
|
||||
started_at = Column(DateTime, nullable=True)
|
||||
finished_at = Column(DateTime, nullable=True)
|
||||
|
||||
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
|
||||
modified_at = Column(
|
||||
DateTime,
|
||||
@@ -592,6 +599,10 @@ class TaskV2Model(Base):
|
||||
error_code_mapping = Column(JSON, nullable=True)
|
||||
max_steps = Column(Integer, nullable=True)
|
||||
|
||||
queued_at = Column(DateTime, nullable=True)
|
||||
started_at = Column(DateTime, nullable=True)
|
||||
finished_at = Column(DateTime, nullable=True)
|
||||
|
||||
created_at = Column(DateTime, default=datetime.datetime.utcnow, nullable=False)
|
||||
modified_at = Column(DateTime, default=datetime.datetime.utcnow, onupdate=datetime.datetime.utcnow, nullable=False)
|
||||
model = Column(JSON, nullable=True)
|
||||
|
||||
@@ -139,6 +139,9 @@ def convert_to_task(task_obj: TaskModel, debug_enabled: bool = False, workflow_p
|
||||
errors=task_obj.errors,
|
||||
application=task_obj.application,
|
||||
model=task_obj.model,
|
||||
queued_at=task_obj.queued_at,
|
||||
started_at=task_obj.started_at,
|
||||
finished_at=task_obj.finished_at,
|
||||
)
|
||||
return task
|
||||
|
||||
@@ -269,6 +272,9 @@ def convert_to_workflow_run(
|
||||
webhook_callback_url=workflow_run_model.webhook_callback_url,
|
||||
totp_verification_url=workflow_run_model.totp_verification_url,
|
||||
totp_identifier=workflow_run_model.totp_identifier,
|
||||
queued_at=workflow_run_model.queued_at,
|
||||
started_at=workflow_run_model.started_at,
|
||||
finished_at=workflow_run_model.finished_at,
|
||||
created_at=workflow_run_model.created_at,
|
||||
modified_at=workflow_run_model.modified_at,
|
||||
workflow_title=workflow_title,
|
||||
|
||||
@@ -45,6 +45,9 @@ class TaskV2(BaseModel):
|
||||
extracted_information_schema: dict | list | str | None = None
|
||||
error_code_mapping: dict | None = None
|
||||
model: dict[str, Any] | None = None
|
||||
queued_at: datetime | None = None
|
||||
started_at: datetime | None = None
|
||||
finished_at: datetime | None = None
|
||||
created_at: datetime
|
||||
modified_at: datetime
|
||||
|
||||
|
||||
@@ -239,6 +239,9 @@ class Task(TaskBase):
|
||||
max_steps_per_run: int | None = None
|
||||
errors: list[dict[str, Any]] = []
|
||||
model: dict[str, Any] | None = None
|
||||
queued_at: datetime | None = None
|
||||
started_at: datetime | None = None
|
||||
finished_at: datetime | None = None
|
||||
|
||||
@property
|
||||
def llm_key(self) -> str | None:
|
||||
@@ -297,6 +300,9 @@ class Task(TaskBase):
|
||||
status=self.status,
|
||||
created_at=self.created_at,
|
||||
modified_at=self.modified_at,
|
||||
queued_at=self.queued_at,
|
||||
started_at=self.started_at,
|
||||
finished_at=self.finished_at,
|
||||
extracted_information=self.extracted_information,
|
||||
failure_reason=failure_reason or self.failure_reason,
|
||||
action_screenshot_urls=action_screenshot_urls,
|
||||
@@ -328,6 +334,9 @@ class TaskResponse(BaseModel):
|
||||
errors: list[dict[str, Any]] = []
|
||||
max_steps_per_run: int | None = None
|
||||
workflow_run_id: str | None = None
|
||||
queued_at: datetime | None = None
|
||||
started_at: datetime | None = None
|
||||
finished_at: datetime | None = None
|
||||
|
||||
|
||||
class TaskOutput(BaseModel):
|
||||
|
||||
@@ -116,6 +116,9 @@ class WorkflowRun(BaseModel):
|
||||
parent_workflow_run_id: str | None = None
|
||||
workflow_title: str | None = None
|
||||
|
||||
queued_at: datetime | None = None
|
||||
started_at: datetime | None = None
|
||||
finished_at: datetime | None = None
|
||||
created_at: datetime
|
||||
modified_at: datetime
|
||||
|
||||
@@ -143,6 +146,9 @@ class WorkflowRunResponseBase(BaseModel):
|
||||
webhook_callback_url: str | None = None
|
||||
totp_verification_url: str | None = None
|
||||
totp_identifier: str | None = None
|
||||
queued_at: datetime | None = None
|
||||
started_at: datetime | None = None
|
||||
finished_at: datetime | None = None
|
||||
created_at: datetime
|
||||
modified_at: datetime
|
||||
parameters: dict[str, Any]
|
||||
|
||||
@@ -1125,6 +1125,9 @@ class WorkflowService:
|
||||
webhook_callback_url=workflow_run.webhook_callback_url,
|
||||
totp_verification_url=workflow_run.totp_verification_url,
|
||||
totp_identifier=workflow_run.totp_identifier,
|
||||
queued_at=workflow_run.queued_at,
|
||||
started_at=workflow_run.started_at,
|
||||
finished_at=workflow_run.finished_at,
|
||||
created_at=workflow_run.created_at,
|
||||
modified_at=workflow_run.modified_at,
|
||||
parameters=parameters_with_value,
|
||||
|
||||
@@ -357,6 +357,9 @@ class BaseRunResponse(BaseModel):
|
||||
modified_at: datetime = Field(
|
||||
description="Timestamp when this run was last modified", examples=["2025-01-01T00:05:00Z"]
|
||||
)
|
||||
queued_at: datetime | None = Field(default=None, description="Timestamp when this run was queued")
|
||||
started_at: datetime | None = Field(default=None, description="Timestamp when this run started execution")
|
||||
finished_at: datetime | None = Field(default=None, description="Timestamp when this run finished")
|
||||
app_url: str | None = Field(
|
||||
default=None,
|
||||
description="URL to the application UI where the run can be viewed",
|
||||
|
||||
@@ -44,6 +44,9 @@ async def get_run_response(run_id: str, organization_id: str | None = None) -> R
|
||||
status=str(task_v1_response.status),
|
||||
output=task_v1_response.extracted_information,
|
||||
failure_reason=task_v1_response.failure_reason,
|
||||
queued_at=task_v1_response.queued_at,
|
||||
started_at=task_v1_response.started_at,
|
||||
finished_at=task_v1_response.finished_at,
|
||||
created_at=task_v1_response.created_at,
|
||||
modified_at=task_v1_response.modified_at,
|
||||
app_url=f"{settings.SKYVERN_APP_URL.rstrip('/')}/tasks/{task_v1_response.task_id}",
|
||||
|
||||
@@ -1629,6 +1629,9 @@ async def build_task_v2_run_response(task_v2: TaskV2) -> TaskRunResponse:
|
||||
status=task_v2.status,
|
||||
output=task_v2.output,
|
||||
failure_reason=workflow_run_resp.failure_reason if workflow_run_resp else None,
|
||||
queued_at=task_v2.queued_at,
|
||||
started_at=task_v2.started_at,
|
||||
finished_at=task_v2.finished_at,
|
||||
created_at=task_v2.created_at,
|
||||
modified_at=task_v2.modified_at,
|
||||
recording_url=workflow_run_resp.recording_url if workflow_run_resp else None,
|
||||
|
||||
@@ -83,6 +83,9 @@ async def get_workflow_run_response(
|
||||
recording_url=workflow_run_resp.recording_url,
|
||||
screenshot_urls=workflow_run_resp.screenshot_urls,
|
||||
failure_reason=workflow_run_resp.failure_reason,
|
||||
queued_at=workflow_run.queued_at,
|
||||
started_at=workflow_run.started_at,
|
||||
finished_at=workflow_run.finished_at,
|
||||
app_url=app_url,
|
||||
created_at=workflow_run.created_at,
|
||||
modified_at=workflow_run.modified_at,
|
||||
|
||||
@@ -0,0 +1,5 @@
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).resolve().parents[1]
|
||||
sys.path.insert(0, str(ROOT))
|
||||
|
||||
@@ -1,4 +1,18 @@
|
||||
from skyvern.utils.url_validators import encode_url
|
||||
import sys
|
||||
import types
|
||||
from importlib import import_module
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
pytest.skip("Dependencies missing", allow_module_level=True)
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parents[2]))
|
||||
ddtrace_stub = types.SimpleNamespace(tracer=None, filters=types.SimpleNamespace(FilterRequestsOnUrl=lambda x: None))
|
||||
sys.modules.setdefault("ddtrace", ddtrace_stub)
|
||||
sys.modules.setdefault("ddtrace.filters", ddtrace_stub.filters)
|
||||
|
||||
encode_url = import_module("skyvern.utils.url_validators").encode_url
|
||||
|
||||
|
||||
def test_encode_url_basic():
|
||||
|
||||
Reference in New Issue
Block a user