Implement LLM router (#95)
This commit is contained in:
@@ -4,21 +4,11 @@ class SkyvernException(Exception):
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class NoAvailableOpenAIClients(SkyvernException):
|
||||
def __init__(self) -> None:
|
||||
super().__init__("No available OpenAI API clients found.")
|
||||
|
||||
|
||||
class InvalidOpenAIResponseFormat(SkyvernException):
|
||||
def __init__(self, message: str | None = None):
|
||||
super().__init__(f"Invalid response format: {message}")
|
||||
|
||||
|
||||
class OpenAIRequestTooBigError(SkyvernException):
|
||||
def __init__(self, message: str | None = None):
|
||||
super().__init__(f"OpenAI request 429 error: {message}")
|
||||
|
||||
|
||||
class FailedToSendWebhook(SkyvernException):
|
||||
def __init__(self, task_id: str | None = None, workflow_run_id: str | None = None, workflow_id: str | None = None):
|
||||
workflow_run_str = f"workflow_run_id={workflow_run_id}" if workflow_run_id else ""
|
||||
|
||||
Reference in New Issue
Block a user