return dummy llm api handler which will raise exception if called (#1954)

This commit is contained in:
Shuchang Zheng
2025-03-17 12:37:10 -07:00
committed by GitHub
parent 6cc595d04c
commit 205e2b35c0
3 changed files with 19 additions and 3 deletions

View File

@@ -93,3 +93,16 @@ class LLMAPIHandler(Protocol):
screenshots: list[bytes] | None = None,
parameters: dict[str, Any] | None = None,
) -> Awaitable[dict[str, Any]]: ...
async def dummy_llm_api_handler(
prompt: str,
prompt_name: str,
step: Step | None = None,
task_v2: TaskV2 | None = None,
thought: Thought | None = None,
ai_suggestion: AISuggestion | None = None,
screenshots: list[bytes] | None = None,
parameters: dict[str, Any] | None = None,
) -> dict[str, Any]:
raise NotImplementedError("Your LLM provider is not configured. Please configure it in the .env file.")