fix cannot access local variable 'prompt_tokens' where it is not associated with a value (#3286)

This commit is contained in:
Shuchang Zheng
2025-08-24 14:25:35 -07:00
committed by GitHub
parent 53d8c69e08
commit b9470ffb44

View File

@@ -198,6 +198,13 @@ class LLMAPIHandlerFactory:
thought=thought,
ai_suggestion=ai_suggestion,
)
prompt_tokens = 0
completion_tokens = 0
reasoning_tokens = 0
cached_tokens = 0
completion_token_detail = None
cached_token_detail = None
llm_cost = 0
if step or thought:
try:
# FIXME: volcengine doesn't support litellm cost calculation.