log duration and prompt_name in llm failure log (#2578)

This commit is contained in:
Shuchang Zheng
2025-06-03 10:24:47 -07:00
committed by GitHub
parent 258b6ac660
commit 2ddcb9eb0b

View File

@@ -149,24 +149,33 @@ class LLMAPIHandlerFactory:
except litellm.exceptions.APIError as e: except litellm.exceptions.APIError as e:
raise LLMProviderErrorRetryableTask(local_llm_key) from e raise LLMProviderErrorRetryableTask(local_llm_key) from e
except litellm.exceptions.ContextWindowExceededError as e: except litellm.exceptions.ContextWindowExceededError as e:
duration_seconds = time.time() - start_time
LOG.exception( LOG.exception(
"Context window exceeded", "Context window exceeded",
llm_key=local_llm_key, llm_key=local_llm_key,
model=main_model_group, model=main_model_group,
prompt_name=prompt_name,
duration_seconds=duration_seconds,
) )
raise SkyvernContextWindowExceededError() from e raise SkyvernContextWindowExceededError() from e
except ValueError as e: except ValueError as e:
duration_seconds = time.time() - start_time
LOG.exception( LOG.exception(
"LLM token limit exceeded", "LLM token limit exceeded",
llm_key=local_llm_key, llm_key=local_llm_key,
model=main_model_group, model=main_model_group,
prompt_name=prompt_name,
duration_seconds=duration_seconds,
) )
raise LLMProviderErrorRetryableTask(local_llm_key) from e raise LLMProviderErrorRetryableTask(local_llm_key) from e
except Exception as e: except Exception as e:
duration_seconds = time.time() - start_time
LOG.exception( LOG.exception(
"LLM request failed unexpectedly", "LLM request failed unexpectedly",
llm_key=local_llm_key, llm_key=local_llm_key,
model=main_model_group, model=main_model_group,
prompt_name=prompt_name,
duration_seconds=duration_seconds,
) )
raise LLMProviderError(local_llm_key) from e raise LLMProviderError(local_llm_key) from e
@@ -352,10 +361,13 @@ class LLMAPIHandlerFactory:
except litellm.exceptions.APIError as e: except litellm.exceptions.APIError as e:
raise LLMProviderErrorRetryableTask(local_llm_key) from e raise LLMProviderErrorRetryableTask(local_llm_key) from e
except litellm.exceptions.ContextWindowExceededError as e: except litellm.exceptions.ContextWindowExceededError as e:
duration_seconds = time.time() - start_time
LOG.exception( LOG.exception(
"Context window exceeded", "Context window exceeded",
llm_key=local_llm_key, llm_key=local_llm_key,
model=model_name, model=model_name,
prompt_name=prompt_name,
duration_seconds=duration_seconds,
) )
raise SkyvernContextWindowExceededError() from e raise SkyvernContextWindowExceededError() from e
except CancelledError: except CancelledError:
@@ -364,11 +376,19 @@ class LLMAPIHandlerFactory:
"LLM request got cancelled", "LLM request got cancelled",
llm_key=local_llm_key, llm_key=local_llm_key,
model=model_name, model=model_name,
prompt_name=prompt_name,
duration=t_llm_cancelled - t_llm_request, duration=t_llm_cancelled - t_llm_request,
) )
raise LLMProviderError(local_llm_key) raise LLMProviderError(local_llm_key)
except Exception as e: except Exception as e:
LOG.exception("LLM request failed unexpectedly", llm_key=local_llm_key) duration_seconds = time.time() - start_time
LOG.exception(
"LLM request failed unexpectedly",
llm_key=local_llm_key,
model=model_name,
prompt_name=prompt_name,
duration_seconds=duration_seconds,
)
raise LLMProviderError(local_llm_key) from e raise LLMProviderError(local_llm_key) from e
await app.ARTIFACT_MANAGER.create_llm_artifact( await app.ARTIFACT_MANAGER.create_llm_artifact(