fix drop_params bug (#3756)

This commit is contained in:
Shuchang Zheng
2025-10-17 12:00:34 -07:00
committed by GitHub
parent 75ce98e841
commit 770ddadc2f

View File

@@ -213,9 +213,6 @@ if settings.ENABLE_OPENAI:
max_completion_tokens=100000,
temperature=None, # Temperature isn't supported in the O-model series
reasoning_effort="high",
litellm_params=LiteLLMParams(
drop_params=True, # type: ignore
),
),
)
LLMConfigRegistry.register_config(
@@ -228,9 +225,6 @@ if settings.ENABLE_OPENAI:
max_completion_tokens=100000,
temperature=None, # Temperature isn't supported in the O-model series
reasoning_effort="high",
litellm_params=LiteLLMParams(
drop_params=True, # type: ignore
),
),
)