O3 Mini support (#1709)

This commit is contained in:
Shuchang Zheng
2025-02-04 05:07:10 +08:00
committed by GitHub
parent e0e868445d
commit 59756cb1d2
3 changed files with 39 additions and 13 deletions

View File

@@ -36,7 +36,9 @@ class LLMConfigBase:
@dataclass(frozen=True)
class LLMConfig(LLMConfigBase):
litellm_params: Optional[LiteLLMParams] = field(default=None)
max_output_tokens: int = SettingsManager.get_settings().LLM_CONFIG_MAX_TOKENS
max_completion_tokens: int = SettingsManager.get_settings().LLM_CONFIG_MAX_TOKENS
temperature: float | None = SettingsManager.get_settings().LLM_CONFIG_TEMPERATURE
reasoning_effort: str | None = None
@dataclass(frozen=True)
@@ -72,7 +74,9 @@ class LLMRouterConfig(LLMConfigBase):
allowed_fails: int | None = None
allowed_fails_policy: AllowedFailsPolicy | None = None
cooldown_time: float | None = None
max_output_tokens: int = SettingsManager.get_settings().LLM_CONFIG_MAX_TOKENS
max_completion_tokens: int = SettingsManager.get_settings().LLM_CONFIG_MAX_TOKENS
reasoning_effort: str | None = None
temperature: float | None = SettingsManager.get_settings().LLM_CONFIG_TEMPERATURE
class LLMAPIHandler(Protocol):