feat: 添加模型级别最大token数配置,并更新相关逻辑以支持优先级处理

This commit is contained in:
墨梓柒
2025-12-03 11:45:15 +08:00
parent 6680afaa4a
commit 12bc661790
3 changed files with 20 additions and 4 deletions

View File

@@ -63,6 +63,9 @@ class ModelInfo(ConfigBase):
temperature: float | None = field(default=None)
"""模型级别温度(可选),会覆盖任务配置中的温度"""
max_tokens: int | None = field(default=None)
"""模型级别最大token数可选会覆盖任务配置中的max_tokens"""
force_stream_mode: bool = field(default=False)
"""是否强制使用流式输出模式"""