feat: 添加模型级别最大token数配置,并更新相关逻辑以支持优先级处理
This commit is contained in:
@@ -63,6 +63,9 @@ class ModelInfo(ConfigBase):
|
||||
temperature: float | None = field(default=None)
|
||||
"""模型级别温度(可选),会覆盖任务配置中的温度"""
|
||||
|
||||
max_tokens: int | None = field(default=None)
|
||||
"""模型级别最大token数(可选),会覆盖任务配置中的max_tokens"""
|
||||
|
||||
force_stream_mode: bool = field(default=False)
|
||||
"""是否强制使用流式输出模式"""
|
||||
|
||||
|
||||
Reference in New Issue
Block a user