fix:生成默认配置文件

This commit is contained in:
SengokuCola
2026-04-18 22:31:15 +08:00
parent 0871db9e07
commit c67f7e8bd2
2 changed files with 159 additions and 4 deletions

View File

@@ -11,6 +11,7 @@ import tomlkit
from .config_base import AttributeData, ConfigBase, Field
from .config_utils import compare_versions, output_config_changes, recursive_parse_item_to_table
from .default_model_config import create_default_model_config
from .file_watcher import FileChange, FileWatcher
from .legacy_migration import migrate_legacy_bind_env_to_bot_config_dict, try_migrate_legacy_bot_config_dict
from .model_configs import APIProvider, ModelInfo, ModelTaskConfig
@@ -55,7 +56,7 @@ MODEL_CONFIG_PATH: Path = (CONFIG_DIR / "model_config.toml").resolve().absolute(
LEGACY_ENV_PATH: Path = (PROJECT_ROOT / ".env").resolve().absolute()
MMC_VERSION: str = "1.0.0"
CONFIG_VERSION: str = "8.9.3"
MODEL_CONFIG_VERSION: str = "1.14.0"
MODEL_CONFIG_VERSION: str = "1.14.1"
logger = get_logger("config")
@@ -439,15 +440,20 @@ class ConfigManager:
logger.error(t("config.reload_timeout", timeout_seconds=self._hot_reload_timeout_s))
def generate_new_config_file(config_class: type[T], config_path: Path, inner_config_version: str) -> None:
def generate_new_config_file(
config_class: type[T], config_path: Path, inner_config_version: str, override_repr: bool = False
) -> None:
"""生成新的配置文件
:param config_class: 配置类
:param config_path: 配置文件路径
:param inner_config_version: 配置文件版本号
"""
config = config_class()
write_config_to_file(config, config_path, inner_config_version)
if config_class is ModelConfig:
config = create_default_model_config(config_class)
else:
config = config_class()
write_config_to_file(config, config_path, inner_config_version, override_repr)
def remove_legacy_env_file(env_path: Path) -> None:
@@ -468,6 +474,9 @@ def load_config_from_file(
config_class: type[T], config_path: Path, new_ver: str, override_repr: bool = False
) -> tuple[T, bool]:
attribute_data = AttributeData()
if not config_path.exists():
logger.warning(f"配置文件缺失,正在生成默认配置: {config_path}")
generate_new_config_file(config_class, config_path, new_ver, override_repr)
with open(config_path, "r", encoding="utf-8") as f:
config_data = tomlkit.load(f)
inner_table = config_data.get("inner")

View File

@@ -0,0 +1,146 @@
from typing import Any, TypeVar
from .config_base import ConfigBase
from .model_configs import APIProvider, ModelInfo, ModelTaskConfig, OpenAICompatibleAuthType, TaskConfig
T = TypeVar("T", bound=ConfigBase)
DEFAULT_PROVIDER_TEMPLATES: list[dict[str, Any]] = [
{
"name": "BaiLian",
"base_url": "https://dashscope.aliyuncs.com/compatible-mode/v1",
"auth_type": OpenAICompatibleAuthType.NONE.value,
}
]
DEFAULT_TASK_CONFIG_TEMPLATES: dict[str, dict[str, Any]] = {
"utils": {
"model_list": ["qwen3.5-35b-a3b-nonthink"],
"max_tokens": 4096,
"temperature": 0.5,
"slow_threshold": 15.0,
"selection_strategy": "random",
},
"replyer": {
"model_list": ["ali-glm-5"],
"max_tokens": 4096,
"temperature": 1,
"slow_threshold": 120.0,
"selection_strategy": "random",
},
"planner": {
"model_list": ["qwen3.5-35b-a3b", "qwen3.5-122b-a10b", "qwen3.5-flash"],
"max_tokens": 8000,
"temperature": 0.7,
"slow_threshold": 12.0,
"selection_strategy": "random",
},
"vlm": {
"model_list": ["qwen3.5-flash"],
"max_tokens": 512,
"temperature": 0.3,
"slow_threshold": 15.0,
"selection_strategy": "random",
},
"voice": {
"model_list": [""],
"max_tokens": 1024,
"temperature": 0.3,
"slow_threshold": 12.0,
"selection_strategy": "random",
},
"embedding": {
"model_list": ["qwen3-embedding"],
"max_tokens": 1024,
"temperature": 0.3,
"slow_threshold": 5.0,
"selection_strategy": "random",
},
}
DEFAULT_MODEL_TEMPLATES: list[dict[str, Any]] = [
{
"model_identifier": "glm-5",
"name": "ali-glm-5",
"api_provider": "BaiLian",
"price_in": 3.0,
"price_out": 14.0,
"temperature": 1.0,
"visual": False,
"extra_params": {"enable_thinking": False},
},
{
"model_identifier": "qwen3.5-122b-a10b",
"name": "qwen3.5-122b-a10b",
"api_provider": "BaiLian",
"price_in": 0.8,
"price_out": 6.4,
"visual": True,
"extra_params": {"enable_thinking": "false"},
},
{
"model_identifier": "qwen3.5-35b-a3b",
"name": "qwen3.5-35b-a3b",
"api_provider": "BaiLian",
"price_in": 0.4,
"price_out": 3.2,
"visual": True,
"extra_params": {},
},
{
"model_identifier": "qwen3.5-35b-a3b",
"name": "qwen3.5-35b-a3b-nonthink",
"api_provider": "BaiLian",
"price_in": 0.4,
"price_out": 3.2,
"visual": True,
"extra_params": {"enable_thinking": "false"},
},
{
"model_identifier": "qwen3.5-flash",
"name": "qwen3.5-flash",
"api_provider": "BaiLian",
"price_in": 0.2,
"price_out": 2.0,
"visual": True,
"extra_params": {"enable_thinking": "false"},
},
{
"model_identifier": "text-embedding-v4",
"name": "qwen3-embedding",
"api_provider": "BaiLian",
"price_in": 0.5,
"price_out": 0.5,
"visual": False,
"extra_params": {},
}
]
def build_default_model_templates() -> list[dict[str, Any]]:
"""筛选任务分配中实际用到的模型模板。"""
used_model_names = {
model_name
for task_template in DEFAULT_TASK_CONFIG_TEMPLATES.values()
for model_name in task_template["model_list"]
}
return [model_template for model_template in DEFAULT_MODEL_TEMPLATES if model_template["name"] in used_model_names]
def create_default_model_config(config_class: type[T]) -> T:
"""根据预置模板创建可通过校验的默认模型配置。"""
task_config_fields = {}
for field_name, field_info in ModelTaskConfig.model_fields.items():
if field_info.annotation is not TaskConfig:
continue
task_template = DEFAULT_TASK_CONFIG_TEMPLATES.get(field_name, {})
task_config_fields[field_name] = TaskConfig(**task_template)
return config_class(
models=[ModelInfo(**model_template) for model_template in build_default_model_templates()],
model_task_config=ModelTaskConfig(**task_config_fields),
api_providers=[APIProvider(**provider_template) for provider_template in DEFAULT_PROVIDER_TEMPLATES],
)