diff --git a/src/config/config.py b/src/config/config.py index 252b4a05..dcd5e7eb 100644 --- a/src/config/config.py +++ b/src/config/config.py @@ -56,7 +56,7 @@ BOT_CONFIG_PATH: Path = (CONFIG_DIR / "bot_config.toml").resolve().absolute() MODEL_CONFIG_PATH: Path = (CONFIG_DIR / "model_config.toml").resolve().absolute() LEGACY_ENV_PATH: Path = (PROJECT_ROOT / ".env").resolve().absolute() MMC_VERSION: str = "1.0.0" -CONFIG_VERSION: str = "8.9.19" +CONFIG_VERSION: str = "8.9.20" MODEL_CONFIG_VERSION: str = "1.14.3" logger = get_logger("config") diff --git a/src/config/official_configs.py b/src/config/official_configs.py index 41f469af..ba11426a 100644 --- a/src/config/official_configs.py +++ b/src/config/official_configs.py @@ -1324,6 +1324,15 @@ class DebugConfig(ConfigBase): ) """是否记录 Replyer 请求体,默认关闭""" + enable_llm_cache_stats: bool = Field( + default=False, + json_schema_extra={ + "x-widget": "switch", + "x-icon": "chart-no-axes-column", + }, + ) + """是否记录 LLM prompt cache 调试统计,默认关闭""" + class ExtraPromptItem(ConfigBase): platform: str = Field( diff --git a/src/services/llm_cache_stats.py b/src/services/llm_cache_stats.py index e6b1c268..1d322ba4 100644 --- a/src/services/llm_cache_stats.py +++ b/src/services/llm_cache_stats.py @@ -182,6 +182,16 @@ class _LLMCacheStatsStore: _store = _LLMCacheStatsStore() +def _is_llm_cache_stats_enabled() -> bool: + """读取调试配置,默认关闭 LLM prompt cache 统计。""" + + try: + from src.config.config import global_config + return bool(global_config.debug.enable_llm_cache_stats) + except Exception: + return False + + def _normalize_request_type(request_type: str) -> str: normalized = str(request_type or "").strip() return normalized or "unknown" @@ -1313,6 +1323,9 @@ def record_llm_cache_usage( ) -> None: """Record one LLM prompt cache usage event.""" + if not _is_llm_cache_stats_enabled(): + return + normalized_task_name = str(task_name or "").strip() if normalized_task_name not in FOCUSED_TASK_NAMES: return