feat:缓存调试信息开关

This commit is contained in:
SengokuCola
2026-05-01 13:19:07 +08:00
parent 88b895a925
commit 2238c34eca
3 changed files with 23 additions and 1 deletions

View File

@@ -182,6 +182,16 @@ class _LLMCacheStatsStore:
_store = _LLMCacheStatsStore()
def _is_llm_cache_stats_enabled() -> bool:
"""读取调试配置,默认关闭 LLM prompt cache 统计。"""
try:
from src.config.config import global_config
return bool(global_config.debug.enable_llm_cache_stats)
except Exception:
return False
def _normalize_request_type(request_type: str) -> str:
normalized = str(request_type or "").strip()
return normalized or "unknown"
@@ -1313,6 +1323,9 @@ def record_llm_cache_usage(
) -> None:
"""Record one LLM prompt cache usage event."""
if not _is_llm_cache_stats_enabled():
return
normalized_task_name = str(task_name or "").strip()
if normalized_task_name not in FOCUSED_TASK_NAMES:
return