feat:支持模型缓存和相关配置

This commit is contained in:
SengokuCola
2026-04-25 13:53:30 +08:00
parent 4b1bc2aba8
commit 9759018a0c
11 changed files with 195 additions and 5 deletions

View File

@@ -68,6 +68,8 @@ class LLMResponseResult(BaseDataModel):
prompt_tokens: int = 0
completion_tokens: int = 0
total_tokens: int = 0
prompt_cache_hit_tokens: int = 0
prompt_cache_miss_tokens: int = 0
@dataclass(slots=True)
@@ -125,6 +127,8 @@ class LLMServiceResult(BaseDataModel):
"prompt_tokens": self.completion.prompt_tokens,
"completion_tokens": self.completion.completion_tokens,
"total_tokens": self.completion.total_tokens,
"prompt_cache_hit_tokens": self.completion.prompt_cache_hit_tokens,
"prompt_cache_miss_tokens": self.completion.prompt_cache_miss_tokens,
}
if self.completion.tool_calls is not None:
payload["tool_calls"] = [

View File

@@ -32,6 +32,7 @@ MODULE_COLORS: Dict[str, Tuple[str, Optional[str], bool]] = {
"remote": ("#6c6c6c", None, False), # 深灰色,更不显眼
"planner": ("#008080", None, False),
"maisaka_reasoning_engine": ("#008080", None, False),
"maisaka_chat_loop": ("#0087ff", None, False),
"maisaka_runtime": ("#ff5fff", None, False),
"relation": ("#af87af", None, False), # 柔和的紫色,不刺眼
# 聊天相关模块