修复 pylance

This commit is contained in:
DrSmoothl
2026-03-30 22:07:07 +08:00
parent e7dbaa1a70
commit 898b693fe0
4 changed files with 41 additions and 29 deletions

View File

@@ -260,7 +260,7 @@ class KnowledgeLearner:
try:
parsed = json.loads(normalized)
except json.JSONDecodeError:
logger.warning("Knowledge learning result is not valid JSON")
logger.warning("知识学习结果不是有效的 JSON")
return []
if not isinstance(parsed, list):
@@ -324,7 +324,7 @@ class KnowledgeLearner:
try:
parsed = json.loads(normalized)
except json.JSONDecodeError:
logger.warning("Knowledge learning result is not valid JSON")
logger.warning("知识学习结果不是有效的 JSON")
return []
if not isinstance(parsed, list):

View File

@@ -9,7 +9,7 @@ import asyncio
import random
from PIL import Image as PILImage
from rich.console import Group
from rich.console import Group, RenderableType
from rich.panel import Panel
from rich.pretty import Pretty
from rich.text import Text
@@ -118,6 +118,20 @@ class MaisakaChatLoopService:
"""设置当前 planner 请求使用的中断标记。"""
self._interrupt_flag = interrupt_flag
def _build_request_messages(self, selected_history: List[LLMContextMessage]) -> List[Message]:
"""构造发给大模型的消息列表。"""
messages: List[Message] = []
system_msg = MessageBuilder().set_role(RoleType.System)
system_msg.add_text_content(self._chat_system_prompt)
messages.append(system_msg.build())
for msg in selected_history:
llm_message = msg.to_llm_message()
if llm_message is not None:
messages.append(llm_message)
return messages
async def analyze_knowledge_need(
self,
chat_history: List[LLMContextMessage],
@@ -192,7 +206,7 @@ class MaisakaChatLoopService:
preview_width = max(8, int(global_config.maisaka.terminal_image_preview_width))
preview_height = max(1, int(height * (preview_width / width) * 0.5))
resized = grayscale.resize((preview_width, preview_height))
pixels = list(resized.getdata())
pixels = list(resized.tobytes())
except Exception:
return None
@@ -205,12 +219,12 @@ class MaisakaChatLoopService:
return "\n".join(rows)
@classmethod
def _render_message_content(cls, content: Any) -> object:
def _render_message_content(cls, content: Any) -> RenderableType:
if isinstance(content, str):
return Text(content)
if isinstance(content, list):
parts: List[object] = []
parts: List[RenderableType] = []
for item in content:
if isinstance(item, str):
parts.append(Text(item))
@@ -220,7 +234,7 @@ class MaisakaChatLoopService:
if isinstance(image_format, str) and isinstance(image_base64, str):
approx_size = max(0, len(image_base64) * 3 // 4)
size_text = f"{approx_size / 1024:.1f} KB" if approx_size >= 1024 else f"{approx_size} B"
preview_parts: List[object] = [
preview_parts: List[RenderableType] = [
Text(f"图片格式 image/{image_format} {size_text}\nbase64 内容已省略", style="magenta")
]
if global_config.maisaka.terminal_image_preview:
@@ -284,13 +298,13 @@ class MaisakaChatLoopService:
content = getattr(message, "content", None)
tool_call_id = getattr(message, "tool_call_id", None)
role = raw_role.value if hasattr(raw_role, "value") else str(raw_role)
role = raw_role.value if isinstance(raw_role, RoleType) else str(raw_role)
title = Text.assemble(
Text(f" {self._get_role_badge_label(role)} ", style=self._get_role_badge_style(role)),
Text(f" #{index}", style="muted"),
)
parts: List[object] = []
parts: List[RenderableType] = []
if content not in (None, "", []):
parts.append(Text(" 消息 ", style="bold cyan"))
parts.append(self._render_message_content(content))
@@ -319,20 +333,11 @@ class MaisakaChatLoopService:
selected_history, selection_reason = self._select_llm_context_messages(chat_history)
def message_factory(_client: BaseClient) -> List[Message]:
messages: List[Message] = []
system_msg = MessageBuilder().set_role(RoleType.System)
system_msg.add_text_content(self._chat_system_prompt)
messages.append(system_msg.build())
del _client
return self._build_request_messages(selected_history)
for msg in selected_history:
llm_message = msg.to_llm_message()
if llm_message is not None:
messages.append(llm_message)
return messages
all_tools = [*get_builtin_tools(), *self._extra_tools]
built_messages = message_factory(None)
all_tools: List[ToolDefinitionInput] = [*get_builtin_tools(), *self._extra_tools]
built_messages = self._build_request_messages(selected_history)
ordered_panels: List[Panel] = []
for index, msg in enumerate(built_messages, start=1):

View File

@@ -1,7 +1,7 @@
"""Maisaka 推理引擎。"""
from datetime import datetime
from typing import TYPE_CHECKING, Any, Optional
from typing import TYPE_CHECKING, Any, Optional, cast
import asyncio
import difflib
@@ -85,8 +85,8 @@ class MaisakaReasoningEngine:
for round_index in range(self._runtime._max_internal_rounds):
cycle_detail = self._start_cycle()
self._runtime._log_cycle_started(cycle_detail, round_index)
planner_started_at = time.time()
try:
planner_started_at = time.time()
logger.info(
f"{self._runtime.log_prefix} 规划器开始执行: "
f"回合={round_index + 1} "
@@ -680,6 +680,9 @@ class MaisakaReasoningEngine:
)
return False
from src.chat.replyer.maisaka_generator import MaisakaReplyGenerator
replyer = cast(MaisakaReplyGenerator, replyer)
logger.info(f"{self._runtime.log_prefix} 已成功获取 Maisaka 回复生成器")
logger.info(f"{self._runtime.log_prefix} 正在调用回复生成接口: 目标消息编号={target_message_id}")

View File

@@ -1,7 +1,7 @@
"""Maisaka runtime for non-CLI integrations."""
from pathlib import Path
from typing import Literal, Optional
from typing import Literal, Optional, cast
import asyncio
import time
@@ -16,6 +16,7 @@ from src.config.config import global_config
from src.know_u.knowledge import KnowledgeLearner
from src.learners.expression_learner import ExpressionLearner
from src.learners.jargon_miner import JargonMiner
from src.llm_models.payload_content.tool_option import ToolDefinitionInput
from src.mcp_module import MCPManager
from .chat_loop_service import MaisakaChatLoopService
@@ -34,9 +35,10 @@ class MaisakaHeartFlowChatting:
def __init__(self, session_id: str):
self.session_id = session_id
self.chat_stream: Optional[BotChatSession] = chat_manager.get_session_by_session_id(session_id)
if self.chat_stream is None:
chat_stream = chat_manager.get_session_by_session_id(session_id)
if chat_stream is None:
raise ValueError(f"未找到会话 {session_id} 对应的 Maisaka 运行时")
self.chat_stream: BotChatSession = chat_stream
session_name = chat_manager.get_session_name(session_id) or session_id
self.log_prefix = f"[{session_name}]"
@@ -382,7 +384,8 @@ class MaisakaHeartFlowChatting:
logger.info(f"{self.log_prefix} 没有可供 Maisaka 使用的 MCP 工具")
return
self._chat_loop_service.set_extra_tools(mcp_tools)
mcp_tool_definitions = [cast(ToolDefinitionInput, tool) for tool in mcp_tools]
self._chat_loop_service.set_extra_tools(mcp_tool_definitions)
logger.info(
f"{self.log_prefix} 已向 Maisaka 加载 {len(mcp_tools)} 个 MCP 工具:\n"
f"{self._mcp_manager.get_tool_summary()}"
@@ -417,9 +420,10 @@ class MaisakaHeartFlowChatting:
)
def _log_cycle_completed(self, cycle_detail: CycleDetail, timer_strings: list[str]) -> None:
end_time = cycle_detail.end_time if cycle_detail.end_time is not None else cycle_detail.start_time
logger.info(
f"{self.log_prefix} MaiSaka 轮次结束: 循环编号={cycle_detail.cycle_id} "
f"总耗时={cycle_detail.end_time - cycle_detail.start_time:.2f} 秒; "
f"总耗时={end_time - cycle_detail.start_time:.2f} 秒; "
f"阶段耗时={', '.join(timer_strings) if timer_strings else ''}"
)