From 7a460a474d0a3292a7ad1f14583dba9e7ac41118 Mon Sep 17 00:00:00 2001 From: DrSmoothl <1787882683@qq.com> Date: Sat, 28 Mar 2026 13:39:48 +0800 Subject: [PATCH] =?UTF-8?q?feat:=20=E6=9B=B4=E6=96=B0=E5=A4=9A=E4=B8=AA?= =?UTF-8?q?=E6=96=87=E4=BB=B6=E4=BB=A5=E4=BD=BF=E7=94=A8=20SessionMessage?= =?UTF-8?q?=20=E6=9B=BF=E4=BB=A3=20MaiMessage=EF=BC=8C=E5=B9=B6=E8=B0=83?= =?UTF-8?q?=E6=95=B4=E7=9B=B8=E5=85=B3=E9=80=BB=E8=BE=91?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- locales/en-US/startup.json | 2 +- locales/ja-JP/startup.json | 2 +- locales/ko/startup.json | 2 +- locales/zh-CN/startup.json | 2 +- pytests/test_maisaka_message_adapter.py | 55 +++++++++++++++++++++++++ src/chat/message_receive/bot.py | 13 +++--- src/llm_models/utils_model.py | 20 ++++----- src/maisaka/cli.py | 12 +++--- src/maisaka/knowledge.py | 4 +- src/maisaka/llm_service.py | 20 ++++----- src/maisaka/message_adapter.py | 30 +++++++------- src/maisaka/replyer.py | 12 +++--- src/maisaka/runtime.py | 8 ++-- src/maisaka/tool_handlers.py | 16 +++---- src/plugin_runtime/component_query.py | 22 ++++------ 15 files changed, 136 insertions(+), 84 deletions(-) create mode 100644 pytests/test_maisaka_message_adapter.py diff --git a/locales/en-US/startup.json b/locales/en-US/startup.json index 8ad1a8c4..7482666b 100644 --- a/locales/en-US/startup.json +++ b/locales/en-US/startup.json @@ -27,7 +27,7 @@ "startup.main_error": "Main process encountered an exception: {error}", "startup.opensource_free_notice": " This project is completely free and open-source software, released under the GPL-3.0 license", "startup.opensource_group": " Official group chat: ", - "startup.opensource_group_value": "1006149251", + "startup.opensource_group_value": "766798517", "startup.opensource_repo": " Official repository: ", "startup.opensource_repo_value": "https://github.com/MaiM-with-u/MaiBot", "startup.opensource_resale_warning": " Reselling this software as a \"product\" or concealing its open-source nature violates the license!", diff --git a/locales/ja-JP/startup.json b/locales/ja-JP/startup.json index 94ec95ec..6a855dc6 100644 --- a/locales/ja-JP/startup.json +++ b/locales/ja-JP/startup.json @@ -27,7 +27,7 @@ "startup.main_error": "メインプロセスで例外が発生しました: {error}", "startup.opensource_free_notice": " 本プロジェクトは完全無料のオープンソースソフトウェアであり、GPL-3.0 ライセンスのもとで公開されています", "startup.opensource_group": " 公式グループ: ", - "startup.opensource_group_value": "1006149251", + "startup.opensource_group_value": "766798517", "startup.opensource_repo": " 公式リポジトリ: ", "startup.opensource_repo_value": "https://github.com/MaiM-with-u/MaiBot", "startup.opensource_resale_warning": " 本ソフトウェアを「商品」として転売したり、オープンソースであることを隠すことはライセンス違反です!", diff --git a/locales/ko/startup.json b/locales/ko/startup.json index 1a31a17d..2f7ee595 100644 --- a/locales/ko/startup.json +++ b/locales/ko/startup.json @@ -27,7 +27,7 @@ "startup.main_error": "메인 프로세스에서 예외 발생: {error}", "startup.opensource_free_notice": " 본 프로젝트는 완전 무료 오픈소스 소프트웨어이며, GPL-3.0 라이선스로 배포됩니다", "startup.opensource_group": " 공식 그룹: ", - "startup.opensource_group_value": "1006149251", + "startup.opensource_group_value": "766798517", "startup.opensource_repo": " 공식 저장소: ", "startup.opensource_repo_value": "https://github.com/MaiM-with-u/MaiBot", "startup.opensource_resale_warning": " 본 소프트웨어를 '상품'으로 재판매하거나 오픈소스임을 숨기는 행위는 라이선스 위반입니다!", diff --git a/locales/zh-CN/startup.json b/locales/zh-CN/startup.json index c70441df..2290b652 100644 --- a/locales/zh-CN/startup.json +++ b/locales/zh-CN/startup.json @@ -27,7 +27,7 @@ "startup.main_error": "主程序发生异常: {error}", "startup.opensource_free_notice": " 本项目是完全免费的开源软件,基于 GPL-3.0 协议发布", "startup.opensource_group": " 官方群聊: ", - "startup.opensource_group_value": "1006149251", + "startup.opensource_group_value": "766798517", "startup.opensource_repo": " 官方仓库: ", "startup.opensource_repo_value": "https://github.com/MaiM-with-u/MaiBot", "startup.opensource_resale_warning": " 将本软件作为「商品」倒卖、隐瞒开源性质均违反协议!", diff --git a/pytests/test_maisaka_message_adapter.py b/pytests/test_maisaka_message_adapter.py new file mode 100644 index 00000000..d872253c --- /dev/null +++ b/pytests/test_maisaka_message_adapter.py @@ -0,0 +1,55 @@ +from datetime import datetime +from pathlib import Path + +import sys + +from src.chat.message_receive.message import SessionMessage +from src.common.data_models.message_component_data_model import MessageSequence, TextComponent +from src.llm_models.payload_content.tool_option import ToolCall +from src.maisaka.message_adapter import build_message, get_message_kind, get_message_role, get_tool_call_id, get_tool_calls + + +PROJECT_ROOT = Path(__file__).resolve().parents[1] +if str(PROJECT_ROOT) not in sys.path: + sys.path.insert(0, str(PROJECT_ROOT)) + + +def test_build_message_returns_session_message_with_maisaka_metadata() -> None: + timestamp = datetime.now() + tool_call = ToolCall( + call_id="call-1", + func_name="reply", + args={"message_id": "msg-1"}, + ) + raw_message = MessageSequence(components=[TextComponent(text="内部消息内容")]) + + message = build_message( + role="assistant", + content="展示消息内容", + message_kind="perception", + source="assistant", + tool_call_id="call-1", + tool_calls=[tool_call], + timestamp=timestamp, + message_id="maisaka-msg-1", + raw_message=raw_message, + display_text="展示消息内容", + ) + + assert isinstance(message, SessionMessage) + assert message.initialized is True + assert message.message_id == "maisaka-msg-1" + assert message.timestamp == timestamp + assert message.processed_plain_text == "展示消息内容" + assert message.display_message == "展示消息内容" + assert message.raw_message is raw_message + + assert get_message_role(message) == "assistant" + assert get_message_kind(message) == "perception" + assert get_tool_call_id(message) == "call-1" + + restored_tool_calls = get_tool_calls(message) + assert len(restored_tool_calls) == 1 + assert restored_tool_calls[0].call_id == "call-1" + assert restored_tool_calls[0].func_name == "reply" + assert restored_tool_calls[0].args == {"message_id": "msg-1"} diff --git a/src/chat/message_receive/bot.py b/src/chat/message_receive/bot.py index 1d2abb3b..dc704e44 100644 --- a/src/chat/message_receive/bot.py +++ b/src/chat/message_receive/bot.py @@ -343,14 +343,13 @@ class ChatBot: # message.update_chat_stream(chat) - # 命令处理 - 使用新插件系统检查并处理命令 - # 注意:命令返回的 response 当前只用于日志记录和流程判断, - # 不会在这里自动作为回复消息发送回会话。 - # is_command, cmd_result, continue_process = await self._process_commands(message) + # 命令处理 - 使用新插件系统检查并处理命令。 + # 命令处理器内部自行决定是否回复消息,这里只负责流程分发与拦截。 + is_command, cmd_result, continue_process = await self._process_commands(message) - # # 如果是命令且不需要继续处理,则直接返回 - # if is_command and await self._handle_command_processing_result(message, cmd_result, continue_process): - # return + # 如果是命令且不需要继续处理,则直接返回,避免落入 HeartFlow / MaiSaka。 + if is_command and await self._handle_command_processing_result(message, cmd_result, continue_process): + return # continue_flag, modified_message = await events_manager.handle_mai_events(EventType.ON_MESSAGE, message) # if not continue_flag: diff --git a/src/llm_models/utils_model.py b/src/llm_models/utils_model.py index 84af5052..43fb5189 100644 --- a/src/llm_models/utils_model.py +++ b/src/llm_models/utils_model.py @@ -328,17 +328,17 @@ class LLMOrchestrator: start_time = time.time() if self.request_type.startswith("maisaka_"): logger.info( - f"LLMOrchestrator[{self.request_type}] generate_response_with_message_async started " + f"LLMOrchestrator[{self.request_type}] 开始执行 generate_response_with_message_async " f"(temperature={temperature}, max_tokens={max_tokens}, tools={len(tools or [])})" ) if self.request_type.startswith("maisaka_"): logger.info( - f"LLMOrchestrator[{self.request_type}] building internal tool options from {len(tools or [])} tool(s)" + f"LLMOrchestrator[{self.request_type}] 正在根据 {len(tools or [])} 个工具构建内部工具选项" ) tool_built = self._build_tool_options(tools) if self.request_type.startswith("maisaka_"): - logger.info(f"LLMOrchestrator[{self.request_type}] built {len(tool_built or [])} internal tool option(s)") + logger.info(f"LLMOrchestrator[{self.request_type}] 已构建 {len(tool_built or [])} 个内部工具选项") execution_result = await self._execute_request( request_type=RequestType.RESPONSE, @@ -353,7 +353,7 @@ class LLMOrchestrator: model_info = execution_result.model_info if self.request_type.startswith("maisaka_"): logger.info( - f"LLMOrchestrator[{self.request_type}] generate_response_with_message_async finished " + f"LLMOrchestrator[{self.request_type}] generate_response_with_message_async 执行完成 " f"(model={model_info.name}, time_cost={time.time() - start_time:.2f}s)" ) @@ -832,18 +832,18 @@ class LLMOrchestrator: model_info, api_provider, client = self._select_model(exclude_models=failed_models_this_request) if self.request_type.startswith("maisaka_"): logger.info( - f"LLMOrchestrator[{self.request_type}] selected model={model_info.name} " + f"LLMOrchestrator[{self.request_type}] 已选择模型 model={model_info.name} " f"provider={api_provider.name} request_type={request_type.value}" ) message_list = [] if message_factory: if self.request_type.startswith("maisaka_"): - logger.info(f"LLMOrchestrator[{self.request_type}] building message list via message_factory") + logger.info(f"LLMOrchestrator[{self.request_type}] 正在通过 message_factory 构建消息列表") message_list = message_factory(client) if self.request_type.startswith("maisaka_"): logger.info( - f"LLMOrchestrator[{self.request_type}] message_factory returned {len(message_list)} message(s)" + f"LLMOrchestrator[{self.request_type}] message_factory 返回了 {len(message_list)} 条消息" ) try: @@ -863,8 +863,8 @@ class LLMOrchestrator: ) if self.request_type.startswith("maisaka_"): logger.info( - f"LLMOrchestrator[{self.request_type}] sending request to model={model_info.name} " - f"with tool_options={len(tool_options or [])}" + f"LLMOrchestrator[{self.request_type}] 正在向模型 model={model_info.name} 发送请求 " + f"(tool_options={len(tool_options or [])})" ) response = await self._attempt_request_on_model( api_provider, @@ -873,7 +873,7 @@ class LLMOrchestrator: ) if self.request_type.startswith("maisaka_"): logger.info( - f"LLMOrchestrator[{self.request_type}] model={model_info.name} returned API response" + f"LLMOrchestrator[{self.request_type}] 模型 model={model_info.name} 已返回 API 响应" ) total_tokens, penalty, usage_penalty = self.model_usage[model_info.name] if response_usage := response.usage: diff --git a/src/maisaka/cli.py b/src/maisaka/cli.py index 6f72f932..83c97ca6 100644 --- a/src/maisaka/cli.py +++ b/src/maisaka/cli.py @@ -13,7 +13,7 @@ from rich.markdown import Markdown from rich.panel import Panel from rich.text import Text -from src.common.data_models.mai_message_data_model import MaiMessage +from src.chat.message_receive.message import SessionMessage from src.config.config import global_config from .config import ( @@ -26,8 +26,8 @@ from .config import ( from .input_reader import InputReader from .knowledge import retrieve_relevant_knowledge from .knowledge_store import get_knowledge_store -from .llm_service import MaiSakaLLMService, build_message, remove_last_perception -from .message_adapter import format_speaker_content +from .llm_service import MaiSakaLLMService +from .message_adapter import build_message, format_speaker_content, remove_last_perception from .mcp_client import MCPManager from .tool_handlers import ( ToolHandlerContext, @@ -47,7 +47,7 @@ class BufferCLI: def __init__(self): self.llm_service: Optional[MaiSakaLLMService] = None self._reader = InputReader() - self._chat_history: Optional[list[MaiMessage]] = None + self._chat_history: Optional[list[SessionMessage]] = None self._knowledge_store = get_knowledge_store() knowledge_stats = self._knowledge_store.get_stats() @@ -122,7 +122,7 @@ class BufferCLI: await self._run_llm_loop(self._chat_history) - async def _run_llm_loop(self, chat_history: list[MaiMessage]): + async def _run_llm_loop(self, chat_history: list[SessionMessage]): """ Main inner loop for the Maisaka planner. @@ -318,7 +318,7 @@ class BufferCLI: ) ) - async def _generate_visible_reply(self, chat_history: list[MaiMessage], latest_thought: str) -> str: + async def _generate_visible_reply(self, chat_history: list[SessionMessage], latest_thought: str) -> str: """Generate and emit a visible reply based on the latest thought.""" if not self.llm_service or not latest_thought: return "" diff --git a/src/maisaka/knowledge.py b/src/maisaka/knowledge.py index f56fbdc5..b6fc5773 100644 --- a/src/maisaka/knowledge.py +++ b/src/maisaka/knowledge.py @@ -4,7 +4,7 @@ MaiSaka knowledge retrieval helpers. from typing import List -from src.common.data_models.mai_message_data_model import MaiMessage +from src.chat.message_receive.message import SessionMessage from .knowledge_store import KNOWLEDGE_CATEGORIES, get_knowledge_store @@ -43,7 +43,7 @@ def extract_category_ids_from_result(result: str) -> List[str]: async def retrieve_relevant_knowledge( llm_service, - chat_history: List[MaiMessage], + chat_history: List[SessionMessage], ) -> str: """Retrieve formatted knowledge snippets relevant to the current chat history.""" store = get_knowledge_store() diff --git a/src/maisaka/llm_service.py b/src/maisaka/llm_service.py index fabf4a65..b6bc0a3b 100644 --- a/src/maisaka/llm_service.py +++ b/src/maisaka/llm_service.py @@ -19,7 +19,8 @@ from rich.panel import Panel from rich.pretty import Pretty from rich.text import Text -from src.common.data_models.mai_message_data_model import MaiMessage +from src.chat.message_receive.message import SessionMessage +from src.common.data_models.llm_service_data_models import LLMGenerationOptions from src.common.logger import get_logger from src.common.prompt_i18n import load_prompt from src.config.config import config_manager, global_config @@ -31,7 +32,6 @@ from src.llm_models.payload_content.tool_option import ( ToolOption, normalize_tool_options, ) -from src.common.data_models.llm_service_data_models import LLMGenerationOptions from src.services.llm_service import LLMServiceClient from . import config @@ -55,7 +55,7 @@ class ChatResponse: content: Optional[str] tool_calls: List[ToolCall] - raw_message: MaiMessage + raw_message: SessionMessage class MaiSakaLLMService: @@ -428,7 +428,7 @@ class MaiSakaLLMService: padding=(0, 1), ) - async def chat_loop_step(self, chat_history: List[MaiMessage]) -> ChatResponse: + async def chat_loop_step(self, chat_history: List[SessionMessage]) -> ChatResponse: """执行主对话循环的一步。 Args: @@ -514,7 +514,7 @@ class MaiSakaLLMService: source="assistant", tool_calls=tool_calls or None, ) - logger.info("已将规划模型响应转换为 MaiMessage") + logger.info("已将规划模型响应转换为 SessionMessage") return ChatResponse( content=response, @@ -522,7 +522,7 @@ class MaiSakaLLMService: raw_message=raw_message, ) - def _filter_for_api(self, chat_history: List[MaiMessage]) -> str: + def _filter_for_api(self, chat_history: List[SessionMessage]) -> str: """将对话历史过滤为简单文本格式。 Args: @@ -555,14 +555,14 @@ class MaiSakaLLMService: return "\n\n".join(parts) - def build_chat_context(self, user_text: str) -> List[MaiMessage]: + def build_chat_context(self, user_text: str) -> List[SessionMessage]: """构建新的对话上下文。 Args: user_text: 用户输入文本。 Returns: - List[MaiMessage]: 初始对话上下文消息列表。 + List[SessionMessage]: 初始对话上下文消息列表。 """ return [ build_message( @@ -572,7 +572,7 @@ class MaiSakaLLMService: ) ] - async def _removed_analyze_timing(self, chat_history: List[MaiMessage], timing_info: str) -> str: + async def _removed_analyze_timing(self, chat_history: List[SessionMessage], timing_info: str) -> str: """执行时间节奏分析。 Args: @@ -623,7 +623,7 @@ class MaiSakaLLMService: # ──────── 回复生成(使用 replyer 模型) ──────── - async def generate_reply(self, reason: str, chat_history: List[MaiMessage]) -> str: + async def generate_reply(self, reason: str, chat_history: List[SessionMessage]) -> str: """生成最终回复文本。 Args: diff --git a/src/maisaka/message_adapter.py b/src/maisaka/message_adapter.py index f079ab83..8553f929 100644 --- a/src/maisaka/message_adapter.py +++ b/src/maisaka/message_adapter.py @@ -1,5 +1,5 @@ """ -MaiSaka message adapters built on top of the main project's MaiMessage model. +MaiSaka 内部消息适配器。 """ from copy import deepcopy @@ -12,7 +12,8 @@ import re from PIL import Image as PILImage -from src.common.data_models.mai_message_data_model import GroupInfo, MaiMessage, MessageInfo, UserInfo +from src.chat.message_receive.message import SessionMessage +from src.common.data_models.mai_message_data_model import GroupInfo, MessageInfo, UserInfo from src.common.data_models.message_component_data_model import EmojiComponent, ImageComponent, MessageSequence, TextComponent from src.config.config import global_config from src.llm_models.payload_content.message import Message, MessageBuilder, RoleType @@ -77,11 +78,11 @@ def build_message( group_info: Optional[GroupInfo] = None, raw_message: Optional[MessageSequence] = None, display_text: Optional[str] = None, -) -> MaiMessage: - """Build a MaiMessage for the Maisaka session history.""" +) -> SessionMessage: + """为 MaiSaka 会话历史构建内部 ``SessionMessage``。""" resolved_timestamp = timestamp or datetime.now() resolved_role = role.value if isinstance(role, RoleType) else role - message = MaiMessage( + message = SessionMessage( message_id=message_id or f"maisaka_{uuid4().hex}", timestamp=resolved_timestamp, platform=platform, @@ -104,6 +105,7 @@ def build_message( visible_text = display_text if display_text is not None else content message.processed_plain_text = visible_text message.display_message = visible_text + message.initialized = True return message @@ -160,7 +162,7 @@ def _guess_image_format(image_bytes: bytes) -> Optional[str]: return None -def get_message_text(message: MaiMessage) -> str: +def get_message_text(message: SessionMessage) -> str: if message.processed_plain_text is not None: return message.processed_plain_text if message.display_message is not None: @@ -174,42 +176,42 @@ def get_message_text(message: MaiMessage) -> str: return "".join(parts) -def get_message_role(message: MaiMessage) -> str: +def get_message_role(message: SessionMessage) -> str: return str(message.message_info.additional_config.get(LLM_ROLE_KEY, RoleType.User.value)) -def get_message_kind(message: MaiMessage) -> str: +def get_message_kind(message: SessionMessage) -> str: return str(message.message_info.additional_config.get(MESSAGE_KIND_KEY, "normal")) -def get_message_source(message: MaiMessage) -> str: +def get_message_source(message: SessionMessage) -> str: return str(message.message_info.additional_config.get(SOURCE_KEY, get_message_role(message))) -def is_perception_message(message: MaiMessage) -> bool: +def is_perception_message(message: SessionMessage) -> bool: return get_message_kind(message) == "perception" -def get_tool_call_id(message: MaiMessage) -> Optional[str]: +def get_tool_call_id(message: SessionMessage) -> Optional[str]: value = message.message_info.additional_config.get(TOOL_CALL_ID_KEY) return str(value) if value else None -def get_tool_calls(message: MaiMessage) -> list[ToolCall]: +def get_tool_calls(message: SessionMessage) -> list[ToolCall]: raw_tool_calls = message.message_info.additional_config.get(TOOL_CALLS_KEY, []) if not isinstance(raw_tool_calls, list): return [] return [_deserialize_tool_call(item) for item in raw_tool_calls if isinstance(item, dict)] -def remove_last_perception(messages: list[MaiMessage]) -> None: +def remove_last_perception(messages: list[SessionMessage]) -> None: for index in range(len(messages) - 1, -1, -1): if is_perception_message(messages[index]): messages.pop(index) break -def to_llm_message(message: MaiMessage) -> Optional[Message]: +def to_llm_message(message: SessionMessage) -> Optional[Message]: role = get_message_role(message) tool_call_id = get_tool_call_id(message) tool_calls = get_tool_calls(message) diff --git a/src/maisaka/replyer.py b/src/maisaka/replyer.py index 391ce699..5e40236a 100644 --- a/src/maisaka/replyer.py +++ b/src/maisaka/replyer.py @@ -4,7 +4,7 @@ MaiSaka reply helper. from typing import Optional -from src.common.data_models.mai_message_data_model import MaiMessage +from src.chat.message_receive.message import SessionMessage from src.config.config import global_config from .config import USER_NAME @@ -19,17 +19,17 @@ def _normalize_content(content: str, limit: int = 500) -> str: return normalized -def _format_message_time(message: MaiMessage) -> str: +def _format_message_time(message: SessionMessage) -> str: return message.timestamp.strftime("%H:%M:%S") -def _extract_visible_assistant_reply(message: MaiMessage) -> str: +def _extract_visible_assistant_reply(message: SessionMessage) -> str: if is_perception_message(message): return "" return "" -def _extract_guided_bot_reply(message: MaiMessage) -> str: +def _extract_guided_bot_reply(message: SessionMessage) -> str: speaker_name, body = parse_speaker_content(get_message_text(message).strip()) bot_nickname = global_config.bot.nickname.strip() or "Bot" if speaker_name == bot_nickname: @@ -64,7 +64,7 @@ def _split_user_message_segments(raw_content: str) -> list[tuple[Optional[str], return segments -def format_chat_history(messages: list[MaiMessage]) -> str: +def format_chat_history(messages: list[SessionMessage]) -> str: """Format visible chat history for reply generation.""" bot_nickname = global_config.bot.nickname.strip() or "Bot" parts: list[str] = [] @@ -109,7 +109,7 @@ class Replyer: def set_enabled(self, enabled: bool) -> None: self._enabled = enabled - async def reply(self, reason: str, chat_history: list[MaiMessage]) -> str: + async def reply(self, reason: str, chat_history: list[SessionMessage]) -> str: if not self._enabled or not reason or self._llm_service is None: return "..." diff --git a/src/maisaka/runtime.py b/src/maisaka/runtime.py index 526927ff..b7c3bdeb 100644 --- a/src/maisaka/runtime.py +++ b/src/maisaka/runtime.py @@ -12,7 +12,7 @@ import asyncio from src.chat.heart_flow.heartFC_utils import CycleDetail from src.chat.message_receive.chat_manager import BotChatSession, chat_manager from src.chat.message_receive.message import SessionMessage -from src.common.data_models.mai_message_data_model import GroupInfo, MaiMessage, UserInfo +from src.common.data_models.mai_message_data_model import GroupInfo, UserInfo from src.common.data_models.message_component_data_model import MessageSequence from src.common.logger import get_logger from src.config.config import global_config @@ -56,7 +56,7 @@ class MaisakaHeartFlowChatting: session_name = chat_manager.get_session_name(session_id) or session_id self.log_prefix = f"[{session_name}]" self._llm_service = MaiSakaLLMService(api_key="", base_url=None, model="") - self._chat_history: list[MaiMessage] = [] + self._chat_history: list[SessionMessage] = [] self.history_loop: list[CycleDetail] = [] self.message_cache: list[SessionMessage] = [] self._mcp_manager: Optional[MCPManager] = None @@ -227,7 +227,7 @@ class MaisakaHeartFlowChatting: return merged_sequence - async def _build_user_history_message(self, message: SessionMessage) -> Optional[MaiMessage]: + async def _build_user_history_message(self, message: SessionMessage) -> Optional[SessionMessage]: user_sequence = await self._build_message_sequence(message) visible_text = build_visible_text_from_sequence(user_sequence).strip() if not user_sequence.components: @@ -498,7 +498,7 @@ class MaisakaHeartFlowChatting: ) return True - def _build_tool_message(self, tool_call: ToolCall, content: str) -> MaiMessage: + def _build_tool_message(self, tool_call: ToolCall, content: str) -> SessionMessage: return build_message( role="tool", content=content, diff --git a/src/maisaka/tool_handlers.py b/src/maisaka/tool_handlers.py index 68d00f22..c1f3f447 100644 --- a/src/maisaka/tool_handlers.py +++ b/src/maisaka/tool_handlers.py @@ -11,7 +11,7 @@ import os from rich.panel import Panel -from src.common.data_models.mai_message_data_model import MaiMessage +from src.chat.message_receive.message import SessionMessage from src.llm_models.payload_content.tool_option import ToolCall from .config import console @@ -41,7 +41,7 @@ class ToolHandlerContext: self.last_user_input_time: Optional[datetime] = None -async def handle_stop(tc: ToolCall, chat_history: list[MaiMessage]) -> None: +async def handle_stop(tc: ToolCall, chat_history: list[SessionMessage]) -> None: """Handle the stop tool.""" console.print("[accent]Calling tool: stop()[/accent]") chat_history.append( @@ -49,7 +49,7 @@ async def handle_stop(tc: ToolCall, chat_history: list[MaiMessage]) -> None: ) -async def handle_wait(tc: ToolCall, chat_history: list[MaiMessage], ctx: ToolHandlerContext) -> str: +async def handle_wait(tc: ToolCall, chat_history: list[SessionMessage], ctx: ToolHandlerContext) -> str: """Handle the wait tool.""" seconds = (tc.args or {}).get("seconds", 30) seconds = max(5, min(seconds, 300)) @@ -86,7 +86,7 @@ async def _do_wait(seconds: int, ctx: ToolHandlerContext) -> str: return f"User input received: {user_input}" -async def handle_mcp_tool(tc: ToolCall, chat_history: list[MaiMessage], mcp_manager: "MCPManager") -> None: +async def handle_mcp_tool(tc: ToolCall, chat_history: list[SessionMessage], mcp_manager: "MCPManager") -> None: """Handle an MCP tool call.""" args_str = _json.dumps(tc.args or {}, ensure_ascii=False) args_preview = args_str if len(args_str) <= 120 else args_str[:120] + "..." @@ -107,13 +107,13 @@ async def handle_mcp_tool(tc: ToolCall, chat_history: list[MaiMessage], mcp_mana chat_history.append(build_message(role="tool", content=result, tool_call_id=tc.call_id)) -async def handle_unknown_tool(tc: ToolCall, chat_history: list[MaiMessage]) -> None: +async def handle_unknown_tool(tc: ToolCall, chat_history: list[SessionMessage]) -> None: """Handle an unknown tool call.""" console.print(f"[accent]Calling unknown tool: {tc.func_name}({tc.args})[/accent]") chat_history.append(build_message(role="tool", content=f"Unknown tool: {tc.func_name}", tool_call_id=tc.call_id)) -async def handle_write_file(tc: ToolCall, chat_history: list[MaiMessage]) -> None: +async def handle_write_file(tc: ToolCall, chat_history: list[SessionMessage]) -> None: """Write a file under the local mai_files workspace.""" filename = (tc.args or {}).get("filename", "") content = (tc.args or {}).get("content", "") @@ -149,7 +149,7 @@ async def handle_write_file(tc: ToolCall, chat_history: list[MaiMessage]) -> Non chat_history.append(build_message(role="tool", content=error_msg, tool_call_id=tc.call_id)) -async def handle_read_file(tc: ToolCall, chat_history: list[MaiMessage]) -> None: +async def handle_read_file(tc: ToolCall, chat_history: list[SessionMessage]) -> None: """Read a file from the local mai_files workspace.""" filename = (tc.args or {}).get("filename", "") console.print(f'[accent]Calling tool: read_file("{filename}")[/accent]') @@ -190,7 +190,7 @@ async def handle_read_file(tc: ToolCall, chat_history: list[MaiMessage]) -> None chat_history.append(build_message(role="tool", content=error_msg, tool_call_id=tc.call_id)) -async def handle_list_files(tc: ToolCall, chat_history: list[MaiMessage]) -> None: +async def handle_list_files(tc: ToolCall, chat_history: list[SessionMessage]) -> None: """List files under the local mai_files workspace.""" console.print("[accent]Calling tool: list_files()[/accent]") diff --git a/src/plugin_runtime/component_query.py b/src/plugin_runtime/component_query.py index 5a6c39f5..37bb9e0d 100644 --- a/src/plugin_runtime/component_query.py +++ b/src/plugin_runtime/component_query.py @@ -139,27 +139,18 @@ class ComponentQueryService: metadata = dict(entry.metadata) raw_action_parameters = metadata.get("action_parameters") action_parameters = ( - { - str(param_name): str(param_description) - for param_name, param_description in raw_action_parameters.items() - } + {str(param_name): str(param_description) for param_name, param_description in raw_action_parameters.items()} if isinstance(raw_action_parameters, dict) else {} ) action_require = [ - str(item) - for item in (metadata.get("action_require") or []) - if item is not None and str(item).strip() + str(item) for item in (metadata.get("action_require") or []) if item is not None and str(item).strip() ] associated_types = [ - str(item) - for item in (metadata.get("associated_types") or []) - if item is not None and str(item).strip() + str(item) for item in (metadata.get("associated_types") or []) if item is not None and str(item).strip() ] activation_keywords = [ - str(item) - for item in (metadata.get("activation_keywords") or []) - if item is not None and str(item).strip() + str(item) for item in (metadata.get("activation_keywords") or []) if item is not None and str(item).strip() ] return ActionInfo( @@ -442,9 +433,14 @@ class ComponentQueryService: message = kwargs.get("message") matched_groups = kwargs.get("matched_groups") plugin_config = kwargs.get("plugin_config") + message_info = getattr(message, "message_info", None) + group_info = getattr(message_info, "group_info", None) + user_info = getattr(message_info, "user_info", None) invoke_args: Dict[str, Any] = { "text": str(getattr(message, "processed_plain_text", "") or ""), "stream_id": str(getattr(message, "session_id", "") or ""), + "group_id": str(getattr(group_info, "group_id", "") or ""), + "user_id": str(getattr(user_info, "user_id", "") or ""), "matched_groups": matched_groups if isinstance(matched_groups, dict) else {}, } if isinstance(plugin_config, dict):