feat:可开启原生at功能

This commit is contained in:
SengokuCola
2026-04-23 15:56:27 +08:00
parent 2255592bd2
commit 35ff91d134
20 changed files with 258 additions and 404 deletions

View File

@@ -9,4 +9,5 @@ Try to keep it short. It is best to reply to only one topic at a time, so the re
{reply_style}
You may refer to the information in [Reply Reference], but depending on the situation, you do not have to follow it completely.
{group_chat_attention_block}
Please do not output any extra content (including unnecessary prefixes or suffixes, colons, brackets, stickers, at, or @). Only output the message content itself.
{replyer_at_block}
Please do not output any extra content (including unnecessary prefixes or suffixes, colons, brackets, stickers, plain at, or @). Only output the message content itself.

View File

@@ -9,4 +9,5 @@
{reply_style}
【返信情報参考】の情報は参考にしてかまいませんが、状況に応じて完全に従う必要はありません。
{group_chat_attention_block}
余計な内容不要な前置きや後置き、コロン、括弧、スタンプ、at や @ など)は出力せず、発言内容だけを出力してください。
{replyer_at_block}
余計な内容(不要な前置きや後置き、コロン、括弧、スタンプ、通常の at や @ など)は出力せず、発言内容だけを出力してください。

View File

@@ -6,4 +6,5 @@
{reply_style}
你可以参考【回复信息参考】中的信息,但是视情况而定,不用完全遵守。
{group_chat_attention_block}
请注意不要输出多余内容(包括不必要的前后缀冒号括号表情包at或 @等 ),只输出发言内容就好。
{replyer_at_block}
请注意不要输出多余内容(包括不必要的前后缀,冒号,括号,表情包,普通 at 或 @等 ),只输出发言内容就好。

View File

@@ -1,6 +1,7 @@
from src.common.data_models.message_component_data_model import ImageComponent, MessageSequence, TextComponent
from src.llm_models.payload_content.message import RoleType
from src.maisaka.context_messages import _build_message_from_sequence
from src.maisaka.message_adapter import build_visible_text_from_sequence
def test_image_only_message_keeps_placeholder_in_text_fallback() -> None:
@@ -20,3 +21,35 @@ def test_image_only_message_keeps_placeholder_in_text_fallback() -> None:
assert message is not None
assert "[发言内容]" in message.get_text_content()
assert "[图片]" in message.get_text_content()
def test_whitespace_image_content_uses_placeholder_in_text_fallback() -> None:
message_sequence = MessageSequence(
[
TextComponent("[发言内容]"),
ImageComponent(binary_hash="hash", content=" ", binary_data=None),
]
)
message = _build_message_from_sequence(
RoleType.User,
message_sequence,
"[发言内容][图片]",
enable_visual_message=False,
)
assert message is not None
assert message.get_text_content() == "[发言内容][图片]"
def test_visible_text_uses_image_placeholder_for_whitespace_content() -> None:
visible_text = build_visible_text_from_sequence(
MessageSequence(
[
TextComponent("看这个"),
ImageComponent(binary_hash="hash", content=" ", binary_data=None),
]
)
)
assert visible_text == "看这个[图片]"

View File

@@ -1,176 +0,0 @@
from importlib import util
from pathlib import Path
from types import ModuleType, SimpleNamespace
from typing import Any
import sys
import pytest
from src.common.data_models.message_component_data_model import AtComponent, TextComponent
from src.core.tooling import ToolExecutionResult, ToolInvocation
_MISSING_MODULE = object()
_module_overrides: dict[str, object] = {}
def _override_module(module_name: str, module: ModuleType) -> None:
_module_overrides[module_name] = sys.modules.get(module_name, _MISSING_MODULE)
sys.modules[module_name] = module
def _restore_overridden_modules() -> None:
for module_name, previous_module in reversed(_module_overrides.items()):
if previous_module is _MISSING_MODULE:
sys.modules.pop(module_name, None)
else:
sys.modules[module_name] = previous_module
_module_overrides.clear()
fake_cli_sender_module = ModuleType("src.cli.maisaka_cli_sender")
fake_cli_sender_module.CLI_PLATFORM_NAME = "cli"
fake_cli_sender_module.render_cli_message = lambda text: text
fake_cli_module = ModuleType("src.cli")
fake_cli_module.maisaka_cli_sender = fake_cli_sender_module
fake_send_service_module = ModuleType("src.services.send_service")
fake_send_service_module._send_to_target_with_message = None
fake_services_module = ModuleType("src.services")
fake_services_module.send_service = fake_send_service_module
_override_module("src.cli", fake_cli_module)
_override_module("src.cli.maisaka_cli_sender", fake_cli_sender_module)
_override_module("src.services", fake_services_module)
_override_module("src.services.send_service", fake_send_service_module)
AT_TOOL_PATH = Path(__file__).resolve().parents[1] / "src" / "maisaka" / "builtin_tool" / "at.py"
at_tool_spec = util.spec_from_file_location("_test_maisaka_builtin_at_tool", AT_TOOL_PATH)
assert at_tool_spec is not None and at_tool_spec.loader is not None
at_tool = util.module_from_spec(at_tool_spec)
sys.modules["_test_maisaka_builtin_at_tool"] = at_tool
try:
at_tool_spec.loader.exec_module(at_tool)
finally:
_restore_overridden_modules()
class _ToolCtx:
def __init__(self, runtime: SimpleNamespace) -> None:
self.runtime = runtime
@staticmethod
def build_success_result(
tool_name: str,
content: str = "",
structured_content: Any = None,
metadata: dict[str, Any] | None = None,
) -> ToolExecutionResult:
return ToolExecutionResult(
tool_name=tool_name,
success=True,
content=content,
structured_content=structured_content,
metadata=dict(metadata or {}),
)
@staticmethod
def build_failure_result(
tool_name: str,
error_message: str,
structured_content: Any = None,
metadata: dict[str, Any] | None = None,
) -> ToolExecutionResult:
return ToolExecutionResult(
tool_name=tool_name,
success=False,
error_message=error_message,
structured_content=structured_content,
metadata=dict(metadata or {}),
)
def append_guided_reply_to_chat_history(self, reply_text: str) -> None:
self.runtime._chat_history.append(reply_text)
def _build_tool_ctx(*, group_id: str = "group-1") -> _ToolCtx:
target_message = SimpleNamespace(
message_info=SimpleNamespace(
user_info=SimpleNamespace(
user_id="target-user-1",
user_nickname="目标昵称",
user_cardname="群名片",
)
)
)
runtime = SimpleNamespace(
_source_messages_by_id={"msg-1": target_message},
chat_stream=SimpleNamespace(platform="qq", group_id=group_id),
session_id="session-1",
log_prefix="[test-at]",
_record_reply_sent=lambda: None,
_chat_history=[],
)
return _ToolCtx(runtime=runtime)
def test_at_tool_spec_does_not_embed_visibility_metadata() -> None:
tool_spec = at_tool.get_tool_spec()
assert tool_spec.name == "at"
assert "deferred" not in tool_spec.metadata
assert "visibility" not in tool_spec.metadata
@pytest.mark.asyncio
async def test_at_tool_sends_at_component_by_msg_id(monkeypatch: pytest.MonkeyPatch) -> None:
captured: dict[str, Any] = {}
async def fake_send_to_target_with_message(**kwargs: Any) -> object:
captured.update(kwargs)
return SimpleNamespace(message_id="sent-msg-1")
monkeypatch.setattr(at_tool.send_service, "_send_to_target_with_message", fake_send_to_target_with_message)
result = await at_tool.handle_tool(
_build_tool_ctx(),
ToolInvocation(tool_name="at", arguments={"msg_id": "msg-1", "text": "看这里"}),
)
assert result.success is True
assert result.structured_content["target_user_id"] == "target-user-1"
assert result.structured_content["target_user_name"] == "群名片"
assert captured["stream_id"] == "session-1"
assert captured["display_message"] == "@群名片 看这里"
assert captured["sync_to_maisaka_history"] is True
assert captured["maisaka_source_kind"] == "guided_reply"
components = captured["message_sequence"].components
assert isinstance(components[0], AtComponent)
assert components[0].target_user_id == "target-user-1"
assert components[0].target_user_nickname == "目标昵称"
assert components[0].target_user_cardname == "群名片"
assert isinstance(components[1], TextComponent)
assert components[1].text == " 看这里"
@pytest.mark.asyncio
async def test_at_tool_rejects_private_chat() -> None:
result = await at_tool.handle_tool(
_build_tool_ctx(group_id=""),
ToolInvocation(tool_name="at", arguments={"msg_id": "msg-1"}),
)
assert result.success is False
assert "群聊" in result.error_message
@pytest.mark.asyncio
async def test_at_tool_rejects_unknown_msg_id() -> None:
result = await at_tool.handle_tool(
_build_tool_ctx(),
ToolInvocation(tool_name="at", arguments={"msg_id": "missing-msg"}),
)
assert result.success is False
assert result.structured_content == {"msg_id": "missing-msg"}

View File

@@ -3,7 +3,8 @@ from types import SimpleNamespace
from src.chat.message_receive.message import SessionMessage
from src.common.data_models.mai_message_data_model import MessageInfo, UserInfo
from src.common.data_models.message_component_data_model import MessageSequence, ReplyComponent, TextComponent
from src.common.data_models.message_component_data_model import AtComponent, MessageSequence, ReplyComponent, TextComponent
from src.config.config import global_config
from src.maisaka.builtin_tool.context import BuiltinToolRuntimeContext
@@ -45,3 +46,53 @@ def test_append_sent_message_to_chat_history_keeps_message_id() -> None:
assert history_message.message_id == "real-message-id"
assert "[msg_id]real-message-id\n" in history_message.raw_message.components[0].text
assert "[msg_id:real-message-id]" in history_message.visible_text
def test_post_process_reply_message_sequences_converts_at_marker_before_bracket_cleanup(monkeypatch) -> None:
monkeypatch.setattr(global_config.chat, "enable_at", True)
monkeypatch.setattr(
"src.maisaka.builtin_tool.context.process_llm_response",
lambda text: [text.strip()] if text.strip() else [],
)
target_message = SimpleNamespace(
message_info=SimpleNamespace(
user_info=SimpleNamespace(
user_id="target-user",
user_nickname="目标昵称",
user_cardname="群名片",
)
)
)
runtime = SimpleNamespace(_source_messages_by_id={"12160142": target_message})
engine = SimpleNamespace(_get_runtime_manager=lambda: None)
tool_ctx = BuiltinToolRuntimeContext(engine=engine, runtime=runtime)
sequences = tool_ctx.post_process_reply_message_sequences("at[12160142] 就这个群")
assert len(sequences) == 1
components = sequences[0].components
assert isinstance(components[0], AtComponent)
assert components[0].target_user_id == "target-user"
assert components[0].target_user_nickname == "目标昵称"
assert components[0].target_user_cardname == "群名片"
assert isinstance(components[1], TextComponent)
assert components[1].text == " 就这个群"
def test_post_process_reply_message_sequences_ignores_at_marker_when_disabled(monkeypatch) -> None:
monkeypatch.setattr(global_config.chat, "enable_at", False)
monkeypatch.setattr(
"src.maisaka.builtin_tool.context.process_llm_response",
lambda text: [text.strip()] if text.strip() else [],
)
runtime = SimpleNamespace(_source_messages_by_id={})
engine = SimpleNamespace(_get_runtime_manager=lambda: None)
tool_ctx = BuiltinToolRuntimeContext(engine=engine, runtime=runtime)
sequences = tool_ctx.post_process_reply_message_sequences("at[12160142] 就这个群")
assert len(sequences) == 1
components = sequences[0].components
assert len(components) == 1
assert isinstance(components[0], TextComponent)
assert components[0].text == "at[12160142] 就这个群"

View File

@@ -12,7 +12,7 @@ from src.plugin_runtime.host.component_registry import ComponentRegistry
@pytest.mark.asyncio
async def test_builtin_at_is_exposed_only_in_group_chats() -> None:
async def test_builtin_at_tool_is_not_exposed() -> None:
registry = ToolRegistry()
registry.register_provider(MaisakaBuiltinToolProvider())
@@ -20,9 +20,9 @@ async def test_builtin_at_is_exposed_only_in_group_chats() -> None:
private_specs = await registry.list_tools(ToolAvailabilityContext(session_id="private-1", is_group_chat=False))
default_specs = await registry.list_tools()
assert "at" in {tool_spec.name for tool_spec in group_specs}
assert "at" not in {tool_spec.name for tool_spec in group_specs}
assert "at" not in {tool_spec.name for tool_spec in private_specs}
assert "at" in {tool_spec.name for tool_spec in default_specs}
assert "at" not in {tool_spec.name for tool_spec in default_specs}
def test_plugin_tool_chat_scope_uses_component_field(monkeypatch: pytest.MonkeyPatch) -> None:

View File

@@ -230,8 +230,10 @@ class SessionMessage(MaiMessage):
Returns:
str: 图片组件对应的文本表示。
"""
if component.content: # 先检查是否处理过
return component.content
normalized_content = component.content.strip()
if normalized_content: # 先检查是否处理过
component.content = normalized_content
return normalized_content
from src.chat.image_system.image_manager import image_manager
# 获取描述
@@ -263,8 +265,10 @@ class SessionMessage(MaiMessage):
Returns:
str: 表情包组件对应的文本表示。
"""
if component.content: # 先检查是否处理过
return component.content
normalized_content = component.content.strip()
if normalized_content: # 先检查是否处理过
component.content = normalized_content
return normalized_content
from src.emoji_system.emoji_manager import emoji_manager
# 获取表情包描述
@@ -323,8 +327,10 @@ class SessionMessage(MaiMessage):
Returns:
str: 语音组件对应的文本表示。
"""
if component.content: # 先检查是否处理过
return component.content
normalized_content = component.content.strip()
if normalized_content: # 先检查是否处理过
component.content = normalized_content
return normalized_content
if not enable_voice_transcription:
component.content = "[语音消息]"
return component.content

View File

@@ -201,6 +201,18 @@ class BaseMaisakaReplyGenerator:
return "在该聊天中的注意事项:\n" + "\n\n".join(prompt_lines) + "\n"
@staticmethod
def _build_replyer_at_block() -> str:
"""构建 replyer 模式下的 at 标记说明。"""
if not global_config.chat.enable_at:
return ""
return (
"如果需要提及某人、让某人关注你的回复,可以在回复中加入 `at[msg_id]` 标记,"
"其中 msg_id 应使用聊天记录中该用户发过的消息编号;"
"消息发送时会检查这种标记并转换为真正的 at 消息。\n"
)
def _build_system_prompt(
self,
reply_message: Optional[SessionMessage],
@@ -218,6 +230,7 @@ class BaseMaisakaReplyGenerator:
"maisaka_replyer",
bot_name=global_config.bot.nickname,
group_chat_attention_block=self._build_group_chat_attention_block(session_id),
replyer_at_block=self._build_replyer_at_block(),
time_block=f"当前时间:{current_time}",
identity=self._personality_prompt,
reply_style=global_config.personality.reply_style,

View File

@@ -385,7 +385,9 @@ class MessageSequence:
@staticmethod
def _ensure_binary_component_content(item: ByteComponent, fallback_text: str) -> str:
"""确保二进制组件在序列化时带有稳定的文本占位。"""
if item.content:
normalized_content = item.content.strip()
if normalized_content:
item.content = normalized_content
return item.content
item.content = fallback_text
return item.content

View File

@@ -55,7 +55,7 @@ BOT_CONFIG_PATH: Path = (CONFIG_DIR / "bot_config.toml").resolve().absolute()
MODEL_CONFIG_PATH: Path = (CONFIG_DIR / "model_config.toml").resolve().absolute()
LEGACY_ENV_PATH: Path = (PROJECT_ROOT / ".env").resolve().absolute()
MMC_VERSION: str = "1.0.0"
CONFIG_VERSION: str = "8.9.8"
CONFIG_VERSION: str = "8.9.11"
MODEL_CONFIG_VERSION: str = "1.14.1"
logger = get_logger("config")

View File

@@ -191,6 +191,15 @@ class ChatConfig(ConfigBase):
inevitable_at_reply: bool = Field(default=True)
"""是否启用at必回复"""
enable_at: bool = Field(
default=True,
json_schema_extra={
"x-widget": "switch",
"x-icon": "at-sign",
},
)
"""是否允许 replyer 使用 at[msg_id] 标记来发送真正的 at 消息"""
enable_reply_quote: bool = Field(
default=True,
json_schema_extra={

View File

@@ -9,8 +9,6 @@ from src.config.config import global_config
from src.core.tooling import ToolAvailabilityContext, ToolExecutionContext, ToolExecutionResult, ToolInvocation, ToolSpec
from src.llm_models.payload_content.tool_option import ToolDefinitionInput
from .at import get_tool_spec as get_at_tool_spec
from .at import handle_tool as handle_at_tool
from .context import BuiltinToolRuntimeContext
from .continue_tool import get_tool_spec as get_continue_tool_spec
from .continue_tool import handle_tool as handle_continue_tool
@@ -93,14 +91,6 @@ BUILTIN_TOOL_ENTRIES: List[BuiltinToolEntry] = [
visibility="hidden",
),
BuiltinToolEntry("send_emoji", get_send_emoji_tool_spec, handle_send_emoji_tool, stage="action"),
BuiltinToolEntry(
"at",
get_at_tool_spec,
handle_at_tool,
stage="action",
visibility="deferred",
chat_scope="group",
),
BuiltinToolEntry("tool_search", get_tool_search_tool_spec, handle_tool_search_tool, stage="action"),
]

View File

@@ -1,186 +0,0 @@
"""Maisaka 内置 at 工具。"""
from typing import Any, Optional, TYPE_CHECKING
from src.cli.maisaka_cli_sender import CLI_PLATFORM_NAME, render_cli_message
from src.common.data_models.message_component_data_model import AtComponent, MessageSequence, TextComponent
from src.common.logger import get_logger
from src.core.tooling import ToolExecutionContext, ToolExecutionResult, ToolInvocation, ToolSpec
from src.services import send_service
if TYPE_CHECKING:
from .context import BuiltinToolRuntimeContext
logger = get_logger("maisaka_builtin_at")
def get_tool_spec() -> ToolSpec:
"""获取 at 工具声明。"""
return ToolSpec(
name="at",
brief_description="当明确提及某位用户时,发送一条 @ 该用户的消息。",
detailed_description=(
"参数说明:\n"
"- msg_idstring必填。要 @ 的目标用户发过的消息编号。\n"
"- textstring可选。@ 后追加发送的短文本;只想单独 @ 人时留空。\n"
"请优先从上下文里选择一条明确属于目标用户的 msg_id不要凭昵称或印象猜测用户。"
),
parameters_schema={
"type": "object",
"properties": {
"msg_id": {
"type": "string",
"description": "要 @ 的目标用户发过的消息编号。",
},
"text": {
"type": "string",
"description": "@ 后追加发送的短文本;只想单独 @ 人时留空。",
"default": "",
},
},
"required": ["msg_id"],
},
provider_name="maisaka_builtin",
provider_type="builtin",
)
def _get_target_user_info(target_message: Any) -> tuple[str, str, str]:
"""从目标消息中提取可用于构造 at 组件的用户信息。"""
message_info = getattr(target_message, "message_info", None)
user_info = getattr(message_info, "user_info", None)
target_user_id = str(getattr(user_info, "user_id", "") or "").strip()
target_user_nickname = str(getattr(user_info, "user_nickname", "") or "").strip()
target_user_cardname = str(getattr(user_info, "user_cardname", "") or "").strip()
return target_user_id, target_user_nickname, target_user_cardname
def _build_at_message_sequence(
*,
target_user_id: str,
target_user_nickname: str = "",
target_user_cardname: str = "",
text: str = "",
) -> MessageSequence:
"""构造 @ 用户的消息组件序列。"""
components = [
AtComponent(
target_user_id=target_user_id,
target_user_nickname=target_user_nickname or None,
target_user_cardname=target_user_cardname or None,
)
]
normalized_text = text.strip()
if normalized_text:
components.append(TextComponent(f" {normalized_text}"))
return MessageSequence(components=components)
async def handle_tool(
tool_ctx: "BuiltinToolRuntimeContext",
invocation: ToolInvocation,
context: Optional[ToolExecutionContext] = None,
) -> ToolExecutionResult:
"""执行 at 内置工具。"""
del context
target_message_id = str(invocation.arguments.get("msg_id") or "").strip()
text = str(invocation.arguments.get("text") or "").strip()
if not target_message_id:
return tool_ctx.build_failure_result(
invocation.tool_name,
"at 工具需要提供有效的 `msg_id` 参数。",
)
if not str(getattr(tool_ctx.runtime.chat_stream, "group_id", "") or "").strip():
return tool_ctx.build_failure_result(
invocation.tool_name,
"at 工具只能在群聊中使用。",
structured_content={"msg_id": target_message_id},
)
target_message = tool_ctx.runtime._source_messages_by_id.get(target_message_id)
if target_message is None:
return tool_ctx.build_failure_result(
invocation.tool_name,
f"未找到要 @ 的目标消息msg_id={target_message_id}",
structured_content={"msg_id": target_message_id},
)
target_user_id, target_user_nickname, target_user_cardname = _get_target_user_info(target_message)
if not target_user_id:
return tool_ctx.build_failure_result(
invocation.tool_name,
f"目标消息缺少有效用户 IDmsg_id={target_message_id}",
structured_content={"msg_id": target_message_id},
)
target_user_name = target_user_cardname or target_user_nickname or target_user_id
message_sequence = _build_at_message_sequence(
target_user_id=target_user_id,
target_user_nickname=target_user_nickname,
target_user_cardname=target_user_cardname,
text=text,
)
display_message = f"@{target_user_name}" + (f" {text}" if text else "")
try:
if tool_ctx.runtime.chat_stream.platform == CLI_PLATFORM_NAME:
render_cli_message(display_message)
tool_ctx.append_guided_reply_to_chat_history(display_message)
sent_message = None
sent = True
else:
sent_message = await send_service._send_to_target_with_message(
message_sequence=message_sequence,
stream_id=tool_ctx.runtime.session_id,
display_message=display_message,
typing=False,
storage_message=True,
show_log=True,
sync_to_maisaka_history=True,
maisaka_source_kind="guided_reply",
)
sent = sent_message is not None
except Exception as exc:
logger.exception(
f"{tool_ctx.runtime.log_prefix} 发送 at 消息时发生异常: msg_id={target_message_id} user_id={target_user_id}"
)
return tool_ctx.build_failure_result(
invocation.tool_name,
f"发送 at 消息时发生异常:{exc}",
structured_content={
"msg_id": target_message_id,
"target_user_id": target_user_id,
"target_user_name": target_user_name,
},
)
if not sent:
return tool_ctx.build_failure_result(
invocation.tool_name,
"at 消息发送失败。",
structured_content={
"msg_id": target_message_id,
"target_user_id": target_user_id,
"target_user_name": target_user_name,
},
)
sent_message_id = str(getattr(sent_message, "message_id", "") or "").strip() if sent_message is not None else ""
tool_ctx.runtime._record_reply_sent()
return tool_ctx.build_success_result(
invocation.tool_name,
f"已 @ {target_user_name}",
structured_content={
"msg_id": target_message_id,
"target_user_id": target_user_id,
"target_user_name": target_user_name,
"text": text,
"sent_message_id": sent_message_id,
},
)

View File

@@ -4,10 +4,12 @@ from __future__ import annotations
from base64 import b64decode
from datetime import datetime
from typing import TYPE_CHECKING, Any, Dict, List, Optional
from typing import Any, Dict, List, Optional, TYPE_CHECKING
import re
from src.chat.utils.utils import process_llm_response
from src.common.data_models.message_component_data_model import EmojiComponent, MessageSequence, TextComponent
from src.common.data_models.message_component_data_model import AtComponent, EmojiComponent, MessageSequence, TextComponent
from src.config.config import global_config
from src.core.tooling import ToolExecutionResult
@@ -19,6 +21,8 @@ if TYPE_CHECKING:
from ..reasoning_engine import MaisakaReasoningEngine
from ..runtime import MaisakaHeartFlowChatting
AT_MARKER_PATTERN = re.compile(r"at\[([^\]\s]+)\]")
class BuiltinToolRuntimeContext:
"""为拆分后的内置工具提供统一运行时能力。"""
@@ -131,6 +135,74 @@ class BuiltinToolRuntimeContext:
return processed_segments
return [reply_text.strip()]
@staticmethod
def _post_process_reply_text_chunk(text: str) -> List[str]:
"""处理回复中的普通文本片段。"""
processed_segments: List[str] = []
for segment in process_llm_response(text):
normalized_segment = segment.strip()
if normalized_segment:
processed_segments.append(normalized_segment)
return processed_segments
def _build_at_component_for_message_id(self, message_id: str) -> Optional[AtComponent]:
"""根据消息编号构造 at 组件。"""
target_message = self.runtime._source_messages_by_id.get(message_id)
if target_message is None:
return None
message_info = getattr(target_message, "message_info", None)
user_info = getattr(message_info, "user_info", None)
target_user_id = str(getattr(user_info, "user_id", "") or "").strip()
if not target_user_id:
return None
target_user_nickname = str(getattr(user_info, "user_nickname", "") or "").strip()
target_user_cardname = str(getattr(user_info, "user_cardname", "") or "").strip()
return AtComponent(
target_user_id=target_user_id,
target_user_nickname=target_user_nickname or None,
target_user_cardname=target_user_cardname or None,
)
def post_process_reply_message_sequences(self, reply_text: str) -> List[MessageSequence]:
"""将回复文本处理为可发送组件序列,并解析 replyer 的 at[msg_id] 标记。"""
if not global_config.chat.enable_at or not AT_MARKER_PATTERN.search(reply_text):
return [MessageSequence([TextComponent(segment)]) for segment in self.post_process_reply_text(reply_text)]
message_sequences: List[MessageSequence] = []
components: List[Any] = []
cursor = 0
def flush_text_chunk(text: str) -> None:
if not text.strip():
return
for segment in self._post_process_reply_text_chunk(text):
prefix = " " if components else ""
components.append(TextComponent(f"{prefix}{segment}"))
for match in AT_MARKER_PATTERN.finditer(reply_text):
flush_text_chunk(reply_text[cursor : match.start()])
message_id = match.group(1).strip()
at_component = self._build_at_component_for_message_id(message_id)
if at_component is None:
components.append(TextComponent(match.group(0)))
else:
components.append(at_component)
cursor = match.end()
flush_text_chunk(reply_text[cursor:])
if components:
message_sequences.append(MessageSequence(components))
if message_sequences:
return message_sequences
return [MessageSequence([TextComponent(reply_text.strip())])]
def get_runtime_manager(self) -> Any:
"""获取插件运行时管理器。"""
@@ -149,6 +221,35 @@ class BuiltinToolRuntimeContext:
)
self.runtime._chat_history.append(history_message)
def append_sent_message_to_chat_history(self, message: Any, *, source_kind: str = "guided_reply") -> bool:
"""将已发送消息写回 Maisaka 历史。"""
runtime_append = getattr(self.runtime, "append_sent_message_to_chat_history", None)
if callable(runtime_append):
return bool(runtime_append(message, source_kind=source_kind))
from ..context_messages import SessionBackedMessage
from ..history_utils import build_prefixed_message_sequence, build_session_message_visible_text
from ..planner_message_utils import build_planner_prefix
user_info = message.message_info.user_info
speaker_name = user_info.user_cardname or user_info.user_nickname or user_info.user_id
planner_prefix = build_planner_prefix(
timestamp=message.timestamp,
user_name=speaker_name,
group_card=user_info.user_cardname or "",
message_id=message.message_id,
include_message_id=not message.is_notify and bool(message.message_id),
)
history_message = SessionBackedMessage.from_session_message(
message,
raw_message=build_prefixed_message_sequence(message.raw_message, planner_prefix),
visible_text=build_session_message_visible_text(message),
source_kind=source_kind,
)
self.runtime._chat_history.append(history_message)
return True
def append_sent_emoji_to_chat_history(
self,
*,

View File

@@ -10,6 +10,7 @@ from src.common.data_models.reply_generation_data_models import ReplyGenerationR
from src.common.logger import get_logger
from src.config import config as config_module
from src.core.tooling import ToolExecutionContext, ToolExecutionResult, ToolInvocation, ToolSpec
from src.maisaka.message_adapter import build_visible_text_from_sequence
from src.services import send_service
from .context import BuiltinToolRuntimeContext
@@ -180,7 +181,8 @@ async def handle_tool(
metadata=reply_metadata,
)
reply_segments = tool_ctx.post_process_reply_text(reply_text)
reply_sequences = tool_ctx.post_process_reply_message_sequences(reply_text)
reply_segments = [build_visible_text_from_sequence(sequence) for sequence in reply_sequences]
combined_reply_text = "".join(reply_segments)
sent_message_ids: list[str] = []
send_results: list[dict[str, Any]] = []
@@ -199,11 +201,13 @@ async def handle_tool(
)
sent = True
else:
for index, segment in enumerate(reply_segments):
for index, reply_sequence in enumerate(reply_sequences):
segment = reply_segments[index]
segment_set_quote = effective_set_quote if index == 0 else False
sent_message = await send_service.text_to_stream_with_message(
text=segment,
sent_message = await send_service._send_to_target_with_message(
message_sequence=reply_sequence,
stream_id=tool_ctx.runtime.session_id,
display_message=segment,
set_reply=segment_set_quote,
reply_message=target_message if segment_set_quote else None,
selected_expressions=reply_result.selected_expression_ids or None,

View File

@@ -90,13 +90,15 @@ def _refresh_pending_visual_components(components: list[object]) -> bool:
def _should_refresh_image_component(component: ImageComponent) -> bool:
"""判断图片组件当前是否仍处于待补全文本的占位状态。"""
return not component.content or component.content == "[图片]"
normalized_content = component.content.strip()
return not normalized_content or normalized_content == "[图片]"
def _should_refresh_emoji_component(component: EmojiComponent) -> bool:
"""判断表情组件当前是否仍处于待补全文本的占位状态。"""
return not component.content or component.content == "[表情包]"
normalized_content = component.content.strip()
return not normalized_content or normalized_content == "[表情包]"
def _lookup_cached_image_description(image_hash: str) -> str:

View File

@@ -53,8 +53,9 @@ def _append_emoji_component(
builder.add_image_content(image_format, base64.b64encode(component.binary_data).decode("utf-8"))
return True
if component.content:
builder.add_text_content(component.content)
normalized_content = component.content.strip()
if normalized_content:
builder.add_text_content(normalized_content)
return True
builder.add_text_content("[表情包]")
@@ -74,8 +75,9 @@ def _append_image_component(
builder.add_image_content(image_format, base64.b64encode(component.binary_data).decode("utf-8"))
return True
if component.content:
builder.add_text_content(component.content)
normalized_content = component.content.strip()
if normalized_content:
builder.add_text_content(normalized_content)
return True
builder.add_text_content("[图片]")

View File

@@ -79,11 +79,11 @@ def build_visible_text_from_sequence(message_sequence: MessageSequence) -> str:
continue
if isinstance(component, EmojiComponent):
parts.append(component.content or "[表情包]")
parts.append(component.content.strip() or "[表情包]")
continue
if isinstance(component, ImageComponent):
parts.append(component.content or "[图片]")
parts.append(component.content.strip() or "[图片]")
continue
if isinstance(component, AtComponent):

View File

@@ -453,15 +453,15 @@ def _build_processed_plain_text(message: SessionMessage) -> str:
continue
if isinstance(component, ImageComponent):
processed_parts.append(component.content or "[图片]")
processed_parts.append(component.content.strip() or "[图片]")
continue
if isinstance(component, EmojiComponent):
processed_parts.append(component.content or "[表情]")
processed_parts.append(component.content.strip() or "[表情]")
continue
if isinstance(component, VoiceComponent):
processed_parts.append(component.content or "[语音]")
processed_parts.append(component.content.strip() or "[语音]")
continue
if isinstance(component, AtComponent):