fix:回复格式问题

This commit is contained in:
SengokuCola
2026-04-25 01:25:52 +08:00
parent 705452793d
commit 4b1bc2aba8
3 changed files with 29 additions and 6 deletions

View File

@@ -15,7 +15,7 @@ from .context_messages import LLMContextMessage, SessionBackedMessage
logger = get_logger("maisaka_chat_history_visual_refresher")
BuildHistoryMessage = Callable[[SessionMessage, str], Awaitable[Optional[LLMContextMessage]]]
BuildVisibleText = Callable[[SessionMessage], str]
BuildVisibleText = Callable[[SessionMessage, str], str]
async def refresh_chat_history_visual_placeholders(
@@ -42,7 +42,7 @@ async def refresh_chat_history_visual_placeholders(
enable_voice_transcription=False,
)
refreshed_visible_text = build_visible_text(original_message)
refreshed_visible_text = build_visible_text(original_message, history_message.source_kind)
if not visual_components_updated and refreshed_visible_text == history_message.visible_text:
continue

View File

@@ -26,6 +26,8 @@ from src.common.data_models.message_component_data_model import (
from src.llm_models.payload_content.message import Message, MessageBuilder, RoleType
from src.llm_models.payload_content.tool_option import ToolCall
from .message_adapter import parse_speaker_content
FORWARD_PREVIEW_LIMIT = 4
@@ -356,6 +358,13 @@ class SessionBackedMessage(LLMContextMessage):
return self.source_kind
def to_llm_message(self, enable_visual_message: bool = True) -> Optional[Message]:
if self.source_kind == "guided_reply":
_, reply_body = parse_speaker_content(self.processed_plain_text)
normalized_reply_body = reply_body.strip()
if not normalized_reply_body:
return None
return MessageBuilder().set_role(RoleType.Assistant).add_text_content(normalized_reply_body).build()
return _build_message_from_sequence(
RoleType.User,
self.raw_message,

View File

@@ -724,7 +724,7 @@ class MaisakaReasoningEngine:
"""根据真实消息构造对应的上下文消息。"""
source_sequence = message.raw_message
visible_text = self._build_legacy_visible_text(message, source_sequence)
visible_text = self._build_legacy_visible_text(message, source_sequence, source_kind=source_kind)
planner_prefix = build_planner_user_prefix_from_session_message(message)
if contains_complex_message(source_sequence):
return ComplexSessionMessage.from_session_message(
@@ -783,11 +783,25 @@ class MaisakaReasoningEngine:
message,
source_kind=source_kind,
),
build_visible_text=lambda message: self._build_legacy_visible_text(message, message.raw_message),
build_visible_text=lambda message, source_kind: self._build_legacy_visible_text(
message,
message.raw_message,
source_kind=source_kind,
),
)
def _build_legacy_visible_text(self, message: SessionMessage, source_sequence: MessageSequence) -> str:
return build_session_message_visible_text(message, source_sequence)
def _build_legacy_visible_text(
self,
message: SessionMessage,
source_sequence: MessageSequence,
*,
source_kind: str = "user",
) -> str:
return build_session_message_visible_text(
message,
source_sequence,
include_reply_components=source_kind != "guided_reply",
)
def _insert_chat_history_message(self, message: LLMContextMessage) -> int:
"""将消息按处理顺序追加到聊天历史末尾。"""