Refactor message sending architecture and implement legacy driver support
- Removed UniversalMessageSender from group_generator.py and private_generator.py. - Updated PlatformIOManager to manage legacy send drivers and ensure send pipeline readiness. - Enhanced LegacyPlatformDriver to utilize prepared messages for sending. - Refactored send_service to unify message sending logic and integrate with Platform IO. - Added regression tests for Platform IO legacy driver and send service functionality.
This commit is contained in:
@@ -17,7 +17,6 @@ from maim_message import BaseMessageInfo, MessageBase, Seg, UserInfo as MaimUser
|
||||
from src.common.data_models.mai_message_data_model import MaiMessage
|
||||
from src.chat.message_receive.message import SessionMessage
|
||||
from src.chat.message_receive.chat_manager import BotChatSession
|
||||
from src.chat.message_receive.uni_message_sender import UniversalMessageSender
|
||||
from src.chat.utils.timer_calculator import Timer # <--- Import Timer
|
||||
from src.chat.utils.utils import get_bot_account, get_chat_type_and_target_info, is_bot_self
|
||||
from src.prompt.prompt_manager import prompt_manager
|
||||
@@ -51,10 +50,15 @@ class DefaultReplyer:
|
||||
chat_stream: BotChatSession,
|
||||
request_type: str = "replyer",
|
||||
):
|
||||
"""初始化群聊回复器。
|
||||
|
||||
Args:
|
||||
chat_stream: 当前绑定的聊天会话。
|
||||
request_type: LLM 请求类型标识。
|
||||
"""
|
||||
self.express_model = LLMRequest(model_set=model_config.model_task_config.replyer, request_type=request_type)
|
||||
self.chat_stream = chat_stream
|
||||
self.is_group_chat, self.chat_target_info = get_chat_type_and_target_info(self.chat_stream.session_id)
|
||||
self.heart_fc_sender = UniversalMessageSender()
|
||||
|
||||
from src.chat.tool_executor import ToolExecutor
|
||||
|
||||
|
||||
@@ -16,7 +16,6 @@ from maim_message import BaseMessageInfo, MessageBase, Seg, UserInfo as MaimUser
|
||||
from src.common.data_models.mai_message_data_model import MaiMessage
|
||||
from src.chat.message_receive.message import SessionMessage
|
||||
from src.chat.message_receive.chat_manager import BotChatSession
|
||||
from src.chat.message_receive.uni_message_sender import UniversalMessageSender
|
||||
from src.chat.utils.timer_calculator import Timer
|
||||
from src.chat.utils.utils import get_bot_account, get_chat_type_and_target_info, is_bot_self
|
||||
from src.prompt.prompt_manager import prompt_manager
|
||||
@@ -47,10 +46,15 @@ class PrivateReplyer:
|
||||
chat_stream: BotChatSession,
|
||||
request_type: str = "replyer",
|
||||
):
|
||||
"""初始化私聊回复器。
|
||||
|
||||
Args:
|
||||
chat_stream: 当前绑定的聊天会话。
|
||||
request_type: LLM 请求类型标识。
|
||||
"""
|
||||
self.express_model = LLMRequest(model_set=model_config.model_task_config.replyer, request_type=request_type)
|
||||
self.chat_stream = chat_stream
|
||||
self.is_group_chat, self.chat_target_info = get_chat_type_and_target_info(self.chat_stream.session_id)
|
||||
self.heart_fc_sender = UniversalMessageSender()
|
||||
# self.memory_activator = MemoryActivator()
|
||||
|
||||
from src.chat.tool_executor import ToolExecutor
|
||||
|
||||
Reference in New Issue
Block a user