部分模块的新数据结构适配
This commit is contained in:
@@ -8,7 +8,7 @@ from rich.traceback import install
|
||||
from src.config.config import global_config
|
||||
from src.common.logger import get_logger
|
||||
from src.common.data_models.info_data_model import ActionPlannerInfo
|
||||
from src.common.data_models.message_data_model import ReplyContentType
|
||||
from src.common.data_models.message_component_data_model import MessageSequence, TextComponent
|
||||
from src.chat.message_receive.chat_manager import chat_manager as _chat_manager
|
||||
from src.chat.utils.prompt_builder import global_prompt_manager
|
||||
from src.chat.utils.timer_calculator import Timer
|
||||
@@ -35,7 +35,6 @@ from src.chat.utils.chat_message_builder import (
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from src.common.data_models.database_data_model import DatabaseMessages
|
||||
from src.common.data_models.message_data_model import ReplySetModel
|
||||
|
||||
|
||||
ERROR_LOOP_INFO = {
|
||||
@@ -513,7 +512,7 @@ class BrainChatting:
|
||||
|
||||
async def _send_response(
|
||||
self,
|
||||
reply_set: "ReplySetModel",
|
||||
reply_set: MessageSequence,
|
||||
message_data: "DatabaseMessages",
|
||||
selected_expressions: Optional[List[int]] = None,
|
||||
) -> str:
|
||||
@@ -528,10 +527,10 @@ class BrainChatting:
|
||||
|
||||
reply_text = ""
|
||||
first_replied = False
|
||||
for reply_content in reply_set.reply_data:
|
||||
if reply_content.content_type != ReplyContentType.TEXT:
|
||||
for component in reply_set.components:
|
||||
if not isinstance(component, TextComponent):
|
||||
continue
|
||||
data: str = reply_content.content # type: ignore
|
||||
data = component.text
|
||||
if not first_replied:
|
||||
await send_api.text_to_stream(
|
||||
text=data,
|
||||
|
||||
@@ -7,29 +7,31 @@ import re
|
||||
from typing import List, Optional, Dict, Any, Tuple
|
||||
from datetime import datetime
|
||||
from src.common.logger import get_logger
|
||||
from src.common.data_models.database_data_model import DatabaseMessages
|
||||
from src.common.data_models.info_data_model import ActionPlannerInfo
|
||||
from src.common.data_models.llm_data_model import LLMGenerationDataModel
|
||||
from src.config.config import global_config, model_config
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from src.chat.message_receive.message_old import UserInfo, Seg, MessageRecv, MessageSending
|
||||
from src.chat.message_receive.chat_stream import ChatStream
|
||||
from maim_message import BaseMessageInfo, MessageBase, Seg
|
||||
|
||||
from src.common.data_models.mai_message_data_model import MaiMessage, UserInfo
|
||||
from src.chat.message_receive.message import SessionMessage
|
||||
from src.chat.message_receive.chat_manager import BotChatSession
|
||||
from src.chat.message_receive.uni_message_sender import UniversalMessageSender
|
||||
from src.chat.utils.timer_calculator import Timer # <--- Import Timer
|
||||
from src.chat.utils.utils import get_chat_type_and_target_info, is_bot_self
|
||||
from src.prompt.prompt_manager import prompt_manager
|
||||
from src.chat.utils.chat_message_builder import (
|
||||
from src.services.message_service import (
|
||||
build_readable_messages,
|
||||
get_raw_msg_before_timestamp_with_chat,
|
||||
replace_user_references,
|
||||
translate_pid_to_description,
|
||||
)
|
||||
from src.bw_learner.expression_selector import expression_selector
|
||||
from src.plugin_system.apis.message_api import translate_pid_to_description
|
||||
|
||||
# from src.memory_system.memory_activator import MemoryActivator
|
||||
from src.person_info.person_info import Person
|
||||
from src.plugin_system.base.component_types import ActionInfo, EventType
|
||||
from src.plugin_system.apis import llm_api
|
||||
from src.core.types import ActionInfo, EventType
|
||||
from src.services import llm_service as llm_api
|
||||
|
||||
from src.chat.logger.plan_reply_logger import PlanReplyLogger
|
||||
from src.memory_system.memory_retrieval import init_memory_retrieval_sys, build_memory_retrieval_prompt
|
||||
@@ -45,17 +47,17 @@ logger = get_logger("replyer")
|
||||
class DefaultReplyer:
|
||||
def __init__(
|
||||
self,
|
||||
chat_stream: ChatStream,
|
||||
chat_stream: BotChatSession,
|
||||
request_type: str = "replyer",
|
||||
):
|
||||
self.express_model = LLMRequest(model_set=model_config.model_task_config.replyer, request_type=request_type)
|
||||
self.chat_stream = chat_stream
|
||||
self.is_group_chat, self.chat_target_info = get_chat_type_and_target_info(self.chat_stream.stream_id)
|
||||
self.is_group_chat, self.chat_target_info = get_chat_type_and_target_info(self.chat_stream.session_id)
|
||||
self.heart_fc_sender = UniversalMessageSender()
|
||||
|
||||
from src.plugin_system.core.tool_use import ToolExecutor # 延迟导入ToolExecutor,不然会循环依赖
|
||||
from src.chat.tool_executor import ToolExecutor
|
||||
|
||||
self.tool_executor = ToolExecutor(chat_id=self.chat_stream.stream_id, enable_cache=True, cache_ttl=3)
|
||||
self.tool_executor = ToolExecutor(chat_id=self.chat_stream.session_id, enable_cache=True, cache_ttl=3)
|
||||
|
||||
async def generate_reply_with_context(
|
||||
self,
|
||||
@@ -66,7 +68,7 @@ class DefaultReplyer:
|
||||
enable_tool: bool = True,
|
||||
from_plugin: bool = True,
|
||||
stream_id: Optional[str] = None,
|
||||
reply_message: Optional[DatabaseMessages] = None,
|
||||
reply_message: Optional[SessionMessage] = None,
|
||||
reply_time_point: float = time.time(),
|
||||
think_level: int = 1,
|
||||
unknown_words: Optional[List[str]] = None,
|
||||
@@ -132,7 +134,7 @@ class DefaultReplyer:
|
||||
if log_reply:
|
||||
try:
|
||||
PlanReplyLogger.log_reply(
|
||||
chat_id=self.chat_stream.stream_id,
|
||||
chat_id=self.chat_stream.session_id,
|
||||
prompt="",
|
||||
output=None,
|
||||
processed_output=None,
|
||||
@@ -146,12 +148,12 @@ class DefaultReplyer:
|
||||
except Exception:
|
||||
logger.exception("记录reply日志失败")
|
||||
return False, llm_response
|
||||
from src.plugin_system.core.events_manager import events_manager
|
||||
from src.core.event_bus import event_bus
|
||||
from src.chat.event_helpers import build_event_message
|
||||
|
||||
if not from_plugin:
|
||||
continue_flag, modified_message = await events_manager.handle_mai_events(
|
||||
EventType.POST_LLM, None, prompt, None, stream_id=stream_id
|
||||
)
|
||||
_event_msg = build_event_message(EventType.POST_LLM, llm_prompt=prompt, stream_id=stream_id)
|
||||
continue_flag, modified_message = await event_bus.emit(EventType.POST_LLM, _event_msg)
|
||||
if not continue_flag:
|
||||
raise UserWarning("插件于请求前中断了内容生成")
|
||||
if modified_message and modified_message._modify_flags.modify_llm_prompt:
|
||||
@@ -202,7 +204,7 @@ class DefaultReplyer:
|
||||
try:
|
||||
if log_reply:
|
||||
PlanReplyLogger.log_reply(
|
||||
chat_id=self.chat_stream.stream_id,
|
||||
chat_id=self.chat_stream.session_id,
|
||||
prompt=prompt,
|
||||
output=content,
|
||||
processed_output=None,
|
||||
@@ -214,9 +216,10 @@ class DefaultReplyer:
|
||||
)
|
||||
except Exception:
|
||||
logger.exception("记录reply日志失败")
|
||||
continue_flag, modified_message = await events_manager.handle_mai_events(
|
||||
EventType.AFTER_LLM, None, prompt, llm_response, stream_id=stream_id
|
||||
_event_msg = build_event_message(
|
||||
EventType.AFTER_LLM, llm_prompt=prompt, llm_response=llm_response, stream_id=stream_id
|
||||
)
|
||||
continue_flag, modified_message = await event_bus.emit(EventType.AFTER_LLM, _event_msg)
|
||||
if not from_plugin and not continue_flag:
|
||||
raise UserWarning("插件于请求后取消了内容生成")
|
||||
if modified_message:
|
||||
@@ -259,7 +262,7 @@ class DefaultReplyer:
|
||||
if log_reply:
|
||||
try:
|
||||
PlanReplyLogger.log_reply(
|
||||
chat_id=self.chat_stream.stream_id,
|
||||
chat_id=self.chat_stream.session_id,
|
||||
prompt=prompt or "",
|
||||
output=None,
|
||||
processed_output=None,
|
||||
@@ -353,14 +356,14 @@ class DefaultReplyer:
|
||||
str: 表达习惯信息字符串
|
||||
"""
|
||||
# 检查是否允许在此聊天流中使用表达
|
||||
use_expression, _, _ = TempMethodsExpression.get_expression_config_for_chat(self.chat_stream.stream_id)
|
||||
use_expression, _, _ = TempMethodsExpression.get_expression_config_for_chat(self.chat_stream.session_id)
|
||||
if not use_expression:
|
||||
return "", []
|
||||
style_habits = []
|
||||
# 使用从处理器传来的选中表达方式
|
||||
# 使用模型预测选择表达方式
|
||||
selected_expressions, selected_ids = await expression_selector.select_suitable_expressions(
|
||||
self.chat_stream.stream_id,
|
||||
self.chat_stream.session_id,
|
||||
chat_history,
|
||||
max_num=8,
|
||||
target_message=target,
|
||||
@@ -594,7 +597,7 @@ class DefaultReplyer:
|
||||
async def _build_jargon_explanation(
|
||||
self,
|
||||
chat_id: str,
|
||||
messages_short: List[DatabaseMessages],
|
||||
messages_short: List[SessionMessage],
|
||||
chat_talking_prompt_short: str,
|
||||
unknown_words: Optional[List[str]],
|
||||
) -> str:
|
||||
@@ -703,9 +706,13 @@ class DefaultReplyer:
|
||||
is_group = stream_type == "group"
|
||||
|
||||
# 使用 ChatManager 提供的接口生成 chat_id,避免在此重复实现逻辑
|
||||
from src.chat.message_receive.chat_stream import get_chat_manager
|
||||
from src.common.utils.utils_session import SessionUtils
|
||||
|
||||
chat_id = get_chat_manager().get_stream_id(platform, str(id_str), is_group=is_group)
|
||||
chat_id = SessionUtils.calculate_session_id(
|
||||
platform,
|
||||
group_id=str(id_str) if is_group else None,
|
||||
user_id=str(id_str) if not is_group else None,
|
||||
)
|
||||
return chat_id, prompt_content
|
||||
|
||||
except (ValueError, IndexError):
|
||||
@@ -751,7 +758,7 @@ class DefaultReplyer:
|
||||
|
||||
async def build_prompt_reply_context(
|
||||
self,
|
||||
reply_message: Optional[DatabaseMessages] = None,
|
||||
reply_message: Optional[SessionMessage] = None,
|
||||
extra_info: str = "",
|
||||
reply_reason: str = "",
|
||||
available_actions: Optional[Dict[str, ActionInfo]] = None,
|
||||
@@ -778,7 +785,7 @@ class DefaultReplyer:
|
||||
if available_actions is None:
|
||||
available_actions = {}
|
||||
chat_stream = self.chat_stream
|
||||
chat_id = chat_stream.stream_id
|
||||
chat_id = chat_stream.session_id
|
||||
_is_group_chat = bool(chat_stream.group_info)
|
||||
platform = chat_stream.platform
|
||||
|
||||
@@ -1005,7 +1012,7 @@ class DefaultReplyer:
|
||||
reply_to: str,
|
||||
) -> str: # sourcery skip: merge-else-if-into-elif, remove-redundant-if
|
||||
chat_stream = self.chat_stream
|
||||
chat_id = chat_stream.stream_id
|
||||
chat_id = chat_stream.session_id
|
||||
sender, target = self._parse_reply_target(reply_to)
|
||||
target = replace_user_references(target, chat_stream.platform, replace_bot_name=True)
|
||||
|
||||
@@ -1105,31 +1112,29 @@ class DefaultReplyer:
|
||||
is_emoji: bool,
|
||||
thinking_start_time: float,
|
||||
display_message: str,
|
||||
anchor_message: Optional[MessageRecv] = None,
|
||||
) -> MessageSending:
|
||||
anchor_message: Optional[MaiMessage] = None,
|
||||
) -> SessionMessage:
|
||||
"""构建单个发送消息"""
|
||||
|
||||
bot_user_info = UserInfo(
|
||||
user_id=str(global_config.bot.qq_account),
|
||||
user_nickname=global_config.bot.nickname,
|
||||
platform=self.chat_stream.platform,
|
||||
)
|
||||
|
||||
# await anchor_message.process()
|
||||
sender_info = anchor_message.message_info.user_info if anchor_message else None
|
||||
|
||||
return MessageSending(
|
||||
message_id=message_id, # 使用片段的唯一ID
|
||||
chat_stream=self.chat_stream,
|
||||
bot_user_info=bot_user_info,
|
||||
sender_info=sender_info,
|
||||
maim_message = MessageBase(
|
||||
message_info=BaseMessageInfo(
|
||||
platform=self.chat_stream.platform,
|
||||
message_id=message_id,
|
||||
time=thinking_start_time,
|
||||
user_info=UserInfo(
|
||||
user_id=str(global_config.bot.qq_account),
|
||||
user_nickname=global_config.bot.nickname,
|
||||
),
|
||||
additional_config={},
|
||||
),
|
||||
message_segment=message_segment,
|
||||
reply=anchor_message, # 回复原始锚点
|
||||
is_head=reply_to,
|
||||
is_emoji=is_emoji,
|
||||
thinking_start_time=thinking_start_time, # 传递原始思考开始时间
|
||||
display_message=display_message,
|
||||
)
|
||||
message = SessionMessage.from_maim_message(maim_message)
|
||||
message.session_id = self.chat_stream.session_id
|
||||
message.display_message = display_message
|
||||
message.reply_to = anchor_message.message_id if reply_to and anchor_message else None
|
||||
message.is_emoji = is_emoji
|
||||
return message
|
||||
|
||||
async def llm_generate_content(self, prompt: str):
|
||||
with Timer("LLM生成", {}): # 内部计时器,可选保留
|
||||
|
||||
@@ -7,33 +7,31 @@ import re
|
||||
from typing import List, Optional, Dict, Any, Tuple
|
||||
from datetime import datetime
|
||||
from src.common.logger import get_logger
|
||||
from src.common.data_models.database_data_model import DatabaseMessages
|
||||
from src.common.data_models.info_data_model import ActionPlannerInfo
|
||||
from src.common.data_models.llm_data_model import LLMGenerationDataModel
|
||||
from src.config.config import global_config, model_config
|
||||
from src.llm_models.utils_model import LLMRequest
|
||||
from maim_message import Seg
|
||||
from maim_message import BaseMessageInfo, MessageBase, Seg
|
||||
|
||||
from src.common.data_models.mai_message_data_model import MaiMessage, UserInfo
|
||||
from src.chat.message_receive.message_old import MessageSending
|
||||
from src.chat.message_receive.message import SessionMessage
|
||||
from src.chat.message_receive.chat_manager import BotChatSession
|
||||
from src.chat.message_receive.uni_message_sender import UniversalMessageSender
|
||||
from src.chat.utils.timer_calculator import Timer
|
||||
from src.chat.utils.utils import get_chat_type_and_target_info, is_bot_self
|
||||
from src.prompt.prompt_manager import prompt_manager
|
||||
from src.chat.utils.common_utils import TempMethodsExpression
|
||||
from src.chat.utils.chat_message_builder import (
|
||||
from src.services.message_service import (
|
||||
build_readable_messages,
|
||||
get_raw_msg_before_timestamp_with_chat,
|
||||
replace_user_references,
|
||||
translate_pid_to_description,
|
||||
)
|
||||
from src.bw_learner.expression_selector import expression_selector
|
||||
from src.services.message_service import translate_pid_to_description
|
||||
|
||||
# from src.memory_system.memory_activator import MemoryActivator
|
||||
from src.person_info.person_info import Person, is_person_known
|
||||
from src.core.types import ActionInfo, EventType
|
||||
from src.services import llm_service as llm_api
|
||||
from src.memory_system.memory_retrieval import init_memory_retrieval_sys, build_memory_retrieval_prompt
|
||||
from src.bw_learner.jargon_explainer_old import explain_jargon_in_context
|
||||
|
||||
@@ -69,7 +67,7 @@ class PrivateReplyer:
|
||||
from_plugin: bool = True,
|
||||
think_level: int = 1,
|
||||
stream_id: Optional[str] = None,
|
||||
reply_message: Optional[DatabaseMessages] = None,
|
||||
reply_message: Optional[SessionMessage] = None,
|
||||
reply_time_point: Optional[float] = time.time(),
|
||||
unknown_words: Optional[List[str]] = None,
|
||||
log_reply: bool = True,
|
||||
@@ -604,7 +602,7 @@ class PrivateReplyer:
|
||||
|
||||
async def build_prompt_reply_context(
|
||||
self,
|
||||
reply_message: Optional[DatabaseMessages] = None,
|
||||
reply_message: Optional[SessionMessage] = None,
|
||||
extra_info: str = "",
|
||||
reply_reason: str = "",
|
||||
available_actions: Optional[Dict[str, ActionInfo]] = None,
|
||||
@@ -954,28 +952,29 @@ class PrivateReplyer:
|
||||
thinking_start_time: float,
|
||||
display_message: str,
|
||||
anchor_message: Optional[MaiMessage] = None,
|
||||
) -> MessageSending:
|
||||
) -> SessionMessage:
|
||||
"""构建单个发送消息"""
|
||||
|
||||
bot_user_info = UserInfo(
|
||||
user_id=str(global_config.bot.qq_account),
|
||||
user_nickname=global_config.bot.nickname,
|
||||
)
|
||||
|
||||
sender_info = anchor_message.message_info.user_info if anchor_message else None
|
||||
|
||||
return MessageSending(
|
||||
message_id=message_id,
|
||||
session=self.chat_stream,
|
||||
bot_user_info=bot_user_info,
|
||||
sender_info=sender_info,
|
||||
maim_message = MessageBase(
|
||||
message_info=BaseMessageInfo(
|
||||
platform=self.chat_stream.platform,
|
||||
message_id=message_id,
|
||||
time=thinking_start_time,
|
||||
user_info=UserInfo(
|
||||
user_id=str(global_config.bot.qq_account),
|
||||
user_nickname=global_config.bot.nickname,
|
||||
),
|
||||
group_info=None,
|
||||
additional_config={},
|
||||
),
|
||||
message_segment=message_segment,
|
||||
reply=anchor_message,
|
||||
is_head=reply_to,
|
||||
is_emoji=is_emoji,
|
||||
thinking_start_time=thinking_start_time,
|
||||
display_message=display_message,
|
||||
)
|
||||
message = SessionMessage.from_maim_message(maim_message)
|
||||
message.session_id = self.chat_stream.session_id
|
||||
message.display_message = display_message
|
||||
message.reply_to = anchor_message.message_id if reply_to and anchor_message else None
|
||||
message.is_emoji = is_emoji
|
||||
return message
|
||||
|
||||
async def llm_generate_content(self, prompt: str):
|
||||
with Timer("LLM生成", {}): # 内部计时器,可选保留
|
||||
@@ -999,55 +998,9 @@ class PrivateReplyer:
|
||||
return content, reasoning_content, model_name, tool_calls
|
||||
|
||||
async def get_prompt_info(self, message: str, sender: str, target: str):
|
||||
related_info = ""
|
||||
start_time = time.time()
|
||||
from src.plugins.built_in.knowledge.lpmm_get_knowledge import SearchKnowledgeFromLPMMTool
|
||||
|
||||
logger.debug(f"获取知识库内容,元消息:{message[:30]}...,消息长度: {len(message)}")
|
||||
# 从LPMM知识库获取知识
|
||||
try:
|
||||
# 检查LPMM知识库是否启用
|
||||
if not global_config.lpmm_knowledge.enable:
|
||||
logger.debug("LPMM知识库未启用,跳过获取知识库内容")
|
||||
return ""
|
||||
|
||||
if global_config.lpmm_knowledge.lpmm_mode == "agent":
|
||||
return ""
|
||||
|
||||
prompt_template = prompt_manager.get_prompt("lpmm_get_knowledge")
|
||||
prompt_template.add_context("bot_name", global_config.bot.nickname)
|
||||
prompt_template.add_context("time_now", time.strftime("%Y-%m-%d %H:%M:%S", time.localtime()))
|
||||
prompt_template.add_context("chat_history", message)
|
||||
prompt_template.add_context("sender", sender)
|
||||
prompt_template.add_context("target_message", target)
|
||||
prompt = await prompt_manager.render_prompt(prompt_template)
|
||||
|
||||
_, _, _, _, tool_calls = await llm_api.generate_with_model_with_tools(
|
||||
prompt,
|
||||
model_config=model_config.model_task_config.tool_use,
|
||||
tool_options=[SearchKnowledgeFromLPMMTool.get_tool_definition()],
|
||||
)
|
||||
if tool_calls:
|
||||
result = await self.tool_executor.execute_tool_call(tool_calls[0], SearchKnowledgeFromLPMMTool())
|
||||
end_time = time.time()
|
||||
if not result or not result.get("content"):
|
||||
logger.debug("从LPMM知识库获取知识失败,返回空知识...")
|
||||
return ""
|
||||
found_knowledge_from_lpmm = result.get("content", "")
|
||||
logger.debug(
|
||||
f"从LPMM知识库获取知识,相关信息:{found_knowledge_from_lpmm[:100]}...,信息长度: {len(found_knowledge_from_lpmm)}"
|
||||
)
|
||||
related_info += found_knowledge_from_lpmm
|
||||
logger.debug(f"获取知识库内容耗时: {(end_time - start_time):.3f}秒")
|
||||
logger.debug(f"获取知识库内容,相关信息:{related_info[:100]}...,信息长度: {len(related_info)}")
|
||||
|
||||
return f"你有以下这些**知识**:\n{related_info}\n请你**记住上面的知识**,之后可能会用到。\n"
|
||||
else:
|
||||
logger.debug("模型认为不需要使用LPMM知识库")
|
||||
return ""
|
||||
except Exception as e:
|
||||
logger.error(f"获取知识库内容时发生异常: {str(e)}")
|
||||
return ""
|
||||
logger.debug(f"已跳过知识库信息获取,元消息:{message[:30]}...,消息长度: {len(message)}")
|
||||
del message, sender, target
|
||||
return ""
|
||||
|
||||
|
||||
def weighted_sample_no_replacement(items, weights, k) -> list:
|
||||
|
||||
Reference in New Issue
Block a user