feat:关系提取支持多人,且更精确;支持全局表达方式

This commit is contained in:
SengokuCola
2025-09-13 15:05:29 +08:00
parent f0cf8b42c8
commit e21a7002e6
9 changed files with 103 additions and 100 deletions

View File

@@ -114,6 +114,20 @@ class ExpressionSelector:
def get_related_chat_ids(self, chat_id: str) -> List[str]:
"""根据expression_groups配置获取与当前chat_id相关的所有chat_id包括自身"""
groups = global_config.expression.expression_groups
# 检查是否存在全局共享组(包含"*"的组)
global_group_exists = any("*" in group for group in groups)
if global_group_exists:
# 如果存在全局共享组则返回所有可用的chat_id
all_chat_ids = set()
for group in groups:
for stream_config_str in group:
if chat_id_candidate := self._parse_stream_config_to_chat_id(stream_config_str):
all_chat_ids.add(chat_id_candidate)
return list(all_chat_ids) if all_chat_ids else [chat_id]
# 否则使用现有的组逻辑
for group in groups:
group_chat_ids = []
for stream_config_str in group:

View File

@@ -453,8 +453,8 @@ class ActionPlanner:
# 调用LLM
llm_content, (reasoning_content, _, _) = await self.planner_llm.generate_response_async(prompt=prompt)
logger.info(f"{self.log_prefix}规划器原始提示词: {prompt}")
logger.info(f"{self.log_prefix}规划器原始响应: {llm_content}")
# logger.info(f"{self.log_prefix}规划器原始提示词: {prompt}")
# logger.info(f"{self.log_prefix}规划器原始响应: {llm_content}")
if global_config.debug.show_prompt:
logger.info(f"{self.log_prefix}规划器原始提示词: {prompt}")

View File

@@ -306,7 +306,7 @@ class DefaultReplyer:
traceback.print_exc()
return False, llm_response
async def build_relation_info(self, sender: str, target: str):
async def build_relation_info(self, chat_content: str, sender: str, person_list: List[Person] = None):
if not global_config.relationship.enable_relationship:
return ""
@@ -322,7 +322,13 @@ class DefaultReplyer:
logger.warning(f"未找到用户 {sender} 的ID跳过信息提取")
return f"你完全不认识{sender}不理解ta的相关信息。"
return person.build_relationship()
sender_relation = await person.build_relationship(chat_content)
others_relation = ""
for person in person_list:
person_relation = await person.build_relationship()
others_relation += person_relation
return f"{sender_relation}\n{others_relation}"
async def build_expression_habits(self, chat_history: str, target: str) -> Tuple[str, List[int]]:
# sourcery skip: for-append-to-extend
@@ -748,6 +754,19 @@ class DefaultReplyer:
timestamp=time.time(),
limit=int(global_config.chat.max_context_size * 0.33),
)
person_list_short:List[Person] = []
for msg in message_list_before_short:
if global_config.bot.qq_account == msg.user_info.user_id and global_config.bot.platform == msg.user_info.platform:
continue
if reply_message and reply_message.user_info.user_id == msg.user_info.user_id and reply_message.user_info.platform == msg.user_info.platform:
continue
person = Person(platform=msg.user_info.platform, user_id=msg.user_info.user_id)
if person.is_known:
person_list_short.append(person)
for person in person_list_short:
print(person.person_name)
chat_talking_prompt_short = build_readable_messages(
message_list_before_short,
@@ -762,7 +781,7 @@ class DefaultReplyer:
self._time_and_run_task(
self.build_expression_habits(chat_talking_prompt_short, target), "expression_habits"
),
self._time_and_run_task(self.build_relation_info(sender, target), "relation_info"),
self._time_and_run_task(self.build_relation_info(chat_talking_prompt_short,sender, person_list_short), "relation_info"),
# self._time_and_run_task(self.build_memory_block(message_list_before_short, target), "memory_block"),
self._time_and_run_task(
self.build_tool_info(chat_talking_prompt_short, sender, target, enable_tool=enable_tool), "tool_info"
@@ -916,7 +935,7 @@ class DefaultReplyer:
# 并行执行2个构建任务
(expression_habits_block, _), relation_info, personality_prompt = await asyncio.gather(
self.build_expression_habits(chat_talking_prompt_half, target),
self.build_relation_info(sender, target),
self.build_relation_info(chat_talking_prompt_half, sender),
self.build_personality_prompt(),
)
@@ -1019,7 +1038,8 @@ class DefaultReplyer:
async def llm_generate_content(self, prompt: str):
with Timer("LLM生成", {}): # 内部计时器,可选保留
# 直接使用已初始化的模型实例
logger.info(f"\n{prompt}\n")
if global_config.debug.show_prompt:
logger.info(f"\n{prompt}\n")
else: