ref:移除一些荣誉模块,新建maisaka回复器
This commit is contained in:
@@ -1,7 +1,8 @@
|
||||
from typing import Dict
|
||||
|
||||
import asyncio
|
||||
import traceback
|
||||
|
||||
from typing import Dict
|
||||
|
||||
from src.chat.message_receive.chat_manager import chat_manager
|
||||
from src.common.logger import get_logger
|
||||
from src.maisaka.runtime import MaisakaHeartFlowChatting
|
||||
@@ -10,46 +11,38 @@ logger = get_logger("heartflow")
|
||||
|
||||
|
||||
class HeartflowManager:
|
||||
"""主心流协调器。
|
||||
|
||||
当前群聊统一使用 Maisaka runtime 作为消息核心循环实现。
|
||||
"""
|
||||
"""管理 session 级别的 Maisaka 心流实例。"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""初始化心流聊天实例缓存。"""
|
||||
self.heartflow_chat_list: Dict[str, MaisakaHeartFlowChatting] = {}
|
||||
self._chat_create_locks: Dict[str, asyncio.Lock] = {}
|
||||
|
||||
async def get_or_create_heartflow_chat(self, session_id: str) -> MaisakaHeartFlowChatting:
|
||||
"""获取或创建群聊心流实例。
|
||||
|
||||
Args:
|
||||
session_id: 聊天会话 ID。
|
||||
|
||||
Returns:
|
||||
MaisakaHeartFlowChatting: 当前会话绑定的 Maisaka runtime。
|
||||
"""
|
||||
"""获取或创建指定会话对应的 Maisaka runtime。"""
|
||||
try:
|
||||
if chat := self.heartflow_chat_list.get(session_id):
|
||||
return chat
|
||||
chat_session = chat_manager.get_session_by_session_id(session_id)
|
||||
if not chat_session:
|
||||
raise ValueError(f"未找到 session_id={session_id} 的聊天流")
|
||||
new_chat = MaisakaHeartFlowChatting(session_id=session_id)
|
||||
await new_chat.start()
|
||||
self.heartflow_chat_list[session_id] = new_chat
|
||||
return new_chat
|
||||
except Exception as e:
|
||||
logger.error(f"创建心流聊天 {session_id} 失败: {e}", exc_info=True)
|
||||
|
||||
create_lock = self._chat_create_locks.setdefault(session_id, asyncio.Lock())
|
||||
async with create_lock:
|
||||
if chat := self.heartflow_chat_list.get(session_id):
|
||||
return chat
|
||||
|
||||
chat_session = chat_manager.get_session_by_session_id(session_id)
|
||||
if not chat_session:
|
||||
raise ValueError(f"未找到 session_id={session_id} 对应的聊天流")
|
||||
|
||||
new_chat = MaisakaHeartFlowChatting(session_id=session_id)
|
||||
await new_chat.start()
|
||||
self.heartflow_chat_list[session_id] = new_chat
|
||||
return new_chat
|
||||
except Exception as exc:
|
||||
logger.error(f"创建心流聊天 {session_id} 失败: {exc}", exc_info=True)
|
||||
traceback.print_exc()
|
||||
raise e
|
||||
raise
|
||||
|
||||
def adjust_talk_frequency(self, session_id: str, frequency: float) -> None:
|
||||
"""调整指定聊天流的说话频率。
|
||||
|
||||
Args:
|
||||
session_id: 聊天会话 ID。
|
||||
frequency: 目标频率系数。
|
||||
"""
|
||||
"""调整指定聊天流的说话频率。"""
|
||||
chat = self.heartflow_chat_list.get(session_id)
|
||||
if chat:
|
||||
chat.adjust_talk_frequency(frequency)
|
||||
|
||||
256
src/chat/replyer/maisaka_generator.py
Normal file
256
src/chat/replyer/maisaka_generator.py
Normal file
@@ -0,0 +1,256 @@
|
||||
from datetime import datetime
|
||||
from typing import Dict, List, Optional, Tuple
|
||||
|
||||
import random
|
||||
import time
|
||||
|
||||
from src.chat.message_receive.chat_manager import BotChatSession
|
||||
from src.chat.message_receive.message import SessionMessage
|
||||
from src.common.data_models.reply_generation_data_models import (
|
||||
GenerationMetrics,
|
||||
LLMCompletionResult,
|
||||
ReplyGenerationResult,
|
||||
)
|
||||
from src.common.logger import get_logger
|
||||
from src.common.prompt_i18n import load_prompt
|
||||
from src.config.config import global_config
|
||||
from src.core.types import ActionInfo
|
||||
from src.services.llm_service import LLMServiceClient
|
||||
|
||||
from src.maisaka.message_adapter import (
|
||||
get_message_kind,
|
||||
get_message_role,
|
||||
get_message_text,
|
||||
is_perception_message,
|
||||
parse_speaker_content,
|
||||
)
|
||||
|
||||
logger = get_logger("maisaka_replyer")
|
||||
|
||||
|
||||
class MaisakaReplyGenerator:
|
||||
"""Maisaka 可见回复生成器。"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
chat_stream: Optional[BotChatSession] = None,
|
||||
request_type: str = "maisaka_replyer",
|
||||
) -> None:
|
||||
self.chat_stream = chat_stream
|
||||
self.request_type = request_type
|
||||
self.express_model = LLMServiceClient(
|
||||
task_name="replyer",
|
||||
request_type=request_type,
|
||||
)
|
||||
self._personality_prompt = self._build_personality_prompt()
|
||||
|
||||
def _build_personality_prompt(self) -> str:
|
||||
"""构建回复器使用的人设描述。"""
|
||||
try:
|
||||
bot_name = global_config.bot.nickname
|
||||
alias_names = global_config.bot.alias_names
|
||||
bot_aliases = f",也有人叫你{','.join(alias_names)}" if alias_names else ""
|
||||
|
||||
prompt_personality = global_config.personality.personality
|
||||
if (
|
||||
hasattr(global_config.personality, "states")
|
||||
and global_config.personality.states
|
||||
and hasattr(global_config.personality, "state_probability")
|
||||
and global_config.personality.state_probability > 0
|
||||
and random.random() < global_config.personality.state_probability
|
||||
):
|
||||
prompt_personality = random.choice(global_config.personality.states)
|
||||
|
||||
return f"你的名字是{bot_name}{bot_aliases},你{prompt_personality};"
|
||||
except Exception as exc:
|
||||
logger.warning(f"Failed to build Maisaka personality prompt: {exc}")
|
||||
return "你的名字是麦麦,你是一个活泼可爱的 AI 助手。"
|
||||
|
||||
@staticmethod
|
||||
def _normalize_content(content: str, limit: int = 500) -> str:
|
||||
normalized = " ".join((content or "").split())
|
||||
if len(normalized) > limit:
|
||||
return normalized[:limit] + "..."
|
||||
return normalized
|
||||
|
||||
@staticmethod
|
||||
def _format_message_time(message: SessionMessage) -> str:
|
||||
return message.timestamp.strftime("%H:%M:%S")
|
||||
|
||||
@staticmethod
|
||||
def _extract_visible_assistant_reply(message: SessionMessage) -> str:
|
||||
if is_perception_message(message):
|
||||
return ""
|
||||
return ""
|
||||
|
||||
def _extract_guided_bot_reply(self, message: SessionMessage) -> str:
|
||||
speaker_name, body = parse_speaker_content(get_message_text(message).strip())
|
||||
bot_nickname = global_config.bot.nickname.strip() or "Bot"
|
||||
if speaker_name == bot_nickname:
|
||||
return self._normalize_content(body.strip())
|
||||
return ""
|
||||
|
||||
@staticmethod
|
||||
def _split_user_message_segments(raw_content: str) -> list[tuple[Optional[str], str]]:
|
||||
"""按说话人拆分用户消息。"""
|
||||
segments: list[tuple[Optional[str], str]] = []
|
||||
current_speaker: Optional[str] = None
|
||||
current_lines: list[str] = []
|
||||
|
||||
for raw_line in raw_content.splitlines():
|
||||
speaker_name, content_body = parse_speaker_content(raw_line)
|
||||
if speaker_name is not None:
|
||||
if current_lines:
|
||||
segments.append((current_speaker, "\n".join(current_lines)))
|
||||
current_speaker = speaker_name
|
||||
current_lines = [content_body]
|
||||
continue
|
||||
|
||||
current_lines.append(raw_line)
|
||||
|
||||
if current_lines:
|
||||
segments.append((current_speaker, "\n".join(current_lines)))
|
||||
|
||||
return segments
|
||||
|
||||
def _format_chat_history(self, messages: list[SessionMessage]) -> str:
|
||||
"""格式化回复器使用的可见聊天历史。"""
|
||||
bot_nickname = global_config.bot.nickname.strip() or "Bot"
|
||||
parts: list[str] = []
|
||||
|
||||
for message in messages:
|
||||
role = get_message_role(message)
|
||||
timestamp = self._format_message_time(message)
|
||||
|
||||
if role == "user":
|
||||
guided_reply = self._extract_guided_bot_reply(message)
|
||||
if guided_reply:
|
||||
parts.append(f"{timestamp} {bot_nickname}(you): {guided_reply}")
|
||||
continue
|
||||
|
||||
raw_content = get_message_text(message)
|
||||
for speaker_name, content_body in self._split_user_message_segments(raw_content):
|
||||
content = self._normalize_content(content_body)
|
||||
if not content:
|
||||
continue
|
||||
visible_speaker = speaker_name or global_config.maisaka.user_name.strip() or "User"
|
||||
parts.append(f"{timestamp} {visible_speaker}: {content}")
|
||||
continue
|
||||
|
||||
if role == "assistant":
|
||||
visible_reply = self._extract_visible_assistant_reply(message)
|
||||
if visible_reply:
|
||||
parts.append(f"{timestamp} {bot_nickname}(you): {visible_reply}")
|
||||
|
||||
return "\n".join(parts)
|
||||
|
||||
def _build_prompt(self, chat_history: List[SessionMessage], reply_reason: str) -> str:
|
||||
"""构建 Maisaka replyer 提示词。"""
|
||||
current_time = datetime.now().strftime("%Y-%m-%d %H:%M:%S")
|
||||
formatted_history = self._format_chat_history(chat_history)
|
||||
|
||||
try:
|
||||
system_prompt = load_prompt(
|
||||
"maidairy_replyer",
|
||||
bot_name=global_config.bot.nickname,
|
||||
identity=self._personality_prompt,
|
||||
reply_style=global_config.personality.reply_style,
|
||||
)
|
||||
except Exception:
|
||||
system_prompt = "你是一个友好的 AI 助手,请根据用户的想法生成自然的回复。"
|
||||
|
||||
user_prompt = (
|
||||
f"当前时间:{current_time}\n\n"
|
||||
f"【聊天记录】\n{formatted_history}\n\n"
|
||||
f"【你的想法】\n{reply_reason}\n\n"
|
||||
"现在,你说:"
|
||||
)
|
||||
return f"System: {system_prompt}\n\nUser: {user_prompt}"
|
||||
|
||||
async def generate_reply_with_context(
|
||||
self,
|
||||
extra_info: str = "",
|
||||
reply_reason: str = "",
|
||||
available_actions: Optional[Dict[str, ActionInfo]] = None,
|
||||
chosen_actions: Optional[List[object]] = None,
|
||||
enable_tool: bool = True,
|
||||
from_plugin: bool = True,
|
||||
stream_id: Optional[str] = None,
|
||||
reply_message: Optional[SessionMessage] = None,
|
||||
reply_time_point: Optional[float] = None,
|
||||
think_level: int = 1,
|
||||
unknown_words: Optional[List[str]] = None,
|
||||
log_reply: bool = True,
|
||||
chat_history: Optional[List[SessionMessage]] = None,
|
||||
) -> Tuple[bool, ReplyGenerationResult]:
|
||||
"""结合上下文生成 Maisaka 的最终可见回复。"""
|
||||
del available_actions
|
||||
del chosen_actions
|
||||
del enable_tool
|
||||
del extra_info
|
||||
del from_plugin
|
||||
del log_reply
|
||||
del reply_time_point
|
||||
del think_level
|
||||
del unknown_words
|
||||
|
||||
result = ReplyGenerationResult()
|
||||
if not reply_reason or chat_history is None:
|
||||
result.error_message = "reply_reason or chat_history is empty"
|
||||
return False, result
|
||||
|
||||
logger.info(
|
||||
f"Maisaka replyer start: stream_id={stream_id} reply_reason={reply_reason!r} "
|
||||
f"history_size={len(chat_history)} target_message_id="
|
||||
f"{reply_message.message_id if reply_message else None}"
|
||||
)
|
||||
|
||||
filtered_history = [
|
||||
message
|
||||
for message in chat_history
|
||||
if get_message_role(message) != "system" and get_message_kind(message) != "perception"
|
||||
]
|
||||
prompt = self._build_prompt(filtered_history, reply_reason)
|
||||
result.completion.request_prompt = prompt
|
||||
|
||||
if global_config.debug.show_replyer_prompt:
|
||||
logger.info(f"\nMaisaka replyer prompt:\n{prompt}\n")
|
||||
|
||||
started_at = time.perf_counter()
|
||||
try:
|
||||
generation_result = await self.express_model.generate_response(prompt)
|
||||
except Exception as exc:
|
||||
logger.exception("Maisaka replyer call failed")
|
||||
result.error_message = str(exc)
|
||||
result.metrics = GenerationMetrics(
|
||||
overall_ms=round((time.perf_counter() - started_at) * 1000, 2),
|
||||
)
|
||||
return False, result
|
||||
|
||||
response_text = (generation_result.response or "").strip()
|
||||
result.success = bool(response_text)
|
||||
result.completion = LLMCompletionResult(
|
||||
request_prompt=prompt,
|
||||
response_text=response_text,
|
||||
reasoning_text=generation_result.reasoning or "",
|
||||
model_name=generation_result.model_name or "",
|
||||
tool_calls=generation_result.tool_calls or [],
|
||||
)
|
||||
result.metrics = GenerationMetrics(
|
||||
overall_ms=round((time.perf_counter() - started_at) * 1000, 2),
|
||||
)
|
||||
|
||||
if global_config.debug.show_replyer_reasoning and result.completion.reasoning_text:
|
||||
logger.info(f"Maisaka replyer reasoning:\n{result.completion.reasoning_text}")
|
||||
|
||||
if not result.success:
|
||||
result.error_message = "replyer returned empty content"
|
||||
logger.warning("Maisaka replyer returned empty content")
|
||||
return False, result
|
||||
|
||||
logger.info(
|
||||
f"Maisaka replyer success: response_text={response_text!r} "
|
||||
f"overall_ms={result.metrics.overall_ms}"
|
||||
)
|
||||
result.text_fragments = [response_text]
|
||||
return True, result
|
||||
@@ -1,65 +1,82 @@
|
||||
from typing import Dict, Optional
|
||||
from typing import TYPE_CHECKING, Any, Dict, Optional
|
||||
|
||||
from src.common.logger import get_logger
|
||||
from src.chat.message_receive.chat_manager import BotChatSession, chat_manager as _chat_manager
|
||||
from src.chat.replyer.group_generator import DefaultReplyer
|
||||
from src.chat.replyer.private_generator import PrivateReplyer
|
||||
from src.common.logger import get_logger
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from src.chat.replyer.group_generator import DefaultReplyer
|
||||
from src.chat.replyer.maisaka_generator import MaisakaReplyGenerator
|
||||
from src.chat.replyer.private_generator import PrivateReplyer
|
||||
|
||||
logger = get_logger("ReplyerManager")
|
||||
|
||||
|
||||
class ReplyerManager:
|
||||
def __init__(self):
|
||||
self._repliers: Dict[str, DefaultReplyer | PrivateReplyer] = {}
|
||||
"""统一管理不同类型的回复生成器。"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
self._repliers: Dict[str, Any] = {}
|
||||
|
||||
def get_replyer(
|
||||
self,
|
||||
chat_stream: Optional[BotChatSession] = None,
|
||||
chat_id: Optional[str] = None,
|
||||
request_type: str = "replyer",
|
||||
) -> Optional[DefaultReplyer | PrivateReplyer]:
|
||||
"""
|
||||
获取或创建回复器实例。
|
||||
|
||||
model_configs 仅在首次为某个 chat_id/stream_id 创建实例时有效。
|
||||
后续调用将返回已缓存的实例,忽略 model_configs 参数。
|
||||
"""
|
||||
replyer_type: str = "default",
|
||||
) -> Optional["DefaultReplyer | MaisakaReplyGenerator | PrivateReplyer"]:
|
||||
"""按会话和 replyer 类型获取实例。"""
|
||||
stream_id = chat_stream.session_id if chat_stream else chat_id
|
||||
if not stream_id:
|
||||
logger.warning("[ReplyerManager] 缺少 stream_id,无法获取回复器。")
|
||||
logger.warning("[ReplyerManager] 缺少 stream_id,无法获取 replyer")
|
||||
return None
|
||||
|
||||
# 如果已有缓存实例,直接返回
|
||||
if stream_id in self._repliers:
|
||||
logger.debug(f"[ReplyerManager] 为 stream_id '{stream_id}' 返回已存在的回复器实例。")
|
||||
return self._repliers[stream_id]
|
||||
cache_key = f"{replyer_type}:{stream_id}"
|
||||
if cache_key in self._repliers:
|
||||
logger.info(f"[ReplyerManager] 命中缓存 replyer: cache_key={cache_key}")
|
||||
return self._repliers[cache_key]
|
||||
|
||||
# 如果没有缓存,则创建新实例(首次初始化)
|
||||
logger.debug(f"[ReplyerManager] 为 stream_id '{stream_id}' 创建新的回复器实例并缓存。")
|
||||
|
||||
target_stream = chat_stream
|
||||
target_stream = chat_stream or _chat_manager.get_session_by_session_id(stream_id)
|
||||
if not target_stream:
|
||||
target_stream = _chat_manager.get_session_by_session_id(stream_id)
|
||||
|
||||
if not target_stream:
|
||||
logger.warning(f"[ReplyerManager] 未找到 stream_id='{stream_id}' 的聊天流,无法创建回复器。")
|
||||
logger.warning(f"[ReplyerManager] 未找到会话,stream_id={stream_id}")
|
||||
return None
|
||||
|
||||
# model_configs 只在此时(初始化时)生效
|
||||
if target_stream.is_group_session:
|
||||
replyer = DefaultReplyer(
|
||||
chat_stream=target_stream,
|
||||
request_type=request_type,
|
||||
)
|
||||
else:
|
||||
replyer = PrivateReplyer(
|
||||
chat_stream=target_stream,
|
||||
request_type=request_type,
|
||||
)
|
||||
logger.info(
|
||||
f"[ReplyerManager] 开始创建 replyer: cache_key={cache_key}, "
|
||||
f"replyer_type={replyer_type}, is_group_session={target_stream.is_group_session}"
|
||||
)
|
||||
|
||||
self._repliers[stream_id] = replyer
|
||||
try:
|
||||
if replyer_type == "maisaka":
|
||||
logger.info("[ReplyerManager] importing MaisakaReplyGenerator")
|
||||
from src.chat.replyer.maisaka_generator import MaisakaReplyGenerator
|
||||
|
||||
replyer = MaisakaReplyGenerator(
|
||||
chat_stream=target_stream,
|
||||
request_type=request_type,
|
||||
)
|
||||
elif target_stream.is_group_session:
|
||||
logger.info("[ReplyerManager] importing DefaultReplyer")
|
||||
from src.chat.replyer.group_generator import DefaultReplyer
|
||||
|
||||
replyer = DefaultReplyer(
|
||||
chat_stream=target_stream,
|
||||
request_type=request_type,
|
||||
)
|
||||
else:
|
||||
logger.info("[ReplyerManager] importing PrivateReplyer")
|
||||
from src.chat.replyer.private_generator import PrivateReplyer
|
||||
|
||||
replyer = PrivateReplyer(
|
||||
chat_stream=target_stream,
|
||||
request_type=request_type,
|
||||
)
|
||||
except Exception:
|
||||
logger.exception(f"[ReplyerManager] 创建 replyer 失败: cache_key={cache_key}")
|
||||
raise
|
||||
|
||||
self._repliers[cache_key] = replyer
|
||||
logger.info(f"[ReplyerManager] replyer 创建完成: cache_key={cache_key}")
|
||||
return replyer
|
||||
|
||||
|
||||
# 创建一个全局实例
|
||||
replyer_manager = ReplyerManager()
|
||||
|
||||
Reference in New Issue
Block a user