feat:优化maisak循环逻辑,分离接受和处理,移除冗余config.py

This commit is contained in:
SengokuCola
2026-03-28 15:23:38 +08:00
parent 8b85c483b8
commit c545b24966
12 changed files with 252 additions and 277 deletions

View File

@@ -1,13 +1,12 @@
"""
"""
Maisaka runtime for non-CLI integrations.
"""
import asyncio
import time
from datetime import datetime
from pathlib import Path
import time
from typing import Optional
import asyncio
from typing import Literal, Optional
from src.chat.heart_flow.heartFC_utils import CycleDetail
from src.chat.message_receive.chat_manager import BotChatSession, chat_manager
@@ -19,16 +18,6 @@ from src.config.config import global_config
from src.llm_models.payload_content.tool_option import ToolCall
from src.services import send_service
from .config import (
DIRECT_IMAGE_INPUT,
ENABLE_KNOWLEDGE_MODULE,
ENABLE_LIST_FILES,
ENABLE_MCP,
ENABLE_READ_FILE,
ENABLE_WRITE_FILE,
MERGE_USER_MESSAGES,
)
from .knowledge import retrieve_relevant_knowledge
from .llm_service import MaiSakaLLMService
from .mcp_client import MCPManager
from .message_adapter import (
@@ -37,9 +26,14 @@ from .message_adapter import (
clone_message_sequence,
format_speaker_content,
get_message_role,
remove_last_perception,
)
from .tool_handlers import handle_list_files, handle_mcp_tool, handle_read_file, handle_unknown_tool, handle_write_file
from .tool_handlers import (
handle_list_files,
handle_mcp_tool,
handle_read_file,
handle_unknown_tool,
handle_write_file,
)
logger = get_logger("maisaka_runtime")
@@ -47,6 +41,10 @@ logger = get_logger("maisaka_runtime")
class MaisakaHeartFlowChatting:
"""Session-scoped Maisaka runtime that replaces the HFC planner and reply loop."""
_STATE_RUNNING: Literal["running"] = "running"
_STATE_WAIT: Literal["wait"] = "wait"
_STATE_STOP: Literal["stop"] = "stop"
def __init__(self, session_id: str):
self.session_id = session_id
self.chat_stream: Optional[BotChatSession] = chat_manager.get_session_by_session_id(session_id)
@@ -59,32 +57,33 @@ class MaisakaHeartFlowChatting:
self._chat_history: list[SessionMessage] = []
self.history_loop: list[CycleDetail] = []
self.message_cache: list[SessionMessage] = []
self._internal_turn_queue: asyncio.Queue[list[SessionMessage]] = asyncio.Queue()
self._message_queue: asyncio.Queue[SessionMessage] = asyncio.Queue()
self._mcp_manager: Optional[MCPManager] = None
self._current_cycle_detail: Optional[CycleDetail] = None
self._source_messages_by_id: dict[str, SessionMessage] = {}
self._running = False
self._cycle_counter = 0
self._internal_loop_task: Optional[asyncio.Task] = None
self._loop_task: Optional[asyncio.Task] = None
self._loop_lock = asyncio.Lock()
self._new_message_event = asyncio.Event()
self._max_internal_rounds = 6
self._chat_start_time: Optional[datetime] = None
self._last_user_input_time: Optional[datetime] = None
self._last_assistant_response_time: Optional[datetime] = None
self._user_input_times: list[datetime] = []
self._max_internal_rounds = global_config.maisaka.max_internal_rounds
self._max_context_size = max(1, int(global_config.chat.max_context_size))
self._agent_state: Literal["running", "wait", "stop"] = self._STATE_STOP
self._wait_until: Optional[float] = None
async def start(self) -> None:
"""Start the runtime loop."""
if self._running:
return
if ENABLE_MCP:
if global_config.maisaka.enable_mcp:
await self._init_mcp()
self._running = True
self._internal_loop_task = asyncio.create_task(self._internal_loop())
self._loop_task = asyncio.create_task(self._main_loop())
logger.info(f"{self.log_prefix} MaiSaka runtime started")
logger.info(f"{self.log_prefix} MaiSaka 启动")
async def stop(self) -> None:
"""Stop the runtime loop."""
@@ -93,6 +92,11 @@ class MaisakaHeartFlowChatting:
self._running = False
self._new_message_event.set()
self.message_cache.clear()
while not self._message_queue.empty():
_ = self._message_queue.get_nowait()
while not self._internal_turn_queue.empty():
_ = self._internal_turn_queue.get_nowait()
if self._loop_task is not None:
self._loop_task.cancel()
@@ -103,6 +107,15 @@ class MaisakaHeartFlowChatting:
finally:
self._loop_task = None
if self._internal_loop_task is not None:
self._internal_loop_task.cancel()
try:
await self._internal_loop_task
except asyncio.CancelledError:
pass
finally:
self._internal_loop_task = None
if self._mcp_manager is not None:
await self._mcp_manager.close()
self._mcp_manager = None
@@ -116,29 +129,142 @@ class MaisakaHeartFlowChatting:
async def register_message(self, message: SessionMessage) -> None:
"""Append a newly received message into the HFC-style message cache."""
self.message_cache.append(message)
await self._message_queue.put(message)
self._source_messages_by_id[message.message_id] = message
if self._agent_state in (self._STATE_WAIT, self._STATE_STOP):
self._agent_state = self._STATE_RUNNING
self._new_message_event.set()
async def _main_loop(self) -> None:
try:
while self._running:
await self._new_message_event.wait()
if self._message_queue.empty():
if self._agent_state == self._STATE_WAIT:
message_arrived = await self._wait_for_trigger()
else:
self._new_message_event.clear()
await self._new_message_event.wait()
message_arrived = self._running
else:
message_arrived = True
if not self._running:
return
if not message_arrived:
self._agent_state = self._STATE_STOP
continue
self._new_message_event.clear()
async with self._loop_lock:
# 加锁灌注消息
while not self._message_queue.empty():
cached_messages = self._drain_message_cache()
if not cached_messages:
continue
await self._ingest_messages(cached_messages)
await self._run_internal_loop(anchor_message=cached_messages[-1])
if cached_messages:
await self._internal_turn_queue.put(cached_messages)
except asyncio.CancelledError:
logger.info(f"{self.log_prefix} MaiSaka runtime loop cancelled")
async def _internal_loop(self) -> None:
"""处理一批缓存消息,并执行对应的内部思考轮次。"""
try:
while self._running:
cached_messages = await self._internal_turn_queue.get()
if not cached_messages:
self._internal_turn_queue.task_done()
continue
self._agent_state = self._STATE_RUNNING
await self._ingest_messages(cached_messages)
anchor_message = cached_messages[-1]
try:
for round_index in range(self._max_internal_rounds):
cycle_detail = self._start_cycle()
logger.info(
f"{self.log_prefix} MaiSaka cycle={cycle_detail.cycle_id} "
f"round={round_index + 1}/{self._max_internal_rounds} "
f"context_size={len(self._chat_history)}"
)
try:
planner_started_at = time.time()
response = await self._llm_service.chat_loop_step(self._chat_history)
cycle_detail.time_records["planner"] = time.time() - planner_started_at
response.raw_message.platform = anchor_message.platform
response.raw_message.session_id = self.session_id
response.raw_message.message_info.group_info = self._build_group_info(anchor_message)
self._chat_history.append(response.raw_message)
if response.tool_calls:
tool_started_at = time.time()
should_pause = await self._handle_tool_calls(
response.tool_calls,
response.content or "",
anchor_message,
)
cycle_detail.time_records["tool_calls"] = time.time() - tool_started_at
if should_pause:
break
continue
if response.content:
continue
break
finally:
self._end_cycle(cycle_detail)
finally:
if self._agent_state == self._STATE_RUNNING:
self._agent_state = self._STATE_STOP
self._internal_turn_queue.task_done()
except asyncio.CancelledError:
logger.info(f"{self.log_prefix} Maisaka internal loop cancelled")
async def _wait_for_trigger(self) -> bool:
"""等待外部触发。返回 True 表示有新消息事件,返回 False 表示等待超时。"""
if self._agent_state != self._STATE_WAIT:
await self._new_message_event.wait()
return True
# 处理 wait 工具调用带来的等待窗口:超时后恢复 idle有新消息则继续处理缓存消息
if self._wait_until is None:
await self._new_message_event.wait()
return True
timeout = self._wait_until - time.time()
if timeout <= 0:
logger.info(f"{self.log_prefix} Maisaka 等待超时,继续查看新消息")
self._enter_stop_state()
self._wait_until = None
return False
try:
await asyncio.wait_for(self._new_message_event.wait(), timeout=timeout)
return True
except asyncio.TimeoutError:
logger.info(f"{self.log_prefix} Maisaka 等待超时,继续查看新消息")
self._enter_stop_state()
self._wait_until = None
return False
def _enter_wait_state(self, seconds: Optional[float] = None) -> None:
"""进入等待状态seconds 为 None 时表示一直等待直到新消息到达。"""
self._agent_state = self._STATE_WAIT
self._wait_until = None if seconds is None else time.time() + seconds
def _enter_stop_state(self) -> None:
"""进入停顿状态:仅等待新消息。"""
self._agent_state = self._STATE_STOP
self._wait_until = None
def _drain_message_cache(self) -> list[SessionMessage]:
"""Drain the current message cache as one processing batch."""
drained_messages = list(self.message_cache)
self.message_cache.clear()
while not self._message_queue.empty():
try:
drained_messages.append(self._message_queue.get_nowait())
except asyncio.QueueEmpty:
break
return drained_messages
async def _init_mcp(self) -> None:
@@ -161,91 +287,29 @@ class MaisakaHeartFlowChatting:
)
async def _ingest_messages(self, messages: list[SessionMessage]) -> None:
if self._chat_start_time is None:
self._chat_start_time = messages[0].timestamp
self._last_user_input_time = messages[-1].timestamp
self._user_input_times.extend(message.timestamp for message in messages)
if MERGE_USER_MESSAGES:
merged_sequence = await self._merge_messages(messages)
merged_content = build_visible_text_from_sequence(merged_sequence).strip()
if not merged_sequence.components:
return
self._chat_history.append(
build_message(
role="user",
content=merged_content,
source="user",
timestamp=messages[-1].timestamp,
platform=messages[-1].platform,
session_id=self.session_id,
group_info=self._build_group_info(messages[-1]),
user_info=self._build_runtime_user_info(),
raw_message=merged_sequence,
display_text=merged_content,
)
)
self._trim_chat_history()
return
"""处理传入消息列表,将其转换为历史消息并加入聊天历史缓存。"""
for message in messages:
history_message = await self._build_user_history_message(message)
if history_message is None:
# 构建用户消息序列
user_sequence = await self._build_message_sequence(message)
visible_text = build_visible_text_from_sequence(user_sequence).strip()
if not user_sequence.components:
continue
history_message = build_message(
role="user",
content=visible_text,
source="user",
timestamp=message.timestamp,
platform=message.platform,
session_id=self.session_id,
group_info=self._build_group_info(message),
user_info=self._build_runtime_user_info(),
raw_message=user_sequence,
display_text=visible_text,
)
self._chat_history.append(history_message)
self._trim_chat_history()
async def _merge_messages(self, messages: list[SessionMessage]) -> MessageSequence:
merged_sequence = MessageSequence([])
for message in messages:
user_info = message.message_info.user_info
speaker_name = user_info.user_cardname or user_info.user_nickname or user_info.user_id
prefix = format_speaker_content(speaker_name, "", message.timestamp, message.message_id)
merged_sequence.text(prefix)
appended_component = False
if DIRECT_IMAGE_INPUT:
source_sequence = getattr(message, "maisaka_original_raw_message", message.raw_message)
else:
source_sequence = message.raw_message
for component in clone_message_sequence(source_sequence).components:
merged_sequence.components.append(component)
appended_component = True
if not appended_component:
if not message.processed_plain_text:
await message.process()
content = (message.processed_plain_text or "").strip()
if content:
merged_sequence.text(content)
merged_sequence.text("\n")
return merged_sequence
async def _build_user_history_message(self, message: SessionMessage) -> Optional[SessionMessage]:
user_sequence = await self._build_message_sequence(message)
visible_text = build_visible_text_from_sequence(user_sequence).strip()
if not user_sequence.components:
return None
return build_message(
role="user",
content=visible_text,
source="user",
timestamp=message.timestamp,
platform=message.platform,
session_id=self.session_id,
group_info=self._build_group_info(message),
user_info=self._build_runtime_user_info(),
raw_message=user_sequence,
display_text=visible_text,
)
async def _build_message_sequence(self, message: SessionMessage) -> MessageSequence:
message_sequence = MessageSequence([])
user_info = message.message_info.user_info
@@ -253,7 +317,7 @@ class MaisakaHeartFlowChatting:
message_sequence.text(format_speaker_content(speaker_name, "", message.timestamp, message.message_id))
appended_component = False
if DIRECT_IMAGE_INPUT:
if global_config.maisaka.direct_image_input:
source_sequence = getattr(message, "maisaka_original_raw_message", message.raw_message)
else:
source_sequence = message.raw_message
@@ -271,49 +335,6 @@ class MaisakaHeartFlowChatting:
return message_sequence
async def _run_internal_loop(self, anchor_message: SessionMessage) -> None:
"""Run the Maisaka internal loop, treating each thinking round as one cycle."""
last_had_tool_calls = True
for round_index in range(self._max_internal_rounds):
cycle_detail = self._start_cycle()
logger.info(
f"{self.log_prefix} MaiSaka cycle={cycle_detail.cycle_id} "
f"round={round_index + 1}/{self._max_internal_rounds} "
f"context_size={len(self._chat_history)}"
)
try:
if last_had_tool_calls:
perception_started_at = time.time()
await self._append_perception_snapshot()
cycle_detail.time_records["perception"] = time.time() - perception_started_at
planner_started_at = time.time()
response = await self._llm_service.chat_loop_step(self._chat_history)
cycle_detail.time_records["planner"] = time.time() - planner_started_at
response.raw_message.platform = anchor_message.platform
response.raw_message.session_id = self.session_id
response.raw_message.message_info.group_info = self._build_group_info(anchor_message)
self._chat_history.append(response.raw_message)
self._last_assistant_response_time = datetime.now()
if response.tool_calls:
tool_started_at = time.time()
should_pause = await self._handle_tool_calls(response.tool_calls, response.content or "", anchor_message)
cycle_detail.time_records["tool_calls"] = time.time() - tool_started_at
if should_pause:
return
last_had_tool_calls = True
continue
if response.content:
last_had_tool_calls = False
continue
return
finally:
self._end_cycle(cycle_detail)
def _start_cycle(self) -> CycleDetail:
"""Start a Maisaka thinking cycle."""
@@ -360,41 +381,6 @@ class MaisakaHeartFlowChatting:
f"remaining_user_messages={user_message_count}"
)
async def _append_perception_snapshot(self) -> None:
tasks = []
if ENABLE_KNOWLEDGE_MODULE:
tasks.append(("knowledge", retrieve_relevant_knowledge(self._llm_service, self._chat_history)))
if not tasks:
return
results = await asyncio.gather(*[task for _, task in tasks], return_exceptions=True)
perception_parts: list[str] = []
for (task_name, _), result in zip(tasks, results):
if isinstance(result, Exception):
logger.warning(f"{self.log_prefix} Maisaka {task_name} analysis failed: {result}")
continue
if result:
perception_parts.append(f"{task_name.title()}\n{result}")
remove_last_perception(self._chat_history)
if not perception_parts:
return
self._chat_history.append(
build_message(
role="assistant",
content="\n\n".join(perception_parts),
message_kind="perception",
source="assistant",
platform=self.chat_stream.platform,
session_id=self.session_id,
group_info=self._build_group_info(),
user_info=self._build_runtime_bot_user_info(),
)
)
async def _handle_tool_calls(
self,
tool_calls: list[ToolCall],
@@ -419,12 +405,18 @@ class MaisakaHeartFlowChatting:
if tool_call.func_name == "wait":
seconds = (tool_call.args or {}).get("seconds", 30)
try:
wait_seconds = int(seconds)
except (TypeError, ValueError):
wait_seconds = 30
wait_seconds = max(0, wait_seconds)
self._chat_history.append(
self._build_tool_message(
tool_call,
f"Waiting for future input for up to {seconds} seconds.",
f"Waiting for future input for up to {wait_seconds} seconds.",
)
)
self._enter_wait_state(seconds=wait_seconds)
return True
if tool_call.func_name == "stop":
@@ -434,17 +426,18 @@ class MaisakaHeartFlowChatting:
"Conversation loop paused until a new message arrives.",
)
)
self._enter_stop_state()
return True
if tool_call.func_name == "write_file" and ENABLE_WRITE_FILE:
if tool_call.func_name == "write_file" and global_config.maisaka.enable_write_file:
await handle_write_file(tool_call, self._chat_history)
continue
if tool_call.func_name == "read_file" and ENABLE_READ_FILE:
if tool_call.func_name == "read_file" and global_config.maisaka.enable_read_file:
await handle_read_file(tool_call, self._chat_history)
continue
if tool_call.func_name == "list_files" and ENABLE_LIST_FILES:
if tool_call.func_name == "list_files" and global_config.maisaka.enable_list_files:
await handle_list_files(tool_call, self._chat_history)
continue