feat:优化maisak循环逻辑,分离接受和处理,移除冗余config.py

This commit is contained in:
SengokuCola
2026-03-28 15:23:38 +08:00
parent 8b85c483b8
commit c545b24966
12 changed files with 252 additions and 277 deletions

View File

@@ -1618,6 +1618,16 @@ class MaiSakaConfig(ConfigBase):
)
"""Whether Maisaka should merge newly received user utterances into a single user message per round"""
max_internal_rounds: int = Field(
default=6,
ge=1,
json_schema_extra={
"x-widget": "input",
"x-icon": "repeat",
},
)
"""Maximum number of internal planning rounds per inbound message."""
terminal_image_preview: bool = Field(
default=False,
json_schema_extra={

View File

@@ -16,13 +16,7 @@ from rich.text import Text
from src.chat.message_receive.message import SessionMessage
from src.config.config import global_config
from .config import (
ENABLE_KNOWLEDGE_MODULE,
ENABLE_MCP,
SHOW_THINKING,
USER_NAME,
console,
)
from .console import console
from .input_reader import InputReader
from .knowledge import retrieve_relevant_knowledge
from .knowledge_store import get_knowledge_store
@@ -116,7 +110,11 @@ class BufferCLI:
self._chat_history.append(
build_message(
role="user",
content=format_speaker_content(USER_NAME, user_text, now),
content=format_speaker_content(
global_config.maisaka.user_name.strip() or "用户",
user_text,
now,
),
)
)
@@ -145,7 +143,7 @@ class BufferCLI:
tasks = []
status_text_parts = []
if ENABLE_KNOWLEDGE_MODULE:
if global_config.maisaka.enable_knowledge_module:
tasks.append(("knowledge", retrieve_relevant_knowledge(self.llm_service, chat_history)))
status_text_parts.append("knowledge")
@@ -156,13 +154,13 @@ class BufferCLI:
results = await asyncio.gather(*[task for _, task in tasks], return_exceptions=True)
knowledge_analysis = ""
if ENABLE_KNOWLEDGE_MODULE:
if global_config.maisaka.enable_knowledge_module:
knowledge_result = results[0] if results else None
if isinstance(knowledge_result, Exception):
console.print(f"[warning]Knowledge analysis failed: {knowledge_result}[/warning]")
elif knowledge_result:
knowledge_analysis = knowledge_result
if SHOW_THINKING:
if global_config.maisaka.show_thinking:
console.print(
Panel(
Markdown(knowledge_analysis),
@@ -189,7 +187,7 @@ class BufferCLI:
)
)
else:
if SHOW_THINKING:
if global_config.maisaka.show_thinking:
console.print("[muted]Skipping module analysis because the last round used no tools.[/muted]")
with console.status("[info]AI is thinking...[/info]", spinner="dots"):
@@ -207,7 +205,7 @@ class BufferCLI:
chat_history.append(response.raw_message)
self._last_assistant_response_time = datetime.now()
if SHOW_THINKING and response.content:
if global_config.maisaka.show_thinking and response.content:
console.print(
Panel(
Markdown(response.content),
@@ -254,7 +252,7 @@ class BufferCLI:
)
elif tc.func_name == "no_reply":
if SHOW_THINKING:
if global_config.maisaka.show_thinking:
console.print("[muted]No visible reply this round.[/muted]")
chat_history.append(
build_message(
@@ -339,7 +337,7 @@ class BufferCLI:
async def run(self):
"""Main interactive loop."""
if ENABLE_MCP:
if global_config.maisaka.enable_mcp:
await self._init_mcp()
else:
console.print("[muted]MCP is disabled (ENABLE_MCP=false)[/muted]")

View File

@@ -1,47 +0,0 @@
"""
MaiSaka - 全局配置
从主项目配置系统读取配置、Rich Console 实例、主题定义。
"""
from pathlib import Path
import sys
from rich.console import Console
from rich.theme import Theme
from src.config.config import global_config
# 添加项目根目录到路径以导入主配置
_root = Path(__file__).parent.parent.parent.absolute()
if str(_root) not in sys.path:
sys.path.insert(0, str(_root))
# ──────────────────── 模块开关配置 ────────────────────
ENABLE_KNOWLEDGE_MODULE = global_config.maisaka.enable_knowledge_module
ENABLE_MCP = global_config.maisaka.enable_mcp
ENABLE_WRITE_FILE = global_config.maisaka.enable_write_file
ENABLE_READ_FILE = global_config.maisaka.enable_read_file
ENABLE_LIST_FILES = global_config.maisaka.enable_list_files
SHOW_THINKING = global_config.maisaka.show_thinking
USER_NAME = global_config.maisaka.user_name.strip() or "用户"
DIRECT_IMAGE_INPUT = global_config.maisaka.direct_image_input
MERGE_USER_MESSAGES = global_config.maisaka.merge_user_messages
TERMINAL_IMAGE_PREVIEW = global_config.maisaka.terminal_image_preview
TERMINAL_IMAGE_PREVIEW_WIDTH = global_config.maisaka.terminal_image_preview_width
TAKE_OVER_HFC = global_config.maisaka.take_over_hfc
# ──────────────────── Rich 主题 & Console ────────────────────
custom_theme = Theme(
{
"info": "cyan",
"success": "green",
"warning": "yellow",
"error": "bold red",
"muted": "dim",
"accent": "bold magenta",
}
)
console = Console(theme=custom_theme)

17
src/maisaka/console.py Normal file
View File

@@ -0,0 +1,17 @@
"""MaiSaka 终端输出组件。"""
from rich.console import Console
from rich.theme import Theme
custom_theme = Theme(
{
"info": "cyan",
"success": "green",
"warning": "yellow",
"error": "bold red",
"muted": "dim",
"accent": "bold magenta",
}
)
console = Console(theme=custom_theme)

View File

@@ -34,9 +34,8 @@ from src.llm_models.payload_content.tool_option import (
)
from src.services.llm_service import LLMServiceClient
from . import config
from .config import console
from .builtin_tools import get_builtin_tools
from .console import console
from .message_adapter import (
build_message,
format_speaker_content,
@@ -209,11 +208,11 @@ class MaiSakaLLMService:
try:
tools_section = ""
if config.ENABLE_WRITE_FILE:
if global_config.maisaka.enable_write_file:
tools_section += "\n• write_file(filename, content) — 在 mai_files 目录下写入文件。"
if config.ENABLE_READ_FILE:
if global_config.maisaka.enable_read_file:
tools_section += "\n• read_file(filename) — 读取 mai_files 目录下的文件内容。"
if config.ENABLE_LIST_FILES:
if global_config.maisaka.enable_list_files:
tools_section += "\n• list_files() — 获取 mai_files 目录下所有文件的元信息列表。"
self._chat_system_prompt = load_prompt(
"maidairy_chat",
@@ -268,7 +267,7 @@ class MaiSakaLLMService:
if width <= 0 or height <= 0:
return None
preview_width = max(8, int(config.TERMINAL_IMAGE_PREVIEW_WIDTH))
preview_width = max(8, int(global_config.maisaka.terminal_image_preview_width))
preview_height = max(1, int(height * (preview_width / width) * 0.5))
resized = grayscale.resize((preview_width, preview_height))
pixels = list(resized.getdata())
@@ -310,7 +309,7 @@ class MaiSakaLLMService:
preview_parts: List[object] = [
Text(f"image/{image_format} {size_text}\nbase64 omitted", style="magenta")
]
if config.TERMINAL_IMAGE_PREVIEW:
if global_config.maisaka.terminal_image_preview:
preview_text = MaiSakaLLMService._build_terminal_image_preview(image_base64)
if preview_text:
preview_parts.append(Text(preview_text, style="white"))
@@ -478,7 +477,7 @@ class MaiSakaLLMService:
for tool_call_index, tool_call in enumerate(tool_calls, start=1):
ordered_panels.append(self._render_tool_call_panel(tool_call, tool_call_index, index))
if config.SHOW_THINKING and ordered_panels:
if global_config.maisaka.show_thinking and ordered_panels:
console.print(
Panel(
Group(*ordered_panels),
@@ -567,7 +566,11 @@ class MaiSakaLLMService:
return [
build_message(
role=RoleType.User.value,
content=format_speaker_content(config.USER_NAME, user_text, datetime.now()),
content=format_speaker_content(
global_config.maisaka.user_name.strip() or "用户",
user_text,
datetime.now(),
),
source="user",
)
]
@@ -597,7 +600,7 @@ class MaiSakaLLMService:
role = get_message_role(msg)
content = get_message_text(msg)
if role == RoleType.User.value:
prompt_parts.append(f"{config.USER_NAME}: {content}")
prompt_parts.append(f"{global_config.maisaka.user_name.strip() or '用户'}: {content}")
elif role == RoleType.Assistant.value:
prompt_parts.append(f"助手: {content}")
@@ -664,7 +667,7 @@ class MaiSakaLLMService:
messages = f"System: {system_prompt}\n\nUser: {user_prompt}"
if config.SHOW_THINKING:
if global_config.maisaka.show_thinking:
print("\n" + "=" * 60)
print("MaiSaka LLM Request - generate_reply:")
print(f" {messages}")

View File

@@ -26,7 +26,7 @@ import os
from dataclasses import dataclass, field
from typing import Optional
from ..config import console
from ..console import console
@dataclass

View File

@@ -6,7 +6,7 @@ MaiSaka - 单个 MCP 服务器连接管理
from contextlib import AsyncExitStack
from typing import Any, Optional
from ..config import console
from ..console import console
from .config import MCPServerConfig
# ──────────────────── MCP SDK 可选导入 ────────────────────

View File

@@ -5,7 +5,7 @@ MaiSaka - MCP 管理器
from typing import Optional
from ..config import console
from ..console import console
from .config import MCPServerConfig, load_mcp_config
from .connection import MCPConnection, MCP_AVAILABLE

View File

@@ -19,8 +19,6 @@ from src.config.config import global_config
from src.llm_models.payload_content.message import Message, MessageBuilder, RoleType
from src.llm_models.payload_content.tool_option import ToolCall
from .config import USER_NAME
MAISAKA_PLATFORM = "maisaka"
MAISAKA_SESSION_ID = "maisaka_cli"
MESSAGE_KIND_KEY = "maisaka_message_kind"
@@ -36,7 +34,11 @@ SPEAKER_PREFIX_PATTERN = re.compile(
def _build_user_info_for_role(role: str) -> UserInfo:
if role == RoleType.User.value:
return UserInfo(user_id="maisaka_user", user_nickname=USER_NAME, user_cardname=None)
return UserInfo(
user_id="maisaka_user",
user_nickname=global_config.maisaka.user_name.strip() or "用户",
user_cardname=None,
)
if role == RoleType.Tool.value:
return UserInfo(user_id="maisaka_tool", user_nickname="tool", user_cardname=None)
return UserInfo(

View File

@@ -7,7 +7,6 @@ from typing import Optional
from src.chat.message_receive.message import SessionMessage
from src.config.config import global_config
from .config import USER_NAME
from .llm_service import MaiSakaLLMService
from .message_adapter import get_message_role, get_message_text, is_perception_message, parse_speaker_content
@@ -84,7 +83,7 @@ def format_chat_history(messages: list[SessionMessage]) -> str:
content = _normalize_content(content_body)
if not content:
continue
visible_speaker = speaker_name or USER_NAME
visible_speaker = speaker_name or global_config.maisaka.user_name.strip() or "用户"
parts.append(f"{timestamp} {visible_speaker}: {content}")
continue

View File

@@ -1,13 +1,12 @@
"""
"""
Maisaka runtime for non-CLI integrations.
"""
import asyncio
import time
from datetime import datetime
from pathlib import Path
import time
from typing import Optional
import asyncio
from typing import Literal, Optional
from src.chat.heart_flow.heartFC_utils import CycleDetail
from src.chat.message_receive.chat_manager import BotChatSession, chat_manager
@@ -19,16 +18,6 @@ from src.config.config import global_config
from src.llm_models.payload_content.tool_option import ToolCall
from src.services import send_service
from .config import (
DIRECT_IMAGE_INPUT,
ENABLE_KNOWLEDGE_MODULE,
ENABLE_LIST_FILES,
ENABLE_MCP,
ENABLE_READ_FILE,
ENABLE_WRITE_FILE,
MERGE_USER_MESSAGES,
)
from .knowledge import retrieve_relevant_knowledge
from .llm_service import MaiSakaLLMService
from .mcp_client import MCPManager
from .message_adapter import (
@@ -37,9 +26,14 @@ from .message_adapter import (
clone_message_sequence,
format_speaker_content,
get_message_role,
remove_last_perception,
)
from .tool_handlers import handle_list_files, handle_mcp_tool, handle_read_file, handle_unknown_tool, handle_write_file
from .tool_handlers import (
handle_list_files,
handle_mcp_tool,
handle_read_file,
handle_unknown_tool,
handle_write_file,
)
logger = get_logger("maisaka_runtime")
@@ -47,6 +41,10 @@ logger = get_logger("maisaka_runtime")
class MaisakaHeartFlowChatting:
"""Session-scoped Maisaka runtime that replaces the HFC planner and reply loop."""
_STATE_RUNNING: Literal["running"] = "running"
_STATE_WAIT: Literal["wait"] = "wait"
_STATE_STOP: Literal["stop"] = "stop"
def __init__(self, session_id: str):
self.session_id = session_id
self.chat_stream: Optional[BotChatSession] = chat_manager.get_session_by_session_id(session_id)
@@ -59,32 +57,33 @@ class MaisakaHeartFlowChatting:
self._chat_history: list[SessionMessage] = []
self.history_loop: list[CycleDetail] = []
self.message_cache: list[SessionMessage] = []
self._internal_turn_queue: asyncio.Queue[list[SessionMessage]] = asyncio.Queue()
self._message_queue: asyncio.Queue[SessionMessage] = asyncio.Queue()
self._mcp_manager: Optional[MCPManager] = None
self._current_cycle_detail: Optional[CycleDetail] = None
self._source_messages_by_id: dict[str, SessionMessage] = {}
self._running = False
self._cycle_counter = 0
self._internal_loop_task: Optional[asyncio.Task] = None
self._loop_task: Optional[asyncio.Task] = None
self._loop_lock = asyncio.Lock()
self._new_message_event = asyncio.Event()
self._max_internal_rounds = 6
self._chat_start_time: Optional[datetime] = None
self._last_user_input_time: Optional[datetime] = None
self._last_assistant_response_time: Optional[datetime] = None
self._user_input_times: list[datetime] = []
self._max_internal_rounds = global_config.maisaka.max_internal_rounds
self._max_context_size = max(1, int(global_config.chat.max_context_size))
self._agent_state: Literal["running", "wait", "stop"] = self._STATE_STOP
self._wait_until: Optional[float] = None
async def start(self) -> None:
"""Start the runtime loop."""
if self._running:
return
if ENABLE_MCP:
if global_config.maisaka.enable_mcp:
await self._init_mcp()
self._running = True
self._internal_loop_task = asyncio.create_task(self._internal_loop())
self._loop_task = asyncio.create_task(self._main_loop())
logger.info(f"{self.log_prefix} MaiSaka runtime started")
logger.info(f"{self.log_prefix} MaiSaka 启动")
async def stop(self) -> None:
"""Stop the runtime loop."""
@@ -93,6 +92,11 @@ class MaisakaHeartFlowChatting:
self._running = False
self._new_message_event.set()
self.message_cache.clear()
while not self._message_queue.empty():
_ = self._message_queue.get_nowait()
while not self._internal_turn_queue.empty():
_ = self._internal_turn_queue.get_nowait()
if self._loop_task is not None:
self._loop_task.cancel()
@@ -103,6 +107,15 @@ class MaisakaHeartFlowChatting:
finally:
self._loop_task = None
if self._internal_loop_task is not None:
self._internal_loop_task.cancel()
try:
await self._internal_loop_task
except asyncio.CancelledError:
pass
finally:
self._internal_loop_task = None
if self._mcp_manager is not None:
await self._mcp_manager.close()
self._mcp_manager = None
@@ -116,29 +129,142 @@ class MaisakaHeartFlowChatting:
async def register_message(self, message: SessionMessage) -> None:
"""Append a newly received message into the HFC-style message cache."""
self.message_cache.append(message)
await self._message_queue.put(message)
self._source_messages_by_id[message.message_id] = message
if self._agent_state in (self._STATE_WAIT, self._STATE_STOP):
self._agent_state = self._STATE_RUNNING
self._new_message_event.set()
async def _main_loop(self) -> None:
try:
while self._running:
await self._new_message_event.wait()
if self._message_queue.empty():
if self._agent_state == self._STATE_WAIT:
message_arrived = await self._wait_for_trigger()
else:
self._new_message_event.clear()
await self._new_message_event.wait()
message_arrived = self._running
else:
message_arrived = True
if not self._running:
return
if not message_arrived:
self._agent_state = self._STATE_STOP
continue
self._new_message_event.clear()
async with self._loop_lock:
# 加锁灌注消息
while not self._message_queue.empty():
cached_messages = self._drain_message_cache()
if not cached_messages:
continue
await self._ingest_messages(cached_messages)
await self._run_internal_loop(anchor_message=cached_messages[-1])
if cached_messages:
await self._internal_turn_queue.put(cached_messages)
except asyncio.CancelledError:
logger.info(f"{self.log_prefix} MaiSaka runtime loop cancelled")
async def _internal_loop(self) -> None:
"""处理一批缓存消息,并执行对应的内部思考轮次。"""
try:
while self._running:
cached_messages = await self._internal_turn_queue.get()
if not cached_messages:
self._internal_turn_queue.task_done()
continue
self._agent_state = self._STATE_RUNNING
await self._ingest_messages(cached_messages)
anchor_message = cached_messages[-1]
try:
for round_index in range(self._max_internal_rounds):
cycle_detail = self._start_cycle()
logger.info(
f"{self.log_prefix} MaiSaka cycle={cycle_detail.cycle_id} "
f"round={round_index + 1}/{self._max_internal_rounds} "
f"context_size={len(self._chat_history)}"
)
try:
planner_started_at = time.time()
response = await self._llm_service.chat_loop_step(self._chat_history)
cycle_detail.time_records["planner"] = time.time() - planner_started_at
response.raw_message.platform = anchor_message.platform
response.raw_message.session_id = self.session_id
response.raw_message.message_info.group_info = self._build_group_info(anchor_message)
self._chat_history.append(response.raw_message)
if response.tool_calls:
tool_started_at = time.time()
should_pause = await self._handle_tool_calls(
response.tool_calls,
response.content or "",
anchor_message,
)
cycle_detail.time_records["tool_calls"] = time.time() - tool_started_at
if should_pause:
break
continue
if response.content:
continue
break
finally:
self._end_cycle(cycle_detail)
finally:
if self._agent_state == self._STATE_RUNNING:
self._agent_state = self._STATE_STOP
self._internal_turn_queue.task_done()
except asyncio.CancelledError:
logger.info(f"{self.log_prefix} Maisaka internal loop cancelled")
async def _wait_for_trigger(self) -> bool:
"""等待外部触发。返回 True 表示有新消息事件,返回 False 表示等待超时。"""
if self._agent_state != self._STATE_WAIT:
await self._new_message_event.wait()
return True
# 处理 wait 工具调用带来的等待窗口:超时后恢复 idle有新消息则继续处理缓存消息
if self._wait_until is None:
await self._new_message_event.wait()
return True
timeout = self._wait_until - time.time()
if timeout <= 0:
logger.info(f"{self.log_prefix} Maisaka 等待超时,继续查看新消息")
self._enter_stop_state()
self._wait_until = None
return False
try:
await asyncio.wait_for(self._new_message_event.wait(), timeout=timeout)
return True
except asyncio.TimeoutError:
logger.info(f"{self.log_prefix} Maisaka 等待超时,继续查看新消息")
self._enter_stop_state()
self._wait_until = None
return False
def _enter_wait_state(self, seconds: Optional[float] = None) -> None:
"""进入等待状态seconds 为 None 时表示一直等待直到新消息到达。"""
self._agent_state = self._STATE_WAIT
self._wait_until = None if seconds is None else time.time() + seconds
def _enter_stop_state(self) -> None:
"""进入停顿状态:仅等待新消息。"""
self._agent_state = self._STATE_STOP
self._wait_until = None
def _drain_message_cache(self) -> list[SessionMessage]:
"""Drain the current message cache as one processing batch."""
drained_messages = list(self.message_cache)
self.message_cache.clear()
while not self._message_queue.empty():
try:
drained_messages.append(self._message_queue.get_nowait())
except asyncio.QueueEmpty:
break
return drained_messages
async def _init_mcp(self) -> None:
@@ -161,91 +287,29 @@ class MaisakaHeartFlowChatting:
)
async def _ingest_messages(self, messages: list[SessionMessage]) -> None:
if self._chat_start_time is None:
self._chat_start_time = messages[0].timestamp
self._last_user_input_time = messages[-1].timestamp
self._user_input_times.extend(message.timestamp for message in messages)
if MERGE_USER_MESSAGES:
merged_sequence = await self._merge_messages(messages)
merged_content = build_visible_text_from_sequence(merged_sequence).strip()
if not merged_sequence.components:
return
self._chat_history.append(
build_message(
role="user",
content=merged_content,
source="user",
timestamp=messages[-1].timestamp,
platform=messages[-1].platform,
session_id=self.session_id,
group_info=self._build_group_info(messages[-1]),
user_info=self._build_runtime_user_info(),
raw_message=merged_sequence,
display_text=merged_content,
)
)
self._trim_chat_history()
return
"""处理传入消息列表,将其转换为历史消息并加入聊天历史缓存。"""
for message in messages:
history_message = await self._build_user_history_message(message)
if history_message is None:
# 构建用户消息序列
user_sequence = await self._build_message_sequence(message)
visible_text = build_visible_text_from_sequence(user_sequence).strip()
if not user_sequence.components:
continue
history_message = build_message(
role="user",
content=visible_text,
source="user",
timestamp=message.timestamp,
platform=message.platform,
session_id=self.session_id,
group_info=self._build_group_info(message),
user_info=self._build_runtime_user_info(),
raw_message=user_sequence,
display_text=visible_text,
)
self._chat_history.append(history_message)
self._trim_chat_history()
async def _merge_messages(self, messages: list[SessionMessage]) -> MessageSequence:
merged_sequence = MessageSequence([])
for message in messages:
user_info = message.message_info.user_info
speaker_name = user_info.user_cardname or user_info.user_nickname or user_info.user_id
prefix = format_speaker_content(speaker_name, "", message.timestamp, message.message_id)
merged_sequence.text(prefix)
appended_component = False
if DIRECT_IMAGE_INPUT:
source_sequence = getattr(message, "maisaka_original_raw_message", message.raw_message)
else:
source_sequence = message.raw_message
for component in clone_message_sequence(source_sequence).components:
merged_sequence.components.append(component)
appended_component = True
if not appended_component:
if not message.processed_plain_text:
await message.process()
content = (message.processed_plain_text or "").strip()
if content:
merged_sequence.text(content)
merged_sequence.text("\n")
return merged_sequence
async def _build_user_history_message(self, message: SessionMessage) -> Optional[SessionMessage]:
user_sequence = await self._build_message_sequence(message)
visible_text = build_visible_text_from_sequence(user_sequence).strip()
if not user_sequence.components:
return None
return build_message(
role="user",
content=visible_text,
source="user",
timestamp=message.timestamp,
platform=message.platform,
session_id=self.session_id,
group_info=self._build_group_info(message),
user_info=self._build_runtime_user_info(),
raw_message=user_sequence,
display_text=visible_text,
)
async def _build_message_sequence(self, message: SessionMessage) -> MessageSequence:
message_sequence = MessageSequence([])
user_info = message.message_info.user_info
@@ -253,7 +317,7 @@ class MaisakaHeartFlowChatting:
message_sequence.text(format_speaker_content(speaker_name, "", message.timestamp, message.message_id))
appended_component = False
if DIRECT_IMAGE_INPUT:
if global_config.maisaka.direct_image_input:
source_sequence = getattr(message, "maisaka_original_raw_message", message.raw_message)
else:
source_sequence = message.raw_message
@@ -271,49 +335,6 @@ class MaisakaHeartFlowChatting:
return message_sequence
async def _run_internal_loop(self, anchor_message: SessionMessage) -> None:
"""Run the Maisaka internal loop, treating each thinking round as one cycle."""
last_had_tool_calls = True
for round_index in range(self._max_internal_rounds):
cycle_detail = self._start_cycle()
logger.info(
f"{self.log_prefix} MaiSaka cycle={cycle_detail.cycle_id} "
f"round={round_index + 1}/{self._max_internal_rounds} "
f"context_size={len(self._chat_history)}"
)
try:
if last_had_tool_calls:
perception_started_at = time.time()
await self._append_perception_snapshot()
cycle_detail.time_records["perception"] = time.time() - perception_started_at
planner_started_at = time.time()
response = await self._llm_service.chat_loop_step(self._chat_history)
cycle_detail.time_records["planner"] = time.time() - planner_started_at
response.raw_message.platform = anchor_message.platform
response.raw_message.session_id = self.session_id
response.raw_message.message_info.group_info = self._build_group_info(anchor_message)
self._chat_history.append(response.raw_message)
self._last_assistant_response_time = datetime.now()
if response.tool_calls:
tool_started_at = time.time()
should_pause = await self._handle_tool_calls(response.tool_calls, response.content or "", anchor_message)
cycle_detail.time_records["tool_calls"] = time.time() - tool_started_at
if should_pause:
return
last_had_tool_calls = True
continue
if response.content:
last_had_tool_calls = False
continue
return
finally:
self._end_cycle(cycle_detail)
def _start_cycle(self) -> CycleDetail:
"""Start a Maisaka thinking cycle."""
@@ -360,41 +381,6 @@ class MaisakaHeartFlowChatting:
f"remaining_user_messages={user_message_count}"
)
async def _append_perception_snapshot(self) -> None:
tasks = []
if ENABLE_KNOWLEDGE_MODULE:
tasks.append(("knowledge", retrieve_relevant_knowledge(self._llm_service, self._chat_history)))
if not tasks:
return
results = await asyncio.gather(*[task for _, task in tasks], return_exceptions=True)
perception_parts: list[str] = []
for (task_name, _), result in zip(tasks, results):
if isinstance(result, Exception):
logger.warning(f"{self.log_prefix} Maisaka {task_name} analysis failed: {result}")
continue
if result:
perception_parts.append(f"{task_name.title()}\n{result}")
remove_last_perception(self._chat_history)
if not perception_parts:
return
self._chat_history.append(
build_message(
role="assistant",
content="\n\n".join(perception_parts),
message_kind="perception",
source="assistant",
platform=self.chat_stream.platform,
session_id=self.session_id,
group_info=self._build_group_info(),
user_info=self._build_runtime_bot_user_info(),
)
)
async def _handle_tool_calls(
self,
tool_calls: list[ToolCall],
@@ -419,12 +405,18 @@ class MaisakaHeartFlowChatting:
if tool_call.func_name == "wait":
seconds = (tool_call.args or {}).get("seconds", 30)
try:
wait_seconds = int(seconds)
except (TypeError, ValueError):
wait_seconds = 30
wait_seconds = max(0, wait_seconds)
self._chat_history.append(
self._build_tool_message(
tool_call,
f"Waiting for future input for up to {seconds} seconds.",
f"Waiting for future input for up to {wait_seconds} seconds.",
)
)
self._enter_wait_state(seconds=wait_seconds)
return True
if tool_call.func_name == "stop":
@@ -434,17 +426,18 @@ class MaisakaHeartFlowChatting:
"Conversation loop paused until a new message arrives.",
)
)
self._enter_stop_state()
return True
if tool_call.func_name == "write_file" and ENABLE_WRITE_FILE:
if tool_call.func_name == "write_file" and global_config.maisaka.enable_write_file:
await handle_write_file(tool_call, self._chat_history)
continue
if tool_call.func_name == "read_file" and ENABLE_READ_FILE:
if tool_call.func_name == "read_file" and global_config.maisaka.enable_read_file:
await handle_read_file(tool_call, self._chat_history)
continue
if tool_call.func_name == "list_files" and ENABLE_LIST_FILES:
if tool_call.func_name == "list_files" and global_config.maisaka.enable_list_files:
await handle_list_files(tool_call, self._chat_history)
continue

View File

@@ -14,7 +14,7 @@ from rich.panel import Panel
from src.chat.message_receive.message import SessionMessage
from src.llm_models.payload_content.tool_option import ToolCall
from .config import console
from .console import console
from .input_reader import InputReader
from .llm_service import MaiSakaLLMService
from .message_adapter import build_message