feat:新增一个状态面板,使私聊不报错

This commit is contained in:
SengokuCola
2026-04-10 11:04:28 +08:00
parent 19ee2c2f92
commit 65276cf763
10 changed files with 383 additions and 48 deletions

View File

@@ -8,7 +8,7 @@ import re
from typing import List, Optional, Dict, Any, Tuple
from datetime import datetime
from src.common.logger import get_logger
from src.common.data_models.info_data_model import ActionPlannerInfo
from src.common.data_models.planned_action_data_models import PlannedAction
from src.common.data_models.llm_data_model import LLMGenerationDataModel
from src.config.config import global_config
from src.services.llm_service import LLMServiceClient
@@ -65,7 +65,7 @@ class DefaultReplyer:
extra_info: str = "",
reply_reason: str = "",
available_actions: Optional[Dict[str, ActionInfo]] = None,
chosen_actions: Optional[List[ActionPlannerInfo]] = None,
chosen_actions: Optional[List[PlannedAction]] = None,
from_plugin: bool = True,
stream_id: Optional[str] = None,
reply_message: Optional[SessionMessage] = None,
@@ -509,7 +509,7 @@ class DefaultReplyer:
return ""
async def build_actions_prompt(
self, available_actions: Dict[str, ActionInfo], chosen_actions_info: Optional[List[ActionPlannerInfo]] = None
self, available_actions: Dict[str, ActionInfo], chosen_actions_info: Optional[List[PlannedAction]] = None
) -> str:
"""构建动作提示"""
@@ -527,14 +527,14 @@ class DefaultReplyer:
chosen_action_descriptions = ""
if chosen_actions_info:
for action_plan_info in chosen_actions_info:
action_name = action_plan_info.action_type
action_name = action_plan_info.action_name
if action_name in skip_names:
continue
action_description: str = "无描述"
reasoning: str = "无原因"
if action := available_actions.get(action_name):
action_description = action.description or action_description
reasoning = action_plan_info.reasoning or reasoning
reasoning = action_plan_info.decision_reason or reasoning
chosen_action_descriptions += f"- {action_name}: {action_description},原因:{reasoning}\n"
@@ -673,7 +673,7 @@ class DefaultReplyer:
extra_info: str = "",
reply_reason: str = "",
available_actions: Optional[Dict[str, ActionInfo]] = None,
chosen_actions: Optional[List[ActionPlannerInfo]] = None,
chosen_actions: Optional[List[PlannedAction]] = None,
reply_time_point: float = time.time(),
think_level: int = 1,
unknown_words: Optional[List[str]] = None,

View File

@@ -20,7 +20,7 @@ from src.services.embedding_service import EmbeddingServiceClient
from .typo_generator import ChineseTypoGenerator
if TYPE_CHECKING:
from src.common.data_models.info_data_model import TargetPersonInfo
from src.common.data_models.chat_target_info_data_model import ChatTargetInfo
logger = get_logger("chat_utils")
_warned_unconfigured_platforms: set[str] = set()
@@ -699,7 +699,7 @@ def translate_timestamp_to_human_readable(timestamp: float, mode: str = "normal"
return time.strftime("%H:%M:%S", time.localtime(timestamp))
def get_chat_type_and_target_info(chat_id: str) -> Tuple[bool, Optional["TargetPersonInfo"]]:
def get_chat_type_and_target_info(chat_id: str) -> Tuple[bool, Optional["ChatTargetInfo"]]:
"""
获取聊天类型(是否群聊)和私聊对象信息。
@@ -734,13 +734,13 @@ def get_chat_type_and_target_info(chat_id: str) -> Tuple[bool, Optional["TargetP
):
user_nickname = chat_stream.context.message.message_info.user_info.user_nickname
from src.common.data_models.info_data_model import TargetPersonInfo # 解决循环导入问题
from src.common.data_models.chat_target_info_data_model import ChatTargetInfo # 解决循环导入问题
# Initialize target_info with basic info
target_info = TargetPersonInfo(
target_info = ChatTargetInfo(
platform=platform,
user_id=user_id,
user_nickname=user_nickname, # type: ignore
session_nickname=user_nickname or "",
person_id=None,
person_name=None,
)
@@ -752,6 +752,7 @@ def get_chat_type_and_target_info(chat_id: str) -> Tuple[bool, Optional["TargetP
logger.warning(f"用户 {user_nickname} 尚未认识")
# 如果用户尚未认识则返回False和None
return False, None
target_info.is_known = True
if person.person_id:
target_info.person_id = person.person_id
target_info.person_name = person.person_name

View File

@@ -1,28 +0,0 @@
# from dataclasses import dataclass, field
# from typing import Optional, Dict, TYPE_CHECKING
# from . import BaseDataModel
# if TYPE_CHECKING:
# from .database_data_model import DatabaseMessages
# from src.core.types import ActionInfo
# # @dataclass
# # class TargetPersonInfo(BaseDataModel):
# # platform: str = field(default_factory=str)
# # user_id: str = field(default_factory=str)
# # user_nickname: str = field(default_factory=str)
# # person_id: Optional[str] = None
# # person_name: Optional[str] = None
# 已重构见src/common/data_models/chat_target_info_data_model.py
# @dataclass
# class ActionPlannerInfo(BaseDataModel):
# action_type: str = field(default_factory=str)
# reasoning: Optional[str] = None
# action_data: Optional[Dict] = None
# action_message: Optional["DatabaseMessages"] = None
# available_actions: Optional[Dict[str, "ActionInfo"]] = None
# loop_start_time: Optional[float] = None
# action_reasoning: Optional[str] = None
# 已重构见src/common/data_models/planned_action_data_models.py

View File

@@ -18,6 +18,7 @@ from src.common.message_server.server import Server, get_global_server
from src.common.remote import TelemetryHeartBeatTask
from src.config.config import config_manager, global_config
from src.manager.async_task_manager import async_task_manager
from src.maisaka.stage_status_board import disable_stage_status_board, enable_stage_status_board
from src.plugin_runtime.integration import get_plugin_runtime_manager
from src.prompt.prompt_manager import prompt_manager
from src.services.memory_flow_service import memory_automation_service
@@ -65,6 +66,7 @@ class MainSystem:
async def initialize(self) -> None:
"""初始化系统组件"""
enable_stage_status_board()
logger.info(t("startup.waking_up", nickname=global_config.bot.nickname))
# 其他初始化任务
@@ -169,6 +171,7 @@ async def main() -> None:
system.schedule_tasks(),
)
finally:
disable_stage_status_board()
emoji_manager.shutdown()
await memory_automation_service.shutdown()
await a_memorix_host_service.stop()

View File

@@ -30,10 +30,12 @@ from src.plugin_runtime.host.hook_spec_registry import HookSpec, HookSpecRegistr
from src.services.llm_service import LLMServiceClient
from .builtin_tool import get_builtin_tools
from .context_messages import AssistantMessage, LLMContextMessage
from .context_messages import AssistantMessage, LLMContextMessage, ToolResultMessage
from .history_utils import drop_orphan_tool_results
from .prompt_cli_renderer import PromptCLIVisualizer
TIMING_GATE_TOOL_NAMES = {"continue", "no_reply", "wait"}
@dataclass(slots=True)
class ChatResponse:
@@ -466,7 +468,10 @@ class MaisakaChatLoopService:
if not self._prompts_loaded:
await self.ensure_chat_prompt_loaded()
selected_history, selection_reason = self.select_llm_context_messages(chat_history)
selected_history, selection_reason = self.select_llm_context_messages(
chat_history,
request_kind=request_kind,
)
built_messages = self._build_request_messages(
selected_history,
injected_user_messages=injected_user_messages,
@@ -592,16 +597,21 @@ class MaisakaChatLoopService:
def select_llm_context_messages(
chat_history: List[LLMContextMessage],
*,
request_kind: str = "planner",
max_context_size: Optional[int] = None,
) -> tuple[List[LLMContextMessage], str]:
"""选择LLM上下文消息"""
filtered_history = MaisakaChatLoopService._filter_history_for_request_kind(
chat_history,
request_kind=request_kind,
)
effective_context_size = max(1, int(max_context_size or global_config.chat.max_context_size))
selected_indices: List[int] = []
counted_message_count = 0
for index in range(len(chat_history) - 1, -1, -1):
message = chat_history[index]
for index in range(len(filtered_history) - 1, -1, -1):
message = filtered_history[index]
if message.to_llm_message() is None:
continue
@@ -615,7 +625,7 @@ class MaisakaChatLoopService:
return [], f"没有选择到上下文消息,实际发送 {effective_context_size} 条 user/assistant 消息"
selected_indices.reverse()
selected_history = [chat_history[index] for index in selected_indices]
selected_history = [filtered_history[index] for index in selected_indices]
selected_history, hidden_assistant_count = MaisakaChatLoopService._hide_early_assistant_messages(selected_history)
selected_history, _ = drop_orphan_tool_results(selected_history)
selection_reason = (
@@ -629,6 +639,45 @@ class MaisakaChatLoopService:
selection_reason,
)
@staticmethod
def _filter_history_for_request_kind(
selected_history: List[LLMContextMessage],
*,
request_kind: str,
) -> List[LLMContextMessage]:
"""按请求类型过滤不应暴露的历史工具链。"""
if request_kind != "planner":
return selected_history
filtered_history: List[LLMContextMessage] = []
for message in selected_history:
if isinstance(message, ToolResultMessage) and message.tool_name in TIMING_GATE_TOOL_NAMES:
continue
if isinstance(message, AssistantMessage) and message.tool_calls:
kept_tool_calls = [
tool_call
for tool_call in message.tool_calls
if tool_call.func_name not in TIMING_GATE_TOOL_NAMES
]
if not kept_tool_calls:
continue
if len(kept_tool_calls) != len(message.tool_calls):
filtered_history.append(
AssistantMessage(
content=message.content,
timestamp=message.timestamp,
tool_calls=kept_tool_calls,
source_kind=message.source_kind,
)
)
continue
filtered_history.append(message)
return filtered_history
@staticmethod
def _hide_early_assistant_messages(
selected_history: List[LLMContextMessage],

View File

@@ -254,7 +254,7 @@ class MaisakaReasoningEngine:
logger.warning(f"{self._runtime.log_prefix} Timing Gate 未返回有效控制工具,默认继续执行 Action Loop")
return "continue", response, tool_result_summaries, tool_monitor_results
append_history = selected_tool_call.func_name != "continue"
append_history = False
store_record = selected_tool_call.func_name != "continue"
invocation, result, tool_spec = await self._invoke_tool_call(
selected_tool_call,
@@ -273,6 +273,7 @@ class MaisakaReasoningEngine:
tool_spec=tool_spec,
)
)
self._append_timing_gate_execution_result(response, selected_tool_call, result)
timing_action = str(result.metadata.get("timing_action") or selected_tool_call.func_name).strip()
if timing_action not in TIMING_GATE_TOOL_NAMES:
@@ -350,6 +351,10 @@ class MaisakaReasoningEngine:
continue
self._runtime._agent_state = self._runtime._STATE_RUNNING
self._runtime._update_stage_status(
"消息整理",
f"待处理消息 {len(cached_messages)}" if cached_messages else "准备复用超时锚点",
)
if cached_messages:
asyncio.create_task(self._runtime._trigger_batch_learning(cached_messages))
if timeout_triggered:
@@ -376,7 +381,9 @@ class MaisakaReasoningEngine:
timing_gate_required = True
for round_index in range(self._runtime._max_internal_rounds):
cycle_detail = self._start_cycle()
round_text = f"{round_index + 1}/{self._runtime._max_internal_rounds}"
self._runtime._log_cycle_started(cycle_detail, round_index)
self._runtime._update_stage_status("启动循环", f"循环 {cycle_detail.cycle_id}", round_text=round_text)
await emit_cycle_start(
session_id=self._runtime.session_id,
cycle_id=cycle_detail.cycle_id,
@@ -407,6 +414,7 @@ class MaisakaReasoningEngine:
)
if timing_gate_required:
self._runtime._update_stage_status("Timing Gate", "等待门控决策", round_text=round_text)
current_stage_started_at = time.time()
timing_started_at = time.time()
(
@@ -443,6 +451,7 @@ class MaisakaReasoningEngine:
planner_started_at = time.time()
current_stage_started_at = planner_started_at
self._runtime._update_stage_status("Planner", "组织上下文并请求模型", round_text=round_text)
action_tool_definitions, deferred_tools_reminder = await self._build_action_tool_definitions()
logger.info(
f"{self._runtime.log_prefix} 规划器开始执行: "
@@ -486,6 +495,11 @@ class MaisakaReasoningEngine:
if not response.content:
break
except ReqAbortException as exc:
self._runtime._update_stage_status(
"Planner 已打断",
str(exc) or "收到外部中断信号",
round_text=round_text,
)
interrupted_at = time.time()
interrupted_stage_label = "Planner"
interrupted_text = (
@@ -617,6 +631,8 @@ class MaisakaReasoningEngine:
finally:
if self._runtime._agent_state == self._runtime._STATE_RUNNING:
self._runtime._agent_state = self._runtime._STATE_STOP
if self._runtime._running:
self._runtime._update_stage_status("等待消息", "本轮处理结束")
except asyncio.CancelledError:
self._runtime._log_internal_loop_cancelled()
raise
@@ -1154,6 +1170,24 @@ class MaisakaReasoningEngine:
)
)
def _append_timing_gate_execution_result(
self,
response: ChatResponse,
tool_call: ToolCall,
result: ToolExecutionResult,
) -> None:
"""将 Timing Gate 的决策链写入历史,供后续门控复用。"""
self._runtime._chat_history.append(
AssistantMessage(
content=response.content or "",
timestamp=response.raw_message.timestamp,
tool_calls=[tool_call],
source_kind="timing_gate",
)
)
self._append_tool_execution_result(tool_call, result)
def _build_tool_result_summary(self, tool_call: ToolCall, result: ToolExecutionResult) -> str:
"""构建用于终端展示的工具结果摘要。"""
@@ -1249,8 +1283,13 @@ class MaisakaReasoningEngine:
tool_spec.name: tool_spec
for tool_spec in await self._runtime._tool_registry.list_tools()
}
for tool_call in tool_calls:
total_tool_count = len(tool_calls)
for tool_index, tool_call in enumerate(tool_calls, start=1):
invocation = self._build_tool_invocation(tool_call, latest_thought)
self._runtime._update_stage_status(
f"工具执行 · {invocation.tool_name}",
f"{tool_index}/{total_tool_count} 个工具",
)
tool_started_at = time.time()
if not self._runtime.is_action_tool_currently_available(invocation.tool_name):
result = ToolExecutionResult(

View File

@@ -37,6 +37,7 @@ from .context_messages import LLMContextMessage
from .display_utils import build_tool_call_summary_lines, format_token_count
from .prompt_cli_renderer import PromptCLIVisualizer
from .reasoning_engine import MaisakaReasoningEngine
from .stage_status_board import remove_stage_status, update_stage_status
from .tool_provider import MaisakaBuiltinToolProvider
logger = get_logger("maisaka_runtime")
@@ -121,6 +122,18 @@ class MaisakaHeartFlowChatting:
self._tool_registry = ToolRegistry()
self._register_tool_providers()
def _update_stage_status(self, stage: str, detail: str = "", *, round_text: str = "") -> None:
"""更新当前会话的阶段状态。"""
update_stage_status(
session_id=self.session_id,
session_name=self.session_name,
stage=stage,
detail=detail,
round_text=round_text,
agent_state=self._agent_state,
)
async def start(self) -> None:
"""启动运行时主循环。"""
if self._running:
@@ -133,6 +146,7 @@ class MaisakaHeartFlowChatting:
self._running = True
self._ensure_background_tasks_running()
self._schedule_message_turn()
self._update_stage_status("空闲", "等待消息触发")
logger.info(f"{self.log_prefix} Maisaka 运行时已启动")
async def stop(self) -> None:
@@ -160,6 +174,7 @@ class MaisakaHeartFlowChatting:
await self._tool_registry.close()
self._mcp_manager = None
self._mcp_host_bridge = None
remove_stage_status(self.session_id)
logger.info(f"{self.log_prefix} Maisaka 运行时已停止")

View File

@@ -0,0 +1,163 @@
"""Maisaka 阶段状态看板。"""
from __future__ import annotations
from pathlib import Path
from typing import Any, Optional
import json
import os
import subprocess
import sys
import threading
import time
class MaisakaStageStatusBoard:
"""维护 Maisaka 阶段状态,并在独立终端中展示。"""
def __init__(self) -> None:
self._lock = threading.Lock()
self._enabled = False
self._entries: dict[str, dict[str, Any]] = {}
self._viewer_process: Optional[subprocess.Popen[Any]] = None
self._state_file = Path("temp") / "maisaka_stage_status.json"
self._state_file.parent.mkdir(parents=True, exist_ok=True)
def enable(self) -> None:
"""启用阶段状态看板。"""
with self._lock:
if self._enabled:
return
self._enabled = True
self._write_state_locked()
self._ensure_viewer_process_locked()
def disable(self) -> None:
"""禁用阶段状态看板。"""
with self._lock:
self._enabled = False
self._entries.clear()
self._write_state_locked()
process = self._viewer_process
self._viewer_process = None
if process is not None and process.poll() is None:
try:
process.terminate()
except Exception:
pass
def update(
self,
*,
session_id: str,
session_name: str,
stage: str,
detail: str = "",
round_text: str = "",
agent_state: str = "",
) -> None:
"""更新一个会话的阶段状态。"""
with self._lock:
if not self._enabled:
return
now = time.time()
current = self._entries.get(session_id, {})
previous_stage = str(current.get("stage") or "").strip()
stage_started_at = float(current.get("stage_started_at") or now)
if previous_stage != stage:
stage_started_at = now
self._entries[session_id] = {
"session_id": session_id,
"session_name": session_name,
"stage": stage,
"detail": detail,
"round_text": round_text,
"agent_state": agent_state,
"stage_started_at": stage_started_at,
"updated_at": now,
}
self._write_state_locked()
def remove(self, session_id: str) -> None:
"""移除一个会话的阶段状态。"""
with self._lock:
if not self._enabled:
return
self._entries.pop(session_id, None)
self._write_state_locked()
def _write_state_locked(self) -> None:
payload = {
"enabled": self._enabled,
"host_pid": os.getpid(),
"updated_at": time.time(),
"entries": list(self._entries.values()),
}
tmp_file = self._state_file.with_suffix(".tmp")
tmp_file.write_text(json.dumps(payload, ensure_ascii=False, indent=2), encoding="utf-8")
tmp_file.replace(self._state_file)
def _ensure_viewer_process_locked(self) -> None:
if not sys.platform.startswith("win"):
return
if self._viewer_process is not None and self._viewer_process.poll() is None:
return
creationflags = getattr(subprocess, "CREATE_NEW_CONSOLE", 0)
viewer_script = Path(__file__).resolve().with_name("stage_status_viewer.py")
self._viewer_process = subprocess.Popen(
[
sys.executable,
str(viewer_script),
str(self._state_file.resolve()),
],
creationflags=creationflags,
cwd=str(Path.cwd()),
)
_stage_board = MaisakaStageStatusBoard()
def enable_stage_status_board() -> None:
"""启用控制台阶段状态看板。"""
_stage_board.enable()
def disable_stage_status_board() -> None:
"""禁用控制台阶段状态看板。"""
_stage_board.disable()
def update_stage_status(
*,
session_id: str,
session_name: str,
stage: str,
detail: str = "",
round_text: str = "",
agent_state: str = "",
) -> None:
"""更新控制台阶段状态。"""
_stage_board.update(
session_id=session_id,
session_name=session_name,
stage=stage,
detail=detail,
round_text=round_text,
agent_state=agent_state,
)
def remove_stage_status(session_id: str) -> None:
"""移除控制台阶段状态。"""
_stage_board.remove(session_id)

View File

@@ -0,0 +1,93 @@
"""Maisaka 阶段状态看板查看器。"""
from __future__ import annotations
from pathlib import Path
from typing import Any
import json
import os
import sys
import traceback
import time
def _clear_screen() -> None:
os.system("cls" if sys.platform.startswith("win") else "clear")
def _load_state(state_file: Path) -> dict[str, Any]:
if not state_file.exists():
return {}
try:
return json.loads(state_file.read_text(encoding="utf-8"))
except Exception:
return {}
def _render(state: dict[str, Any]) -> str:
entries = state.get("entries")
if not isinstance(entries, list):
entries = []
lines = ["Maisaka 阶段看板", "=" * 72, ""]
if not entries:
lines.append("当前没有活跃会话。")
return "\n".join(lines)
entries = sorted(
[entry for entry in entries if isinstance(entry, dict)],
key=lambda item: str(item.get("session_name") or item.get("session_id") or ""),
)
now = time.time()
for entry in entries:
session_name = str(entry.get("session_name") or entry.get("session_id") or "").strip() or "unknown"
session_id = str(entry.get("session_id") or "").strip()
stage = str(entry.get("stage") or "").strip() or "未知"
detail = str(entry.get("detail") or "").strip() or "-"
round_text = str(entry.get("round_text") or "").strip()
agent_state = str(entry.get("agent_state") or "").strip() or "-"
stage_started_at = float(entry.get("stage_started_at") or now)
elapsed = max(0.0, now - stage_started_at)
lines.append(f"Chat: {session_name}")
if session_id and session_id != session_name:
lines.append(f"ID: {session_id}")
lines.append(f"阶段: {stage}")
if round_text:
lines.append(f"轮次: {round_text}")
lines.append(f"详情: {detail}")
lines.append(f"状态: {agent_state}")
lines.append(f"阶段耗时: {elapsed:.1f}s")
lines.append("-" * 72)
return "\n".join(lines)
def main() -> int:
if len(sys.argv) < 2:
return 1
state_file = Path(sys.argv[1]).resolve()
log_file = state_file.with_name("maisaka_stage_status_viewer.log")
last_render = ""
while True:
try:
state = _load_state(state_file)
if not state.get("enabled", False):
return 0
rendered = _render(state)
if rendered != last_render:
_clear_screen()
print(rendered, flush=True)
last_render = rendered
time.sleep(0.5)
except Exception:
log_file.write_text(traceback.format_exc(), encoding="utf-8")
time.sleep(3)
return 1
if __name__ == "__main__":
raise SystemExit(main())

View File

@@ -20,8 +20,8 @@ from src.common.logger import get_logger
from src.core.types import ActionInfo
if TYPE_CHECKING:
from src.common.data_models.info_data_model import ActionPlannerInfo
from src.common.data_models.llm_data_model import LLMGenerationDataModel
from src.common.data_models.planned_action_data_models import PlannedAction
from src.chat.message_receive.message import SessionMessage
install(extra_lines=3)
@@ -100,7 +100,7 @@ async def generate_reply(
extra_info: str = "",
reply_reason: str = "",
available_actions: Optional[Dict[str, ActionInfo]] = None,
chosen_actions: Optional[List["ActionPlannerInfo"]] = None,
chosen_actions: Optional[List["PlannedAction"]] = None,
unknown_words: Optional[List[str]] = None,
enable_splitter: bool = True,
enable_chinese_typo: bool = True,