diff --git a/src/chat/replyer/maisaka_generator_base.py b/src/chat/replyer/maisaka_generator_base.py index 0ec8f6ad..0c7ba07a 100644 --- a/src/chat/replyer/maisaka_generator_base.py +++ b/src/chat/replyer/maisaka_generator_base.py @@ -86,7 +86,6 @@ class BaseMaisakaReplyGenerator: request_type=request_type, session_id=getattr(chat_stream, "session_id", "") if chat_stream is not None else "", ) - self._personality_prompt = self._build_personality_prompt() def _build_personality_prompt(self) -> str: """构建 replyer 使用的人设提示。""" @@ -272,7 +271,7 @@ class BaseMaisakaReplyGenerator: bot_name=global_config.bot.nickname, group_chat_attention_block=self._build_group_chat_attention_block(session_id), replyer_at_block=self._build_replyer_at_block(), - identity=self._personality_prompt, + identity=self._build_personality_prompt(), reply_style=self._select_reply_style(), ) except Exception: diff --git a/src/config/config.py b/src/config/config.py index b824aa28..940f1a51 100644 --- a/src/config/config.py +++ b/src/config/config.py @@ -234,6 +234,7 @@ class ConfigManager: self._hot_reload_min_interval_s: float = 1.0 self._hot_reload_timeout_s: float = 20.0 self._last_hot_reload_monotonic: float = 0.0 + self.reload_revision: int = 0 def initialize(self): logger.info(t("config.current_version", version=MMC_VERSION)) @@ -424,9 +425,7 @@ class ConfigManager: self.global_config = global_config_new self.model_config = model_config_new - global global_config, model_config - global_config = global_config_new - model_config = model_config_new + self.reload_revision += 1 logger.info(t("config.hot_reload_completed")) for callback in list(self._reload_callbacks): @@ -657,8 +656,30 @@ def write_config_to_file( tomlkit.dump(full_config_data, f) +class _ConfigProxy: + """稳定配置代理,确保热重载后旧导入也能读取最新配置。""" + + def __init__(self, getter: Callable[[], ConfigBase]) -> None: + self._getter = getter + + def __getattr__(self, name: str) -> Any: + return getattr(self._getter(), name) + + def __getitem__(self, key: str) -> Any: + return self._getter()[key] + + def __setattr__(self, name: str, value: Any) -> None: + if name == "_getter": + object.__setattr__(self, name, value) + return + setattr(self._getter(), name, value) + + def __repr__(self) -> str: + return repr(self._getter()) + + # generate_new_config_file(Config, BOT_CONFIG_PATH, CONFIG_VERSION) config_manager = ConfigManager() config_manager.initialize() -global_config = config_manager.get_global_config() -model_config = config_manager.get_model_config() +global_config: Config = cast(Config, _ConfigProxy(config_manager.get_global_config)) +model_config: ModelConfig = cast(ModelConfig, _ConfigProxy(config_manager.get_model_config)) diff --git a/src/maisaka/chat_loop_service.py b/src/maisaka/chat_loop_service.py index 606e1269..0083dd6c 100644 --- a/src/maisaka/chat_loop_service.py +++ b/src/maisaka/chat_loop_service.py @@ -10,7 +10,7 @@ from rich.console import RenderableType from src.common.data_models.llm_service_data_models import LLMGenerationOptions from src.common.i18n import get_locale from src.common.logger import get_logger -from src.common.prompt_i18n import get_prompt_cache_revision, load_prompt +from src.common.prompt_i18n import load_prompt from src.common.utils.utils_config import ChatConfigUtils from src.config.config import global_config from src.core.tooling import ToolAvailabilityContext, ToolRegistry @@ -218,21 +218,15 @@ class MaisakaChatLoopService: self._extra_tools: List[ToolOption] = [] self._interrupt_flag: asyncio.Event | None = None self._tool_registry: ToolRegistry | None = None - self._prompts_loaded = chat_system_prompt is not None - self._prompt_cache_revision = get_prompt_cache_revision() + self._custom_chat_system_prompt = chat_system_prompt self._prompt_load_lock = asyncio.Lock() - self._personality_prompt = self._build_personality_prompt() - if chat_system_prompt is None: - self._chat_system_prompt = f"{self._personality_prompt}\n\nYou are a helpful AI assistant." - else: - self._chat_system_prompt = chat_system_prompt self._llm_chat_clients: dict[str, LLMServiceClient] = {} @property def personality_prompt(self) -> str: """返回当前人格提示词。""" - return self._personality_prompt + return self._build_personality_prompt() @staticmethod def _resolve_llm_request_type(request_kind: str) -> str: @@ -349,13 +343,15 @@ class MaisakaChatLoopService: tools_section: 额外注入到提示词中的工具说明片段。 """ async with self._prompt_load_lock: - try: - self._chat_system_prompt = load_prompt("maisaka_chat", **self.build_prompt_template_context(tools_section)) - except Exception: - self._chat_system_prompt = f"{self._personality_prompt}\n\nYou are a helpful AI assistant." + self._build_chat_system_prompt(tools_section) - self._prompts_loaded = True - self._prompt_cache_revision = get_prompt_cache_revision() + def _build_chat_system_prompt(self, tools_section: str = "") -> str: + """基于当前配置实时构造主聊天系统提示词。""" + + try: + return load_prompt("maisaka_chat", **self.build_prompt_template_context(tools_section)) + except Exception: + return f"{self.personality_prompt}\n\nYou are a helpful AI assistant." def build_prompt_template_context(self, tools_section: str = "") -> dict[str, str]: """构造 Maisaka prompt 模板的公共渲染参数。""" @@ -364,7 +360,7 @@ class MaisakaChatLoopService: "bot_name": global_config.bot.nickname, "file_tools_section": tools_section, "group_chat_attention_block": self._build_group_chat_attention_block(), - "identity": self._personality_prompt, + "identity": self.personality_prompt, "timing_gate_wait_rule": self._build_timing_gate_wait_rule(), "time_block": self._build_time_block(), } @@ -471,7 +467,13 @@ class MaisakaChatLoopService: messages: List[Message] = [] system_msg = MessageBuilder().set_role(RoleType.System) - system_msg.add_text_content(system_prompt if system_prompt is not None else self._chat_system_prompt) + if system_prompt is not None: + resolved_system_prompt = system_prompt + elif self._custom_chat_system_prompt is not None: + resolved_system_prompt = self._custom_chat_system_prompt + else: + resolved_system_prompt = self._build_chat_system_prompt() + system_msg.add_text_content(resolved_system_prompt) messages.append(system_msg.build()) for msg in selected_history: @@ -521,8 +523,6 @@ class MaisakaChatLoopService: ChatResponse: 本轮规划器返回结果。 """ - if not self._prompts_loaded or self._prompt_cache_revision != get_prompt_cache_revision(): - await self.ensure_chat_prompt_loaded() enable_visual_message = self._resolve_enable_visual_message(request_kind) selected_history, selection_reason = self.select_llm_context_messages( chat_history, diff --git a/src/maisaka/runtime.py b/src/maisaka/runtime.py index 55061917..f1a4b7b0 100644 --- a/src/maisaka/runtime.py +++ b/src/maisaka/runtime.py @@ -105,12 +105,6 @@ class MaisakaHeartFlowChatting: self._recent_reply_latencies: deque[tuple[float, float]] = deque() self._wait_timeout_task: Optional[asyncio.Task[None]] = None self._max_internal_rounds = MAX_INTERNAL_ROUNDS - configured_context_size = ( - global_config.chat.max_context_size - if self.chat_stream.is_group_session - else global_config.chat.max_private_context_size - ) - self._max_context_size = max(1, int(configured_context_size)) self._agent_state: Literal["running", "wait", "stop"] = self._STATE_STOP self._pending_wait_tool_call_id: Optional[str] = None self._force_next_timing_continue = False @@ -152,6 +146,17 @@ class MaisakaHeartFlowChatting: self._register_tool_providers() self._emit_monitor_session_start() + @property + def _max_context_size(self) -> int: + """返回当前会话实时生效的上下文窗口大小。""" + + configured_context_size = ( + global_config.chat.max_context_size + if self.chat_stream.is_group_session + else global_config.chat.max_private_context_size + ) + return max(1, int(configured_context_size)) + def _emit_monitor_session_start(self) -> None: """向 WebUI 监控面板同步当前会话的展示标识。"""