feat: 尝试让人设和上下文长度热重载
This commit is contained in:
@@ -86,7 +86,6 @@ class BaseMaisakaReplyGenerator:
|
|||||||
request_type=request_type,
|
request_type=request_type,
|
||||||
session_id=getattr(chat_stream, "session_id", "") if chat_stream is not None else "",
|
session_id=getattr(chat_stream, "session_id", "") if chat_stream is not None else "",
|
||||||
)
|
)
|
||||||
self._personality_prompt = self._build_personality_prompt()
|
|
||||||
|
|
||||||
def _build_personality_prompt(self) -> str:
|
def _build_personality_prompt(self) -> str:
|
||||||
"""构建 replyer 使用的人设提示。"""
|
"""构建 replyer 使用的人设提示。"""
|
||||||
@@ -272,7 +271,7 @@ class BaseMaisakaReplyGenerator:
|
|||||||
bot_name=global_config.bot.nickname,
|
bot_name=global_config.bot.nickname,
|
||||||
group_chat_attention_block=self._build_group_chat_attention_block(session_id),
|
group_chat_attention_block=self._build_group_chat_attention_block(session_id),
|
||||||
replyer_at_block=self._build_replyer_at_block(),
|
replyer_at_block=self._build_replyer_at_block(),
|
||||||
identity=self._personality_prompt,
|
identity=self._build_personality_prompt(),
|
||||||
reply_style=self._select_reply_style(),
|
reply_style=self._select_reply_style(),
|
||||||
)
|
)
|
||||||
except Exception:
|
except Exception:
|
||||||
|
|||||||
@@ -234,6 +234,7 @@ class ConfigManager:
|
|||||||
self._hot_reload_min_interval_s: float = 1.0
|
self._hot_reload_min_interval_s: float = 1.0
|
||||||
self._hot_reload_timeout_s: float = 20.0
|
self._hot_reload_timeout_s: float = 20.0
|
||||||
self._last_hot_reload_monotonic: float = 0.0
|
self._last_hot_reload_monotonic: float = 0.0
|
||||||
|
self.reload_revision: int = 0
|
||||||
|
|
||||||
def initialize(self):
|
def initialize(self):
|
||||||
logger.info(t("config.current_version", version=MMC_VERSION))
|
logger.info(t("config.current_version", version=MMC_VERSION))
|
||||||
@@ -424,9 +425,7 @@ class ConfigManager:
|
|||||||
|
|
||||||
self.global_config = global_config_new
|
self.global_config = global_config_new
|
||||||
self.model_config = model_config_new
|
self.model_config = model_config_new
|
||||||
global global_config, model_config
|
self.reload_revision += 1
|
||||||
global_config = global_config_new
|
|
||||||
model_config = model_config_new
|
|
||||||
logger.info(t("config.hot_reload_completed"))
|
logger.info(t("config.hot_reload_completed"))
|
||||||
|
|
||||||
for callback in list(self._reload_callbacks):
|
for callback in list(self._reload_callbacks):
|
||||||
@@ -657,8 +656,30 @@ def write_config_to_file(
|
|||||||
tomlkit.dump(full_config_data, f)
|
tomlkit.dump(full_config_data, f)
|
||||||
|
|
||||||
|
|
||||||
|
class _ConfigProxy:
|
||||||
|
"""稳定配置代理,确保热重载后旧导入也能读取最新配置。"""
|
||||||
|
|
||||||
|
def __init__(self, getter: Callable[[], ConfigBase]) -> None:
|
||||||
|
self._getter = getter
|
||||||
|
|
||||||
|
def __getattr__(self, name: str) -> Any:
|
||||||
|
return getattr(self._getter(), name)
|
||||||
|
|
||||||
|
def __getitem__(self, key: str) -> Any:
|
||||||
|
return self._getter()[key]
|
||||||
|
|
||||||
|
def __setattr__(self, name: str, value: Any) -> None:
|
||||||
|
if name == "_getter":
|
||||||
|
object.__setattr__(self, name, value)
|
||||||
|
return
|
||||||
|
setattr(self._getter(), name, value)
|
||||||
|
|
||||||
|
def __repr__(self) -> str:
|
||||||
|
return repr(self._getter())
|
||||||
|
|
||||||
|
|
||||||
# generate_new_config_file(Config, BOT_CONFIG_PATH, CONFIG_VERSION)
|
# generate_new_config_file(Config, BOT_CONFIG_PATH, CONFIG_VERSION)
|
||||||
config_manager = ConfigManager()
|
config_manager = ConfigManager()
|
||||||
config_manager.initialize()
|
config_manager.initialize()
|
||||||
global_config = config_manager.get_global_config()
|
global_config: Config = cast(Config, _ConfigProxy(config_manager.get_global_config))
|
||||||
model_config = config_manager.get_model_config()
|
model_config: ModelConfig = cast(ModelConfig, _ConfigProxy(config_manager.get_model_config))
|
||||||
|
|||||||
@@ -10,7 +10,7 @@ from rich.console import RenderableType
|
|||||||
from src.common.data_models.llm_service_data_models import LLMGenerationOptions
|
from src.common.data_models.llm_service_data_models import LLMGenerationOptions
|
||||||
from src.common.i18n import get_locale
|
from src.common.i18n import get_locale
|
||||||
from src.common.logger import get_logger
|
from src.common.logger import get_logger
|
||||||
from src.common.prompt_i18n import get_prompt_cache_revision, load_prompt
|
from src.common.prompt_i18n import load_prompt
|
||||||
from src.common.utils.utils_config import ChatConfigUtils
|
from src.common.utils.utils_config import ChatConfigUtils
|
||||||
from src.config.config import global_config
|
from src.config.config import global_config
|
||||||
from src.core.tooling import ToolAvailabilityContext, ToolRegistry
|
from src.core.tooling import ToolAvailabilityContext, ToolRegistry
|
||||||
@@ -218,21 +218,15 @@ class MaisakaChatLoopService:
|
|||||||
self._extra_tools: List[ToolOption] = []
|
self._extra_tools: List[ToolOption] = []
|
||||||
self._interrupt_flag: asyncio.Event | None = None
|
self._interrupt_flag: asyncio.Event | None = None
|
||||||
self._tool_registry: ToolRegistry | None = None
|
self._tool_registry: ToolRegistry | None = None
|
||||||
self._prompts_loaded = chat_system_prompt is not None
|
self._custom_chat_system_prompt = chat_system_prompt
|
||||||
self._prompt_cache_revision = get_prompt_cache_revision()
|
|
||||||
self._prompt_load_lock = asyncio.Lock()
|
self._prompt_load_lock = asyncio.Lock()
|
||||||
self._personality_prompt = self._build_personality_prompt()
|
|
||||||
if chat_system_prompt is None:
|
|
||||||
self._chat_system_prompt = f"{self._personality_prompt}\n\nYou are a helpful AI assistant."
|
|
||||||
else:
|
|
||||||
self._chat_system_prompt = chat_system_prompt
|
|
||||||
self._llm_chat_clients: dict[str, LLMServiceClient] = {}
|
self._llm_chat_clients: dict[str, LLMServiceClient] = {}
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def personality_prompt(self) -> str:
|
def personality_prompt(self) -> str:
|
||||||
"""返回当前人格提示词。"""
|
"""返回当前人格提示词。"""
|
||||||
|
|
||||||
return self._personality_prompt
|
return self._build_personality_prompt()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _resolve_llm_request_type(request_kind: str) -> str:
|
def _resolve_llm_request_type(request_kind: str) -> str:
|
||||||
@@ -349,13 +343,15 @@ class MaisakaChatLoopService:
|
|||||||
tools_section: 额外注入到提示词中的工具说明片段。
|
tools_section: 额外注入到提示词中的工具说明片段。
|
||||||
"""
|
"""
|
||||||
async with self._prompt_load_lock:
|
async with self._prompt_load_lock:
|
||||||
try:
|
self._build_chat_system_prompt(tools_section)
|
||||||
self._chat_system_prompt = load_prompt("maisaka_chat", **self.build_prompt_template_context(tools_section))
|
|
||||||
except Exception:
|
|
||||||
self._chat_system_prompt = f"{self._personality_prompt}\n\nYou are a helpful AI assistant."
|
|
||||||
|
|
||||||
self._prompts_loaded = True
|
def _build_chat_system_prompt(self, tools_section: str = "") -> str:
|
||||||
self._prompt_cache_revision = get_prompt_cache_revision()
|
"""基于当前配置实时构造主聊天系统提示词。"""
|
||||||
|
|
||||||
|
try:
|
||||||
|
return load_prompt("maisaka_chat", **self.build_prompt_template_context(tools_section))
|
||||||
|
except Exception:
|
||||||
|
return f"{self.personality_prompt}\n\nYou are a helpful AI assistant."
|
||||||
|
|
||||||
def build_prompt_template_context(self, tools_section: str = "") -> dict[str, str]:
|
def build_prompt_template_context(self, tools_section: str = "") -> dict[str, str]:
|
||||||
"""构造 Maisaka prompt 模板的公共渲染参数。"""
|
"""构造 Maisaka prompt 模板的公共渲染参数。"""
|
||||||
@@ -364,7 +360,7 @@ class MaisakaChatLoopService:
|
|||||||
"bot_name": global_config.bot.nickname,
|
"bot_name": global_config.bot.nickname,
|
||||||
"file_tools_section": tools_section,
|
"file_tools_section": tools_section,
|
||||||
"group_chat_attention_block": self._build_group_chat_attention_block(),
|
"group_chat_attention_block": self._build_group_chat_attention_block(),
|
||||||
"identity": self._personality_prompt,
|
"identity": self.personality_prompt,
|
||||||
"timing_gate_wait_rule": self._build_timing_gate_wait_rule(),
|
"timing_gate_wait_rule": self._build_timing_gate_wait_rule(),
|
||||||
"time_block": self._build_time_block(),
|
"time_block": self._build_time_block(),
|
||||||
}
|
}
|
||||||
@@ -471,7 +467,13 @@ class MaisakaChatLoopService:
|
|||||||
|
|
||||||
messages: List[Message] = []
|
messages: List[Message] = []
|
||||||
system_msg = MessageBuilder().set_role(RoleType.System)
|
system_msg = MessageBuilder().set_role(RoleType.System)
|
||||||
system_msg.add_text_content(system_prompt if system_prompt is not None else self._chat_system_prompt)
|
if system_prompt is not None:
|
||||||
|
resolved_system_prompt = system_prompt
|
||||||
|
elif self._custom_chat_system_prompt is not None:
|
||||||
|
resolved_system_prompt = self._custom_chat_system_prompt
|
||||||
|
else:
|
||||||
|
resolved_system_prompt = self._build_chat_system_prompt()
|
||||||
|
system_msg.add_text_content(resolved_system_prompt)
|
||||||
messages.append(system_msg.build())
|
messages.append(system_msg.build())
|
||||||
|
|
||||||
for msg in selected_history:
|
for msg in selected_history:
|
||||||
@@ -521,8 +523,6 @@ class MaisakaChatLoopService:
|
|||||||
ChatResponse: 本轮规划器返回结果。
|
ChatResponse: 本轮规划器返回结果。
|
||||||
"""
|
"""
|
||||||
|
|
||||||
if not self._prompts_loaded or self._prompt_cache_revision != get_prompt_cache_revision():
|
|
||||||
await self.ensure_chat_prompt_loaded()
|
|
||||||
enable_visual_message = self._resolve_enable_visual_message(request_kind)
|
enable_visual_message = self._resolve_enable_visual_message(request_kind)
|
||||||
selected_history, selection_reason = self.select_llm_context_messages(
|
selected_history, selection_reason = self.select_llm_context_messages(
|
||||||
chat_history,
|
chat_history,
|
||||||
|
|||||||
@@ -105,12 +105,6 @@ class MaisakaHeartFlowChatting:
|
|||||||
self._recent_reply_latencies: deque[tuple[float, float]] = deque()
|
self._recent_reply_latencies: deque[tuple[float, float]] = deque()
|
||||||
self._wait_timeout_task: Optional[asyncio.Task[None]] = None
|
self._wait_timeout_task: Optional[asyncio.Task[None]] = None
|
||||||
self._max_internal_rounds = MAX_INTERNAL_ROUNDS
|
self._max_internal_rounds = MAX_INTERNAL_ROUNDS
|
||||||
configured_context_size = (
|
|
||||||
global_config.chat.max_context_size
|
|
||||||
if self.chat_stream.is_group_session
|
|
||||||
else global_config.chat.max_private_context_size
|
|
||||||
)
|
|
||||||
self._max_context_size = max(1, int(configured_context_size))
|
|
||||||
self._agent_state: Literal["running", "wait", "stop"] = self._STATE_STOP
|
self._agent_state: Literal["running", "wait", "stop"] = self._STATE_STOP
|
||||||
self._pending_wait_tool_call_id: Optional[str] = None
|
self._pending_wait_tool_call_id: Optional[str] = None
|
||||||
self._force_next_timing_continue = False
|
self._force_next_timing_continue = False
|
||||||
@@ -152,6 +146,17 @@ class MaisakaHeartFlowChatting:
|
|||||||
self._register_tool_providers()
|
self._register_tool_providers()
|
||||||
self._emit_monitor_session_start()
|
self._emit_monitor_session_start()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def _max_context_size(self) -> int:
|
||||||
|
"""返回当前会话实时生效的上下文窗口大小。"""
|
||||||
|
|
||||||
|
configured_context_size = (
|
||||||
|
global_config.chat.max_context_size
|
||||||
|
if self.chat_stream.is_group_session
|
||||||
|
else global_config.chat.max_private_context_size
|
||||||
|
)
|
||||||
|
return max(1, int(configured_context_size))
|
||||||
|
|
||||||
def _emit_monitor_session_start(self) -> None:
|
def _emit_monitor_session_start(self) -> None:
|
||||||
"""向 WebUI 监控面板同步当前会话的展示标识。"""
|
"""向 WebUI 监控面板同步当前会话的展示标识。"""
|
||||||
|
|
||||||
|
|||||||
Reference in New Issue
Block a user