remove:移除部分max_token限制

This commit is contained in:
SengokuCola
2026-05-06 00:46:20 +08:00
parent 371eb5c0f7
commit 0b2620b290
5 changed files with 1 additions and 11 deletions

1
.gitignore vendored
View File

@@ -12,6 +12,7 @@ NapCat.Framework.Windows.OneKey/
log/ log/
logs/ logs/
out/ out/
SnowLuma-v1.6.10-win-x64/
tool_call_benchmark.py tool_call_benchmark.py
run_maibot_core.bat run_maibot_core.bat
run_voice.bat run_voice.bat

View File

@@ -24,7 +24,6 @@ async def _run_expression_selector(tool_ctx: BuiltinToolRuntimeContext, system_p
context_message_limit=10, context_message_limit=10,
system_prompt=system_prompt, system_prompt=system_prompt,
request_kind="expression_selector", request_kind="expression_selector",
max_tokens=256,
) )
return (response.content or "").strip() return (response.content or "").strip()

View File

@@ -34,7 +34,6 @@ from .context import BuiltinToolRuntimeContext
logger = get_logger("maisaka_builtin_send_emoji") logger = get_logger("maisaka_builtin_send_emoji")
_EMOJI_SUB_AGENT_CONTEXT_LIMIT = 12 _EMOJI_SUB_AGENT_CONTEXT_LIMIT = 12
_EMOJI_SUB_AGENT_MAX_TOKENS = 240
_EMOJI_MAX_CANDIDATE_COUNT = 64 _EMOJI_MAX_CANDIDATE_COUNT = 64
_EMOJI_CANDIDATE_TILE_SIZE = 256 _EMOJI_CANDIDATE_TILE_SIZE = 256
_EMOJI_SUCCESS_MESSAGE = "表情包发送成功" _EMOJI_SUCCESS_MESSAGE = "表情包发送成功"
@@ -368,7 +367,6 @@ async def _select_emoji_with_sub_agent(
context_message_limit=_EMOJI_SUB_AGENT_CONTEXT_LIMIT, context_message_limit=_EMOJI_SUB_AGENT_CONTEXT_LIMIT,
system_prompt=system_prompt, system_prompt=system_prompt,
extra_messages=[prompt_message, candidate_message], extra_messages=[prompt_message, candidate_message],
max_tokens=_EMOJI_SUB_AGENT_MAX_TOKENS,
model_task_name=model_task_name, model_task_name=model_task_name,
) )
selection_duration_ms = round((datetime.now() - selection_started_at).total_seconds() * 1000, 2) selection_duration_ms = round((datetime.now() - selection_started_at).total_seconds() * 1000, 2)

View File

@@ -194,7 +194,6 @@ class MaisakaChatLoopService:
chat_system_prompt: Optional[str] = None, chat_system_prompt: Optional[str] = None,
session_id: Optional[str] = None, session_id: Optional[str] = None,
is_group_chat: Optional[bool] = None, is_group_chat: Optional[bool] = None,
max_tokens: int = 2048,
model_task_name: str = "planner", model_task_name: str = "planner",
) -> None: ) -> None:
"""初始化 Maisaka 对话循环服务。 """初始化 Maisaka 对话循环服务。
@@ -203,10 +202,7 @@ class MaisakaChatLoopService:
chat_system_prompt: 可选的系统提示词。 chat_system_prompt: 可选的系统提示词。
session_id: 当前会话 ID用于匹配会话级额外提示。 session_id: 当前会话 ID用于匹配会话级额外提示。
is_group_chat: 当前会话是否为群聊。 is_group_chat: 当前会话是否为群聊。
max_tokens: 规划器最大输出长度。
""" """
self._max_tokens = max_tokens
self._model_task_name = model_task_name.strip() or "planner" self._model_task_name = model_task_name.strip() or "planner"
self._is_group_chat = is_group_chat self._is_group_chat = is_group_chat
self._session_id = session_id or "" self._session_id = session_id or ""
@@ -606,7 +602,6 @@ class MaisakaChatLoopService:
message_factory=message_factory, message_factory=message_factory,
options=LLMGenerationOptions( options=LLMGenerationOptions(
tool_options=all_tools if all_tools else None, tool_options=all_tools if all_tools else None,
max_tokens=self._max_tokens,
response_format=response_format, response_format=response_format,
interrupt_flag=self._interrupt_flag, interrupt_flag=self._interrupt_flag,
), ),

View File

@@ -619,7 +619,6 @@ class MaisakaHeartFlowChatting:
request_kind: str = "sub_agent", request_kind: str = "sub_agent",
extra_messages: Optional[Sequence[LLMContextMessage]] = None, extra_messages: Optional[Sequence[LLMContextMessage]] = None,
interrupt_flag: asyncio.Event | None = None, interrupt_flag: asyncio.Event | None = None,
max_tokens: int = 512,
model_task_name: str = "planner", model_task_name: str = "planner",
response_format: RespFormat | None = None, response_format: RespFormat | None = None,
tool_definitions: Optional[Sequence[ToolDefinitionInput]] = None, tool_definitions: Optional[Sequence[ToolDefinitionInput]] = None,
@@ -643,7 +642,6 @@ class MaisakaHeartFlowChatting:
chat_system_prompt=system_prompt, chat_system_prompt=system_prompt,
session_id=self.session_id, session_id=self.session_id,
is_group_chat=self.chat_stream.is_group_session, is_group_chat=self.chat_stream.is_group_session,
max_tokens=max_tokens,
model_task_name=model_task_name, model_task_name=model_task_name,
) )
sub_agent.set_interrupt_flag(interrupt_flag) sub_agent.set_interrupt_flag(interrupt_flag)
@@ -703,7 +701,6 @@ class MaisakaHeartFlowChatting:
system_prompt="你是回复效果评分器。请严格按用户给出的 JSON 格式输出,不要输出 JSON 之外的内容。", system_prompt="你是回复效果评分器。请严格按用户给出的 JSON 格式输出,不要输出 JSON 之外的内容。",
request_kind="reply_effect_judge", request_kind="reply_effect_judge",
extra_messages=[judge_message], extra_messages=[judge_message],
max_tokens=900,
tool_definitions=[], tool_definitions=[],
) )
return (response.content or "").strip() return (response.content or "").strip()