feat:修复部分模型请求问题(v4l)
This commit is contained in:
@@ -35,7 +35,7 @@ from .context_messages import (
|
||||
ToolResultMessage,
|
||||
build_llm_message_from_context,
|
||||
)
|
||||
from .history_utils import drop_orphan_tool_results
|
||||
from .history_utils import drop_orphan_tool_results, normalize_tool_result_order
|
||||
from .display.prompt_cli_renderer import PromptCLIVisualizer
|
||||
from .visual_mode_utils import resolve_enable_visual_planner
|
||||
|
||||
@@ -652,6 +652,7 @@ class MaisakaChatLoopService:
|
||||
selected_history = [filtered_history[index] for index in selected_indices]
|
||||
selected_history, _ = MaisakaChatLoopService._hide_early_assistant_messages(selected_history)
|
||||
selected_history, _ = drop_orphan_tool_results(selected_history)
|
||||
selected_history, _ = normalize_tool_result_order(selected_history)
|
||||
tool_message_count = sum(1 for message in selected_history if isinstance(message, ToolResultMessage))
|
||||
normal_message_count = len(selected_history) - tool_message_count
|
||||
selection_reason = (
|
||||
|
||||
@@ -3,7 +3,7 @@
|
||||
from dataclasses import dataclass
|
||||
|
||||
from .context_messages import AssistantMessage, LLMContextMessage, ToolResultMessage
|
||||
from .history_utils import drop_leading_orphan_tool_results, drop_orphan_tool_results
|
||||
from .history_utils import drop_leading_orphan_tool_results, drop_orphan_tool_results, normalize_tool_result_order
|
||||
|
||||
TIMING_HISTORY_TOOL_NAMES = {"continue", "finish", "no_reply", "wait"}
|
||||
EARLY_TRIM_RATIO = 0.2
|
||||
@@ -15,6 +15,7 @@ class HistoryPostProcessResult:
|
||||
|
||||
history: list[LLMContextMessage]
|
||||
removed_count: int
|
||||
changed_count: int
|
||||
remaining_context_count: int
|
||||
|
||||
|
||||
@@ -30,6 +31,7 @@ def process_chat_history_after_cycle(
|
||||
removed_assistant_thought_count = _remove_early_assistant_thoughts(processed_history)
|
||||
|
||||
processed_history, orphan_removed_count = drop_orphan_tool_results(processed_history)
|
||||
processed_history, moved_tool_result_count = normalize_tool_result_order(processed_history)
|
||||
remaining_context_count = sum(1 for message in processed_history if message.count_in_context)
|
||||
removed_overflow_count = 0
|
||||
|
||||
@@ -48,9 +50,11 @@ def process_chat_history_after_cycle(
|
||||
+ orphan_removed_count
|
||||
+ removed_overflow_count
|
||||
)
|
||||
changed_count = removed_count + moved_tool_result_count
|
||||
return HistoryPostProcessResult(
|
||||
history=processed_history,
|
||||
removed_count=removed_count,
|
||||
changed_count=changed_count,
|
||||
remaining_context_count=remaining_context_count,
|
||||
)
|
||||
|
||||
|
||||
@@ -105,3 +105,70 @@ def drop_orphan_tool_results(
|
||||
filtered_history.append(message)
|
||||
|
||||
return filtered_history, removed_count
|
||||
|
||||
|
||||
def normalize_tool_result_order(
|
||||
chat_history: list[LLMContextMessage],
|
||||
) -> tuple[list[LLMContextMessage], int]:
|
||||
"""把被其他消息隔开的 tool 结果移动到对应 assistant tool_calls 后面。"""
|
||||
|
||||
if not chat_history:
|
||||
return chat_history, 0
|
||||
|
||||
consumed_indexes: set[int] = set()
|
||||
normalized_history: list[LLMContextMessage] = []
|
||||
moved_count = 0
|
||||
|
||||
for index, message in enumerate(chat_history):
|
||||
if index in consumed_indexes:
|
||||
continue
|
||||
|
||||
normalized_history.append(message)
|
||||
if not isinstance(message, AssistantMessage) or not message.tool_calls:
|
||||
continue
|
||||
|
||||
appended_tool_result_count = 0
|
||||
for tool_call in message.tool_calls:
|
||||
tool_call_id = str(tool_call.call_id or "").strip()
|
||||
if not tool_call_id:
|
||||
continue
|
||||
|
||||
matching_index = _find_tool_result_index(
|
||||
chat_history,
|
||||
tool_call_id=tool_call_id,
|
||||
start_index=index + 1,
|
||||
consumed_indexes=consumed_indexes,
|
||||
)
|
||||
if matching_index is None:
|
||||
continue
|
||||
|
||||
consumed_indexes.add(matching_index)
|
||||
normalized_history.append(chat_history[matching_index])
|
||||
expected_index = index + appended_tool_result_count + 1
|
||||
if matching_index != expected_index:
|
||||
moved_count += 1
|
||||
appended_tool_result_count += 1
|
||||
|
||||
if moved_count <= 0:
|
||||
return chat_history, 0
|
||||
return normalized_history, moved_count
|
||||
|
||||
|
||||
def _find_tool_result_index(
|
||||
chat_history: list[LLMContextMessage],
|
||||
*,
|
||||
tool_call_id: str,
|
||||
start_index: int,
|
||||
consumed_indexes: set[int],
|
||||
) -> int | None:
|
||||
"""查找指定 tool_call_id 对应的 tool 结果消息位置。"""
|
||||
|
||||
for index in range(start_index, len(chat_history)):
|
||||
if index in consumed_indexes:
|
||||
continue
|
||||
message = chat_history[index]
|
||||
if not isinstance(message, ToolResultMessage):
|
||||
continue
|
||||
if message.tool_call_id == tool_call_id:
|
||||
return index
|
||||
return None
|
||||
|
||||
@@ -341,6 +341,13 @@ class MaisakaReasoningEngine:
|
||||
queued_trigger = await self._runtime._internal_turn_queue.get()
|
||||
message_triggered, timeout_triggered = self._drain_ready_turn_triggers(queued_trigger)
|
||||
|
||||
if self._runtime._agent_state == self._runtime._STATE_WAIT and not timeout_triggered:
|
||||
self._runtime._message_turn_scheduled = False
|
||||
logger.debug(
|
||||
f"{self._runtime.log_prefix} 当前仍处于 wait 状态,忽略消息触发并继续等待超时"
|
||||
)
|
||||
continue
|
||||
|
||||
if message_triggered:
|
||||
await self._runtime._wait_for_message_quiet_period()
|
||||
self._runtime._message_turn_scheduled = False
|
||||
@@ -809,10 +816,12 @@ class MaisakaReasoningEngine:
|
||||
self._runtime._chat_history,
|
||||
max_context_size=self._runtime._max_context_size,
|
||||
)
|
||||
if process_result.removed_count <= 0:
|
||||
if process_result.changed_count <= 0:
|
||||
return
|
||||
|
||||
self._runtime._chat_history = process_result.history
|
||||
if process_result.removed_count <= 0:
|
||||
return
|
||||
self._runtime._log_history_trimmed(
|
||||
process_result.removed_count,
|
||||
process_result.remaining_context_count,
|
||||
|
||||
Reference in New Issue
Block a user