合并消息的正确存储

This commit is contained in:
UnCLAS-Prommer
2025-09-15 15:21:37 +08:00
parent d560d02761
commit 48ed58c4a8
8 changed files with 21 additions and 8 deletions

View File

@@ -310,6 +310,7 @@ async def command_to_stream(
display_message=display_message,
typing=False,
storage_message=storage_message,
set_reply=False,
)

View File

@@ -397,6 +397,8 @@ class BaseAction(ABC):
reply_set=reply_set,
stream_id=self.chat_id,
storage_message=storage_message,
set_reply=False,
reply_message=None,
)
async def send_voice(self, audio_base64: str) -> bool:

View File

@@ -320,6 +320,8 @@ class BaseCommand(ABC):
reply_set=reply_set,
stream_id=chat_stream.stream_id,
storage_message=storage_message,
set_reply=False,
reply_message=None,
)
async def send_custom(

View File

@@ -377,4 +377,6 @@ class BaseEventHandler(ABC):
reply_set=reply_set,
stream_id=stream_id,
storage_message=storage_message,
set_reply=False,
reply_message=None,
)

View File

@@ -2,7 +2,7 @@ import asyncio
import contextlib
from typing import List, Dict, Optional, Type, Tuple, TYPE_CHECKING
from src.chat.message_receive.message import MessageRecv
from src.chat.message_receive.message import MessageRecv, MessageSending
from src.chat.message_receive.chat_stream import get_chat_manager
from src.common.logger import get_logger
from src.plugin_system.base.component_types import EventType, EventHandlerInfo, MaiMessages, CustomEventHandlerResult
@@ -66,7 +66,7 @@ class EventsManager:
async def handle_mai_events(
self,
event_type: EventType,
message: Optional[MessageRecv] = None,
message: Optional[MessageRecv | MessageSending] = None,
llm_prompt: Optional[str] = None,
llm_response: Optional["LLMGenerationDataModel"] = None,
stream_id: Optional[str] = None,
@@ -206,7 +206,7 @@ class EventsManager:
def _transform_event_message(
self,
message: MessageRecv,
message: MessageRecv | MessageSending,
llm_prompt: Optional[str] = None,
llm_response: Optional["LLMGenerationDataModel"] = None,
) -> MaiMessages:
@@ -295,7 +295,7 @@ class EventsManager:
def _prepare_message(
self,
event_type: EventType,
message: Optional[MessageRecv] = None,
message: Optional[MessageRecv | MessageSending] = None,
llm_prompt: Optional[str] = None,
llm_response: Optional["LLMGenerationDataModel"] = None,
stream_id: Optional[str] = None,