feat:可选,由llm执行回复

This commit is contained in:
SengokuCola
2025-12-31 00:43:08 +08:00
parent 67c24f84cd
commit 71a85667e3
5 changed files with 57 additions and 26 deletions

View File

@@ -125,6 +125,9 @@ class ChatConfig(ConfigBase):
plan_reply_log_max_per_chat: int = 1024
"""每个聊天流最大保存的Plan/Reply日志数量超过此数量时会自动删除最老的日志"""
llm_quote: bool = False
"""是否在 reply action 中启用 quote 参数,启用后 LLM 可以控制是否引用消息"""
def _parse_stream_config_to_chat_id(self, stream_config_str: str) -> Optional[str]:
"""与 ChatStream.get_stream_id 一致地从 "platform:id:type" 生成 chat_id。"""
try: