feat:添加了focus的logger记录,修复潜在的bar request400,为表达器读取配置文件

This commit is contained in:
SengokuCola
2025-05-30 12:18:01 +08:00
parent 54724ae21e
commit 78df7ab553
8 changed files with 95 additions and 53 deletions

View File

@@ -104,7 +104,7 @@ class ChattingInfoProcessor(BaseProcessor):
if obs.compressor_prompt:
summary = ""
try:
summary_result, _, _ = await self.model_summary.generate_response(obs.compressor_prompt)
summary_result, _ = await self.model_summary.generate_response_async(obs.compressor_prompt)
summary = "没有主题的闲聊"
if summary_result:
summary = summary_result

View File

@@ -71,7 +71,7 @@ class MindProcessor(BaseProcessor):
self.llm_model = LLMRequest(
model=global_config.model.focus_chat_mind,
temperature=global_config.model.focus_chat_mind["temp"],
# temperature=global_config.model.focus_chat_mind["temp"],
max_tokens=800,
request_type="focus.processor.chat_mind",
)
@@ -225,7 +225,7 @@ class MindProcessor(BaseProcessor):
# 处理总体异常
logger.error(f"{self.log_prefix} 执行LLM请求或处理响应时出错: {e}")
logger.error(traceback.format_exc())
content = "思考过程中出现错误"
content = "注意:思考过程中出现错误应该是LLM大模型有问题你需要告诉别人检查大模型配置"
# 记录初步思考结果
logger.debug(f"{self.log_prefix} 思考prompt: \n{prompt}\n")