移除残留的KnowU系统,修复gemini请求的思考签名问题

This commit is contained in:
SengokuCola
2026-04-07 15:15:37 +08:00
parent f2b64cc58c
commit 3b5baf901a
19 changed files with 88 additions and 2034 deletions

View File

@@ -9,7 +9,6 @@ from sqlmodel import col, select
from src.common.database.database import get_db_session
from src.common.database.database_model import PersonInfo
from src.core.tooling import ToolExecutionContext, ToolExecutionResult, ToolInvocation, ToolSpec
from src.know_u.knowledge_store import get_knowledge_store
from .context import BuiltinToolRuntimeContext
@@ -79,7 +78,6 @@ async def handle_tool(
result: Dict[str, Any] = {
"query": person_name,
"persons": persons,
"related_knowledge": _query_related_knowledge(person_name, persons, limit),
}
return tool_ctx.build_success_result(
invocation.tool_name,
@@ -129,55 +127,3 @@ def _query_person_records(person_name: str, limit: int) -> List[Dict[str, Any]]:
)
return persons
def _query_related_knowledge(
person_name: str,
persons: List[Dict[str, Any]],
limit: int,
) -> List[Dict[str, Any]]:
"""从 Maisaka knowledge 中补充检索与该人物相关的条目。"""
store = get_knowledge_store()
knowledge_items: List[Dict[str, Any]] = []
seen_ids: set[str] = set()
for person in persons:
matched_items = store.get_knowledge_by_user(
platform=str(person.get("platform", "")).strip(),
user_id=str(person.get("user_id", "")).strip(),
user_nickname=str(person.get("user_nickname", "")).strip(),
person_name=str(person.get("person_name", "")).strip(),
limit=max(limit, 5),
)
for item in matched_items:
item_id = str(item.get("id", "")).strip()
if item_id and item_id in seen_ids:
continue
if item_id:
seen_ids.add(item_id)
knowledge_items.append(item)
if not knowledge_items:
fallback_items = store.search_knowledge(person_name, limit=max(limit, 5))
for item in fallback_items:
item_id = str(item.get("id", "")).strip()
if item_id and item_id in seen_ids:
continue
if item_id:
seen_ids.add(item_id)
knowledge_items.append(item)
results: List[Dict[str, Any]] = []
for item in knowledge_items:
results.append(
{
"id": str(item.get("id", "")).strip(),
"category_id": str(item.get("category_id", "")).strip(),
"category_name": str(item.get("category_name", "")).strip(),
"content": str(item.get("content", "")).strip(),
"metadata": item.get("metadata", {}),
"created_at": item.get("created_at"),
}
)
return results

View File

@@ -18,7 +18,6 @@ from src.common.prompt_i18n import load_prompt
from src.common.utils.utils_session import SessionUtils
from src.config.config import global_config
from src.core.tooling import ToolRegistry, ToolSpec
from src.know_u.knowledge import extract_category_ids_from_result
from src.llm_models.model_client.base_client import BaseClient
from src.llm_models.payload_content.message import Message, MessageBuilder, RoleType
from src.llm_models.payload_content.resp_format import RespFormat, RespFormatType
@@ -665,41 +664,6 @@ class MaisakaChatLoopService:
)
return filtered_tool_specs
async def analyze_knowledge_need(
self,
chat_history: List[LLMContextMessage],
categories_summary: str,
) -> List[str]:
"""分析当前对话是否需要检索知识库分类。"""
visible_history: List[str] = []
for message in chat_history[-8:]:
if not message.processed_plain_text:
continue
visible_history.append(f"{message.role}: {message.processed_plain_text}")
if not visible_history or not categories_summary.strip():
return []
prompt = (
"你需要判断当前对话是否需要查询知识库。\n"
"请只返回最相关的分类编号,多个编号用空格分隔;如果完全不需要,返回 none。\n\n"
f"【可用分类】\n{categories_summary}\n\n"
f"【最近对话】\n{chr(10).join(visible_history)}"
)
try:
generation_result = await self._llm_chat.generate_response(
prompt=prompt,
options=LLMGenerationOptions(
temperature=0.1,
max_tokens=64,
),
)
except Exception:
return []
return extract_category_ids_from_result(generation_result.response or "")
async def chat_loop_step(
self,
chat_history: List[LLMContextMessage],

View File

@@ -263,7 +263,6 @@ class ReferenceMessageType(str, Enum):
CUSTOM = "custom"
JARGON = "jargon"
KNOWLEDGE = "knowledge"
MEMORY = "memory"
TOOL_HINT = "tool_hint"

View File

@@ -19,7 +19,6 @@ from src.common.logger import get_logger
from src.common.utils.utils_config import ExpressionConfigUtils
from src.config.config import global_config
from src.core.tooling import ToolRegistry
from src.know_u.knowledge import KnowledgeLearner
from src.learners.expression_learner import ExpressionLearner
from src.learners.jargon_miner import JargonMiner
from src.llm_models.payload_content.resp_format import RespFormat
@@ -102,10 +101,8 @@ class MaisakaHeartFlowChatting:
self._enable_jargon_learning = jargon_learn
self._min_extraction_interval = 30
self._last_expression_extraction_time = 0.0
self._last_knowledge_extraction_time = 0.0
self._expression_learner = ExpressionLearner(session_id)
self._jargon_miner = JargonMiner(session_id, session_name=session_name)
self._knowledge_learner = KnowledgeLearner(session_id)
self._reasoning_engine = MaisakaReasoningEngine(self)
self._tool_registry = ToolRegistry()
@@ -449,16 +446,11 @@ class MaisakaHeartFlowChatting:
self._wait_timeout_task = None
async def _trigger_batch_learning(self, messages: list[SessionMessage]) -> None:
"""按同一批消息触发表达方式黑话和 knowledge 学习。"""
expression_result, knowledge_result = await asyncio.gather(
self._trigger_expression_learning(messages),
self._trigger_knowledge_learning(messages),
return_exceptions=True,
)
if isinstance(expression_result, Exception):
logger.error(f"{self.log_prefix} 表达学习任务异常退出: {expression_result}")
if isinstance(knowledge_result, Exception):
logger.error(f"{self.log_prefix} 知识学习任务异常退出: {knowledge_result}")
"""按同一批消息触发表达方式黑话学习。"""
try:
await self._trigger_expression_learning(messages)
except Exception as exc:
logger.error(f"{self.log_prefix} 表达学习任务异常退出: {exc}")
def _should_trigger_learning(
self,
@@ -523,34 +515,6 @@ class MaisakaHeartFlowChatting:
except Exception:
logger.exception(f"{self.log_prefix} ??????")
async def _trigger_knowledge_learning(self, messages: list[SessionMessage]) -> None:
"""?????????????????"""
pending_count = self._knowledge_learner.get_pending_count(self.message_cache)
if not self._should_trigger_learning(
enabled=global_config.maisaka.enable_knowledge_module,
feature_name="知识学习",
last_extraction_time=self._last_knowledge_extraction_time,
pending_count=pending_count,
min_messages_for_extraction=self._knowledge_learner.min_messages_for_extraction,
):
return
self._last_knowledge_extraction_time = time.time()
logger.info(
f"{self.log_prefix} ??????: "
f"??????={len(messages)} ??????={pending_count} "
f"?????={len(self.message_cache)}"
)
try:
added_count = await self._knowledge_learner.learn(self.message_cache)
if added_count > 0:
logger.info(f"{self.log_prefix} ???????: ?????={added_count}")
else:
logger.debug(f"{self.log_prefix} ???????????????")
except Exception:
logger.exception(f"{self.log_prefix} ??????")
async def _init_mcp(self) -> None:
"""初始化 MCP 工具并注册到统一工具层。"""
self._mcp_host_bridge = MCPHostLLMBridge(