fix:收敛A_Memorix最小回归修复

最小修复聊天摘要写回游标恢复、摘要元数据透传、webui反馈参数解析、embedding批次缓存索引、图存储清理与配置默认值回归,并补齐针对性回归测试,确保问题解决且不影响现有逻辑。
This commit is contained in:
A-Dawn
2026-04-16 20:28:54 +08:00
parent 322309bef9
commit 6bfccf90a3
17 changed files with 361 additions and 60 deletions

View File

@@ -8,6 +8,7 @@ from typing import Any, Callable, Dict, List
import asyncio
import inspect
import json
import pickle
from sqlalchemy.orm import sessionmaker
from sqlmodel import Session, create_engine
@@ -394,6 +395,19 @@ async def test_text_to_stream_triggers_real_chat_summary_writeback(
assert "我最近买了一条绿色围巾。" in captured_prompts[-1]
assert "好的,我会记住你最近买了绿色围巾。" in captured_prompts[-1]
assert any("绿色围巾" in str(item.get("content", "") or "") for item in paragraphs)
assert any(
int(
(
pickle.loads(item.get("metadata"))
if isinstance(item.get("metadata"), (bytes, bytearray))
else item.get("metadata")
or {}
).get("trigger_message_count", 0)
or 0
)
== 2
for item in paragraphs
)
assert service.chat_summary_writeback._states["test-session"].last_trigger_message_count == 2
finally:
await service.shutdown()

View File

@@ -164,3 +164,28 @@ async def test_runtime_self_check_reports_requested_dimension_without_explicit_o
assert report["detected_dimension"] == 384
assert report["encoded_dimension"] == 384
assert manager.encode_calls == ["A_Memorix runtime self check"]
@pytest.mark.asyncio
async def test_encode_batch_keeps_batch_local_indexes_when_cache_hits_previous_batch(monkeypatch):
adapter = EmbeddingAPIAdapter(default_dimension=4, enable_cache=True)
adapter._dimension = 4
adapter._dimension_detected = True
async def fake_detect_dimension() -> int:
return 4
async def fake_get_embedding_direct(text: str, dimensions: int | None = None):
del dimensions
base = float(ord(str(text)[0]))
return [base, base + 1.0, base + 2.0, base + 3.0]
monkeypatch.setattr(adapter, "_detect_dimension", fake_detect_dimension)
monkeypatch.setattr(adapter, "_get_embedding_direct", fake_get_embedding_direct)
embeddings = await adapter.encode(["A", "B", "A", "C"], batch_size=2)
assert embeddings.shape == (4, 4)
assert np.array_equal(embeddings[0], embeddings[2])
assert embeddings[1][0] == float(ord("B"))
assert embeddings[3][0] == float(ord("C"))

View File

@@ -62,3 +62,21 @@ def test_graph_store_load_resets_stale_adjacency_when_metadata_is_empty(tmp_path
assert reloaded.num_nodes == 0
assert reloaded.num_edges == 0
assert reloaded.get_nodes() == []
def test_graph_store_load_clears_stale_edge_hash_map_when_metadata_is_empty(tmp_path: Path) -> None:
data_dir = tmp_path / "graph_data"
store = GraphStore(data_dir=data_dir)
store.add_edges([("Alice", "Bob")], relation_hashes=["rel-1"])
store.save()
metadata_path = data_dir / "graph_metadata.pkl"
empty_metadata = _build_empty_graph_metadata()
empty_metadata["edge_hash_map"] = {(0, 1): {"rel-1"}}
with metadata_path.open("wb") as handle:
pickle.dump(empty_metadata, handle)
reloaded = GraphStore(data_dir=data_dir)
reloaded.load()
assert reloaded.has_edge_hash_map() is False

View File

@@ -59,7 +59,16 @@ async def test_chat_summary_writeback_service_triggers_when_threshold_reached(mo
events.append(("ingest_summary", kwargs))
return SimpleNamespace(success=True, detail="ok")
async def fake_load_last_trigger_message_count(self, *, session_id: str, total_message_count: int) -> int:
del self, session_id, total_message_count
return 0
monkeypatch.setattr(memory_flow_module.memory_service, "ingest_summary", fake_ingest_summary)
monkeypatch.setattr(
memory_flow_module.ChatSummaryWritebackService,
"_load_last_trigger_message_count",
fake_load_last_trigger_message_count,
)
service = memory_flow_module.ChatSummaryWritebackService()
message = SimpleNamespace(session_id="session-1", session=SimpleNamespace(user_id="user-1", group_id="group-1"))
@@ -100,7 +109,16 @@ async def test_chat_summary_writeback_service_skips_when_threshold_not_reached(m
called = True
return SimpleNamespace(success=True, detail="ok")
async def fake_load_last_trigger_message_count(self, *, session_id: str, total_message_count: int) -> int:
del self, session_id, total_message_count
return 0
monkeypatch.setattr(memory_flow_module.memory_service, "ingest_summary", fake_ingest_summary)
monkeypatch.setattr(
memory_flow_module.ChatSummaryWritebackService,
"_load_last_trigger_message_count",
fake_load_last_trigger_message_count,
)
service = memory_flow_module.ChatSummaryWritebackService()
message = SimpleNamespace(session_id="session-1", session=SimpleNamespace(user_id="user-1", group_id="group-1"))
@@ -110,6 +128,116 @@ async def test_chat_summary_writeback_service_skips_when_threshold_not_reached(m
assert called is False
@pytest.mark.asyncio
async def test_chat_summary_writeback_service_restores_previous_trigger_count(monkeypatch):
events: list[tuple[str, object]] = []
monkeypatch.setattr(
memory_flow_module,
"global_config",
SimpleNamespace(
memory=SimpleNamespace(
chat_summary_writeback_enabled=True,
chat_summary_writeback_message_threshold=3,
chat_summary_writeback_context_length=7,
)
),
)
monkeypatch.setattr(memory_flow_module, "count_messages", lambda **kwargs: 8)
async def fake_ingest_summary(**kwargs):
events.append(("ingest_summary", kwargs))
return SimpleNamespace(success=True, detail="ok")
async def fake_load_last_trigger_message_count(self, *, session_id: str, total_message_count: int) -> int:
del self, session_id, total_message_count
return 5
monkeypatch.setattr(memory_flow_module.memory_service, "ingest_summary", fake_ingest_summary)
monkeypatch.setattr(
memory_flow_module.ChatSummaryWritebackService,
"_load_last_trigger_message_count",
fake_load_last_trigger_message_count,
)
service = memory_flow_module.ChatSummaryWritebackService()
message = SimpleNamespace(session_id="session-1", session=SimpleNamespace(user_id="user-1", group_id="group-1"))
await service._handle_message(message)
assert len(events) == 1
_, payload = events[0]
assert payload["external_id"] == "chat_auto_summary:session-1:8"
assert service._states["session-1"].last_trigger_message_count == 8
@pytest.mark.asyncio
async def test_chat_summary_writeback_service_falls_back_to_current_count_for_legacy_summary(monkeypatch):
called = False
monkeypatch.setattr(
memory_flow_module,
"global_config",
SimpleNamespace(
memory=SimpleNamespace(
chat_summary_writeback_enabled=True,
chat_summary_writeback_message_threshold=3,
chat_summary_writeback_context_length=7,
)
),
)
monkeypatch.setattr(memory_flow_module, "count_messages", lambda **kwargs: 5)
async def fake_ingest_summary(**kwargs):
nonlocal called
called = True
return SimpleNamespace(success=True, detail="ok")
async def fake_load_last_trigger_message_count(self, *, session_id: str, total_message_count: int) -> int:
del self, session_id, total_message_count
return 5
monkeypatch.setattr(memory_flow_module.memory_service, "ingest_summary", fake_ingest_summary)
monkeypatch.setattr(
memory_flow_module.ChatSummaryWritebackService,
"_load_last_trigger_message_count",
fake_load_last_trigger_message_count,
)
service = memory_flow_module.ChatSummaryWritebackService()
message = SimpleNamespace(session_id="session-1", session=SimpleNamespace(user_id="user-1", group_id="group-1"))
await service._handle_message(message)
assert called is False
assert service._states["session-1"].last_trigger_message_count == 5
@pytest.mark.asyncio
async def test_chat_summary_writeback_service_loads_trigger_count_from_summary_metadata(monkeypatch):
class FakeMetadataStore:
@staticmethod
def get_paragraphs_by_source(source: str):
assert source == "chat_summary:session-1"
return [
{"created_at": 1.0, "metadata": {"trigger_message_count": 3}},
{"created_at": 2.0, "metadata": {"trigger_message_count": 6}},
]
class FakeRuntimeManager:
@staticmethod
async def _ensure_kernel():
return SimpleNamespace(metadata_store=FakeMetadataStore())
monkeypatch.setattr(memory_flow_module.memory_service_module, "a_memorix_host_service", FakeRuntimeManager())
service = memory_flow_module.ChatSummaryWritebackService()
restored = await service._load_last_trigger_message_count(session_id="session-1", total_message_count=8)
assert restored == 6
@pytest.mark.asyncio
async def test_memory_automation_service_auto_starts_and_delegates():
events: list[tuple[str, str]] = []

View File

@@ -82,6 +82,7 @@ def test_resolve_static_path_prefers_installed_dashboard_package(monkeypatch, tm
def test_resolve_static_path_uses_dashboard_dist(monkeypatch, tmp_path) -> None:
dashboard_dist = tmp_path / "dashboard" / "dist"
dashboard_dist.mkdir(parents=True)
(dashboard_dist / "index.html").write_text("<html></html>", encoding="utf-8")
monkeypatch.setattr(webui_app, "_get_project_root", lambda: tmp_path)
@@ -91,6 +92,26 @@ def test_resolve_static_path_uses_dashboard_dist(monkeypatch, tmp_path) -> None:
assert resolved_path == dashboard_dist
def test_resolve_static_path_falls_back_to_package_when_dashboard_dist_has_no_index(monkeypatch, tmp_path) -> None:
dashboard_dist = tmp_path / "dashboard" / "dist"
dashboard_dist.mkdir(parents=True)
package_dist = tmp_path / "site-packages" / "maibot_dashboard" / "dist"
package_dist.mkdir(parents=True)
class _DashboardModule:
@staticmethod
def get_dist_path() -> Path:
return package_dist
monkeypatch.setattr(webui_app, "_get_project_root", lambda: tmp_path)
with patch.object(webui_app, "import_module", return_value=_DashboardModule()):
resolved_path = webui_app._resolve_static_path()
assert resolved_path == package_dist
def test_resolve_safe_static_file_path_allows_regular_static_file(tmp_path) -> None:
static_path = tmp_path / "dist"
asset_path = static_path / "assets" / "app.js"

View File

@@ -643,7 +643,12 @@ def test_delete_operation_routes(client: TestClient, monkeypatch):
def test_feedback_correction_routes(client: TestClient, monkeypatch):
async def fake_feedback_admin(*, action: str, **kwargs):
if action == "list":
assert kwargs == {"limit": 7, "status": "applied", "rollback_status": "none", "query": "green"}
assert kwargs == {
"limit": 7,
"statuses": ["applied"],
"rollback_statuses": ["none"],
"query": "green",
}
return {"success": True, "items": [{"task_id": 11, "query_text": "what color"}], "count": 1}
if action == "get":
assert kwargs == {"task_id": 11}