feat;私聊回滚wait,添加记忆总结模型配置

This commit is contained in:
SengokuCola
2026-05-08 02:21:27 +08:00
parent 197351d469
commit 7bdbdec157
13 changed files with 371 additions and 92 deletions

View File

@@ -12,12 +12,24 @@ from src.services import llm_service as llm_api
def _fake_available_models() -> dict[str, TaskConfig]:
return {
"memory": TaskConfig(
model_list=["memory-model"],
max_tokens=512,
temperature=0.4,
selection_strategy="random",
),
"utils": TaskConfig(
model_list=["utils-model"],
max_tokens=256,
temperature=0.5,
selection_strategy="random",
),
"replyer": TaskConfig(
model_list=["test-model"],
model_list=["replyer-model"],
max_tokens=128,
temperature=0.7,
selection_strategy="priority",
)
selection_strategy="random",
),
}
@@ -35,7 +47,63 @@ def test_resolve_summary_model_config_uses_auto_list_when_summarization_missing(
resolved = importer._resolve_summary_model_config()
assert resolved is not None
assert resolved.model_list == ["test-model"]
assert resolved.model_list == ["memory-model"]
def test_resolve_summary_model_config_auto_falls_back_to_utils_then_planner(monkeypatch):
importer = SummaryImporter(
vector_store=None,
graph_store=None,
metadata_store=None,
embedding_manager=None,
plugin_config={},
)
monkeypatch.setattr(
llm_api,
"get_available_models",
lambda: {
"utils": TaskConfig(model_list=["utils-model"]),
"planner": TaskConfig(model_list=["planner-model"]),
"replyer": TaskConfig(model_list=["replyer-model"]),
},
)
resolved = importer._resolve_summary_model_config()
assert resolved is not None
assert resolved.model_list == ["utils-model"]
monkeypatch.setattr(
llm_api,
"get_available_models",
lambda: {
"planner": TaskConfig(model_list=["planner-model"]),
"replyer": TaskConfig(model_list=["replyer-model"]),
},
)
resolved = importer._resolve_summary_model_config()
assert resolved is not None
assert resolved.model_list == ["planner-model"]
def test_resolve_summary_model_config_auto_does_not_fallback_to_replyer(monkeypatch):
monkeypatch.setattr(
llm_api,
"get_available_models",
lambda: {
"replyer": TaskConfig(model_list=["replyer-model"]),
"embedding": TaskConfig(model_list=["embedding-model"]),
},
)
importer = SummaryImporter(
vector_store=None,
graph_store=None,
metadata_store=None,
embedding_manager=None,
plugin_config={},
)
assert importer._resolve_summary_model_config() is None
def test_resolve_summary_model_config_rejects_legacy_string_selector(monkeypatch):