diff --git a/src/A_memorix/core/runtime/lifecycle_orchestrator.py b/src/A_memorix/core/runtime/lifecycle_orchestrator.py index bc49a25f..a421b05a 100644 --- a/src/A_memorix/core/runtime/lifecycle_orchestrator.py +++ b/src/A_memorix/core/runtime/lifecycle_orchestrator.py @@ -4,7 +4,7 @@ from __future__ import annotations import asyncio from pathlib import Path -from typing import Any +from typing import Any, Callable, Coroutine, cast from src.common.logger import get_logger @@ -106,7 +106,8 @@ def start_background_tasks(plugin: Any) -> None: and bool(plugin.get_config("episode.generation_enabled", True)) and (episode_task is None or episode_task.done()) ): - plugin._episode_generation_task = asyncio.create_task(episode_loop()) + episode_loop_fn = cast(Callable[[], Coroutine[Any, Any, Any]], episode_loop) + plugin._episode_generation_task = asyncio.create_task(episode_loop_fn()) async def cancel_background_tasks(plugin: Any) -> None: diff --git a/src/A_memorix/core/runtime/sdk_memory_kernel.py b/src/A_memorix/core/runtime/sdk_memory_kernel.py index 13e55bfa..0da4e02d 100644 --- a/src/A_memorix/core/runtime/sdk_memory_kernel.py +++ b/src/A_memorix/core/runtime/sdk_memory_kernel.py @@ -7,7 +7,7 @@ import time import uuid from dataclasses import dataclass from pathlib import Path -from typing import Any, Awaitable, Callable, Dict, Iterable, List, Optional, Sequence +from typing import Any, Callable, Coroutine, Dict, Iterable, List, Optional, Sequence from src.common.logger import get_logger @@ -83,7 +83,7 @@ class _KernelRuntimeFacade: async def execute_request_with_dedup( self, request_key: str, - executor: Callable[[], Awaitable[Dict[str, Any]]], + executor: Callable[[], Coroutine[Any, Any, Dict[str, Any]]], ) -> tuple[bool, Dict[str, Any]]: return await self._kernel.execute_request_with_dedup(request_key, executor) @@ -769,7 +769,7 @@ class SDKMemoryKernel: async def execute_request_with_dedup( self, request_key: str, - executor: Callable[[], Awaitable[Dict[str, Any]]], + executor: Callable[[], Coroutine[Any, Any, Dict[str, Any]]], ) -> tuple[bool, Dict[str, Any]]: token = str(request_key or "").strip() if not token: @@ -1761,8 +1761,22 @@ class SDKMemoryKernel: profile = manager.get_profile_snapshot() return {"success": True, "profile": profile, "toml": manager.export_toml_snippet(profile)} if act == "apply_profile": - profile = kwargs.get("profile") if isinstance(kwargs.get("profile"), dict) else kwargs - return {"success": True, **await manager.apply_profile(profile, reason=str(kwargs.get("reason", "manual") or "manual"))} + profile_raw = kwargs.get("profile") + if isinstance(profile_raw, dict): + profile_payload: Dict[str, Any] = dict(profile_raw) + else: + profile_payload = { + key: value + for key, value in kwargs.items() + if key not in {"reason", "profile"} + } + return { + "success": True, + **await manager.apply_profile( + profile_payload, + reason=str(kwargs.get("reason", "manual") or "manual"), + ), + } if act == "rollback_profile": return {"success": True, **await manager.rollback_profile()} if act == "export_profile": @@ -1999,7 +2013,11 @@ class SDKMemoryKernel: self._ensure_background_task("memory_maintenance", self._memory_maintenance_loop) self._ensure_background_task("person_profile_refresh", self._person_profile_refresh_loop) - def _ensure_background_task(self, name: str, factory: Callable[[], Awaitable[None]]) -> None: + def _ensure_background_task( + self, + name: str, + factory: Callable[[], Coroutine[Any, Any, None]], + ) -> None: task = self._background_tasks.get(name) if task is not None and not task.done(): return diff --git a/src/A_memorix/core/storage/graph_store.py b/src/A_memorix/core/storage/graph_store.py index 0a5fd95d..88574a12 100644 --- a/src/A_memorix/core/storage/graph_store.py +++ b/src/A_memorix/core/storage/graph_store.py @@ -73,7 +73,7 @@ class GraphStore: def __init__( self, - matrix_format: str = "csr", + matrix_format: Union[str, SparseMatrixFormat] = "csr", data_dir: Optional[Union[str, Path]] = None, ): """ diff --git a/src/A_memorix/core/storage/vector_store.py b/src/A_memorix/core/storage/vector_store.py index 97a9144c..787e625a 100644 --- a/src/A_memorix/core/storage/vector_store.py +++ b/src/A_memorix/core/storage/vector_store.py @@ -79,6 +79,7 @@ class VectorStore: self.quantization_type = QuantizationType.INT8 self.index_type = "sq8" self.buffer_size = buffer_size + self.min_train_threshold = self.DEFAULT_MIN_TRAIN self._index: Optional[faiss.IndexIDMap2] = None self._init_index() diff --git a/src/A_memorix/core/utils/relation_write_service.py b/src/A_memorix/core/utils/relation_write_service.py index 6fa2e621..fbaeb820 100644 --- a/src/A_memorix/core/utils/relation_write_service.py +++ b/src/A_memorix/core/utils/relation_write_service.py @@ -128,7 +128,7 @@ class RelationWriteService: predicate: str, obj: str, confidence: float = 1.0, - source_paragraph: str = "", + source_paragraph: Optional[str] = None, metadata: Optional[Dict[str, Any]] = None, *, write_vector: bool = True, diff --git a/src/webui/routers/memory.py b/src/webui/routers/memory.py index 303b2cc4..25bb1d9f 100644 --- a/src/webui/routers/memory.py +++ b/src/webui/routers/memory.py @@ -125,7 +125,8 @@ class DeletePurgeRequest(BaseModel): def _build_import_guide_markdown(settings: dict[str, Any]) -> str: - path_aliases = settings.get("path_aliases") if isinstance(settings.get("path_aliases"), dict) else {} + path_aliases_raw = settings.get("path_aliases") + path_aliases = path_aliases_raw if isinstance(path_aliases_raw, dict) else {} alias_lines = [ f"- `{name}` -> `{path}`" for name, path in sorted(path_aliases.items()) @@ -394,15 +395,7 @@ async def _memory_config_get() -> dict: async def _memory_config_get_raw() -> dict: - raw_payload_getter = getattr(a_memorix_host_service, "get_raw_config_with_meta", None) - if callable(raw_payload_getter): - raw_payload = raw_payload_getter() - else: - raw_payload = { - "config": a_memorix_host_service.get_raw_config(), - "exists": bool(a_memorix_host_service.get_config_path().exists()), - "using_default": False, - } + raw_payload = a_memorix_host_service.get_raw_config_with_meta() return { "success": True, "config": str(raw_payload.get("config", "") or ""), @@ -628,8 +621,10 @@ async def _tuning_apply_best(task_id: str) -> dict: async def _tuning_report(task_id: str, fmt: str) -> dict: - payload = await memory_service.tuning_admin(action="get_report", task_id=task_id, format=fmt) - report = payload.get("report") if isinstance(payload.get("report"), dict) else {} + payload_raw = await memory_service.tuning_admin(action="get_report", task_id=task_id, format=fmt) + payload = payload_raw if isinstance(payload_raw, dict) else {} + report_raw = payload.get("report") + report = report_raw if isinstance(report_raw, dict) else {} return { "success": bool(payload.get("success", False)), "format": report.get("format", fmt),