Files
smartmate/backend/cmd/memory/main.go
Losita 2a96f4c6f9 Version: 0.9.76.dev.260505
后端:
1.阶段 6 agent / memory 服务化收口
- 新增 cmd/agent 独立进程入口,承载 agent zrpc server、agent outbox relay / consumer 和运行时依赖初始化
- 补齐 services/agent/rpc 的 Chat stream 与 conversation meta/list/timeline、schedule-preview、context-stats、schedule-state unary RPC
- 新增 gateway/client/agent 与 shared/contracts/agent,将 /api/v1/agent chat 和非 chat 门面切到 agent zrpc
- 收缩 gateway 本地 AgentService 装配,双 RPC 开关开启时不再初始化本地 agent 编排、LLM、RAG 和 memory reader fallback
- 将 backend/memory 物理迁入 services/memory,私有实现收入 internal,保留 module/model/observe 作为 memory 服务门面
- 调整 memory outbox、memory reader 和 agent 记忆渲染链路的 import 与服务边界,cmd/memory 独占 memory worker / consumer
- 关闭 gateway 侧 agent outbox worker 所有权,agent relay / consumer 由 cmd/agent 独占,gateway 仅保留 HTTP/SSE 门面与迁移期开关回退
- 更新阶段 6 文档,记录 agent / memory 当前切流点、smoke 结果,以及 backend/client 与 gateway/shared 的目录收口口径
2026-05-05 19:31:39 +08:00

139 lines
4.5 KiB
Go
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
package main
import (
"context"
"fmt"
"log"
"os"
"os/signal"
"syscall"
"github.com/LoveLosita/smartflow/backend/bootstrap"
kafkabus "github.com/LoveLosita/smartflow/backend/infra/kafka"
outboxinfra "github.com/LoveLosita/smartflow/backend/infra/outbox"
"github.com/LoveLosita/smartflow/backend/inits"
llmservice "github.com/LoveLosita/smartflow/backend/services/llm"
memorymodule "github.com/LoveLosita/smartflow/backend/services/memory"
memorydao "github.com/LoveLosita/smartflow/backend/services/memory/dao"
memoryobserve "github.com/LoveLosita/smartflow/backend/services/memory/observe"
memoryrpc "github.com/LoveLosita/smartflow/backend/services/memory/rpc"
memorysv "github.com/LoveLosita/smartflow/backend/services/memory/sv"
ragservice "github.com/LoveLosita/smartflow/backend/services/rag"
ragconfig "github.com/LoveLosita/smartflow/backend/services/rag/config"
"github.com/spf13/viper"
)
func main() {
if err := bootstrap.LoadConfig(); err != nil {
log.Fatalf("failed to load config: %v", err)
}
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
defer stop()
db, err := memorydao.OpenDBFromConfig()
if err != nil {
log.Fatalf("failed to connect memory database: %v", err)
}
llmClient, err := buildMemoryLLMClient()
if err != nil {
log.Fatalf("failed to initialize memory LLM client: %v", err)
}
ragRuntime, err := buildMemoryRAGRuntime(ctx)
if err != nil {
log.Fatalf("failed to initialize memory RAG runtime: %v", err)
}
memoryCfg := memorymodule.LoadConfigFromViper()
memoryObserver := memoryobserve.NewLoggerObserver(log.Default())
memoryMetrics := memoryobserve.NewMetricsRegistry()
module := memorymodule.NewModuleWithObserve(
db,
llmClient,
ragRuntime,
memoryCfg,
memorymodule.ObserveDeps{
Observer: memoryObserver,
Metrics: memoryMetrics,
},
)
outboxRepo := outboxinfra.NewRepository(db)
svc, err := memorysv.NewService(memorysv.Options{
Module: module,
OutboxRepo: outboxRepo,
KafkaConfig: kafkabus.LoadConfig(),
})
if err != nil {
log.Fatalf("failed to initialize memory service: %v", err)
}
defer svc.Close()
svc.StartWorkers(ctx)
server, listenOn, err := memoryrpc.NewServer(memoryrpc.ServerOptions{
ListenOn: viper.GetString("memory.rpc.listenOn"),
Timeout: viper.GetDuration("memory.rpc.timeout"),
Service: svc,
})
if err != nil {
log.Fatalf("failed to build memory zrpc server: %v", err)
}
defer server.Stop()
go func() {
log.Printf("memory zrpc service starting on %s", listenOn)
server.Start()
}()
<-ctx.Done()
log.Println("memory service stopping")
}
// buildMemoryLLMClient 初始化 memory 抽取链路使用的模型客户端。
//
// 说明:
// 1. CP1 先复用既有 llm-service canonical 入口,不在 memory 服务里重建模型调用封装;
// 2. 当前启动入口与 cmd/start.go / cmd/active-scheduler 都需要 Eino 初始化,后续若出现第三处重复装配,应抽公共 bootstrap
// 3. 返回 ProClient 是因为现有 memory.Module 只需要 llmservice.Client不需要完整 Service。
func buildMemoryLLMClient() (*llmservice.Client, error) {
aiHub, err := inits.InitEino()
if err != nil {
return nil, err
}
llmService := llmservice.New(llmservice.Options{
AIHub: aiHub,
APIKey: os.Getenv("ARK_API_KEY"),
BaseURL: viper.GetString("agent.baseURL"),
CourseVisionModel: viper.GetString("courseImport.visionModel"),
})
return llmService.ProClient(), nil
}
// buildMemoryRAGRuntime 初始化 memory 检索与向量同步使用的 RAG Runtime。
//
// 暂不抽公共层原因:
// 1. 本轮只迁 memory 一个能力域,避免同时调整 cmd/start.go 的既有装配路径;
// 2. RAG 的 canonical 入口已在 services/rag 内,当前函数只做启动层配置读取与日志包装;
// 3. 等 agent 服务也迁出后,再统一评估 llm/rag 启动装配的公共 bootstrap。
func buildMemoryRAGRuntime(ctx context.Context) (ragservice.Runtime, error) {
ragCfg := ragconfig.LoadFromViper()
if !ragCfg.Enabled {
log.Println("RAG service is disabled for memory")
return nil, nil
}
ragLogger := log.Default()
ragService, err := ragservice.NewFromConfig(ctx, ragCfg, ragservice.FactoryDeps{
Logger: ragLogger,
Observer: ragservice.NewLoggerObserver(ragLogger),
})
if err != nil {
return nil, fmt.Errorf("build memory RAG service failed: %w", err)
}
log.Printf("Memory RAG runtime initialized: store=%s embed=%s reranker=%s", ragCfg.Store, ragCfg.EmbedProvider, ragCfg.RerankerProvider)
return ragService.Runtime(), nil
}