Version: 0.9.65.dev.260503

后端:
1. 阶段 1.5/1.6
收口 llm-service / rag-service,统一模型出口与检索基础设施入口,清退 backend/infra/llm 与 backend/infra/rag 旧实现;
2. 同步更新相关调用链与微服务迁移计划文档
This commit is contained in:
Losita
2026-05-03 23:21:03 +08:00
parent a6c1e5d077
commit 9902ca3563
65 changed files with 550 additions and 376 deletions

View File

@@ -11,7 +11,7 @@ import (
"github.com/LoveLosita/smartflow/backend/active_scheduler/ports"
"github.com/LoveLosita/smartflow/backend/active_scheduler/trigger"
infrallm "github.com/LoveLosita/smartflow/backend/infra/llm"
llmservice "github.com/LoveLosita/smartflow/backend/services/llm"
)
const locateMaxTokens = 800
@@ -24,7 +24,7 @@ const locateMaxTokens = 800
// 3. 不创建新工具系统,也不直接产出 preview。
type Service struct {
reader ports.ScheduleReader
client *infrallm.Client
client *llmservice.Client
clock func() time.Time
logger *log.Logger
}
@@ -34,7 +34,7 @@ type Service struct {
// 说明:
// 1. reader / client 允许为空,方便在模型不可用或读模型暂时不可用时直接回退 ask_user。
// 2. 真正的定位能力只在 Resolve 内部按需启用。
func NewService(reader ports.ScheduleReader, client *infrallm.Client) *Service {
func NewService(reader ports.ScheduleReader, client *llmservice.Client) *Service {
return &Service{
reader: reader,
client: client,
@@ -101,15 +101,15 @@ func (s *Service) Resolve(ctx context.Context, req Request) (Result, error) {
return s.buildAskUserResult(req, "定位 prompt 构造失败"), nil
}
messages := infrallm.BuildSystemUserMessages(strings.TrimSpace(locateSystemPrompt), nil, userPrompt)
resp, rawResult, err := infrallm.GenerateJSON[llmResponse](
messages := llmservice.BuildSystemUserMessages(strings.TrimSpace(locateSystemPrompt), nil, userPrompt)
resp, rawResult, err := llmservice.GenerateJSON[llmResponse](
ctx,
s.client,
messages,
infrallm.GenerateOptions{
llmservice.GenerateOptions{
Temperature: 0.1,
MaxTokens: locateMaxTokens,
Thinking: infrallm.ThinkingModeDisabled,
Thinking: llmservice.ThinkingModeDisabled,
Metadata: map[string]any{
"stage": "active_scheduler_feedback_locate",
"candidate_count": len(candidates),
@@ -340,7 +340,7 @@ func cloneAndTrimStrings(values []string) []string {
return result
}
func truncateRaw(raw *infrallm.TextResult) string {
func truncateRaw(raw *llmservice.TextResult) string {
if raw == nil {
return ""
}

View File

@@ -10,7 +10,7 @@ import (
"time"
"github.com/LoveLosita/smartflow/backend/active_scheduler/candidate"
infrallm "github.com/LoveLosita/smartflow/backend/infra/llm"
llmservice "github.com/LoveLosita/smartflow/backend/services/llm"
)
const selectionMaxTokens = 1200
@@ -22,7 +22,7 @@ const selectionMaxTokens = 1200
// 2. LLM 失败、输出非法或选择不存在候选时,回退到后端 fallback candidate
// 3. 不写 preview、不发通知、不修改正式日程。
type Service struct {
client *infrallm.Client
client *llmservice.Client
clock func() time.Time
logger *log.Logger
}
@@ -33,7 +33,7 @@ type Service struct {
// 1. client 允许为空;为空时选择器只走确定性 fallback便于本地测试和降级
// 2. 真正的模型接入在 cmd/start.go 中完成aiHub.Pro -> llm.Client -> selection.Service
// 3. 选择器本身不持有模型配置,只表达本业务域的 prompt 和结果校验。
func NewService(client *infrallm.Client) *Service {
func NewService(client *llmservice.Client) *Service {
return &Service{
client: client,
clock: time.Now,
@@ -70,19 +70,19 @@ func (s *Service) Select(ctx context.Context, req SelectRequest) (Result, error)
return buildFallbackResult(req, "选择器 prompt 构造失败: "+err.Error()), nil
}
messages := infrallm.BuildSystemUserMessages(
messages := llmservice.BuildSystemUserMessages(
strings.TrimSpace(selectionSystemPrompt),
nil,
userPrompt,
)
resp, rawResult, err := infrallm.GenerateJSON[llmSelectionResponse](
resp, rawResult, err := llmservice.GenerateJSON[llmSelectionResponse](
ctx,
s.client,
messages,
infrallm.GenerateOptions{
llmservice.GenerateOptions{
Temperature: 0.1,
MaxTokens: selectionMaxTokens,
Thinking: infrallm.ThinkingModeDisabled,
Thinking: llmservice.ThinkingModeDisabled,
Metadata: map[string]any{
"stage": "active_scheduler_select",
"candidate_count": len(req.Candidates),
@@ -275,7 +275,7 @@ func firstNonEmpty(values ...string) string {
return ""
}
func truncateRaw(raw *infrallm.TextResult) string {
func truncateRaw(raw *llmservice.TextResult) string {
if raw == nil {
return ""
}