后端:
1.阶段 6 CP4/CP5 目录收口与共享边界纯化
- 将 backend 根目录收口为 services、client、gateway、cmd、shared 五个一级目录
- 收拢 bootstrap、inits、infra/kafka、infra/outbox、conv、respond、pkg、middleware,移除根目录旧实现与空目录
- 将 utils 下沉到 services/userauth/internal/auth,将 logic 下沉到 services/schedule/core/planning
- 将迁移期 runtime 桥接实现统一收拢到 services/runtime/{conv,dao,eventsvc,model},删除 shared/legacy 与未再被 import 的旧 service 实现
- 将 gateway/shared/respond 收口为 HTTP/Gin 错误写回适配,shared/respond 仅保留共享错误语义与状态映射
- 将 HTTP IdempotencyMiddleware 与 RateLimitMiddleware 收口到 gateway/middleware
- 将 GormCachePlugin 下沉到 shared/infra/gormcache,将共享 RateLimiter 下沉到 shared/infra/ratelimit,将 agent token budget 下沉到 services/agent/shared
- 删除 InitEino 兼容壳,收缩 cmd/internal/coreinit 仅保留旧组合壳残留域初始化语义
- 更新微服务迁移计划与桌面 checklist,补齐 CP4/CP5 当前切流点、目录终态与验证结果
- 完成 go test ./...、git diff --check 与最终真实 smoke;health、register/login、task/create+get、schedule/today、task-class/list、memory/items、agent chat/meta/timeline/context-stats 全部 200,SSE 合并结果为 CP5_OK 且 [DONE] 只有 1 个
71 lines
2.1 KiB
Go
71 lines
2.1 KiB
Go
package main
|
|
|
|
import (
|
|
"context"
|
|
"log"
|
|
"os"
|
|
"os/signal"
|
|
"syscall"
|
|
|
|
coursedao "github.com/LoveLosita/smartflow/backend/services/course/dao"
|
|
courserpc "github.com/LoveLosita/smartflow/backend/services/course/rpc"
|
|
coursesv "github.com/LoveLosita/smartflow/backend/services/course/sv"
|
|
llmservice "github.com/LoveLosita/smartflow/backend/services/llm"
|
|
rootdao "github.com/LoveLosita/smartflow/backend/services/runtime/dao"
|
|
"github.com/LoveLosita/smartflow/backend/shared/infra/bootstrap"
|
|
"github.com/spf13/viper"
|
|
)
|
|
|
|
func main() {
|
|
if err := bootstrap.LoadConfig(); err != nil {
|
|
log.Fatalf("failed to load config: %v", err)
|
|
}
|
|
|
|
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
|
|
defer stop()
|
|
|
|
db, err := coursedao.OpenDBFromConfig()
|
|
if err != nil {
|
|
log.Fatalf("failed to connect course database: %v", err)
|
|
}
|
|
|
|
// 1. course 自有 DAO 只承载课程导入对 schedule 表的迁移期写入。
|
|
// 2. scheduleRepo 用于复用既有冲突检查,后续若切 schedule RPC bridge 再替换这里。
|
|
courseRepo := coursedao.NewCourseDAO(db)
|
|
scheduleRepo := rootdao.NewScheduleDAO(db)
|
|
courseImageClient := llmservice.NewArkResponsesClient(
|
|
os.Getenv("ARK_API_KEY"),
|
|
viper.GetString("agent.baseURL"),
|
|
viper.GetString("courseImport.visionModel"),
|
|
)
|
|
svc := coursesv.NewCourseService(
|
|
courseRepo,
|
|
scheduleRepo,
|
|
courseImageClient,
|
|
coursesv.NewCourseImageParseConfig(
|
|
viper.GetInt64("courseImport.maxImageBytes"),
|
|
viper.GetInt("courseImport.maxTokens"),
|
|
),
|
|
viper.GetString("courseImport.visionModel"),
|
|
)
|
|
|
|
server, listenOn, err := courserpc.NewServer(courserpc.ServerOptions{
|
|
ListenOn: viper.GetString("course.rpc.listenOn"),
|
|
Timeout: viper.GetDuration("course.rpc.timeout"),
|
|
MaxImageBytes: viper.GetInt64("courseImport.maxImageBytes"),
|
|
Service: svc,
|
|
})
|
|
if err != nil {
|
|
log.Fatalf("failed to build course zrpc server: %v", err)
|
|
}
|
|
defer server.Stop()
|
|
|
|
go func() {
|
|
log.Printf("course zrpc service starting on %s", listenOn)
|
|
server.Start()
|
|
}()
|
|
|
|
<-ctx.Done()
|
|
log.Println("course service stopping")
|
|
}
|