Files
smartmate/backend/cmd/active-scheduler/main.go
Losita 6843c7efac Version: 0.9.71.dev.260504
后端:
1.阶段 5 task 服务边界落地
- 新增 cmd/task 与 services/task/{dao,rpc,sv},承载 task zrpc、tasks 表迁移和 task outbox 消费边界
- 新增 gateway/client/task、shared/contracts/task 和 task port,gateway /api/v1/task/* 切到 task zrpc client
- 将 task.urgency.promote.requested handler / relay / retry loop 迁入 cmd/task,单体 worker 不再消费 task outbox
- 保留单体 Agent 残留 task 查询的 publish-only 写入能力,避免迁移期 task 事件丢失
- active-scheduler task facts / due job scanner 切到 task RPC,并移除启动期 tasks 表依赖检查
- 更新阶段 5 文档,记录 task 切流点、旧实现保留、跨域 DB 依赖缩减和下一轮建议
- 补充 task rpc 示例配置
2026-05-05 00:00:09 +08:00

85 lines
2.8 KiB
Go

package main
import (
"context"
"log"
"os"
"os/signal"
"syscall"
"github.com/LoveLosita/smartflow/backend/bootstrap"
kafkabus "github.com/LoveLosita/smartflow/backend/infra/kafka"
"github.com/LoveLosita/smartflow/backend/inits"
activeadapters "github.com/LoveLosita/smartflow/backend/services/active_scheduler/core/adapters"
activeschedulerdao "github.com/LoveLosita/smartflow/backend/services/active_scheduler/dao"
activeschedulerrpc "github.com/LoveLosita/smartflow/backend/services/active_scheduler/rpc"
activeschedulersv "github.com/LoveLosita/smartflow/backend/services/active_scheduler/sv"
llmservice "github.com/LoveLosita/smartflow/backend/services/llm"
"github.com/spf13/viper"
)
func main() {
if err := bootstrap.LoadConfig(); err != nil {
log.Fatalf("failed to load config: %v", err)
}
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
defer stop()
db, err := activeschedulerdao.OpenDBFromConfig()
if err != nil {
log.Fatalf("failed to connect active-scheduler database: %v", err)
}
aiHub, err := inits.InitEino()
if err != nil {
log.Fatalf("failed to initialize active-scheduler Eino runtime: %v", err)
}
llmService := llmservice.New(llmservice.Options{
AIHub: aiHub,
APIKey: os.Getenv("ARK_API_KEY"),
BaseURL: viper.GetString("agent.baseURL"),
CourseVisionModel: viper.GetString("courseImport.visionModel"),
})
svc, err := activeschedulersv.New(db, llmService, activeschedulersv.Options{
JobScanEvery: viper.GetDuration("activeScheduler.jobScanEvery"),
JobScanLimit: viper.GetInt("activeScheduler.jobScanLimit"),
KafkaConfig: kafkabus.LoadConfig(),
TaskRPC: activeadapters.TaskRPCConfig{
Endpoints: viper.GetStringSlice("task.rpc.endpoints"),
Target: viper.GetString("task.rpc.target"),
Timeout: viper.GetDuration("task.rpc.timeout"),
},
ScheduleRPC: activeadapters.ScheduleRPCConfig{
Endpoints: viper.GetStringSlice("schedule.rpc.endpoints"),
Target: viper.GetString("schedule.rpc.target"),
Timeout: viper.GetDuration("schedule.rpc.timeout"),
},
})
if err != nil {
log.Fatalf("failed to initialize active-scheduler service: %v", err)
}
defer svc.Close()
svc.StartWorkers(ctx)
log.Println("Active-scheduler outbox consumer and due job scanner started")
server, listenOn, err := activeschedulerrpc.NewServer(activeschedulerrpc.ServerOptions{
ListenOn: viper.GetString("activeScheduler.rpc.listenOn"),
Timeout: viper.GetDuration("activeScheduler.rpc.timeout"),
Service: svc,
})
if err != nil {
log.Fatalf("failed to build active-scheduler zrpc server: %v", err)
}
defer server.Stop()
go func() {
log.Printf("active-scheduler zrpc service starting on %s", listenOn)
server.Start()
}()
<-ctx.Done()
log.Println("active-scheduler service stopping")
}