feat(agent): ✨ 在 Agent 聊天接口中新增 AI 随口记功能 * 无相关意图时保持正常聊天,若识别到相关意图则自动切换为随口记模式 * 支持阶段状态反馈与话题化回复,提升交互体验 - 引入请求级当前时间基准,支持相对时间解析(如“明天”、“下周一”等) - 增加非法日期拦截机制,防止用户输入格式错误的时间并返回修正提示 - 优化随口记图谱,补充阶段打点与详细中文注释,失败/重试分支处理更清晰 - 推送 `reasoning_content` 阶段状态,涵盖 `request.accepted`、`intent`、`deadline`、`priority`、`persisting`、`persisted`、`reply.polishing` 等状态 - 最终文案改为“事实句 + AI 生成的贴题轻松跟进句”,避免硬编码调侃内容 - 完善时间解析相关测试,确保功能正确性,测试通过 `go test ./...` --- improvements: 🛠️ 开发心路历程与优化 * 修复随口记链路中 `assistant` 消息未写入 Redis 的问题,确保消息持久化 * 去除“分段正文伪流式”处理,改为最终正文一次性输出,简化内容流转
107 lines
3.3 KiB
Go
107 lines
3.3 KiB
Go
package cmd
|
|
|
|
import (
|
|
"context"
|
|
"fmt"
|
|
"log"
|
|
|
|
"github.com/LoveLosita/smartflow/backend/api"
|
|
"github.com/LoveLosita/smartflow/backend/dao"
|
|
"github.com/LoveLosita/smartflow/backend/inits"
|
|
kafkabus "github.com/LoveLosita/smartflow/backend/kafka"
|
|
"github.com/LoveLosita/smartflow/backend/middleware"
|
|
"github.com/LoveLosita/smartflow/backend/pkg"
|
|
"github.com/LoveLosita/smartflow/backend/routers"
|
|
"github.com/LoveLosita/smartflow/backend/service"
|
|
"github.com/spf13/viper"
|
|
)
|
|
|
|
// loadConfig 加载应用配置。
|
|
func loadConfig() error {
|
|
viper.SetConfigName("config")
|
|
viper.SetConfigType("yaml")
|
|
viper.AddConfigPath(".")
|
|
if err := viper.ReadInConfig(); err != nil {
|
|
return fmt.Errorf("failed to read config file: %w", err)
|
|
}
|
|
log.Println("Config loaded successfully")
|
|
return nil
|
|
}
|
|
|
|
// Start 是应用启动入口。
|
|
func Start() {
|
|
if err := loadConfig(); err != nil {
|
|
log.Fatalf("Failed to load config: %v", err)
|
|
}
|
|
|
|
db, err := inits.ConnectDB()
|
|
if err != nil {
|
|
log.Fatalf("Failed to connect to database: %v", err)
|
|
}
|
|
|
|
rdb := inits.InitRedis()
|
|
limiter := pkg.NewRateLimiter(rdb)
|
|
|
|
aiHub, err := inits.InitEino()
|
|
if err != nil {
|
|
log.Fatalf("Failed to initialize Eino: %v", err)
|
|
}
|
|
|
|
// DAO 层初始化。
|
|
cacheRepo := dao.NewCacheDAO(rdb)
|
|
agentCacheRepo := dao.NewAgentCache(rdb)
|
|
_ = db.Use(middleware.NewGormCachePlugin(cacheRepo))
|
|
userRepo := dao.NewUserDAO(db)
|
|
taskRepo := dao.NewTaskDAO(db)
|
|
courseRepo := dao.NewCourseDAO(db)
|
|
taskClassRepo := dao.NewTaskClassDAO(db)
|
|
scheduleRepo := dao.NewScheduleDAO(db)
|
|
manager := dao.NewManager(db)
|
|
agentRepo := dao.NewAgentDAO(db)
|
|
outboxRepo := dao.NewOutboxDAO(db)
|
|
|
|
// outbox 异步链路接线:
|
|
// - 读取 Kafka 配置
|
|
// - 初始化 producer/consumer
|
|
// - 启动 dispatch/consume 两个后台循环
|
|
kafkaCfg := kafkabus.LoadConfig()
|
|
asyncPipeline, err := service.NewAgentAsyncPipeline(outboxRepo, kafkaCfg)
|
|
if err != nil {
|
|
log.Fatalf("Failed to initialize Kafka async pipeline: %v", err)
|
|
}
|
|
if asyncPipeline != nil {
|
|
asyncPipeline.Start(context.Background())
|
|
defer asyncPipeline.Close()
|
|
log.Println("Kafka async pipeline started")
|
|
} else {
|
|
log.Println("Kafka async pipeline is disabled")
|
|
}
|
|
|
|
// Service 层初始化。
|
|
userService := service.NewUserService(userRepo, cacheRepo)
|
|
taskSv := service.NewTaskService(taskRepo, cacheRepo)
|
|
courseService := service.NewCourseService(courseRepo, scheduleRepo)
|
|
taskClassService := service.NewTaskClassService(taskClassRepo, cacheRepo, scheduleRepo, manager)
|
|
scheduleService := service.NewScheduleService(scheduleRepo, userRepo, taskClassRepo, manager, cacheRepo)
|
|
agentService := service.NewAgentService(aiHub, agentRepo, taskRepo, agentCacheRepo, asyncPipeline)
|
|
|
|
// API 层初始化。
|
|
userApi := api.NewUserHandler(userService)
|
|
taskApi := api.NewTaskHandler(taskSv)
|
|
courseApi := api.NewCourseHandler(courseService)
|
|
taskClassApi := api.NewTaskClassHandler(taskClassService)
|
|
scheduleApi := api.NewScheduleAPI(scheduleService)
|
|
agentApi := api.NewAgentHandler(agentService)
|
|
handlers := &api.ApiHandlers{
|
|
UserHandler: userApi,
|
|
TaskHandler: taskApi,
|
|
TaskClassHandler: taskClassApi,
|
|
CourseHandler: courseApi,
|
|
ScheduleHandler: scheduleApi,
|
|
AgentHandler: agentApi,
|
|
}
|
|
|
|
r := routers.RegisterRouters(handlers, cacheRepo, limiter)
|
|
routers.StartEngine(r)
|
|
}
|