Files
smartmate/backend/cmd/llm/main.go
Losita 61db646805 Version: 0.9.80.dev.260506
后端:
1. LLM 独立服务与统一计费出口落地:新增 `cmd/llm`、`client/llm` 与 `services/llm/rpc`,补齐 BillingContext、CreditBalanceGuard、价格规则解析、stream usage 归集与 `credit.charge.requested` outbox 发布,active-scheduler / agent / course / memory / gateway fallback 全部改走 llm zrpc,不再各自本地初始化模型。
2. TokenStore 收口为 Credit 权威账本:新增 credit account / ledger / product / order / price-rule / reward-rule 能力与 Redis 快照缓存,扩展 tokenstore rpc/client 支撑余额快照、消耗看板、商品、订单、流水、价格规则和奖励规则,并接入 LLM charge 事件消费完成 Credit 扣费落账。
3. 计费旧链路下线与网关切口切换:`/token-store` 语义整体切到 `/credit-store`,agent chat 移除旧 TokenQuotaGuard,userauth 的 CheckTokenQuota / AdjustTokenUsage 改为废弃,聊天历史落库不再同步旧 token 额度账本,course 图片解析请求补 user_id 进入新计费口径。

前端:
4. 计划广场从 mock 数据切到真实接口:新增 forum api/types,首页支持真实列表、标签、搜索、防抖、点赞、导入和发布计划,详情页补齐帖子详情、评论树、回复和删除评论链路,同时补上“至少一个标签”的前后端约束与默认标签兜底。
5. 商店页切到 Credit 体系并重做展示:顶部改为余额 + Credit/Token 消耗看板,支持 24h/7d/30d/all 周期切换;套餐区展示原价与当前价;历史区改为当前用户 Credit 流水并支持查看更多,整体视觉和交互同步收口。

仓库:
6. 配置与本地启动体系补齐 llm / outbox 编排:`config.example.yaml` 增加 llm rpc 和统一 outbox service 配置,`dev-common.ps1` 把 llm 纳入多服务依赖并自动建 Kafka topic,`docker-compose.yml` 同步初始化 agent/task/memory/active-scheduler/notification/taskclass-forum/llm/token-store 全量 outbox topic。
2026-05-06 20:16:53 +08:00

158 lines
4.6 KiB
Go

package main
import (
"context"
"log"
"os"
"os/signal"
"sync"
"syscall"
tokenstoreclient "github.com/LoveLosita/smartflow/backend/client/tokenstore"
llmservice "github.com/LoveLosita/smartflow/backend/services/llm"
llmdao "github.com/LoveLosita/smartflow/backend/services/llm/dao"
llmrpc "github.com/LoveLosita/smartflow/backend/services/llm/rpc"
creditcontracts "github.com/LoveLosita/smartflow/backend/shared/contracts/creditstore"
"github.com/LoveLosita/smartflow/backend/shared/infra/bootstrap"
einoinfra "github.com/LoveLosita/smartflow/backend/shared/infra/eino"
kafkabus "github.com/LoveLosita/smartflow/backend/shared/infra/kafka"
outboxinfra "github.com/LoveLosita/smartflow/backend/shared/infra/outbox"
redisinfra "github.com/LoveLosita/smartflow/backend/shared/infra/redis"
"github.com/spf13/viper"
)
func main() {
if err := bootstrap.LoadConfig(); err != nil {
log.Fatalf("failed to load config: %v", err)
}
ctx, stop := signal.NotifyContext(context.Background(), os.Interrupt, syscall.SIGTERM)
defer stop()
db, err := llmdao.OpenDBFromConfig()
if err != nil {
log.Fatalf("failed to connect llm database: %v", err)
}
redisClient, err := redisinfra.OpenRedisFromConfig()
if err != nil {
log.Fatalf("failed to connect llm redis: %v", err)
}
defer redisClient.Close()
aiHub, err := einoinfra.InitEino()
if err != nil {
log.Fatalf("failed to initialize llm Eino runtime: %v", err)
}
legacyService := llmservice.New(llmservice.Options{
AIHub: aiHub,
APIKey: os.Getenv("ARK_API_KEY"),
BaseURL: viper.GetString("agent.baseURL"),
CourseVisionModel: viper.GetString("courseImport.visionModel"),
})
balanceSnapshotProvider := &tokenStoreSnapshotProvider{
cfg: tokenstoreclient.ClientConfig{
Endpoints: viper.GetStringSlice("tokenstore.rpc.endpoints"),
Target: viper.GetString("tokenstore.rpc.target"),
Timeout: viper.GetDuration("tokenstore.rpc.timeout"),
},
}
outboxRepo := outboxinfra.NewRepository(db)
priceRuleDAO := llmdao.NewPriceRuleDAO(db)
dispatchEngine, err := buildLLMOutboxDispatchEngine(outboxRepo)
if err != nil {
log.Fatalf("failed to initialize llm outbox dispatch engine: %v", err)
}
if dispatchEngine != nil {
dispatchEngine.StartDispatch(ctx)
defer dispatchEngine.Close()
log.Println("llm outbox dispatch started")
} else {
log.Println("llm outbox dispatch is disabled")
}
runtimeService, err := llmservice.NewRuntimeService(llmservice.RuntimeServiceOptions{
LegacyService: legacyService,
CacheDAO: llmdao.NewCacheDAO(redisClient),
PriceRuleDAO: priceRuleDAO,
SnapshotProvider: balanceSnapshotProvider,
OutboxRepo: outboxRepo,
OutboxMaxRetry: kafkabus.LoadConfig().MaxRetry,
ProviderName: viper.GetString("llm.providerName"),
LiteModelName: viper.GetString("agent.liteModel"),
ProModelName: viper.GetString("agent.proModel"),
MaxModelName: viper.GetString("agent.maxModel"),
CourseVisionModel: viper.GetString("courseImport.visionModel"),
})
if err != nil {
log.Fatalf("failed to initialize llm runtime service: %v", err)
}
server, listenOn, err := llmrpc.NewServer(llmrpc.ServerOptions{
ListenOn: viper.GetString("llm.rpc.listenOn"),
Timeout: viper.GetDuration("llm.rpc.timeout"),
Service: runtimeService,
})
if err != nil {
log.Fatalf("failed to build llm zrpc server: %v", err)
}
defer server.Stop()
go func() {
log.Printf("llm zrpc service starting on %s", listenOn)
server.Start()
}()
<-ctx.Done()
log.Println("llm service stopping")
}
func buildLLMOutboxDispatchEngine(outboxRepo *outboxinfra.Repository) (*outboxinfra.Engine, error) {
kafkaCfg := kafkabus.LoadConfig()
if !kafkaCfg.Enabled || outboxRepo == nil {
return nil, nil
}
route, _ := outboxinfra.ResolveServiceRoute(outboxinfra.ServiceLLM)
kafkaCfg.ServiceName = outboxinfra.ServiceLLM
return outboxinfra.NewEngine(outboxRepo.WithRoute(route), kafkaCfg)
}
type tokenStoreSnapshotProvider struct {
cfg tokenstoreclient.ClientConfig
mu sync.Mutex
client *tokenstoreclient.Client
}
func (p *tokenStoreSnapshotProvider) GetCreditBalanceSnapshot(ctx context.Context, userID uint64) (*creditcontracts.CreditBalanceSnapshot, error) {
client, err := p.ensureClient()
if err != nil {
return nil, err
}
return client.GetCreditBalanceSnapshot(ctx, userID)
}
func (p *tokenStoreSnapshotProvider) ensureClient() (*tokenstoreclient.Client, error) {
if p == nil {
return nil, nil
}
p.mu.Lock()
defer p.mu.Unlock()
if p.client != nil {
return p.client, nil
}
client, err := tokenstoreclient.NewClient(p.cfg)
if err != nil {
return nil, err
}
p.client = client
return p.client, nil
}