Version: 0.9.80.dev.260506

后端:
1. LLM 独立服务与统一计费出口落地:新增 `cmd/llm`、`client/llm` 与 `services/llm/rpc`,补齐 BillingContext、CreditBalanceGuard、价格规则解析、stream usage 归集与 `credit.charge.requested` outbox 发布,active-scheduler / agent / course / memory / gateway fallback 全部改走 llm zrpc,不再各自本地初始化模型。
2. TokenStore 收口为 Credit 权威账本:新增 credit account / ledger / product / order / price-rule / reward-rule 能力与 Redis 快照缓存,扩展 tokenstore rpc/client 支撑余额快照、消耗看板、商品、订单、流水、价格规则和奖励规则,并接入 LLM charge 事件消费完成 Credit 扣费落账。
3. 计费旧链路下线与网关切口切换:`/token-store` 语义整体切到 `/credit-store`,agent chat 移除旧 TokenQuotaGuard,userauth 的 CheckTokenQuota / AdjustTokenUsage 改为废弃,聊天历史落库不再同步旧 token 额度账本,course 图片解析请求补 user_id 进入新计费口径。

前端:
4. 计划广场从 mock 数据切到真实接口:新增 forum api/types,首页支持真实列表、标签、搜索、防抖、点赞、导入和发布计划,详情页补齐帖子详情、评论树、回复和删除评论链路,同时补上“至少一个标签”的前后端约束与默认标签兜底。
5. 商店页切到 Credit 体系并重做展示:顶部改为余额 + Credit/Token 消耗看板,支持 24h/7d/30d/all 周期切换;套餐区展示原价与当前价;历史区改为当前用户 Credit 流水并支持查看更多,整体视觉和交互同步收口。

仓库:
6. 配置与本地启动体系补齐 llm / outbox 编排:`config.example.yaml` 增加 llm rpc 和统一 outbox service 配置,`dev-common.ps1` 把 llm 纳入多服务依赖并自动建 Kafka topic,`docker-compose.yml` 同步初始化 agent/task/memory/active-scheduler/notification/taskclass-forum/llm/token-store 全量 outbox topic。
This commit is contained in:
Losita
2026-05-06 20:16:53 +08:00
parent 7d324b77aa
commit 61db646805
104 changed files with 9527 additions and 3925 deletions

View File

@@ -2,6 +2,8 @@ package feedbacklocate
import (
"context"
"crypto/sha1"
"encoding/hex"
"errors"
"fmt"
"log"
@@ -102,8 +104,9 @@ func (s *Service) Resolve(ctx context.Context, req Request) (Result, error) {
}
messages := llmservice.BuildSystemUserMessages(strings.TrimSpace(locateSystemPrompt), nil, userPrompt)
invokeCtx := llmservice.WithBillingContext(ctx, buildFeedbackLocateBillingContext(req))
resp, rawResult, err := llmservice.GenerateJSON[llmResponse](
ctx,
invokeCtx,
s.client,
messages,
llmservice.GenerateOptions{
@@ -365,3 +368,21 @@ func minInt(left, right int) int {
}
return right
}
func buildFeedbackLocateBillingContext(req Request) llmservice.BillingContext {
if req.UserID <= 0 {
return llmservice.BillingContext{
Scene: "active_scheduler_feedback_locate",
ModelAlias: "active_scheduler_feedback_locate",
}
}
sum := sha1.Sum([]byte(strings.TrimSpace(req.UserMessage) + "|" + strings.TrimSpace(req.PendingQuestion)))
requestID := fmt.Sprintf("active_scheduler_feedback_locate:%d:%s", req.UserID, hex.EncodeToString(sum[:]))
return llmservice.BillingContext{
UserID: uint64(req.UserID),
EventID: requestID,
Scene: "active_scheduler_feedback_locate",
RequestID: requestID,
ModelAlias: "active_scheduler_feedback_locate",
}
}

View File

@@ -75,8 +75,9 @@ func (s *Service) Select(ctx context.Context, req SelectRequest) (Result, error)
nil,
userPrompt,
)
invokeCtx := llmservice.WithBillingContext(ctx, buildSelectionBillingContext(req))
resp, rawResult, err := llmservice.GenerateJSON[llmSelectionResponse](
ctx,
invokeCtx,
s.client,
messages,
llmservice.GenerateOptions{
@@ -294,6 +295,26 @@ func (s *Service) now() time.Time {
return s.clock()
}
func buildSelectionBillingContext(req SelectRequest) llmservice.BillingContext {
if req.ActiveContext == nil {
return llmservice.BillingContext{
Scene: "active_scheduler_select",
ModelAlias: "active_scheduler_select",
}
}
traceID := strings.TrimSpace(req.ActiveContext.Trace.TraceID)
if traceID == "" {
traceID = fmt.Sprintf("active_scheduler_select:%d:%s", req.ActiveContext.User.UserID, strings.TrimSpace(req.ActiveContext.Trigger.TriggerID))
}
return llmservice.BillingContext{
UserID: uint64(req.ActiveContext.User.UserID),
EventID: traceID,
Scene: "active_scheduler_select",
RequestID: traceID,
ModelAlias: "active_scheduler_select",
}
}
func (r Result) String() string {
return fmt.Sprintf("active_scheduler_selection(action=%s, selected=%s, fallback=%t)",
r.Action,