Files
smartmate/backend/services/llm/rpc/transport.go
Losita 61db646805 Version: 0.9.80.dev.260506
后端:
1. LLM 独立服务与统一计费出口落地:新增 `cmd/llm`、`client/llm` 与 `services/llm/rpc`,补齐 BillingContext、CreditBalanceGuard、价格规则解析、stream usage 归集与 `credit.charge.requested` outbox 发布,active-scheduler / agent / course / memory / gateway fallback 全部改走 llm zrpc,不再各自本地初始化模型。
2. TokenStore 收口为 Credit 权威账本:新增 credit account / ledger / product / order / price-rule / reward-rule 能力与 Redis 快照缓存,扩展 tokenstore rpc/client 支撑余额快照、消耗看板、商品、订单、流水、价格规则和奖励规则,并接入 LLM charge 事件消费完成 Credit 扣费落账。
3. 计费旧链路下线与网关切口切换:`/token-store` 语义整体切到 `/credit-store`,agent chat 移除旧 TokenQuotaGuard,userauth 的 CheckTokenQuota / AdjustTokenUsage 改为废弃,聊天历史落库不再同步旧 token 额度账本,course 图片解析请求补 user_id 进入新计费口径。

前端:
4. 计划广场从 mock 数据切到真实接口:新增 forum api/types,首页支持真实列表、标签、搜索、防抖、点赞、导入和发布计划,详情页补齐帖子详情、评论树、回复和删除评论链路,同时补上“至少一个标签”的前后端约束与默认标签兜底。
5. 商店页切到 Credit 体系并重做展示:顶部改为余额 + Credit/Token 消耗看板,支持 24h/7d/30d/all 周期切换;套餐区展示原价与当前价;历史区改为当前用户 Credit 流水并支持查看更多,整体视觉和交互同步收口。

仓库:
6. 配置与本地启动体系补齐 llm / outbox 编排:`config.example.yaml` 增加 llm rpc 和统一 outbox service 配置,`dev-common.ps1` 把 llm 纳入多服务依赖并自动建 Kafka topic,`docker-compose.yml` 同步初始化 agent/task/memory/active-scheduler/notification/taskclass-forum/llm/token-store 全量 outbox topic。
2026-05-06 20:16:53 +08:00

196 lines
7.1 KiB
Go

package rpc
import (
"context"
"io"
llmcontracts "github.com/LoveLosita/smartflow/backend/shared/contracts/llm"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
const (
LLM_Ping_FullMethodName = "/smartflow.llm.LLM/Ping"
LLM_GenerateText_FullMethodName = "/smartflow.llm.LLM/GenerateText"
LLM_StreamText_FullMethodName = "/smartflow.llm.LLM/StreamText"
LLM_GenerateResponsesText_FullMethodName = "/smartflow.llm.LLM/GenerateResponsesText"
)
type LLMClient interface {
Ping(ctx context.Context, in *llmcontracts.PingRequest, opts ...grpc.CallOption) (*llmcontracts.PingResponse, error)
GenerateText(ctx context.Context, in *llmcontracts.TextRequest, opts ...grpc.CallOption) (*llmcontracts.TextResponse, error)
StreamText(ctx context.Context, in *llmcontracts.StreamTextRequest, opts ...grpc.CallOption) (LLM_StreamTextClient, error)
GenerateResponsesText(ctx context.Context, in *llmcontracts.ResponsesRequest, opts ...grpc.CallOption) (*llmcontracts.ResponsesResponse, error)
}
type llmClient struct {
cc grpc.ClientConnInterface
}
func NewLLMClient(cc grpc.ClientConnInterface) LLMClient {
return &llmClient{cc: cc}
}
func (c *llmClient) Ping(ctx context.Context, in *llmcontracts.PingRequest, opts ...grpc.CallOption) (*llmcontracts.PingResponse, error) {
out := new(llmcontracts.PingResponse)
err := c.cc.Invoke(ctx, LLM_Ping_FullMethodName, in, out, opts...)
return out, err
}
func (c *llmClient) GenerateText(ctx context.Context, in *llmcontracts.TextRequest, opts ...grpc.CallOption) (*llmcontracts.TextResponse, error) {
out := new(llmcontracts.TextResponse)
err := c.cc.Invoke(ctx, LLM_GenerateText_FullMethodName, in, out, opts...)
return out, err
}
func (c *llmClient) StreamText(ctx context.Context, in *llmcontracts.StreamTextRequest, opts ...grpc.CallOption) (LLM_StreamTextClient, error) {
stream, err := c.cc.NewStream(ctx, &LLM_ServiceDesc.Streams[0], LLM_StreamText_FullMethodName, opts...)
if err != nil {
return nil, err
}
client := &llmStreamTextClient{ClientStream: stream}
if err = client.SendMsg(in); err != nil {
return nil, err
}
if err = client.CloseSend(); err != nil {
return nil, err
}
return client, nil
}
func (c *llmClient) GenerateResponsesText(ctx context.Context, in *llmcontracts.ResponsesRequest, opts ...grpc.CallOption) (*llmcontracts.ResponsesResponse, error) {
out := new(llmcontracts.ResponsesResponse)
err := c.cc.Invoke(ctx, LLM_GenerateResponsesText_FullMethodName, in, out, opts...)
return out, err
}
type LLM_StreamTextClient interface {
Recv() (*llmcontracts.StreamChunk, error)
grpc.ClientStream
}
type llmStreamTextClient struct {
grpc.ClientStream
}
func (x *llmStreamTextClient) Recv() (*llmcontracts.StreamChunk, error) {
m := new(llmcontracts.StreamChunk)
if err := x.ClientStream.RecvMsg(m); err != nil {
if err == io.EOF {
return nil, err
}
return nil, err
}
return m, nil
}
type LLMServer interface {
Ping(context.Context, *llmcontracts.PingRequest) (*llmcontracts.PingResponse, error)
GenerateText(context.Context, *llmcontracts.TextRequest) (*llmcontracts.TextResponse, error)
StreamText(*llmcontracts.StreamTextRequest, LLM_StreamTextServer) error
GenerateResponsesText(context.Context, *llmcontracts.ResponsesRequest) (*llmcontracts.ResponsesResponse, error)
}
type UnimplementedLLMServer struct{}
func (UnimplementedLLMServer) Ping(context.Context, *llmcontracts.PingRequest) (*llmcontracts.PingResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Ping not implemented")
}
func (UnimplementedLLMServer) GenerateText(context.Context, *llmcontracts.TextRequest) (*llmcontracts.TextResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GenerateText not implemented")
}
func (UnimplementedLLMServer) StreamText(*llmcontracts.StreamTextRequest, LLM_StreamTextServer) error {
return status.Errorf(codes.Unimplemented, "method StreamText not implemented")
}
func (UnimplementedLLMServer) GenerateResponsesText(context.Context, *llmcontracts.ResponsesRequest) (*llmcontracts.ResponsesResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method GenerateResponsesText not implemented")
}
func RegisterLLMServer(s grpc.ServiceRegistrar, srv LLMServer) {
s.RegisterService(&LLM_ServiceDesc, srv)
}
type LLM_StreamTextServer interface {
Send(*llmcontracts.StreamChunk) error
grpc.ServerStream
}
type llmStreamTextServer struct {
grpc.ServerStream
}
func (x *llmStreamTextServer) Send(m *llmcontracts.StreamChunk) error {
return x.ServerStream.SendMsg(m)
}
func _LLM_Ping_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(llmcontracts.PingRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LLMServer).Ping(ctx, in)
}
info := &grpc.UnaryServerInfo{Server: srv, FullMethod: LLM_Ping_FullMethodName}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LLMServer).Ping(ctx, req.(*llmcontracts.PingRequest))
}
return interceptor(ctx, in, info, handler)
}
func _LLM_GenerateText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(llmcontracts.TextRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LLMServer).GenerateText(ctx, in)
}
info := &grpc.UnaryServerInfo{Server: srv, FullMethod: LLM_GenerateText_FullMethodName}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LLMServer).GenerateText(ctx, req.(*llmcontracts.TextRequest))
}
return interceptor(ctx, in, info, handler)
}
func _LLM_GenerateResponsesText_Handler(srv interface{}, ctx context.Context, dec func(interface{}) error, interceptor grpc.UnaryServerInterceptor) (interface{}, error) {
in := new(llmcontracts.ResponsesRequest)
if err := dec(in); err != nil {
return nil, err
}
if interceptor == nil {
return srv.(LLMServer).GenerateResponsesText(ctx, in)
}
info := &grpc.UnaryServerInfo{Server: srv, FullMethod: LLM_GenerateResponsesText_FullMethodName}
handler := func(ctx context.Context, req interface{}) (interface{}, error) {
return srv.(LLMServer).GenerateResponsesText(ctx, req.(*llmcontracts.ResponsesRequest))
}
return interceptor(ctx, in, info, handler)
}
func _LLM_StreamText_Handler(srv interface{}, stream grpc.ServerStream) error {
m := new(llmcontracts.StreamTextRequest)
if err := stream.RecvMsg(m); err != nil {
return err
}
return srv.(LLMServer).StreamText(m, &llmStreamTextServer{ServerStream: stream})
}
var LLM_ServiceDesc = grpc.ServiceDesc{
ServiceName: "smartflow.llm.LLM",
HandlerType: (*LLMServer)(nil),
Methods: []grpc.MethodDesc{
{MethodName: "Ping", Handler: _LLM_Ping_Handler},
{MethodName: "GenerateText", Handler: _LLM_GenerateText_Handler},
{MethodName: "GenerateResponsesText", Handler: _LLM_GenerateResponsesText_Handler},
},
Streams: []grpc.StreamDesc{
{StreamName: "StreamText", Handler: _LLM_StreamText_Handler, ServerStreams: true},
},
Metadata: "services/llm/rpc/llm.proto",
}