Version: 0.4.2.dev.260305

feat: 🧠 支持切换模型思考模式

* 新增模型思考开关配置,支持动态控制模型是否启用思考模式
This commit is contained in:
LoveLosita
2026-03-05 21:44:22 +08:00
parent f9d52e0c5e
commit cb5ac07e3c
4 changed files with 33 additions and 10 deletions

View File

@@ -8,6 +8,7 @@ import (
"github.com/cloudwego/eino-ext/components/model/ark"
"github.com/cloudwego/eino/schema"
arkModel "github.com/volcengine/volcengine-go-sdk/service/arkruntime/model"
)
// StreamResponse 专为 Apifox/前端 识别设计的极简结构
@@ -31,10 +32,25 @@ func ToStreamResponseDTO(chunk *schema.Message) StreamResponse {
return dto
}
func ToStreamReasoningResponseDTO(chunk *schema.Message) StreamResponse {
var dto StreamResponse
dto.Choices = append(dto.Choices, struct {
Delta struct {
Content string `json:"content"`
} `json:"delta"`
}{})
dto.Choices[0].Delta.Content = chunk.ReasoningContent
return dto
}
// ToOpenAIStream 负责将 Eino 的内部 Chunk 转换为 OpenAI 兼容的 data: {JSON} 字符串
func ToOpenAIStream(chunk *schema.Message) (string, error) {
dto := ToStreamResponseDTO(chunk)
var dto StreamResponse
if chunk.ReasoningContent != "" {
dto = ToStreamReasoningResponseDTO(chunk)
} else {
dto = ToStreamResponseDTO(chunk)
}
jsonBytes, err := json.Marshal(dto)
if err != nil {
return "", err
@@ -43,7 +59,7 @@ func ToOpenAIStream(chunk *schema.Message) (string, error) {
return string(jsonBytes), nil
}
func StreamChat(ctx context.Context, llm *ark.ChatModel, userInput string, chatHistory []*schema.Message, outChan chan<- string) (string, error) {
func StreamChat(ctx context.Context, llm *ark.ChatModel, userInput string, ifThinking bool, chatHistory []*schema.Message, outChan chan<- string) (string, error) {
// 1. 组装消息
messages := make([]*schema.Message, 0)
// A. 塞入 System Message (人设)
@@ -55,7 +71,13 @@ func StreamChat(ctx context.Context, llm *ark.ChatModel, userInput string, chatH
// C. 塞入用户当前的消息 (当前需求)
messages = append(messages, schema.UserMessage(userInput))
// 2. 调用流式接口
reader, err := llm.Stream(ctx, messages)
var thinking *ark.Thinking
if ifThinking {
thinking = &arkModel.Thinking{Type: arkModel.ThinkingTypeEnabled}
} else {
thinking = &arkModel.Thinking{Type: arkModel.ThinkingTypeDisabled}
}
reader, err := llm.Stream(ctx, messages, ark.WithThinking(thinking))
if err != nil {
return "", err
}
@@ -71,9 +93,9 @@ func StreamChat(ctx context.Context, llm *ark.ChatModel, userInput string, chatH
if err != nil {
return "", err
}
if chunk.Content == "" {
/*if chunk.Content == "" { // 过滤掉空内容,避免发送无效消息
continue
}
}*/
fullText.WriteString(chunk.Content)
// 将内容发送到通道中供前端消费
retChuck, err := ToOpenAIStream(chunk)