Files
eino_learn/chat/chat.go
lirui 5632bf88f8 feat: 添加配置文件支持并重构聊天模块
- 新增config.yaml和config.go用于管理配置
- 重构chatmodel.go使用配置初始化模型
- 修改GenerateChatMessage和ChatStream函数签名添加context参数
- 更新main.go加载配置并初始化聊天模型
- 优化错误处理和日志输出
2026-01-11 20:32:41 +08:00

59 lines
1.3 KiB
Go

package chat
import (
"context"
"fmt"
"io"
"github.com/cloudwego/eino-ext/components/model/openai"
"github.com/cloudwego/eino/schema"
)
func Chat(chatModel *openai.ChatModel, ctx context.Context, role, style, question string) error {
history := []*schema.Message{}
messages, err := GenerateChatMessage(ctx, role, style, question, history)
if err != nil {
return err
}
response, err := chatModel.Generate(ctx, messages)
if err != nil {
return err
}
// 获取 Token 使用情况
if usage := response.ResponseMeta.Usage; usage != nil {
fmt.Printf("提示 Tokens: %d\n", usage.PromptTokens)
fmt.Printf("生成 Tokens: %d\n", usage.CompletionTokens)
fmt.Printf("总 Tokens: %d\n", usage.TotalTokens)
}
fmt.Println(response.Content)
return nil
}
func ChatStream(chatModel *openai.ChatModel, ctx context.Context, role, style, question string) error {
history := []*schema.Message{}
messages, err := GenerateChatMessage(ctx, role, style, question, history)
if err != nil {
return err
}
// 获取流式回复
reader, err := chatModel.Stream(ctx, messages)
if err != nil {
return err
}
defer reader.Close() // 注意要关闭
// 处理流式内容
for {
chunk, err := reader.Recv()
if err == io.EOF {
break
}
if err != nil {
return err
}
fmt.Print(chunk.Content)
}
fmt.Println()
return nil
}