package chat import ( "context" "fmt" "io" "github.com/cloudwego/eino-ext/components/model/openai" "github.com/cloudwego/eino/schema" ) func Chat(chatModel *openai.ChatModel, ctx context.Context, role, style, question string) error { history := []*schema.Message{} messages, err := GenerateChatMessage(ctx, role, style, question, history) if err != nil { return err } response, err := chatModel.Generate(ctx, messages) if err != nil { return err } // 获取 Token 使用情况 if usage := response.ResponseMeta.Usage; usage != nil { fmt.Printf("提示 Tokens: %d\n", usage.PromptTokens) fmt.Printf("生成 Tokens: %d\n", usage.CompletionTokens) fmt.Printf("总 Tokens: %d\n", usage.TotalTokens) } fmt.Println(response.Content) return nil } func ChatStream(chatModel *openai.ChatModel, ctx context.Context, role, style, question string) error { history := []*schema.Message{} messages, err := GenerateChatMessage(ctx, role, style, question, history) if err != nil { return err } // 获取流式回复 reader, err := chatModel.Stream(ctx, messages) if err != nil { return err } defer reader.Close() // 注意要关闭 // 处理流式内容 for { chunk, err := reader.Recv() if err == io.EOF { break } if err != nil { return err } fmt.Print(chunk.Content) } fmt.Println() return nil }