Skip to content

Commit

Permalink
feat: update quickstart_chat (#11)
Browse files Browse the repository at this point in the history
  • Loading branch information
meguminnnnnnnnn authored Jan 20, 2025
1 parent 94c219b commit ba14795
Show file tree
Hide file tree
Showing 6 changed files with 243 additions and 60 deletions.
41 changes: 41 additions & 0 deletions quickstart/chat/generate.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Copyright 2025 CloudWeGo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package main

import (
"context"
"log"

"github.com/cloudwego/eino/components/model"
"github.com/cloudwego/eino/schema"
)

func generate(ctx context.Context, llm model.ChatModel, in []*schema.Message) *schema.Message {
result, err := llm.Generate(ctx, in)
if err != nil {
log.Fatalf("llm generate failed: %v", err)
}
return result
}

func stream(ctx context.Context, llm model.ChatModel, in []*schema.Message) *schema.StreamReader[*schema.Message] {
result, err := llm.Stream(ctx, in)
if err != nil {
log.Fatalf("llm generate failed: %v", err)
}
return result
}
75 changes: 15 additions & 60 deletions quickstart/chat/main.go
Original file line number Diff line number Diff line change
Expand Up @@ -19,72 +19,27 @@ package main
import (
"context"
"log"
"os"

"github.com/cloudwego/eino-ext/components/model/openai"
"github.com/cloudwego/eino/components/prompt"
"github.com/cloudwego/eino/schema"

"github.com/cloudwego/eino-examples/internal/logs"
)

func main() {
openAIAPIKey := os.Getenv("OPENAI_API_KEY")

ctx := context.Background()

// 创建模板,使用 FString 格式
template := prompt.FromMessages(schema.FString,
// 系统消息模板
schema.SystemMessage("你是一个{role}。你需要用{style}的语气回答问题。你的目标是帮助程序员保持积极乐观的心态,提供技术建议的同时也要关注他们的心理健康,给他们提供足够的情绪价值。"),

// 插入可选的示例对话
schema.MessagesPlaceholder("examples", true),

// 插入必需的对话历史
schema.MessagesPlaceholder("chat_history", false),

// 用户消息模板
schema.UserMessage("问题: {question}"),
)

// 使用模板生成消息
messages, err := template.Format(ctx, map[string]any{
"role": "程序员鼓励师",
"style": "积极、温暖且专业",
"question": "我的代码一直报错,感觉好沮丧,该怎么办?",
// 对话历史(必需的)
"chat_history": []*schema.Message{
schema.UserMessage("你好"),
schema.AssistantMessage("嘿!我是你的程序员鼓励师!记住,每个优秀的程序员都是从 Debug 中成长起来的。有什么我可以帮你的吗?", nil),
},
// 示例对话(可选的)
"examples": []*schema.Message{
schema.UserMessage("我觉得自己写的代码太烂了"),
schema.AssistantMessage("每个程序员都经历过这个阶段!重要的是你在不断学习和进步。让我们一起看看代码,我相信通过重构和优化,它会变得更好。记住,Rome wasn't built in a day,代码质量是通过持续改进来提升的。", nil),
},
})
if err != nil {
log.Fatal(err)
}
// 使用模版创建messages
log.Printf("===create messages===\n")
messages := createMessagesFromTemplate()
log.Printf("messages: %+v\n\n", messages)

// 创建 OpenAI ChatModel, 假设使用 openai 官方服务。
chatModel, err := openai.NewChatModel(ctx, &openai.ChatModelConfig{
Model: "gpt-4o", // 使用的模型版本
APIKey: openAIAPIKey, // OpenAI API 密钥
})
if err != nil {
logs.Errorf("NewChatModel failed, err=%v", err)
return
}
// 创建llm
log.Printf("===create llm===\n")
cm := createOpenAIChatModel(ctx)
// cm := createOllamaChatModel(ctx)
log.Printf("create llm success\n\n")

// 使用 Generate 获取完整回复
response, err := chatModel.Generate(ctx, messages)
if err != nil {
logs.Errorf("chatModel.Generate failed, err=%v", err)
return
}
log.Printf("===llm generate===\n")
result := generate(ctx, cm, messages)
log.Printf("result: %+v\n\n", result)

logs.Infof("below is chat model's output:")
logs.Tokenf("%v", response.Content)
log.Printf("===llm stream generate===\n")
streamResult := stream(ctx, cm, messages)
reportStream(streamResult)
}
36 changes: 36 additions & 0 deletions quickstart/chat/ollama.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
/*
* Copyright 2025 CloudWeGo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package main

import (
"context"
"log"

"github.com/cloudwego/eino-ext/components/model/ollama"
"github.com/cloudwego/eino/components/model"
)

func createOllamaChatModel(ctx context.Context) model.ChatModel {
chatModel, err := ollama.NewChatModel(ctx, &ollama.ChatModelConfig{
BaseURL: "http://localhost:11434", // Ollama 服务地址
Model: "llama2", // 模型名称
})
if err != nil {
log.Fatalf("create ollama chat model failed: %v", err)
}
return chatModel
}
38 changes: 38 additions & 0 deletions quickstart/chat/openai.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,38 @@
/*
* Copyright 2025 CloudWeGo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package main

import (
"context"
"log"
"os"

"github.com/cloudwego/eino-ext/components/model/openai"
"github.com/cloudwego/eino/components/model"
)

func createOpenAIChatModel(ctx context.Context) model.ChatModel {
key := os.Getenv("OPENAI_API_KEY")
chatModel, err := openai.NewChatModel(ctx, &openai.ChatModelConfig{
Model: "gpt-4o", // 使用的模型版本
APIKey: key, // OpenAI API 密钥
})
if err != nil {
log.Fatalf("create openai chat model failed, err=%v", err)
}
return chatModel
}
41 changes: 41 additions & 0 deletions quickstart/chat/stream.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
/*
* Copyright 2025 CloudWeGo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package main

import (
"io"
"log"

"github.com/cloudwego/eino/schema"
)

func reportStream(sr *schema.StreamReader[*schema.Message]) {
defer sr.Close()

i := 0
for {
message, err := sr.Recv()
if err == io.EOF {
return
}
if err != nil {
log.Fatalf("recv failed: %v", err)
}
log.Printf("message[%d]: %+v\n", i, message)
i++
}
}
72 changes: 72 additions & 0 deletions quickstart/chat/template.go
Original file line number Diff line number Diff line change
@@ -0,0 +1,72 @@
/*
* Copyright 2025 CloudWeGo Authors
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/

package main

import (
"context"
"log"

"github.com/cloudwego/eino/components/prompt"
"github.com/cloudwego/eino/schema"
)

func createTemplate() prompt.ChatTemplate {
// 创建模板,使用 FString 格式
return prompt.FromMessages(schema.FString,
// 系统消息模板
schema.SystemMessage("你是一个{role}。你需要用{style}的语气回答问题。你的目标是帮助程序员保持积极乐观的心态,提供技术建议的同时也要关注他们的心理健康。"),

// 插入需要的对话历史(新对话的话这里不填)
schema.MessagesPlaceholder("chat_history", true),

// 用户消息模板
schema.UserMessage("问题: {question}"),
)
}

func createMessagesFromTemplate() []*schema.Message {
template := createTemplate()

// 使用模板生成消息
messages, err := template.Format(context.Background(), map[string]any{
"role": "程序员鼓励师",
"style": "积极、温暖且专业",
"question": "我的代码一直报错,感觉好沮丧,该怎么办?",
// 对话历史(这个例子里模拟两轮对话历史)
"chat_history": []*schema.Message{
schema.UserMessage("你好"),
schema.AssistantMessage("嘿!我是你的程序员鼓励师!记住,每个优秀的程序员都是从 Debug 中成长起来的。有什么我可以帮你的吗?", nil),
schema.UserMessage("我觉得自己写的代码太烂了"),
schema.AssistantMessage("每个程序员都经历过这个阶段!重要的是你在不断学习和进步。让我们一起看看代码,我相信通过重构和优化,它会变得更好。记住,Rome wasn't built in a day,代码质量是通过持续改进来提升的。", nil),
},
})
if err != nil {
log.Fatalf("format template failed: %v\n", err)
}
return messages
}

// 输出结果
//func main() {
// messages, err := createMessagesFromTemplate()
// if err != nil {
// fmt.Printf("create messages from template fail: %s", err.Error())
// }
// fmt.Printf("formatted message: %v", messages)
//}

// formatted message: [system: 你是一个程序员鼓励师。你需要用积极、温暖且专业的语气回答问题。你的目标是帮助程序员保持积极乐观的心态,提供技术建议的同时也要关注他们的心理健康。 user: 你好 assistant: 嘿!我是你的程序员鼓励师!记住,每个优秀的程序员都是从 Debug 中成长起来的。有什么我可以帮你的吗? user: 我觉得自己写的代码太烂了 assistant: 每个程序员都经历过这个阶段!重要的是你在不断学习和进步。让我们一起看看代码,我相信通过重构和优化,它会变得更好。记住,Rome wasn't built in a day,代码质量是通过持续改进来提升的。 user: 问题: 我的代码一直报错,感觉好沮丧,该怎么办?]

0 comments on commit ba14795

Please sign in to comment.