From 47e841d3146d37084d31c691f7251169c8367c46 Mon Sep 17 00:00:00 2001 From: Dr Nic Williams Date: Thu, 25 Apr 2024 19:26:52 +1000 Subject: [PATCH 1/2] Remove debug? helper that's called once --- examples/user-chat-streaming.rb | 6 +----- examples/user-chat.rb | 6 +----- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/examples/user-chat-streaming.rb b/examples/user-chat-streaming.rb index 10ab0d9..2947b7b 100755 --- a/examples/user-chat-streaming.rb +++ b/examples/user-chat-streaming.rb @@ -34,10 +34,6 @@ raise "Missing --model option" if @options[:model].nil? raise "Missing --agent-prompt option" if @options[:agent_prompt_path].nil? -def debug? - @options[:debug] -end - # Read the agent prompt from the file agent_prompt = YAML.load_file(@options[:agent_prompt_path]) user_emoji = agent_prompt["user_emoji"] @@ -47,7 +43,7 @@ def debug? # Initialize the Groq client @client = Groq::Client.new(model_id: @options[:model], request_timeout: @options[:timeout]) do |f| - if debug? + if @options[:debug] require "logger" # Create a logger instance diff --git a/examples/user-chat.rb b/examples/user-chat.rb index 53e8f78..d2e58f7 100755 --- a/examples/user-chat.rb +++ b/examples/user-chat.rb @@ -35,10 +35,6 @@ raise "Missing --model option" if @options[:model].nil? raise "Missing --agent-prompt option" if @options[:agent_prompt_path].nil? -def debug? - @options[:debug] -end - # Read the agent prompt from the file agent_prompt = YAML.load_file(@options[:agent_prompt_path]) user_emoji = agent_prompt["user_emoji"] @@ -48,7 +44,7 @@ def debug? # Initialize the Groq client @client = Groq::Client.new(model_id: @options[:model], request_timeout: @options[:timeout]) do |f| - if debug? + if @options[:debug] require "logger" # Create a logger instance From 5c8ffe51ece77508627b45c404ec239e78b5248d Mon Sep 17 00:00:00 2001 From: Dr Nic Williams Date: Thu, 25 Apr 2024 19:42:44 +1000 Subject: [PATCH 2/2] Example of streaming JSON objects --- examples/README.md | 38 +++++++++++- examples/streaming-to-json-objects.rb | 85 +++++++++++++++++++++++++++ 2 files changed, 122 insertions(+), 1 deletion(-) create mode 100755 examples/streaming-to-json-objects.rb diff --git a/examples/README.md b/examples/README.md index 2906c78..41095b4 100644 --- a/examples/README.md +++ b/examples/README.md @@ -20,7 +20,7 @@ At the prompt, either talk to the AI agent, or some special commands: - `exit` to exit the conversation - `summary` to get a summary of the conversation so far -### Streaming +### Streaming text chunks There is also an example of streaming the conversation to terminal as it is received from Groq API. @@ -30,6 +30,42 @@ It defaults to the slower `llama3-70b-8192` model so that the streaming is more bundle exec examples/user-chat-streaming.rb --agent-prompt examples/agent-prompts/pizzeria-sales.yml ``` +### Streaming useful chunks (e.g. JSON) + +If the response is returning a list of objects, such as a sequence of JSON objects, you can try to stream the chunks that make up the JSON objects and process them as soon as they are complete. + +```bash +bundle exec examples/streaming-to-json-objects.rb +``` + +This will produce JSON for each planet in the solar system, one at a time. The API does not return each JSON as a chunk, rather it only returns `{` and `"` and `name` as distinct chunks. But the example code [`examples/streaming-to-json-objects.rb`](examples/streaming-to-json-objects.rb) shows how you might build up JSON objects from chunks, and process it (e.g. store to DB) as soon as it is complete. + +The system prompt used is: + +```plain +Write out the names of the planets of our solar system, and a brief description of each one. + +Return JSON object for each one: + +{ "name": "Mercury", "position": 1, "description": "Mercury is ..." } + +Between each response, say "NEXT" to clearly delineate each JSON response. +``` + +The code in the repo uses the `NEXT` token to know when to process the JSON object. + +The output will look like, with each Ruby Hash object been pretty printed when it has been built from chunks. + +```json +{"name"=>"Mercury", + "position"=>1, + "description"=>"Mercury is the smallest planet in our solar system, with a highly elliptical orbit that takes it extremely close to the sun."} +{"name"=>"Venus", + "position"=>2, + "description"=> + "Venus is often called Earth's twin due to their similar size and mass, but it has a thick atmosphere that traps heat, making it the hottest planet."} +``` + ### Pizzeria Run the pizzeria example with the following command: diff --git a/examples/streaming-to-json-objects.rb b/examples/streaming-to-json-objects.rb new file mode 100755 index 0000000..4acf89b --- /dev/null +++ b/examples/streaming-to-json-objects.rb @@ -0,0 +1,85 @@ +#!/usr/bin/env ruby + +require "optparse" +require "groq" +require "yaml" + +include Groq::Helpers + +@options = { + model: "llama3-70b-8192", + timeout: 20 +} +OptionParser.new do |opts| + opts.banner = "Usage: ruby script.rb [options]" + + opts.on("-m", "--model MODEL", "Model name") do |v| + @options[:model] = v + end + + opts.on("-t", "--timeout TIMEOUT", "Timeout in seconds") do |v| + @options[:timeout] = v.to_i + end + + opts.on("-d", "--debug", "Enable debug mode") do |v| + @options[:debug] = v + end +end.parse! + +raise "Missing --model option" if @options[:model].nil? + +# Initialize the Groq client +@client = Groq::Client.new(model_id: @options[:model], request_timeout: @options[:timeout]) do |f| + if @options[:debug] + require "logger" + + # Create a logger instance + logger = Logger.new($stdout) + logger.level = Logger::DEBUG + + f.response :logger, logger, bodies: true # Log request and response bodies + end +end + +prompt = <<~TEXT + Write out the names of the planets of our solar system, and a brief description of each one. + + Return JSON object for each one: + + { "name": "Mercury", "position": 1, "description": "Mercury is ..." } + + Between each response, say "NEXT" to clearly delineate each JSON response. +TEXT + +# Handle each JSON object once it has been fully streamed + +class PlanetStreamer + def initialize + @buffer = "" + end + + def call(content) + if !content || content.include?("NEXT") + json = JSON.parse(@buffer) + + # do something with JSON, e.g. save to database + pp json + + # reset buffer + @buffer = "" + return + end + # if @buffer is empty; and content is not JSON start {, then ignore + return + if @buffer.empty? && !content.start_with?("{") + return + end + + # build JSON + @buffer << content + end +end + +streamer = PlanetStreamer.new + +@client.chat([S(prompt)], stream: streamer) +puts