Skip to content

Commit 1a09ecd

Browse files
committed
Add back JSONL parsing
1 parent dc4d1c6 commit 1a09ecd

File tree

3 files changed

+23
-4
lines changed

3 files changed

+23
-4
lines changed

lib/openai/http.rb

Lines changed: 12 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -3,9 +3,9 @@
33
module OpenAI
44
module HTTP
55
def get(path:)
6-
conn.get(uri(path: path)) do |req|
6+
parse_jsonl(conn.get(uri(path: path)) do |req|
77
req.headers = headers
8-
end&.body
8+
end&.body)
99
end
1010

1111
def json_post(path:, parameters:)
@@ -29,6 +29,16 @@ def delete(path:)
2929

3030
private
3131

32+
def parse_jsonl(response)
33+
return unless response
34+
return response unless response.is_a?(String)
35+
36+
# Convert a multiline string of JSON objects to a JSON array.
37+
response = response.gsub("}\n{", "},{").prepend("[").concat("]")
38+
39+
JSON.parse(response)
40+
end
41+
3242
# Given a proc, returns an outer proc that can be used to iterate over a JSON stream of chunks.
3343
# For each chunk, the inner user_proc is called giving it the JSON object. The JSON object could
3444
# be a data object or an error object as described in the OpenAI API documentation.

spec/openai/client/chat_spec.rb

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -77,7 +77,7 @@ def call(chunk)
7777
let(:cassette) { "#{model} streamed chat with error response".downcase }
7878

7979
it "raises an HTTP error" do
80-
VCR.use_cassette(cassette) do
80+
VCR.use_cassette(cassette, record: :none) do
8181
response
8282
rescue Faraday::BadRequestError => e
8383
expect(e.response).to include(status: 400)

spec/openai/client/http_spec.rb

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -109,7 +109,7 @@
109109
let(:cassette) { "http get with error response".downcase }
110110

111111
it "raises an HTTP error" do
112-
VCR.use_cassette(cassette) do
112+
VCR.use_cassette(cassette, record: :none) do
113113
OpenAI::Client.new.models.retrieve(id: "text-ada-001")
114114
rescue Faraday::Error => e
115115
expect(e.response).to include(status: 400)
@@ -189,6 +189,15 @@
189189
end
190190
end
191191

192+
describe ".parse_jsonl" do
193+
context "with a jsonl string" do
194+
let(:body) { "{\"prompt\":\":)\"}\n{\"prompt\":\":(\"}\n" }
195+
let(:parsed) { OpenAI::Client.new.send(:parse_jsonl, body) }
196+
197+
it { expect(parsed).to eq([{ "prompt" => ":)" }, { "prompt" => ":(" }]) }
198+
end
199+
end
200+
192201
describe ".uri" do
193202
let(:path) { "/chat" }
194203
let(:uri) { OpenAI::Client.new.send(:uri, path: path) }

0 commit comments

Comments
 (0)