Skip to content

Commit

Permalink
Properly implement serialize / deserialize
Browse files Browse the repository at this point in the history
  • Loading branch information
ksylvest committed Aug 16, 2024
1 parent 313e08a commit 560f3ac
Show file tree
Hide file tree
Showing 6 changed files with 80 additions and 34 deletions.
14 changes: 7 additions & 7 deletions Gemfile.lock
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
PATH
remote: .
specs:
omniai-openai (1.7.0)
omniai-openai (1.8.0)
event_stream_parser
omniai
zeitwerk
Expand Down Expand Up @@ -50,20 +50,20 @@ GEM
llhttp-ffi (0.5.0)
ffi-compiler (~> 1.0)
rake (~> 13.0)
omniai (1.7.0)
omniai (1.8.0)
event_stream_parser
http
zeitwerk
parallel (1.25.1)
parser (3.3.4.0)
parallel (1.26.2)
parser (3.3.4.2)
ast (~> 2.4.1)
racc
public_suffix (6.0.1)
racc (1.8.1)
rainbow (3.1.1)
rake (13.2.1)
regexp_parser (2.9.2)
rexml (3.3.4)
rexml (3.3.5)
strscan
rspec (3.13.0)
rspec-core (~> 3.13.0)
Expand Down Expand Up @@ -91,11 +91,11 @@ GEM
rubocop-ast (>= 1.31.1, < 2.0)
ruby-progressbar (~> 1.7)
unicode-display_width (>= 2.4.0, < 3.0)
rubocop-ast (1.31.3)
rubocop-ast (1.32.0)
parser (>= 3.3.1.0)
rubocop-rake (0.6.0)
rubocop (~> 1.0)
rubocop-rspec (3.0.3)
rubocop-rspec (3.0.4)
rubocop (~> 1.61)
ruby-progressbar (1.13.0)
simplecov (0.22.0)
Expand Down
16 changes: 9 additions & 7 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -62,19 +62,21 @@ _For details on installation or running Ollama checkout [the project README](htt

### Chat

A chat completion is generated by passing in prompts using any a variety of formats:
A chat completion is generated by passing in a simple text prompt:

```ruby
completion = client.chat('Tell me a joke!')
completion.choice.message.content # 'Why did the chicken cross the road? To get to the other side.'
completion.content # 'Why did the chicken cross the road? To get to the other side.'
```

A chat completion may also be generated by using a prompt builder:

```ruby
completion = client.chat do |prompt|
prompt.system('Your are an expert in geography.')
prompt.user('What is the capital of Canada?')
end
completion.choice.message.content # 'The capital of Canada is Ottawa.'
completion.content # 'The capital of Canada is Ottawa.'
```

#### Model
Expand All @@ -83,7 +85,7 @@ completion.choice.message.content # 'The capital of Canada is Ottawa.'

```ruby
completion = client.chat('How fast is a cheetah?', model: OmniAI::OpenAI::Chat::Model::GPT_3_5_TURBO)
completion.choice.message.content # 'A cheetah can reach speeds over 100 km/h.'
completion.content # 'A cheetah can reach speeds over 100 km/h.'
```

[OpenAI API Reference `model`](https://platform.openai.com/docs/api-reference/chat/create#chat-create-model)
Expand All @@ -94,7 +96,7 @@ completion.choice.message.content # 'A cheetah can reach speeds over 100 km/h.'

```ruby
completion = client.chat('Pick a number between 1 and 5', temperature: 2.0)
completion.choice.message.content # '3'
completion.content # '3'
```

[OpenAI API Reference `temperature`](https://platform.openai.com/docs/api-reference/chat/create#chat-create-temperature)
Expand All @@ -105,7 +107,7 @@ completion.choice.message.content # '3'

```ruby
stream = proc do |chunk|
print(chunk.choice.delta.content) # 'Better', 'three', 'hours', ...
print(chunk.content) # 'Better', 'three', 'hours', ...
end
client.chat('Be poetic.', stream:)
```
Expand All @@ -121,7 +123,7 @@ completion = client.chat(format: :json) do |prompt|
prompt.system(OmniAI::Chat::JSON_PROMPT)
prompt.user('What is the name of the drummer for the Beatles?')
end
JSON.parse(completion.choice.message.content) # { "name": "Ringo" }
JSON.parse(completion.content) # { "name": "Ringo" }
```

[OpenAI API Reference `response_format`](https://platform.openai.com/docs/api-reference/chat/create#chat-create-stream)
Expand Down
49 changes: 49 additions & 0 deletions examples/chat
Original file line number Diff line number Diff line change
@@ -0,0 +1,49 @@
#!/usr/bin/env ruby
# frozen_string_literal: true

require 'bundler/setup'
require 'omniai/openai'

CLIENT = OmniAI::OpenAI::Client.new

TOOL = OmniAI::Tool.new(
proc { |location:, unit: 'celsius'| "#{rand(20..50)}° #{unit} in #{location}" },
name: 'Weather',
description: 'Lookup the weather in a location',
parameters: OmniAI::Tool::Parameters.new(
properties: {
location: OmniAI::Tool::Property.string(description: 'e.g. Toronto'),
unit: OmniAI::Tool::Property.string(enum: %w[celcius farenheit]),
},
required: %i[location]
)
)

def example1(stream: $stdout)
stream.puts '> [USER] Tell me a joke'
response = CLIENT.chat('Tell me a joke')
stream.puts response.text
end

def example2(stream: $stdout)
stream.puts '> [SYSTEM] Respond in both English and French.'
stream.puts '> [USER] What is the fastest animal?'
CLIENT.chat(stream:) do |prompt|
prompt.system 'Respond in both English and French.'
prompt.user 'What is the fastest animal?'
end
end

def example3(stream: $stdout)
stream.puts '> [SYSTEM] You are an expert in weather.'
stream.puts '> [USER] What is the weather in "London" in celsius and "Madrid" in fahrenheit?'
response = CLIENT.chat(tools: [TOOL]) do |prompt|
prompt.system 'You are an expert in weather.'
prompt.user 'What is the weather in "London" in celsius and "Madrid" in fahrenheit?'
end
stream.puts response.text
end

example1
example2
example3
2 changes: 1 addition & 1 deletion lib/omniai/openai/chat.rb
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,7 @@ def payload
stream: @stream.nil? ? nil : !@stream.nil?,
temperature: @temperature,
response_format: (JSON_RESPONSE_FORMAT if @format.eql?(:json)),
tools: @tools&.map(&:prepare),
tools: @tools&.map(&:serialize),
}).compact
end

Expand Down
2 changes: 1 addition & 1 deletion lib/omniai/openai/version.rb
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,6 @@

module OmniAI
module OpenAI
VERSION = '1.7.0'
VERSION = '1.8.0'
end
end
31 changes: 13 additions & 18 deletions spec/omniai/openai/chat_spec.rb
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,7 @@
before do
stub_request(:post, 'https://api.openai.com/v1/chat/completions')
.with(body: {
messages: [{ role: 'user', content: prompt }],
messages: [{ role: 'user', content: [{ type: 'text', text: 'Tell me a joke!' }] }],
model:,
})
.to_return_json(body: {
Expand All @@ -28,8 +28,7 @@
})
end

it { expect(completion.choice.message.role).to eql('assistant') }
it { expect(completion.choice.message.content).to eql('Two elephants fall off a cliff. Boom! Boom!') }
it { expect(completion.text).to eql('Two elephants fall off a cliff. Boom! Boom!') }
end

context 'with an advanced prompt' do
Expand All @@ -44,8 +43,8 @@
stub_request(:post, 'https://api.openai.com/v1/chat/completions')
.with(body: {
messages: [
{ role: 'system', content: 'You are a helpful assistant.' },
{ role: 'user', content: 'What is the capital of Canada?' },
{ role: 'system', content: [{ type: 'text', text: 'You are a helpful assistant.' }] },
{ role: 'user', content: [{ type: 'text', text: 'What is the capital of Canada?' }] },
],
model:,
})
Expand All @@ -60,8 +59,7 @@
})
end

it { expect(completion.choice.message.role).to eql('assistant') }
it { expect(completion.choice.message.content).to eql('The capital of Canada is Ottawa.') }
it { expect(completion.text).to eql('The capital of Canada is Ottawa.') }
end

context 'with a temperature' do
Expand All @@ -73,7 +71,7 @@
before do
stub_request(:post, 'https://api.openai.com/v1/chat/completions')
.with(body: {
messages: [{ role: 'user', content: prompt }],
messages: [{ role: 'user', content: [{ type: 'text', text: 'Pick a number between 1 and 5.' }] }],
model:,
temperature:,
})
Expand All @@ -88,8 +86,7 @@
})
end

it { expect(completion.choice.message.role).to eql('assistant') }
it { expect(completion.choice.message.content).to eql('3') }
it { expect(completion.text).to eql('3') }
end

context 'when formatting as JSON' do
Expand All @@ -106,8 +103,8 @@
stub_request(:post, 'https://api.openai.com/v1/chat/completions')
.with(body: {
messages: [
{ role: 'system', content: OmniAI::Chat::JSON_PROMPT },
{ role: 'user', content: 'What is the name of the dummer for the Beatles?' },
{ role: 'system', content: [{ type: 'text', text: OmniAI::Chat::JSON_PROMPT }] },
{ role: 'user', content: [{ type: 'text', text: 'What is the name of the dummer for the Beatles?' }] },
],
model:,
response_format: { type: 'json_object' },
Expand All @@ -123,8 +120,7 @@
})
end

it { expect(completion.choice.message.role).to eql('assistant') }
it { expect(completion.choice.message.content).to eql('{ "name": "Ringo" }') }
it { expect(completion.text).to eql('{ "name": "Ringo" }') }
end

context 'when streaming' do
Expand All @@ -137,7 +133,7 @@
stub_request(:post, 'https://api.openai.com/v1/chat/completions')
.with(body: {
messages: [
{ role: 'user', content: 'Tell me a story.' },
{ role: 'user', content: [{ type: 'text', text: 'Tell me a story.' }] },
],
model:,
stream: !stream.nil?,
Expand All @@ -153,7 +149,7 @@
chunks = []
allow(stream).to receive(:call) { |chunk| chunks << chunk }
completion
expect(chunks.map { |chunk| chunk.choice.delta.content }).to eql(%w[A B])
expect(chunks.map(&:text)).to eql(%w[A B])
end
end

Expand Down Expand Up @@ -198,8 +194,7 @@
})
end

it { expect(completion.choice.message.role).to eql('assistant') }
it { expect(completion.choice.message.content).to eql('They are a photo of a cat and a photo of a dog.') }
it { expect(completion.text).to eql('They are a photo of a cat and a photo of a dog.') }
end
end
end

0 comments on commit 560f3ac

Please sign in to comment.