From 63b11c534d57533311e2c3e5f3ed27f1d4f0d190 Mon Sep 17 00:00:00 2001 From: Kevin Sylvestre Date: Tue, 16 Jul 2024 20:51:54 -0700 Subject: [PATCH] Define a prompt builder --- Gemfile.lock | 4 +- README.md | 54 ++++---- lib/omniai/chat.rb | 34 +++-- lib/omniai/chat/content/file.rb | 27 ---- lib/omniai/chat/content/media.rb | 56 -------- lib/omniai/chat/content/text.rb | 17 --- lib/omniai/chat/content/url.rb | 41 ------ lib/omniai/chat/prompt.rb | 125 ++++++++++++++++++ lib/omniai/chat/prompt/message.rb | 111 ++++++++++++++++ lib/omniai/chat/prompt/message/content.rb | 17 +++ lib/omniai/chat/prompt/message/file.rb | 39 ++++++ lib/omniai/chat/prompt/message/media.rb | 78 +++++++++++ lib/omniai/chat/prompt/message/text.rb | 31 +++++ lib/omniai/chat/prompt/message/url.rb | 57 ++++++++ lib/omniai/version.rb | 2 +- spec/omniai/chat/content/text_spec.rb | 9 -- .../chat/prompt/message/content_spec.rb | 9 ++ .../{content => prompt/message}/file_spec.rb | 15 ++- .../{content => prompt/message}/media_spec.rb | 36 ++++- spec/omniai/chat/prompt/message/text_spec.rb | 13 ++ .../{content => prompt/message}/url_spec.rb | 6 +- spec/omniai/chat/prompt/message_spec.rb | 90 +++++++++++++ spec/omniai/chat/prompt_spec.rb | 81 ++++++++++++ spec/omniai/chat_spec.rb | 38 +++++- 24 files changed, 783 insertions(+), 207 deletions(-) delete mode 100644 lib/omniai/chat/content/file.rb delete mode 100644 lib/omniai/chat/content/media.rb delete mode 100644 lib/omniai/chat/content/text.rb delete mode 100644 lib/omniai/chat/content/url.rb create mode 100644 lib/omniai/chat/prompt.rb create mode 100644 lib/omniai/chat/prompt/message.rb create mode 100644 lib/omniai/chat/prompt/message/content.rb create mode 100644 lib/omniai/chat/prompt/message/file.rb create mode 100644 lib/omniai/chat/prompt/message/media.rb create mode 100644 lib/omniai/chat/prompt/message/text.rb create mode 100644 lib/omniai/chat/prompt/message/url.rb delete mode 100644 spec/omniai/chat/content/text_spec.rb create mode 100644 spec/omniai/chat/prompt/message/content_spec.rb rename spec/omniai/chat/{content => prompt/message}/file_spec.rb (66%) rename spec/omniai/chat/{content => prompt/message}/media_spec.rb (64%) create mode 100644 spec/omniai/chat/prompt/message/text_spec.rb rename spec/omniai/chat/{content => prompt/message}/url_spec.rb (79%) create mode 100644 spec/omniai/chat/prompt/message_spec.rb create mode 100644 spec/omniai/chat/prompt_spec.rb diff --git a/Gemfile.lock b/Gemfile.lock index 3150816..dbb87db 100644 --- a/Gemfile.lock +++ b/Gemfile.lock @@ -1,7 +1,7 @@ PATH remote: . specs: - omniai (1.5.2) + omniai (1.6.0) event_stream_parser http zeitwerk @@ -60,7 +60,7 @@ GEM rainbow (3.1.1) rake (13.2.1) regexp_parser (2.9.2) - rexml (3.3.1) + rexml (3.3.2) strscan rspec (3.13.0) rspec-core (~> 3.13.0) diff --git a/README.md b/README.md index 0f98a91..5c16122 100644 --- a/README.md +++ b/README.md @@ -122,25 +122,30 @@ client = OmniAI::OpenAI::Client.new(timeout: { Clients that support chat (e.g. Anthropic w/ "Claude", Google w/ "Gemini", Mistral w/ "LeChat", OpenAI w/ "ChatGPT", etc) generate completions using the following calls: -#### Completions using Single Message +#### Completions using a Simple Prompt + +Generating a completion is as simple as sending in the text: ```ruby completion = client.chat('Tell me a joke.') -completion.choice.message.content # '...' +completion.choice.message.content # 'Why don't scientists trust atoms? They make up everything!' ``` -#### Completions using Multiple Messages +#### Completions using a Complex Prompt + +More complex completions are generated using a block w/ various system / user messages: ```ruby -messages = [ - { - role: OmniAI::Chat::Role::SYSTEM, - content: 'You are a helpful assistant with an expertise in geography.', - }, - 'What is the capital of Canada?' -] -completion = client.chat(messages, model: '...', temperature: 0.7, format: :json) -completion.choice.message.content # '...' +completion = client.chat do |prompt| + prompt.system 'You are a helpful assistant with an expertise in animals.' + prompt.user do |message| + message.text 'What animals are in the attached photos?' + message.url('https://.../cat.jpeg', "image/jpeg") + message.url('https://.../dog.jpeg', "image/jpeg") + message.file('./hamster.jpeg', "image/jpeg") + end +end +completion.choice.message.content # 'They are photos of a cat, a cat, and a hamster.' ``` #### Completions using Streaming via Proc @@ -167,20 +172,19 @@ client.chat('Tell me a story', stream: $stdout) A chat can also be initialized with tools: ```ruby -client.chat('What is the weather in "London, England" and "Madrid, Spain"?', tools: [ - OmniAI::Tool.new( - proc { |location:, unit: 'celsius'| "It is #{rand(20..50)}° #{unit} in #{location}" }, - name: 'Weather', - description: 'Lookup the weather in a location', - parameters: OmniAI::Tool::Parameters.new( - properties: { - location: OmniAI::Tool::Property.string(description: 'The city and country (e.g. Toronto, Canada).'), - unit: OmniAI::Tool::Property.string(enum: %w[celcius farenheit]), - }, - required: %i[location] - ) +tool = OmniAI::Tool.new( + proc { |location:, unit: 'celsius'| "#{rand(20..50)}° #{unit} in #{location}" }, + name: 'Weather', + description: 'Lookup the weather in a location', + parameters: OmniAI::Tool::Parameters.new( + properties: { + location: OmniAI::Tool::Property.string(description: 'e.g. Toronto'), + unit: OmniAI::Tool::Property.string(enum: %w[celcius farenheit]), + }, + required: %i[location] ) -]) +) +client.chat('What is the weather in "London" and "Madrid"?', tools: [tool]) ``` ### Transcribe diff --git a/lib/omniai/chat.rb b/lib/omniai/chat.rb index e786411..6dd6ecb 100644 --- a/lib/omniai/chat.rb +++ b/lib/omniai/chat.rb @@ -50,15 +50,20 @@ def self.process!(...) new(...).process! end - # @param messages [String] required + # @param prompt [OmniAI::Chat::Prompt, String, nil] optional # @param client [OmniAI::Client] the client # @param model [String] required # @param temperature [Float, nil] optional # @param stream [Proc, IO, nil] optional # @param tools [Array] optional # @param format [Symbol, nil] optional - :json - def initialize(messages, client:, model:, temperature: nil, stream: nil, tools: nil, format: nil) - @messages = arrayify(messages) + # @yield [prompt] optional + def initialize(prompt = nil, client:, model:, temperature: nil, stream: nil, tools: nil, format: nil, &block) + raise ArgumentError, 'prompt or block is required' if !prompt && !block + + @prompt = prompt ? Prompt.parse(prompt) : Prompt.new + block&.call(@prompt) + @client = client @model = model @temperature = temperature @@ -79,9 +84,12 @@ def process! protected # Used to spawn another chat with the same configuration using different messages. - def spawn!(messages) + # + # @param prompt [OmniAI::Chat::Prompt] + # @return [OmniAI::Chat::Prompt] + def spawn!(prompt) self.class.new( - messages, + prompt, client: @client, model: @model, temperature: @temperature, @@ -118,7 +126,7 @@ def complete!(response:) if @tools && completion.tool_call_list.any? spawn!([ - *@messages, + *@prompt.prepare, *completion.choices.map(&:message).map(&:data), *(completion.tool_call_list.map { |tool_call| execute_tool_call(tool_call) }), ]) @@ -148,19 +156,7 @@ def stream!(response:) # @return [Array] def messages - @messages.map do |content| - case content - when String then { role: Role::USER, content: } - when Hash then content - else raise Error, "Unsupported content=#{content.inspect}" - end - end - end - - # @param value [Object, Array] - # @return [Array] - def arrayify(value) - value.is_a?(Array) ? value : [value] + @prompt.prepare end # @return [HTTP::Response] diff --git a/lib/omniai/chat/content/file.rb b/lib/omniai/chat/content/file.rb deleted file mode 100644 index a199f41..0000000 --- a/lib/omniai/chat/content/file.rb +++ /dev/null @@ -1,27 +0,0 @@ -# frozen_string_literal: true - -module OmniAI - class Chat - module Content - # A file that is either audio / image / video. - class File < Media - attr_accessor :io - - # @param io [IO, Pathname, String] - # @param type [Symbol, String] :image, :video, :audio, "audio/flac", "image/jpeg", "video/mpeg", etc. - def initialize(io, type) - super(type) - @io = io - end - - # @return [String] - def fetch! - case @io - when IO then @io.read - else ::File.binread(@io) - end - end - end - end - end -end diff --git a/lib/omniai/chat/content/media.rb b/lib/omniai/chat/content/media.rb deleted file mode 100644 index f55de63..0000000 --- a/lib/omniai/chat/content/media.rb +++ /dev/null @@ -1,56 +0,0 @@ -# frozen_string_literal: true - -module OmniAI - class Chat - module Content - # An abstract class that represents audio / image / video and is used for both files and urls. - class Media - attr_accessor :type - - # @param type [String] "audio/flac", "image/jpeg", "video/mpeg", etc. - def initialize(type) - @type = type - end - - # @return [Boolean] - def text? - @type.match?(%r{^text/}) - end - - # @return [Boolean] - def audio? - @type.match?(%r{^audio/}) - end - - # @return [Boolean] - def image? - @type.match?(%r{^image/}) - end - - # @return [Boolean] - def video? - @type.match?(%r{^video/}) - end - - # @yield [io] - def fetch!(&) - raise NotImplementedError, "#{self.class}#fetch! undefined" - end - - # e.g. "Hello" -> "SGVsbG8h" - # - # @return [String] - def data - Base64.strict_encode64(fetch!) - end - - # e.g. "data:text/html;base64,..." - # - # @return [String] - def data_uri - "data:#{@type};base64,#{data}" - end - end - end - end -end diff --git a/lib/omniai/chat/content/text.rb b/lib/omniai/chat/content/text.rb deleted file mode 100644 index 7300e6e..0000000 --- a/lib/omniai/chat/content/text.rb +++ /dev/null @@ -1,17 +0,0 @@ -# frozen_string_literal: true - -module OmniAI - class Chat - module Content - # Just some text. - class Text - attr_accessor :text - - # @param text [text] - def initialize(text) - @text = text - end - end - end - end -end diff --git a/lib/omniai/chat/content/url.rb b/lib/omniai/chat/content/url.rb deleted file mode 100644 index 9d694c6..0000000 --- a/lib/omniai/chat/content/url.rb +++ /dev/null @@ -1,41 +0,0 @@ -# frozen_string_literal: true - -module OmniAI - class Chat - module Content - # A url that is either audio / image / video. - class URL < Media - attr_accessor :url, :type - - class HTTPError < OmniAI::HTTPError; end - - # @param url [URI, String] - # @param type [Symbol, String] "audio/flac", "image/jpeg", "video/mpeg", etc. - def initialize(url, type) - super(type) - @url = url - end - - # @raise [HTTPError] - # - # @return [String] - def fetch! - response = request! - String(response.body) - end - - private - - # @raise [HTTPError] - # - # @return [HTTP::Response] - def request! - response = HTTP.get(@url) - raise HTTPError, response.flush unless response.status.success? - - response - end - end - end - end -end diff --git a/lib/omniai/chat/prompt.rb b/lib/omniai/chat/prompt.rb new file mode 100644 index 0000000..0dbf27e --- /dev/null +++ b/lib/omniai/chat/prompt.rb @@ -0,0 +1,125 @@ +# frozen_string_literal: true + +module OmniAI + class Chat + # Used to standardizes the process of building complex prompts. + # + # Usage: + # + # completion = client.chat do |prompt| + # prompt.system('You are a helpful assistant.') + # prompt.user do |message| + # message.text 'What are these photos of?' + # message.url 'https://example.com/cat.jpg', type: "image/jpeg" + # message.url 'https://example.com/dog.jpg', type: "image/jpeg" + # message.file File.open('hamster.jpg'), type: "image/jpeg" + # end + # end + class Prompt + class MessageError < Error; end + + # @return [Array] + attr_accessor :messages + + # Usage: + # + # OmniAI::Chat::Prompt.parse('What is the capital of Canada?') + # + # @param messages [nil, String] + # + # @return [OmniAI::Chat::Prompt] + def self.parse(prompt) + new if prompt.nil? + return prompt if prompt.is_a?(self) + + new.tap do |instance| + instance.user(prompt) + end + end + + # Usage: + # + # OmniAI::Chat::Prompt.build do |prompt| + # prompt.system('You are an expert in geography.') + # prompt.user('What is the capital of Canada?') + # end + # + # @return [OmniAI::Chat::Prompt] + # @yield [OmniAI::Chat::Prompt] + def self.build(&block) + new.tap do |prompt| + block&.call(prompt) + end + end + + # @param messages [Array] optional + def initialize(messages: []) + @messages = messages + end + + # @return [String] + def inspect + "#<#{self.class.name} messages=#{@messages.inspect}>" + end + + # Usage: + # + # prompt.message('What is the capital of Canada?') + # + # @param content [String, nil] + # @param role [Symbol] + # + # @yield [OmniAI::Chat::Prompt::Message] + # @return [OmniAI::Chat::Prompt::Message] + def message(content = nil, role: :user, &block) + raise ArgumentError, 'content or block is required' if content.nil? && block.nil? + + self.class::Message.new(content:, role:).tap do |message| + block&.call(message) + @messages << message + end + end + + # Usage: + # + # prompt.system('You are a helpful assistant.') + # + # prompt.system do |message| + # message.text 'You are a helpful assistant.' + # end + # + # @param content [String, nil] + # + # @yield [OmniAI::Chat::Prompt::Message] + # @return [OmniAI::Chat::Prompt::Message] + def system(content = nil, &) + message(content, role: Role::SYSTEM, &) + end + + # Usage: + # + # prompt.user('What is the capital of Canada?') + # + # prompt.user do |message| + # message.text 'What is the capital of Canada?' + # end + # + # @param content [String, nil] + # + # @yield [OmniAI::Chat::Prompt::Message] + # @return [OmniAI::Chat::Prompt::Message] + def user(content = nil, &) + message(content, role: Role::USER, &) + end + + # Usage: + # + # prompt.prepare # => [{ content: "What is the capital of Canada?", role: :user }] + # + # @return [Array] + def prepare + @messages.map(&:prepare) + end + end + end +end diff --git a/lib/omniai/chat/prompt/message.rb b/lib/omniai/chat/prompt/message.rb new file mode 100644 index 0000000..891e492 --- /dev/null +++ b/lib/omniai/chat/prompt/message.rb @@ -0,0 +1,111 @@ +# frozen_string_literal: true + +module OmniAI + class Chat + class Prompt + # Used to standardize the process of building message within a prompt: + # + # completion = client.chat do |prompt| + # prompt.user do |message| + # message.text 'What are these photos of?' + # message.url 'https://example.com/cat.jpg', type: "image/jpeg" + # message.url 'https://example.com/dog.jpg', type: "image/jpeg" + # message.file File.open('hamster.jpg'), type: "image/jpeg" + # end + # end + class Message + # @return [Array, String] + attr_accessor :content + + # @return [String] + attr_accessor :role + + # @param content [String, nil] + # @param role [String] + def initialize(content: nil, role: Role::USER) + @content = content || [] + @role = role + end + + # @return [String] + def inspect + "#<#{self.class} role=#{@role.inspect} content=#{@content.inspect}>" + end + + # @return [Boolean] + def role?(role) + String(@role).eql?(String(role)) + end + + # @return [Boolean] + def system? + role?(Role::SYSTEM) + end + + # @return [Boolean] + def user? + role?(Role::USER) + end + + # Usage: + # + # message.text('What are these photos of?') + # + # @param value [String] + # + # @return [Message::Text] + def text(value) + self.class::Text.new(value).tap do |text| + @content << text + end + end + + # Usage: + # + # message.url('https://example.com/hamster.jpg', type: "image/jpeg") + # + # @param uri [String] + # @param type [String] + # + # @return [Message::URL] + def url(uri, type) + self.class::URL.new(uri, type).tap do |url| + @content << url + end + end + + # Usage: + # + # message.file(File.open('hamster.jpg'), type: "image/jpeg") + # + # @param io [IO] + # @param type [String] + # + # @return [Message::File] + def file(io, type) + self.class::File.new(io, type).tap do |file| + @content << file + end + end + + # Prepares a message ahead of sending. This method is intended to be extended or overridden by a subclass. + # + # Usage: + # message.prepare # => { role: :user, content: 'Hello!' } + # message.prepare # => { role: :user, content: [{ type: 'text', text: 'Hello!' }] } + # + # @return [Hash] + def prepare + content = + if @content.is_a?(String) + @content + else + @content.map(&:prepare) + end + + { role: @role, content: } + end + end + end + end +end diff --git a/lib/omniai/chat/prompt/message/content.rb b/lib/omniai/chat/prompt/message/content.rb new file mode 100644 index 0000000..878aa67 --- /dev/null +++ b/lib/omniai/chat/prompt/message/content.rb @@ -0,0 +1,17 @@ +# frozen_string_literal: true + +module OmniAI + class Chat + class Prompt + class Message + # A placeholder for parts of a message. Any subclass must implement `prepare`.` + class Content + # @return [Hash] + def prepare + raise NotImplementedError, "#{self.class}#prepare undefined" + end + end + end + end + end +end diff --git a/lib/omniai/chat/prompt/message/file.rb b/lib/omniai/chat/prompt/message/file.rb new file mode 100644 index 0000000..0fe0ef7 --- /dev/null +++ b/lib/omniai/chat/prompt/message/file.rb @@ -0,0 +1,39 @@ +# frozen_string_literal: true + +module OmniAI + class Chat + class Prompt + class Message + # A file that is either audio / image / video. + class File < Media + attr_accessor :io + + # @param io [IO, Pathname, String] + # @param type [Symbol, String] :image, :video, :audio, "audio/flac", "image/jpeg", "video/mpeg", etc. + def initialize(io, type) + super(type) + @io = io + end + + # @return [String] + def inspect + "#<#{self.class} io=#{@io.inspect}>" + end + + # @return [String] + def fetch! + case @io + when IO then @io.read + else ::File.binread(@io) + end + end + + # @return [Hash] + def prepare + { type: "#{kind}_url", "#{kind}_url": { url: data_uri } } + end + end + end + end + end +end diff --git a/lib/omniai/chat/prompt/message/media.rb b/lib/omniai/chat/prompt/message/media.rb new file mode 100644 index 0000000..30d0e1a --- /dev/null +++ b/lib/omniai/chat/prompt/message/media.rb @@ -0,0 +1,78 @@ +# frozen_string_literal: true + +module OmniAI + class Chat + class Prompt + class Message + # An abstract class that represents audio / image / video and is used for both files and urls. + class Media < Content + class TypeError < Error; end + + # @return [Symbol, String] + attr_accessor :type + + # @param type [String] "audio/flac", "image/jpeg", "video/mpeg", etc. + def initialize(type) + super() + @type = type + end + + # @return [Boolean] + def text? + @type.match?(%r{^text/}) + end + + # @return [Boolean] + def audio? + @type.match?(%r{^audio/}) + end + + # @return [Boolean] + def image? + @type.match?(%r{^image/}) + end + + # @return [Boolean] + def video? + @type.match?(%r{^video/}) + end + + # @return [:video, :audio, :image, :text] + def kind + if text? then :text + elsif audio? then :audio + elsif image? then :image + elsif video? then :video + else + raise(TypeError, "unsupported type=#{@type}") + end + end + + # e.g. "Hello" -> "SGVsbG8h" + # + # @return [String] + def data + Base64.strict_encode64(fetch!) + end + + # e.g. "data:text/html;base64,..." + # + # @return [String] + def data_uri + "data:#{@type};base64,#{data}" + end + + # @return [String] + def fetch! + raise NotImplementedError, "#{self.class}#fetch! undefined" + end + + # @return [Hash] + def prepare + raise NotImplementedError, "#{self.class}#prepare undefined" + end + end + end + end + end +end diff --git a/lib/omniai/chat/prompt/message/text.rb b/lib/omniai/chat/prompt/message/text.rb new file mode 100644 index 0000000..c2fa2c1 --- /dev/null +++ b/lib/omniai/chat/prompt/message/text.rb @@ -0,0 +1,31 @@ +# frozen_string_literal: true + +module OmniAI + class Chat + class Prompt + class Message + # Just some text. + class Text < Content + # @return [String] + attr_accessor :text + + # @param text [text] + def initialize(text = nil) + super() + @text = text + end + + # @return [String] + def inspect + "#<#{self.class} text=#{@text.inspect}>" + end + + # @return [Hash] + def prepare + { type: 'text', text: @text } + end + end + end + end + end +end diff --git a/lib/omniai/chat/prompt/message/url.rb b/lib/omniai/chat/prompt/message/url.rb new file mode 100644 index 0000000..54a88ba --- /dev/null +++ b/lib/omniai/chat/prompt/message/url.rb @@ -0,0 +1,57 @@ +# frozen_string_literal: true + +module OmniAI + class Chat + class Prompt + class Message + # A url that is either audio / image / video. + class URL < Media + # @return [URI, String] + attr_accessor :uri + + class FetchError < HTTPError; end + + # @param uri [URI, String] "https://example.com/cat.jpg" + # @param type [Symbol, String] "audio/flac", "image/jpeg", "video/mpeg", :audi, :image, :video, etc. + def initialize(uri, type = nil) + super(type) + @uri = uri + end + + # @return [String] + def inspect + "#<#{self.class} uri=#{@uri.inspect}>" + end + + # @return [Hash] + def prepare + { + type: "#{kind}_url", + "#{kind}_url": { url: @uri }, + } + end + + # @raise [FetchError] + # + # @return [String] + def fetch! + response = request! + String(response.body) + end + + protected + + # @raise [FetchError] + # + # @return [HTTP::Response] + def request! + response = HTTP.get(@uri) + raise FetchError, response.flush unless response.status.success? + + response + end + end + end + end + end +end diff --git a/lib/omniai/version.rb b/lib/omniai/version.rb index 059aad2..89f71d5 100644 --- a/lib/omniai/version.rb +++ b/lib/omniai/version.rb @@ -1,5 +1,5 @@ # frozen_string_literal: true module OmniAI - VERSION = '1.5.2' + VERSION = '1.6.0' end diff --git a/spec/omniai/chat/content/text_spec.rb b/spec/omniai/chat/content/text_spec.rb deleted file mode 100644 index 5ae73a9..0000000 --- a/spec/omniai/chat/content/text_spec.rb +++ /dev/null @@ -1,9 +0,0 @@ -# frozen_string_literal: true - -RSpec.describe OmniAI::Chat::Content::Text do - subject(:text) { described_class.new('Hello!') } - - describe '#text' do - it { expect(text.text).to eq('Hello!') } - end -end diff --git a/spec/omniai/chat/prompt/message/content_spec.rb b/spec/omniai/chat/prompt/message/content_spec.rb new file mode 100644 index 0000000..70deb58 --- /dev/null +++ b/spec/omniai/chat/prompt/message/content_spec.rb @@ -0,0 +1,9 @@ +# frozen_string_literal: true + +RSpec.describe OmniAI::Chat::Prompt::Message::Content do + subject(:content) { described_class.new } + + describe '#prepare' do + it { expect { content.prepare }.to raise_error(NotImplementedError) } + end +end diff --git a/spec/omniai/chat/content/file_spec.rb b/spec/omniai/chat/prompt/message/file_spec.rb similarity index 66% rename from spec/omniai/chat/content/file_spec.rb rename to spec/omniai/chat/prompt/message/file_spec.rb index 1d6565f..e142d7f 100644 --- a/spec/omniai/chat/content/file_spec.rb +++ b/spec/omniai/chat/prompt/message/file_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -RSpec.describe OmniAI::Chat::Content::File do +RSpec.describe OmniAI::Chat::Prompt::Message::File do subject(:file) { described_class.new(io, type) } let(:io) do @@ -27,6 +27,10 @@ it { expect(file.io).to eq(io) } end + describe '#inspect' do + it { expect(file.inspect).to eql("#") } + end + describe '#fetch!' do it { expect(file.fetch!).to eql('Hello!') } end @@ -38,4 +42,13 @@ describe '#data_uri' do it { expect(file.data_uri).to eq('data:text/plain;base64,SGVsbG8h') } end + + describe '#prepare' do + it do + expect(file.prepare).to eql({ + type: 'text_url', + text_url: { url: 'data:text/plain;base64,SGVsbG8h' }, + }) + end + end end diff --git a/spec/omniai/chat/content/media_spec.rb b/spec/omniai/chat/prompt/message/media_spec.rb similarity index 64% rename from spec/omniai/chat/content/media_spec.rb rename to spec/omniai/chat/prompt/message/media_spec.rb index 0426e9c..73c284b 100644 --- a/spec/omniai/chat/content/media_spec.rb +++ b/spec/omniai/chat/prompt/message/media_spec.rb @@ -1,6 +1,6 @@ # frozen_string_literal: true -RSpec.describe OmniAI::Chat::Content::Media do +RSpec.describe OmniAI::Chat::Prompt::Message::Media do subject(:media) { described_class.new(type) } let(:type) { 'text/plain' } @@ -68,4 +68,38 @@ it { expect(media).not_to be_video } end end + + describe '#kind' do + subject(:kind) { media.kind } + + context 'when type is audio/flac' do + let(:type) { 'audio/flac' } + + it { expect(kind).to eq(:audio) } + end + + context 'when type is image/jpeg' do + let(:type) { 'image/jpeg' } + + it { expect(kind).to eq(:image) } + end + + context 'when type is video/mpeg' do + let(:type) { 'video/mpeg' } + + it { expect(kind).to eq(:video) } + end + + context 'when type is text/plain' do + let(:type) { 'text/plain' } + + it { expect(kind).to eq(:text) } + end + + context 'when type is application/pdf' do + let(:type) { 'application/pdf' } + + it { expect { kind }.to raise_error(described_class::TypeError, 'unsupported type=application/pdf') } + end + end end diff --git a/spec/omniai/chat/prompt/message/text_spec.rb b/spec/omniai/chat/prompt/message/text_spec.rb new file mode 100644 index 0000000..c06dbbd --- /dev/null +++ b/spec/omniai/chat/prompt/message/text_spec.rb @@ -0,0 +1,13 @@ +# frozen_string_literal: true + +RSpec.describe OmniAI::Chat::Prompt::Message::Text do + subject(:text) { described_class.new('Hello!') } + + describe '#text' do + it { expect(text.text).to eq('Hello!') } + end + + describe '#inspect' do + it { expect(text.inspect).to eql('#') } + end +end diff --git a/spec/omniai/chat/content/url_spec.rb b/spec/omniai/chat/prompt/message/url_spec.rb similarity index 79% rename from spec/omniai/chat/content/url_spec.rb rename to spec/omniai/chat/prompt/message/url_spec.rb index 33a8455..5e8ec93 100644 --- a/spec/omniai/chat/content/url_spec.rb +++ b/spec/omniai/chat/prompt/message/url_spec.rb @@ -1,8 +1,12 @@ # frozen_string_literal: true -RSpec.describe OmniAI::Chat::Content::URL do +RSpec.describe OmniAI::Chat::Prompt::Message::URL do subject(:url) { described_class.new('https://localhost/greeting.txt', 'text/plain') } + describe '#url' do + it { expect(url.inspect).to eql('#') } + end + describe '#fetch!' do before do stub_request(:get, 'https://localhost/greeting.txt') diff --git a/spec/omniai/chat/prompt/message_spec.rb b/spec/omniai/chat/prompt/message_spec.rb new file mode 100644 index 0000000..f07f317 --- /dev/null +++ b/spec/omniai/chat/prompt/message_spec.rb @@ -0,0 +1,90 @@ +# frozen_string_literal: true + +RSpec.describe OmniAI::Chat::Prompt::Message do + subject(:message) { described_class.new(role:, content:) } + + let(:role) { OmniAI::Chat::Role::USER } + let(:content) { [] } + + describe '#inspect' do + it { expect(message.inspect).to eql('#') } + end + + describe '#role' do + it { expect(message.role).to eq('user') } + end + + describe '#user?' do + context 'when role is user' do + let(:role) { OmniAI::Chat::Role::USER } + + it { expect(message).to be_user } + end + + context 'when role is system' do + let(:role) { OmniAI::Chat::Role::SYSTEM } + + it { expect(message).not_to be_user } + end + end + + describe '#system?' do + context 'when role is user' do + let(:role) { OmniAI::Chat::Role::SYSTEM } + + it { expect(message).to be_system } + end + + context 'when role is system' do + let(:role) { OmniAI::Chat::Role::USER } + + it { expect(message).not_to be_system } + end + end + + describe '#text' do + it { expect { message.text('What is the capital of Canada?') }.to change(message.content, :count) } + end + + describe '#url' do + it { expect { message.url('https://localhost/greeting.txt', 'text/plain') }.to change(message.content, :count) } + end + + describe '#file' do + it { expect { message.file('greeting.txt', 'Hello!') }.to change(message.content, :count) } + end + + describe '#prepare' do + context 'with text content' do + let(:content) { 'What is the capital of Canada?' } + + it do + expect(message.prepare).to eql({ + role: 'user', + content: 'What is the capital of Canada?', + }) + end + end + + context 'with array content' do + let(:content) do + [ + OmniAI::Chat::Prompt::Message::Text.new('What are these photos of?'), + OmniAI::Chat::Prompt::Message::URL.new('https://localhost/cat.jpeg', 'image/jpeg'), + OmniAI::Chat::Prompt::Message::URL.new('https://localhost/dog.jpeg', 'image/jpeg'), + ] + end + + it do + expect(message.prepare).to eql({ + role: 'user', + content: [ + { type: 'text', text: 'What are these photos of?' }, + { type: 'image_url', image_url: { url: 'https://localhost/cat.jpeg' } }, + { type: 'image_url', image_url: { url: 'https://localhost/dog.jpeg' } }, + ], + }) + end + end + end +end diff --git a/spec/omniai/chat/prompt_spec.rb b/spec/omniai/chat/prompt_spec.rb new file mode 100644 index 0000000..7094eeb --- /dev/null +++ b/spec/omniai/chat/prompt_spec.rb @@ -0,0 +1,81 @@ +# frozen_string_literal: true + +RSpec.describe OmniAI::Chat::Prompt do + subject(:prompt) { described_class.new(messages:) } + + let(:messages) { [] } + + describe '.build' do + context 'with a block' do + let(:prompt) { described_class.build { |prompt| prompt.user('How much does the averager elephant eat a day?') } } + + it { expect(prompt).to(be_a(described_class)) } + end + end + + describe '#inspect' do + it { expect(prompt.inspect).to eql('#') } + end + + describe '#message' do + context 'without some text or a block' do + it { expect { prompt.message }.to raise_error(ArgumentError, 'content or block is required') } + end + + context 'with some text' do + let(:message) { prompt.message('What is the capital of Canada?') } + + it { expect { message }.to(change { prompt.messages.size }) } + end + + context 'with a block' do + let(:message) { prompt.message { |message| message.text('What is the capital of Canada?') } } + + it { expect { message }.to(change { prompt.messages.size }) } + end + end + + describe '#system' do + context 'with some text' do + let(:message) { prompt.system('You are a helpful assistant.') } + + it { expect { message }.to(change { prompt.messages.size }) } + end + + context 'with a block' do + let(:message) { prompt.system { |message| message.text('You are a helpful assistant.') } } + + it { expect { message }.to(change { prompt.messages.size }) } + end + end + + describe '#user' do + context 'with some text' do + let(:message) { prompt.user('What is the capital of Canada?') } + + it { expect { message }.to(change { prompt.messages.size }) } + end + + context 'with a block' do + let(:message) { prompt.user { |message| message.text('What is the capital of Canada?') } } + + it { expect { message }.to(change { prompt.messages.size }) } + end + end + + describe '#prepare' do + let(:messages) do + [ + OmniAI::Chat::Prompt::Message.new(role: 'system', content: 'You are an expert biologist.'), + OmniAI::Chat::Prompt::Message.new(role: 'system', content: 'Is a cheetah quicker than a human?'), + ] + end + + it do + expect(prompt.prepare).to eql([ + { role: 'system', content: 'You are an expert biologist.' }, + { role: 'system', content: 'Is a cheetah quicker than a human?' }, + ]) + end + end +end diff --git a/spec/omniai/chat_spec.rb b/spec/omniai/chat_spec.rb index ef1131b..42192cc 100644 --- a/spec/omniai/chat_spec.rb +++ b/spec/omniai/chat_spec.rb @@ -21,15 +21,39 @@ def payload end RSpec.describe OmniAI::Chat do - subject(:chat) { described_class.new(messages, model:, client:) } + subject(:chat) { described_class.new(prompt, model:, client:) } let(:model) { '...' } let(:client) { OmniAI::Client.new(api_key: '...') } - let(:messages) do - [ - { role: described_class::Role::SYSTEM, content: 'You are a helpful assistant.' }, - 'What is the name of the dummer for the Beatles?', - ] + + let(:prompt) do + OmniAI::Chat::Prompt.new.tap do |prompt| + prompt.system('You are a helpful assistant.') + prompt.user('What is the name of the dummer for the Beatles?') + end + end + + describe '#initialize' do + context 'with a prompt' do + it 'returns a chat' do + expect(described_class.new('What is the capital of France', model:, client:)) + .to be_a(described_class) + end + end + + context 'with a block' do + it 'returns a chat' do + expect(described_class.new(model:, client:) { |prompt| prompt.user('What is the capital of Spain') }) + .to be_a(described_class) + end + end + + context 'without a prompt or block' do + it 'raises an error' do + expect { described_class.new(model:, client:) } + .to raise_error(ArgumentError, 'prompt or block is required') + end + end end describe '#path' do @@ -41,7 +65,7 @@ def payload end describe '.process!' do - subject(:process!) { FakeChat.process!(messages, model:, client:, stream:) } + subject(:process!) { FakeChat.process!(prompt, model:, client:, stream:) } let(:stream) { nil } let(:client) { FakeClient.new(api_key: '...') }