diff --git a/CHANGES.md b/CHANGES.md index 8e90667..aef3549 100644 --- a/CHANGES.md +++ b/CHANGES.md @@ -1,5 +1,18 @@ # Changes +## 2026-02-06 v1.19.0 + +- Added authentication support to `Ollama::Client`: + + New `api_key` parameter for simple Bearer token authentication + + New `headers` parameter for custom HTTP headers + + Authorization header automatically set when `api_key` is provided + + Custom headers merged with default headers (can override defaults) + + Support added to both direct client initialization and `Client::Config` +- Updated `Client::Config` with `headers` and `api_key` attributes +- Updated `configure_with` method to pass new authentication parameters +- Added comprehensive tests for authentication features +- Updated README with authentication examples and usage documentation + ## 2025-12-19 v1.18.0 - Updated `gem_hadar` development dependency from version **2.8** to **2.9** diff --git a/README.md b/README.md index 5e78c07..9332c28 100644 --- a/README.md +++ b/README.md @@ -69,6 +69,42 @@ messages = Message.new(role: 'user', content: 'Why is the sky blue?') ollama.chat(model: 'llama3.1', stream: true, messages:, &Print) ``` +### Authentication + +For Ollama Cloud or authenticated Ollama instances, you can provide an API key: + +```ruby +# Using api_key parameter (sets Authorization: Bearer header automatically) +ollama = Client.new( + base_url: 'https://ollama.com/api', + api_key: 'your-api-key-here' +) +``` + +You can also provide custom headers for any authentication scheme: + +```ruby +# Using custom headers for flexibility +ollama = Client.new( + base_url: 'https://ollama.com/api', + headers: { + 'Authorization' => 'Basic abc123', + 'X-Request-ID' => SecureRandom.uuid + } +) +``` + +Both options can be combined and used with configuration objects: + +```ruby +config = Client::Config[ + base_url: 'https://ollama.com/api', + api_key: 'your-api-key', + headers: { 'X-Custom' => 'value' } +] +ollama = Client.configure_with(config) +``` + ## Try out things in ollama\_console This is an interactive console where you can try out the different commands diff --git a/lib/ollama/client.rb b/lib/ollama/client.rb index 9a33743..c25758b 100644 --- a/lib/ollama/client.rb +++ b/lib/ollama/client.rb @@ -35,25 +35,35 @@ class Ollama::Client # configuration options, making them available for use in subsequent client operations. # # @param base_url [ String, nil ] the base URL of the Ollama API endpoint, defaults to nil + # @param headers [ Hash, nil ] custom HTTP headers to include in all requests, defaults to nil + # @param api_key [ String, nil ] API key for authentication, sets Authorization: Bearer header, defaults to nil # @param output [ IO ] the output stream to be used for handling responses, defaults to $stdout # @param connect_timeout [ Integer, nil ] the connection timeout value in seconds, defaults to nil # @param read_timeout [ Integer, nil ] the read timeout value in seconds, defaults to nil # @param write_timeout [ Integer, nil ] the write timeout value in seconds, defaults to nil # @param debug [ Boolean, nil ] the debug flag indicating whether debug output is enabled, defaults to nil # @param user_agent [ String, nil ] the user agent string to be used for API requests, defaults to nil - def initialize(base_url: nil, output: $stdout, connect_timeout: nil, read_timeout: nil, write_timeout: nil, debug: nil, user_agent: nil) + def initialize(base_url: nil, headers: nil, api_key: nil, output: $stdout, connect_timeout: nil, read_timeout: nil, + write_timeout: nil, debug: nil, user_agent: nil) base_url.nil? and base_url = ENV.fetch('OLLAMA_URL') do raise ArgumentError, - 'missing :base_url parameter or OLLAMA_URL environment variable' + 'missing :base_url parameter or OLLAMA_URL environment variable' end base_url.is_a? URI or base_url = URI.parse(base_url) base_url.is_a?(URI::HTTP) || base_url.is_a?(URI::HTTPS) or raise ArgumentError, "require #{base_url.inspect} to be http/https-URI" - @ssl_verify_peer = base_url.query.to_s.split(?&).inject({}) { |h, l| - h.merge Hash[*l.split(?=)] - }['ssl_verify_peer'] != 'false' - @base_url, @output, @connect_timeout, @read_timeout, @write_timeout, @debug, @user_agent = - base_url, output, connect_timeout, read_timeout, write_timeout, debug, user_agent + @ssl_verify_peer = base_url.query.to_s.split('&').inject({}) do |h, l| + h.merge Hash[*l.split('=')] + end['ssl_verify_peer'] != 'false' + @base_url = base_url + @headers = headers + @api_key = api_key + @output = output + @connect_timeout = connect_timeout + @read_timeout = read_timeout + @write_timeout = write_timeout + @debug = debug + @user_agent = user_agent end # The output attribute accessor allows reading and setting the output stream @@ -135,7 +145,7 @@ def commands # It is typically used to provide users with information about which commands # are available for execution through the client interface. def help - @output.puts "Commands: %s" % commands.join(?,) + @output.puts 'Commands: %s' % commands.join(',') end # The request method sends an HTTP request to the Ollama API and processes @@ -158,7 +168,7 @@ def request(method:, path:, handler:, body: nil, stream: nil) url = @base_url + path responses = Enumerator.new do |yielder| if stream - response_block = -> chunk, remaining_bytes, total_bytes do + response_block = lambda do |chunk, _remaining_bytes, _total_bytes| response_line = parse_json(chunk) response_line and yielder.yield response_line end @@ -210,14 +220,18 @@ def inspect # This method generates a set of standard HTTP headers required for making # requests to the Ollama API, including the User-Agent and Content-Type. It # uses the instance's configured user agent or falls back to the class-level - # user agent if none is set. + # user agent if none is set. Custom headers and api_key authorization are + # merged into the result. # # @return [ Hash ] a hash containing the HTTP headers with keys 'User-Agent' and 'Content-Type' def headers - { - 'User-Agent' => @user_agent || self.class.user_agent, - 'Content-Type' => 'application/json; charset=utf-8', + result = { + 'User-Agent' => @user_agent || self.class.user_agent, + 'Content-Type' => 'application/json; charset=utf-8' } + result['Authorization'] = "Bearer #{@api_key}" if @api_key && !@api_key.empty? + result.merge!(@headers) if @headers + result end # The user_agent method generates a formatted user agent string for API requests. @@ -228,7 +242,7 @@ def headers # # @return [ String ] a formatted user agent string in the format "Ollama::Client/1.2.3" def self.user_agent - '%s/%s' % [ self, Ollama::VERSION ] + format('%s/%s', self, Ollama::VERSION) end # The excon method creates and returns a new Excon client instance configured @@ -246,10 +260,10 @@ def self.user_agent def excon(url) params = { connect_timeout: @connect_timeout, - read_timeout: @read_timeout, - write_timeout: @write_timeout, + read_timeout: @read_timeout, + write_timeout: @write_timeout, ssl_verify_peer: @ssl_verify_peer, - debug: @debug, + debug: @debug }.compact Excon.new(url, params) end @@ -269,6 +283,6 @@ def parse_json(string) JSON.parse(string, object_class: Ollama::Response) rescue JSON::ParserError => e warn "Caught #{e.class}: #{e}" - return + nil end end diff --git a/lib/ollama/client/configuration/config.rb b/lib/ollama/client/configuration/config.rb index 9211adb..321490e 100644 --- a/lib/ollama/client/configuration/config.rb +++ b/lib/ollama/client/configuration/config.rb @@ -77,6 +77,18 @@ def self.[](value) # @attr [ URI ] the new base URL to be set for API requests attr_accessor :base_url + # The headers attribute accessor allows reading and setting custom HTTP + # headers to be included in all requests. + # + # @attr [ Hash, nil ] the custom HTTP headers to be set + attr_accessor :headers + + # The api_key attribute accessor allows reading and setting the API key + # for authentication. When set, an Authorization: Bearer header is added. + # + # @attr [ String, nil ] the API key to be used for authentication + attr_accessor :api_key + # The output attribute accessor allows reading and setting the output stream # used for handling responses and messages. # @@ -132,13 +144,15 @@ def self.[](value) # provided settings def configure_with(config) new( - base_url: config.base_url, - output: config.output, + base_url: config.base_url, + headers: config.headers, + api_key: config.api_key, + output: config.output, connect_timeout: config.connect_timeout, - read_timeout: config.read_timeout, - write_timeout: config.write_timeout, - debug: config.debug, - user_agent: config.user_agent + read_timeout: config.read_timeout, + write_timeout: config.write_timeout, + debug: config.debug, + user_agent: config.user_agent ) end end diff --git a/spec/ollama/client_spec.rb b/spec/ollama/client_spec.rb index 04db7fc..1f784b7 100644 --- a/spec/ollama/client_spec.rb +++ b/spec/ollama/client_spec.rb @@ -21,6 +21,19 @@ expect(client.output).to be $stdout end + it 'can be instantiated with config including headers and api_key' do + config = Ollama::Client::Config[ + base_url: base_url, + api_key: 'test_key', + headers: { 'X-Custom' => 'value' } + ] + client = described_class.configure_with(config) + expect(client).to be_a described_class + expect(client.base_url.to_s).to eq base_url + expect(client.send(:headers)).to include('Authorization' => 'Bearer test_key') + expect(client.send(:headers)).to include('X-Custom' => 'value') + end + it 'can be instantiated with config loaded from JSON' do config = Ollama::Client::Config.load_from_json(asset('client.json')) config.base_url = base_url @@ -52,8 +65,49 @@ expect(client2).not_to be_ssl_verify_peer end - it 'has a string representation' do - expect(ollama.to_s).to eq '#' + it 'can be instantiated with api_key parameter' do + client = described_class.new(base_url: base_url, api_key: 'test_key') + expect(client.send(:headers)).to include('Authorization' => 'Bearer test_key') + end + + it 'can be instantiated with custom headers' do + client = described_class.new( + base_url: base_url, + headers: { 'X-Custom' => 'value', 'X-Request-ID' => '123' } + ) + headers = client.send(:headers) + expect(headers).to include('X-Custom' => 'value') + expect(headers).to include('X-Request-ID' => '123') + end + + it 'custom headers can override default headers' do + client = described_class.new( + base_url: base_url, + headers: { 'Content-Type' => 'application/octet-stream' } + ) + headers = client.send(:headers) + expect(headers['Content-Type']).to eq('application/octet-stream') + end + + it 'can combine api_key and custom headers' do + client = described_class.new( + base_url: base_url, + api_key: 'test_key', + headers: { 'X-Custom' => 'value' } + ) + headers = client.send(:headers) + expect(headers).to include('Authorization' => 'Bearer test_key') + expect(headers).to include('X-Custom' => 'value') + end + + it 'does not include Authorization header when api_key is nil' do + client = described_class.new(base_url: base_url) + expect(client.send(:headers)).not_to have_key('Authorization') + end + + it 'does not include Authorization header when api_key is empty string' do + client = described_class.new(base_url: base_url, api_key: '') + expect(client.send(:headers)).not_to have_key('Authorization') end let :excon do @@ -66,49 +120,49 @@ it 'can raise error based on status code 500' do expect(excon).to receive(:send).and_return(double(status: 500, body: '{}')) - expect { + expect do ollama.generate(model: 'llama3.1', prompt: 'Hello World') - }.to raise_error(Ollama::Errors::Error) + end.to raise_error(Ollama::Errors::Error) end it 'can raise error based on status code 400' do expect(excon).to receive(:send).and_return(double(status: 400, body: '{}')) - expect { + expect do ollama.generate(model: 'llama3.1', prompt: 'Hello World', think: true) - }.to raise_error(Ollama::Errors::BadRequestError) + end.to raise_error(Ollama::Errors::BadRequestError) end it 'can raise error based on status code 404' do expect(excon).to receive(:send).and_return(double(status: 404, body: '{}')) - expect { + expect do ollama.generate(model: 'llama3.1', prompt: 'Hello World') - }.to raise_error(Ollama::Errors::NotFoundError) + end.to raise_error(Ollama::Errors::NotFoundError) end it 'can raise error on connection error' do expect(excon).to receive(:post).and_raise Excon::Error::Socket - expect { + expect do ollama.generate(model: 'llama3.1', prompt: 'Hello World') - }.to raise_error(Ollama::Errors::SocketError) + end.to raise_error(Ollama::Errors::SocketError) end it 'can raise error on timeout' do expect(excon).to receive(:post).and_raise Excon::Errors::Timeout - expect { + expect do ollama.generate(model: 'llama3.1', prompt: 'Hello World') - }.to raise_error(Ollama::Errors::TimeoutError) + end.to raise_error(Ollama::Errors::TimeoutError) end it 'can raise a generic error' do expect(excon).to receive(:post).and_raise Excon::Errors::Error - expect { + expect do ollama.generate(model: 'llama3.1', prompt: 'Hello World') - }.to raise_error(Ollama::Errors::Error) + end.to raise_error(Ollama::Errors::Error) end describe 'handlers' do let :body do - %{{"models":[{"name":"llama3.1:latest","model":"llama3.1:latest","size":6654289920,"digest":"62757c860e01d552d4e46b09c6b8d5396ef9015210105427e05a8b27d7727ed2","details":{"parent_model":"","format":"gguf","family":"llama","families":["llama"],"parameter_size":"8.0B","quantization_level":"Q4_0"},"expires_at":"2024-08-05T10:56:26.588713988Z","size_vram":6654289920}]}} + %({"models":[{"name":"llama3.1:latest","model":"llama3.1:latest","size":6654289920,"digest":"62757c860e01d552d4e46b09c6b8d5396ef9015210105427e05a8b27d7727ed2","details":{"parent_model":"","format":"gguf","family":"llama","families":["llama"],"parameter_size":"8.0B","quantization_level":"Q4_0"},"expires_at":"2024-08-05T10:56:26.588713988Z","size_vram":6654289920}]}) end let :expected_response do @@ -120,7 +174,7 @@ :get, body: nil, headers: hash_including( - 'Content-Type' => 'application/json; charset=utf-8', + 'Content-Type' => 'application/json; charset=utf-8' ) ).and_return(double(status: 200, body:)) end @@ -148,9 +202,9 @@ it 'can generate without stream' do expect(excon).to receive(:send).with( :post, - body: '{"model":"llama3.1","prompt":"Hello World"}', + body: '{"model":"llama3.1","prompt":"Hello World"}', headers: hash_including( - 'Content-Type' => 'application/json; charset=utf-8', + 'Content-Type' => 'application/json; charset=utf-8' ) ).and_return(double(status: 200, body: '{}')) ollama.generate(model: 'llama3.1', prompt: 'Hello World') @@ -159,9 +213,9 @@ it 'can soldier on with parse errors and output warning' do expect(excon).to receive(:send).with( :post, - body: '{"model":"llama3.1","prompt":"Hello World"}', + body: '{"model":"llama3.1","prompt":"Hello World"}', headers: hash_including( - 'Content-Type' => 'application/json; charset=utf-8', + 'Content-Type' => 'application/json; charset=utf-8' ) ).and_return(double(status: 200, body: '{i am so broken}')) expect(ollama).to receive(:warn).with( @@ -173,9 +227,9 @@ it 'can generate with stream' do expect(excon).to receive(:send).with( :post, - body: '{"model":"llama3.1","prompt":"Hello World","stream":true}', + body: '{"model":"llama3.1","prompt":"Hello World","stream":true}', headers: hash_including( - 'Content-Type' => 'application/json; charset=utf-8', + 'Content-Type' => 'application/json; charset=utf-8' ), response_block: an_instance_of(Proc) ).and_return(double(status: 200, body: '{}'))