diff --git a/lib/ruby-openai-swarm/core.rb b/lib/ruby-openai-swarm/core.rb index d0b5d61..da2053a 100644 --- a/lib/ruby-openai-swarm/core.rb +++ b/lib/ruby-openai-swarm/core.rb @@ -28,7 +28,7 @@ def initialize(client = nil) # ) # end - def get_chat_completion(agent_tracker, history, context_variables, model_override, stream, debug) + def get_chat_completion(agent_tracker, history, context_variables, model_override, stream, debug, metadata = nil) agent = agent_tracker.current_agent context_variables = context_variables.dup instructions = agent.instructions.respond_to?(:call) ? agent.instructions.call(context_variables) : agent.instructions @@ -56,6 +56,11 @@ def get_chat_completion(agent_tracker, history, context_variables, model_overrid tools: Util.request_tools_excluded(tools, agent_tracker.tracking_agents_tool_name, agent.strategy.prevent_agent_reentry), } + # Add metadata if provided + # Add support for LiteLLM observability with Langfuse + # See: https://docs.litellm.ai/docs/observability/langfuse_integration + create_params[:metadata] = metadata if metadata + # TODO: https://platform.openai.com/docs/guides/function-calling/how-do-functions-differ-from-tools # create_params[:functions] = tools unless tools.empty? # create_params[:function_call] = agent.tool_choice if agent.tool_choice @@ -168,7 +173,7 @@ def handle_tool_calls(tool_calls, active_agent, context_variables, debug) partial_response end - def run(agent:, messages:, context_variables: {}, model_override: nil, stream: false, debug: false, max_turns: Float::INFINITY, execute_tools: true) + def run(agent:, messages:, context_variables: {}, model_override: nil, stream: false, debug: false, max_turns: Float::INFINITY, execute_tools: true, metadata: nil) agent_tracker = OpenAISwarm::Agents::ChangeTracker.new(agent) if stream return run_and_stream( @@ -178,7 +183,8 @@ def run(agent:, messages:, context_variables: {}, model_override: nil, stream: f model_override: model_override, debug: debug, max_turns: max_turns, - execute_tools: execute_tools + execute_tools: execute_tools, + metadata: metadata ) end @@ -197,7 +203,8 @@ def run(agent:, messages:, context_variables: {}, model_override: nil, stream: f context_variables, model_override, stream, - debug + debug, + metadata ) message = completion.dig('choices', 0, 'message') || {} @@ -237,7 +244,7 @@ def run(agent:, messages:, context_variables: {}, model_override: nil, stream: f end # TODO(Grayson): a lot of copied code here that will be refactored - def run_and_stream(agent:, messages:, context_variables: {}, model_override: nil, debug: false, max_turns: Float::INFINITY, execute_tools: true) + def run_and_stream(agent:, messages:, context_variables: {}, model_override: nil, debug: false, max_turns: Float::INFINITY, execute_tools: true, metadata: nil) agent_tracker = OpenAISwarm::Agents::ChangeTracker.new(agent) active_agent = agent context_variables = context_variables.dup @@ -255,7 +262,8 @@ def run_and_stream(agent:, messages:, context_variables: {}, model_override: nil context_variables, model_override, true, # stream - debug + debug, + metadata ) yield({ delim: "start" }) if block_given? diff --git a/lib/ruby-openai-swarm/version.rb b/lib/ruby-openai-swarm/version.rb index 8052e1c..74b78ac 100644 --- a/lib/ruby-openai-swarm/version.rb +++ b/lib/ruby-openai-swarm/version.rb @@ -1,3 +1,3 @@ module OpenAISwarm - VERSION = "0.5.1" + VERSION = "0.5.2" end diff --git a/spec/lib/ruby-openai-swarm/core_spec.rb b/spec/lib/ruby-openai-swarm/core_spec.rb new file mode 100644 index 0000000..192b695 --- /dev/null +++ b/spec/lib/ruby-openai-swarm/core_spec.rb @@ -0,0 +1,107 @@ +require 'spec_helper' + +RSpec.describe OpenAISwarm::Core do + let(:client) { instance_double(OpenAI::Client) } + let(:agent) { OpenAISwarm::Agent.new(name: "TestAgent", model: "gpt-3.5-turbo") } + let(:messages) { [{ role: "user", content: "Hello" }] } + let(:metadata) do + { + generation_name: "test-generation", + generation_id: "gen-123", + trace_id: "trace-123", + trace_name: "test-trace", + trace_metadata: { key: "value" }, + tags: ["test", "metadata"] + } + end + + describe '#run' do + let(:chat_response) do + { + 'choices' => [ + { + 'message' => { + 'content' => 'Hello! How can I help you?', + 'role' => 'assistant' + } + } + ] + } + end + + before do + allow(client).to receive(:chat).and_return(chat_response) + end + + it 'passes metadata to chat completion' do + core = described_class.new(client) + + expect(client).to receive(:chat) do |params| + expect(params[:parameters][:metadata]).to eq(metadata) + chat_response + end + + core.run( + agent: agent, + messages: messages, + metadata: metadata + ) + end + + it 'works without metadata' do + core = described_class.new(client) + + expect(client).to receive(:chat) do |params| + expect(params[:parameters][:metadata]).to be_nil + chat_response + end + + core.run( + agent: agent, + messages: messages + ) + end + end + + describe '#run_and_stream' do + let(:stream_response) do + [ + { 'choices' => [{ 'delta' => { 'content' => 'Hello', 'role' => 'assistant' } }] }, + { 'choices' => [{ 'delta' => { 'content' => '!' } }] } + ] + end + + before do + allow(client).to receive(:chat).and_yield(stream_response[0], 1).and_yield(stream_response[1], 1) + end + + it 'passes metadata to streaming chat completion' do + core = described_class.new(client) + + expect(client).to receive(:chat) do |params| + expect(params[:parameters][:metadata]).to eq(metadata) + stream_response.each { |chunk| params[:parameters][:stream].call(chunk, 1) } + end + + core.run_and_stream( + agent: agent, + messages: messages, + metadata: metadata + ) { |_chunk| } + end + + it 'works without metadata in streaming mode' do + core = described_class.new(client) + + expect(client).to receive(:chat) do |params| + expect(params[:parameters][:metadata]).to be_nil + stream_response.each { |chunk| params[:parameters][:stream].call(chunk, 1) } + end + + core.run_and_stream( + agent: agent, + messages: messages + ) { |_chunk| } + end + end +end \ No newline at end of file