Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
20 changes: 14 additions & 6 deletions lib/ruby-openai-swarm/core.rb
Original file line number Diff line number Diff line change
Expand Up @@ -28,7 +28,7 @@ def initialize(client = nil)
# )
# end

def get_chat_completion(agent_tracker, history, context_variables, model_override, stream, debug)
def get_chat_completion(agent_tracker, history, context_variables, model_override, stream, debug, metadata = nil)
agent = agent_tracker.current_agent
context_variables = context_variables.dup
instructions = agent.instructions.respond_to?(:call) ? agent.instructions.call(context_variables) : agent.instructions
Expand Down Expand Up @@ -56,6 +56,11 @@ def get_chat_completion(agent_tracker, history, context_variables, model_overrid
tools: Util.request_tools_excluded(tools, agent_tracker.tracking_agents_tool_name, agent.strategy.prevent_agent_reentry),
}

# Add metadata if provided
# Add support for LiteLLM observability with Langfuse
# See: https://docs.litellm.ai/docs/observability/langfuse_integration
create_params[:metadata] = metadata if metadata

# TODO: https://platform.openai.com/docs/guides/function-calling/how-do-functions-differ-from-tools
# create_params[:functions] = tools unless tools.empty?
# create_params[:function_call] = agent.tool_choice if agent.tool_choice
Expand Down Expand Up @@ -168,7 +173,7 @@ def handle_tool_calls(tool_calls, active_agent, context_variables, debug)
partial_response
end

def run(agent:, messages:, context_variables: {}, model_override: nil, stream: false, debug: false, max_turns: Float::INFINITY, execute_tools: true)
def run(agent:, messages:, context_variables: {}, model_override: nil, stream: false, debug: false, max_turns: Float::INFINITY, execute_tools: true, metadata: nil)
agent_tracker = OpenAISwarm::Agents::ChangeTracker.new(agent)
if stream
return run_and_stream(
Expand All @@ -178,7 +183,8 @@ def run(agent:, messages:, context_variables: {}, model_override: nil, stream: f
model_override: model_override,
debug: debug,
max_turns: max_turns,
execute_tools: execute_tools
execute_tools: execute_tools,
metadata: metadata
)
end

Expand All @@ -197,7 +203,8 @@ def run(agent:, messages:, context_variables: {}, model_override: nil, stream: f
context_variables,
model_override,
stream,
debug
debug,
metadata
)

message = completion.dig('choices', 0, 'message') || {}
Expand Down Expand Up @@ -237,7 +244,7 @@ def run(agent:, messages:, context_variables: {}, model_override: nil, stream: f
end

# TODO(Grayson): a lot of copied code here that will be refactored
def run_and_stream(agent:, messages:, context_variables: {}, model_override: nil, debug: false, max_turns: Float::INFINITY, execute_tools: true)
def run_and_stream(agent:, messages:, context_variables: {}, model_override: nil, debug: false, max_turns: Float::INFINITY, execute_tools: true, metadata: nil)
agent_tracker = OpenAISwarm::Agents::ChangeTracker.new(agent)
active_agent = agent
context_variables = context_variables.dup
Expand All @@ -255,7 +262,8 @@ def run_and_stream(agent:, messages:, context_variables: {}, model_override: nil
context_variables,
model_override,
true, # stream
debug
debug,
metadata
)

yield({ delim: "start" }) if block_given?
Expand Down
2 changes: 1 addition & 1 deletion lib/ruby-openai-swarm/version.rb
Original file line number Diff line number Diff line change
@@ -1,3 +1,3 @@
module OpenAISwarm
VERSION = "0.5.1"
VERSION = "0.5.2"
end
107 changes: 107 additions & 0 deletions spec/lib/ruby-openai-swarm/core_spec.rb
Original file line number Diff line number Diff line change
@@ -0,0 +1,107 @@
require 'spec_helper'

RSpec.describe OpenAISwarm::Core do
let(:client) { instance_double(OpenAI::Client) }
let(:agent) { OpenAISwarm::Agent.new(name: "TestAgent", model: "gpt-3.5-turbo") }
let(:messages) { [{ role: "user", content: "Hello" }] }
let(:metadata) do
{
generation_name: "test-generation",
generation_id: "gen-123",
trace_id: "trace-123",
trace_name: "test-trace",
trace_metadata: { key: "value" },
tags: ["test", "metadata"]
}
end

describe '#run' do
let(:chat_response) do
{
'choices' => [
{
'message' => {
'content' => 'Hello! How can I help you?',
'role' => 'assistant'
}
}
]
}
end

before do
allow(client).to receive(:chat).and_return(chat_response)
end

it 'passes metadata to chat completion' do
core = described_class.new(client)

expect(client).to receive(:chat) do |params|
expect(params[:parameters][:metadata]).to eq(metadata)
chat_response
end

core.run(
agent: agent,
messages: messages,
metadata: metadata
)
end

it 'works without metadata' do
core = described_class.new(client)

expect(client).to receive(:chat) do |params|
expect(params[:parameters][:metadata]).to be_nil
chat_response
end

core.run(
agent: agent,
messages: messages
)
end
end

describe '#run_and_stream' do
let(:stream_response) do
[
{ 'choices' => [{ 'delta' => { 'content' => 'Hello', 'role' => 'assistant' } }] },
{ 'choices' => [{ 'delta' => { 'content' => '!' } }] }
]
end

before do
allow(client).to receive(:chat).and_yield(stream_response[0], 1).and_yield(stream_response[1], 1)
end

it 'passes metadata to streaming chat completion' do
core = described_class.new(client)

expect(client).to receive(:chat) do |params|
expect(params[:parameters][:metadata]).to eq(metadata)
stream_response.each { |chunk| params[:parameters][:stream].call(chunk, 1) }
end

core.run_and_stream(
agent: agent,
messages: messages,
metadata: metadata
) { |_chunk| }
end

it 'works without metadata in streaming mode' do
core = described_class.new(client)

expect(client).to receive(:chat) do |params|
expect(params[:parameters][:metadata]).to be_nil
stream_response.each { |chunk| params[:parameters][:stream].call(chunk, 1) }
end

core.run_and_stream(
agent: agent,
messages: messages
) { |_chunk| }
end
end
end