Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
44 changes: 44 additions & 0 deletions recipes/python/voice-agents/v1/function-calling/README.md
Original file line number Diff line number Diff line change
@@ -0,0 +1,44 @@
# Function Calling (Voice Agents v1)

Inject tool definitions for the LLM to call during a voice agent conversation.

## What it does

Defines functions (tools) in the agent's think settings that the LLM can invoke during a conversation. When the LLM decides to call a function, a `FunctionCallRequest` event arrives with the function name and arguments. Your code executes the function and sends the result back via `send_function_call_response()`, so the LLM can incorporate the result into its spoken response.

## Key parameters

| Parameter | Value | Description |
|-----------|-------|-------------|
| `think.functions` | `list[dict]` | Array of function definitions (name, description, parameters) |
| `FunctionCallRequest.name` | `str` | Name of the function the LLM wants to call |
| `FunctionCallRequest.id` | `str` | Unique ID to match response to request |
| `send_function_call_response()` | method | Send function result back to the agent |

## Example output

```
Agent configured with function calling (get_weather)
Connection opened
Event: SettingsApplied
Function call: get_weather
Connection closed
```

## Prerequisites

- Python 3.10+
- Set `DEEPGRAM_API_KEY` environment variable
- Install: `pip install -r recipes/python/requirements.txt`

## Run

```bash
python example.py
```

## Test

```bash
pytest example_test.py -v
```
84 changes: 84 additions & 0 deletions recipes/python/voice-agents/v1/function-calling/example.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
"""
Recipe: Function Calling (Voice Agents v1)
============================================
Demonstrates injecting tool/function definitions for the LLM to call
during a voice agent conversation.

Functions are defined in the think settings and, when the LLM invokes
one, a FunctionCallRequest event arrives. You respond with
send_function_call_response() so the LLM can incorporate the result
into its next spoken response.
"""

from deepgram import DeepgramClient
from deepgram.agent.v1.types import (
AgentV1FunctionCallRequest, AgentV1SendFunctionCallResponse,
AgentV1Settings, AgentV1SettingsAgent,
AgentV1SettingsAgentListen, AgentV1SettingsAgentListenProvider_V1,
AgentV1SettingsAudio, AgentV1SettingsAudioInput,
)
from deepgram.core.events import EventType
from deepgram.types.speak_settings_v1 import SpeakSettingsV1
from deepgram.types.speak_settings_v1provider import SpeakSettingsV1Provider_Deepgram
from deepgram.types.think_settings_v1 import ThinkSettingsV1
from deepgram.types.think_settings_v1provider import ThinkSettingsV1Provider_OpenAi

WEATHER_FUNCTION = {
"name": "get_weather",
"description": "Get the current weather for a location",
"parameters": {
"type": "object",
"properties": {"location": {"type": "string", "description": "City name"}},
"required": ["location"],
},
}


def main():
client = DeepgramClient() # reads DEEPGRAM_API_KEY from environment

with client.agent.v1.connect() as agent:
settings = AgentV1Settings(
audio=AgentV1SettingsAudio(
input=AgentV1SettingsAudioInput(encoding="linear16", sample_rate=24000)
),
agent=AgentV1SettingsAgent(
listen=AgentV1SettingsAgentListen(
provider=AgentV1SettingsAgentListenProvider_V1(type="deepgram", model="nova-3")
),
think=ThinkSettingsV1(
provider=ThinkSettingsV1Provider_OpenAi(type="open_ai", model="gpt-4o-mini"),
prompt="You are a helpful assistant. Use get_weather when asked about weather.",
functions=[WEATHER_FUNCTION], # <-- function definitions for the LLM
),
speak=SpeakSettingsV1(
provider=SpeakSettingsV1Provider_Deepgram(type="deepgram", model="aura-2-thalia-en")
),
),
)

agent.send_settings(settings)
print("Agent configured with function calling (get_weather)")

def on_message(message) -> None:
if isinstance(message, AgentV1FunctionCallRequest):
print(f"Function call: {message.name}")
agent.send_function_call_response(
AgentV1SendFunctionCallResponse(
type="FunctionCallResponse", id=message.id,
name=message.name, content='{"temp": "72°F", "condition": "sunny"}',
)
)
elif isinstance(message, bytes):
print(f"Received {len(message)} bytes of audio")
else:
print(f"Event: {getattr(message, 'type', type(message).__name__)}")

agent.on(EventType.OPEN, lambda _: print("Connection opened"))
agent.on(EventType.MESSAGE, on_message)
agent.on(EventType.CLOSE, lambda _: print("Connection closed"))
agent.start_listening()


if __name__ == "__main__":
main()
16 changes: 16 additions & 0 deletions recipes/python/voice-agents/v1/function-calling/example_test.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
import subprocess
from pathlib import Path

def test_example_runs():
"""Runs the function-calling example and verifies it produces output."""
example = Path(__file__).parent / "example.py"
result = subprocess.run(
["python", str(example)],
capture_output=True,
text=True,
timeout=60,
)
assert result.returncode == 0, (
f"Example failed\nSTDOUT: {result.stdout}\nSTDERR: {result.stderr}"
)
assert result.stdout.strip(), "Example produced no output"
Loading