|
| 1 | +"""LangChain + Tavily MCP Example |
| 2 | +
|
| 3 | +Creates a simple research agent that uses the Tavily MCP server |
| 4 | +to search the web and answer questions with relevant links. |
| 5 | +""" |
| 6 | + |
| 7 | +import asyncio |
| 8 | +import logging |
| 9 | +import os |
| 10 | + |
| 11 | +import azure.identity |
| 12 | +from dotenv import load_dotenv |
| 13 | +from langchain.agents import create_agent |
| 14 | +from langchain_core.messages import HumanMessage |
| 15 | +from langchain_mcp_adapters.client import MultiServerMCPClient |
| 16 | +from langchain_openai import ChatOpenAI |
| 17 | +from pydantic import SecretStr |
| 18 | +from rich.logging import RichHandler |
| 19 | + |
| 20 | +# Configure logging |
| 21 | +logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()]) |
| 22 | +logger = logging.getLogger("langchainv1_tavily") |
| 23 | +logger.setLevel(logging.INFO) |
| 24 | + |
| 25 | +# Load environment variables |
| 26 | +load_dotenv(override=True) |
| 27 | + |
| 28 | +api_host = os.getenv("API_HOST", "github") |
| 29 | + |
| 30 | +if api_host == "azure": |
| 31 | + token_provider = azure.identity.get_bearer_token_provider( |
| 32 | + azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default" |
| 33 | + ) |
| 34 | + model = ChatOpenAI( |
| 35 | + model=os.environ.get("AZURE_OPENAI_CHAT_DEPLOYMENT"), |
| 36 | + base_url=os.environ["AZURE_OPENAI_ENDPOINT"] + "/openai/v1/", |
| 37 | + api_key=token_provider, |
| 38 | + ) |
| 39 | +elif api_host == "github": |
| 40 | + model = ChatOpenAI( |
| 41 | + model=os.getenv("GITHUB_MODEL", "gpt-4o"), |
| 42 | + base_url="https://models.inference.ai.azure.com", |
| 43 | + api_key=SecretStr(os.environ["GITHUB_TOKEN"]), |
| 44 | + ) |
| 45 | +elif api_host == "ollama": |
| 46 | + model = ChatOpenAI( |
| 47 | + model=os.environ.get("OLLAMA_MODEL", "llama3.1"), |
| 48 | + base_url=os.environ.get("OLLAMA_ENDPOINT", "http://localhost:11434/v1"), |
| 49 | + api_key=SecretStr(os.environ.get("OLLAMA_API_KEY", "none")), |
| 50 | + ) |
| 51 | +else: |
| 52 | + model = ChatOpenAI(model=os.getenv("OPENAI_MODEL", "gpt-4o-mini")) |
| 53 | + |
| 54 | + |
| 55 | +async def run_agent() -> None: |
| 56 | + """Run a Tavily-backed research agent via MCP tools.""" |
| 57 | + tavily_key = os.environ["TAVILY_API_KEY"] |
| 58 | + client = MultiServerMCPClient( |
| 59 | + { |
| 60 | + "tavily": { |
| 61 | + "url": "https://mcp.tavily.com/mcp/", |
| 62 | + "transport": "streamable_http", |
| 63 | + "headers": {"Authorization": f"Bearer {tavily_key}"}, |
| 64 | + } |
| 65 | + } |
| 66 | + ) |
| 67 | + |
| 68 | + # Fetch available tools and create the agent |
| 69 | + tools = await client.get_tools() |
| 70 | + agent = create_agent(model, tools, prompt="You search the web and include relevant links in answers.") |
| 71 | + |
| 72 | + query = "What's new in Python 3.14? Include relevant links." |
| 73 | + response = await agent.ainvoke({"messages": [HumanMessage(content=query)]}) |
| 74 | + |
| 75 | + final_response = response["messages"][-1].content |
| 76 | + print(final_response) |
| 77 | + |
| 78 | + |
| 79 | +def main() -> None: |
| 80 | + asyncio.run(run_agent()) |
| 81 | + |
| 82 | + |
| 83 | +if __name__ == "__main__": |
| 84 | + main() |
0 commit comments