Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
15 changes: 4 additions & 11 deletions agents/agentframework_learn.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,3 @@
from __future__ import annotations

import asyncio
import logging
import os
Expand All @@ -14,17 +12,14 @@

# Configure logging
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
logger = logging.getLogger("learn_mcp_lang")
logger = logging.getLogger("agentframework_learn")
logger.setLevel(logging.INFO)

# Load environment variables
load_dotenv(override=True)

# Constants
LEARN_MCP_URL = "https://learn.microsoft.com/api/mcp"

# Configure chat client based on API_HOST
API_HOST = os.getenv("API_HOST", "github")

if API_HOST == "azure":
client = AzureOpenAIChatClient(
credential=DefaultAzureCredential(),
Expand All @@ -50,15 +45,13 @@
)


async def http_mcp_example() -> None:
async def http_mcp_example():
"""
Demonstrate MCP integration with Microsoft Learn documentation.

Creates an agent that can answer questions about Microsoft documentation
using the Microsoft Learn MCP server.
"""
async with (
MCPStreamableHTTPTool(name="Microsoft Learn MCP", url=LEARN_MCP_URL) as mcp_server,
MCPStreamableHTTPTool(name="Microsoft Learn MCP", url="https://learn.microsoft.com/api/mcp") as mcp_server,
ChatAgent(
chat_client=client,
name="DocsAgent",
Expand Down
69 changes: 69 additions & 0 deletions agents/agentframework_tavily.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,69 @@
import asyncio
import logging
import os

from agent_framework import ChatAgent, MCPStreamableHTTPTool
from agent_framework.azure import AzureOpenAIChatClient
from agent_framework.openai import OpenAIChatClient
from azure.identity import DefaultAzureCredential
from dotenv import load_dotenv
from rich import print
from rich.logging import RichHandler

# Configure logging
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
logger = logging.getLogger("agentframework_tavily")
logger.setLevel(logging.INFO)

# Load environment variables
load_dotenv(override=True)

# Configure chat client based on API_HOST
API_HOST = os.getenv("API_HOST", "github")
if API_HOST == "azure":
client = AzureOpenAIChatClient(
credential=DefaultAzureCredential(),
deployment_name=os.environ.get("AZURE_OPENAI_CHAT_DEPLOYMENT"),
endpoint=os.environ.get("AZURE_OPENAI_ENDPOINT"),
api_version=os.environ.get("AZURE_OPENAI_VERSION"),
)
elif API_HOST == "github":
client = OpenAIChatClient(
base_url="https://models.github.ai/inference",
api_key=os.environ["GITHUB_TOKEN"],
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4o"),
)
elif API_HOST == "ollama":
client = OpenAIChatClient(
base_url=os.environ.get("OLLAMA_ENDPOINT", "http://localhost:11434/v1"),
api_key="none",
model_id=os.environ.get("OLLAMA_MODEL", "llama3.1:latest"),
)
else:
client = OpenAIChatClient(
api_key=os.environ.get("OPENAI_API_KEY"), model_id=os.environ.get("OPENAI_MODEL", "gpt-4o")
)


async def http_mcp_example():
"""
Creates an agent that can search the web using the Tavily MCP server.
"""

tavily_key = os.environ["TAVILY_API_KEY"]
headers = {"Authorization": f"Bearer {tavily_key}"}
async with (
MCPStreamableHTTPTool(name="Tavily MCP", url="https://mcp.tavily.com/mcp/", headers=headers) as mcp_server,
ChatAgent(
chat_client=client,
name="WebSearchAgent",
instructions="You search the web with Tavily and provide concise answers with links.",
) as agent,
):
query = "What's new in Python 3.14? Include relevant links."
result = await agent.run(query, tools=mcp_server)
print(result)


if __name__ == "__main__":
asyncio.run(http_mcp_example())
4 changes: 2 additions & 2 deletions agents/langchainv1_http.py
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@

# Configure logging
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
logger = logging.getLogger("itinerario_lang")
logger = logging.getLogger("langchainv1_http")
logger.setLevel(logging.INFO)

# Load environment variables
load_dotenv(override=True)
Expand Down Expand Up @@ -88,5 +89,4 @@ def main() -> None:


if __name__ == "__main__":
logger.setLevel(logging.INFO)
main()
84 changes: 84 additions & 0 deletions agents/langchainv1_tavily.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,84 @@
"""LangChain + Tavily MCP Example

Creates a simple research agent that uses the Tavily MCP server
to search the web and answer questions with relevant links.
"""

import asyncio
import logging
import os

import azure.identity
from dotenv import load_dotenv
from langchain.agents import create_agent
from langchain_core.messages import HumanMessage
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain_openai import ChatOpenAI
from pydantic import SecretStr
from rich.logging import RichHandler

# Configure logging
logging.basicConfig(level=logging.WARNING, format="%(message)s", datefmt="[%X]", handlers=[RichHandler()])
logger = logging.getLogger("langchainv1_tavily")
logger.setLevel(logging.INFO)

# Load environment variables
load_dotenv(override=True)

api_host = os.getenv("API_HOST", "github")
Copy link

Copilot AI Dec 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The variable name api_host should follow Python naming conventions for constants that are module-level configuration values. Since this value is determined at module load time and used to configure the model, it should be named API_HOST (all uppercase) to be consistent with other similar files in the codebase (e.g., langchainv1_http.py line 27, agentframework_tavily.py line 22).

Copilot uses AI. Check for mistakes.

if api_host == "azure":
token_provider = azure.identity.get_bearer_token_provider(
azure.identity.DefaultAzureCredential(), "https://cognitiveservices.azure.com/.default"
)
model = ChatOpenAI(
model=os.environ.get("AZURE_OPENAI_CHAT_DEPLOYMENT"),
base_url=os.environ["AZURE_OPENAI_ENDPOINT"] + "/openai/v1/",
api_key=token_provider,
)
elif api_host == "github":
model = ChatOpenAI(
model=os.getenv("GITHUB_MODEL", "gpt-4o"),
base_url="https://models.inference.ai.azure.com",
api_key=SecretStr(os.environ["GITHUB_TOKEN"]),
)
elif api_host == "ollama":
model = ChatOpenAI(
model=os.environ.get("OLLAMA_MODEL", "llama3.1"),
base_url=os.environ.get("OLLAMA_ENDPOINT", "http://localhost:11434/v1"),
api_key=SecretStr(os.environ.get("OLLAMA_API_KEY", "none")),
)
else:
model = ChatOpenAI(model=os.getenv("OPENAI_MODEL", "gpt-4o-mini"))
Comment on lines +34 to +52
Copy link

Copilot AI Dec 18, 2025

Choose a reason for hiding this comment

The reason will be displayed to describe this comment to others. Learn more.

The variable name model is inconsistent with similar files in the codebase. For example, langchainv1_http.py uses base_model for the same purpose. Using a more specific name like base_model or llm would improve code clarity and consistency across the codebase.

Copilot uses AI. Check for mistakes.


async def run_agent() -> None:
"""Run a Tavily-backed research agent via MCP tools."""
tavily_key = os.environ["TAVILY_API_KEY"]
client = MultiServerMCPClient(
{
"tavily": {
"url": "https://mcp.tavily.com/mcp/",
"transport": "streamable_http",
"headers": {"Authorization": f"Bearer {tavily_key}"},
}
}
)

# Fetch available tools and create the agent
tools = await client.get_tools()
agent = create_agent(model, tools, prompt="You search the web and include relevant links in answers.")

query = "What's new in Python 3.14? Include relevant links."
response = await agent.ainvoke({"messages": [HumanMessage(content=query)]})

final_response = response["messages"][-1].content
print(final_response)


def main() -> None:
asyncio.run(run_agent())


if __name__ == "__main__":
main()