Skip to content

Commit 4990cfb

Browse files
committed
Add my examples
1 parent 80b7d0c commit 4990cfb

32 files changed

Lines changed: 3575 additions & 2390 deletions

.env.sample

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,8 @@
1-
# API_HOST can be either azure, ollama, openai, or github:
1+
# API_HOST can be either azure, openai, or github:
22
API_HOST=azure
33
# Configure for Azure:
44
AZURE_OPENAI_ENDPOINT=https://YOUR-AZURE-OPENAI-SERVICE-NAME.openai.azure.com/openai/v1
55
AZURE_OPENAI_CHAT_DEPLOYMENT=YOUR-AZURE-DEPLOYMENT-NAME
6-
# Configure for Ollama:
7-
OLLAMA_ENDPOINT=http://localhost:11434/v1
8-
OLLAMA_MODEL=llama3.1
96
# Configure for OpenAI.com:
107
OPENAI_API_KEY=YOUR-OPENAI-KEY
118
OPENAI_MODEL=gpt-3.5-turbo

.gitattributes

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
* text=auto
2+
*.sh text eol=lf
3+
*.ps1 text eol=crlf

README.md

Lines changed: 11 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -168,13 +168,17 @@ You can run the examples in this repository by executing the scripts in the `exa
168168
169169
| Example | Description |
170170
| ------- | ----------- |
171-
| [basic.py](examples/basic.py) | Uses Agent Framework to build a basic informational agent. |
172-
| [tool.py](examples/tool.py) | Uses Agent Framework to build an agent with a single weather tool. |
173-
| [tools.py](examples/tools.py) | Uses Agent Framework to build a weekend planning agent with multiple tools. |
174-
| [supervisor.py](examples/supervisor.py) | Uses Agent Framework with a supervisor orchestrating activity and recipe sub-agents. |
175-
| [magenticone.py](examples/magenticone.py) | Uses Agent Framework to build a MagenticOne agent. |
176-
| [hitl.py](examples/hitl.py) | Uses Agent Framework with human-in-the-loop (HITL) for tool-enabled agents with human feedback. |
177-
| [workflow.py](examples/workflow.py) | Uses Agent Framework to build a workflow-based agent. |
171+
| [agent_basic.py](examples/agent_basic.py) | A basic informational agent. |
172+
| [agent_tool.py](examples/agent_tool.py) | An agent with a single weather tool. |
173+
| [agent_tools.py](examples/agent_tools.py) | A weekend planning agent with multiple tools. |
174+
| [agent_supervisor.py](examples/agent_supervisor.py) | A supervisor orchestrating activity and recipe sub-agents. |
175+
| [workflow_magenticone.py](examples/workflow_magenticone.py) | A MagenticOne multi-agent workflow. |
176+
| [workflow_hitl.py](examples/workflow_hitl.py) | Human-in-the-loop (HITL) for tool-enabled agents with human feedback. |
177+
| [agent_middleware.py](examples/agent_middleware.py) | Agent, chat, and function middleware for logging, timing, and blocking. |
178+
| [agent_mcp_remote.py](examples/agent_mcp_remote.py) | An agent using a remote MCP server (Microsoft Learn) for documentation search. |
179+
| [agent_mcp_local.py](examples/agent_mcp_local.py) | An agent connected to a local MCP server (e.g. for expense logging). |
180+
| [openai_tool_calling.py](examples/openai_tool_calling.py) | Tool calling with the low-level OpenAI SDK, showing manual tool dispatch. |
181+
| [workflow_basic.py](examples/workflow_basic.py) | A workflow-based agent. |
178182
179183
## Resources
180184
Lines changed: 44 additions & 50 deletions
Original file line numberDiff line numberDiff line change
@@ -1,50 +1,44 @@
1-
import asyncio
2-
import os
3-
4-
from agent_framework import ChatAgent
5-
from agent_framework.openai import OpenAIChatClient
6-
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
7-
from dotenv import load_dotenv
8-
from rich import print
9-
10-
# Configure OpenAI client based on environment
11-
load_dotenv(override=True)
12-
API_HOST = os.getenv("API_HOST", "github")
13-
14-
async_credential = None
15-
if API_HOST == "azure":
16-
async_credential = DefaultAzureCredential()
17-
token_provider = get_bearer_token_provider(async_credential, "https://cognitiveservices.azure.com/.default")
18-
client = OpenAIChatClient(
19-
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
20-
api_key=token_provider,
21-
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
22-
)
23-
elif API_HOST == "github":
24-
client = OpenAIChatClient(
25-
base_url="https://models.github.ai/inference",
26-
api_key=os.environ["GITHUB_TOKEN"],
27-
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-5-mini"),
28-
)
29-
elif API_HOST == "ollama":
30-
client = OpenAIChatClient(
31-
base_url=os.environ.get("OLLAMA_ENDPOINT", "http://localhost:11434/v1"),
32-
api_key="none",
33-
model_id=os.environ.get("OLLAMA_MODEL", "llama3.1:latest"),
34-
)
35-
else:
36-
client = OpenAIChatClient(api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-5-mini"))
37-
38-
agent = ChatAgent(chat_client=client, instructions="You're an informational agent. Answer questions cheerfully.")
39-
40-
41-
async def main():
42-
response = await agent.run("Whats weather today in San Francisco?")
43-
print(response.text)
44-
45-
if async_credential:
46-
await async_credential.close()
47-
48-
49-
if __name__ == "__main__":
50-
asyncio.run(main())
1+
import asyncio
2+
import os
3+
4+
from agent_framework import ChatAgent
5+
from agent_framework.openai import OpenAIChatClient
6+
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
7+
from dotenv import load_dotenv
8+
from rich import print
9+
10+
# Configure OpenAI client based on environment
11+
load_dotenv(override=True)
12+
API_HOST = os.getenv("API_HOST", "github")
13+
14+
async_credential = None
15+
if API_HOST == "azure":
16+
async_credential = DefaultAzureCredential()
17+
token_provider = get_bearer_token_provider(async_credential, "https://cognitiveservices.azure.com/.default")
18+
client = OpenAIChatClient(
19+
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
20+
api_key=token_provider,
21+
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
22+
)
23+
elif API_HOST == "github":
24+
client = OpenAIChatClient(
25+
base_url="https://models.github.ai/inference",
26+
api_key=os.environ["GITHUB_TOKEN"],
27+
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-5-mini"),
28+
)
29+
else:
30+
client = OpenAIChatClient(api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-5-mini"))
31+
32+
agent = ChatAgent(chat_client=client, instructions="You're an informational agent. Answer questions cheerfully.")
33+
34+
35+
async def main():
36+
response = await agent.run("Whats weather today in San Francisco?")
37+
print(response.text)
38+
39+
if async_credential:
40+
await async_credential.close()
41+
42+
43+
if __name__ == "__main__":
44+
asyncio.run(main())

examples/agent_mcp_local.py

Lines changed: 61 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,61 @@
1+
import asyncio
2+
import logging
3+
import os
4+
from datetime import datetime
5+
6+
from agent_framework import ChatAgent, MCPStreamableHTTPTool
7+
from agent_framework.openai import OpenAIChatClient
8+
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
9+
from dotenv import load_dotenv
10+
from rich import print
11+
from rich.logging import RichHandler
12+
13+
# Setup logging
14+
handler = RichHandler(show_path=False, rich_tracebacks=True, show_level=False)
15+
logging.basicConfig(level=logging.WARNING, handlers=[handler], force=True, format="%(message)s")
16+
logger = logging.getLogger(__name__)
17+
logger.setLevel(logging.INFO)
18+
19+
# Configure OpenAI client based on environment
20+
load_dotenv(override=True)
21+
API_HOST = os.getenv("API_HOST", "github")
22+
MCP_SERVER_URL = os.getenv("MCP_SERVER_URL", "http://localhost:8000/mcp/")
23+
24+
async_credential = None
25+
if API_HOST == "azure":
26+
async_credential = DefaultAzureCredential()
27+
token_provider = get_bearer_token_provider(async_credential, "https://cognitiveservices.azure.com/.default")
28+
client = OpenAIChatClient(
29+
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
30+
api_key=token_provider,
31+
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
32+
)
33+
elif API_HOST == "github":
34+
client = OpenAIChatClient(
35+
base_url="https://models.github.ai/inference",
36+
api_key=os.environ["GITHUB_TOKEN"],
37+
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-5-mini"),
38+
)
39+
else:
40+
client = OpenAIChatClient(api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-5-mini"))
41+
42+
43+
async def main() -> None:
44+
"""Run an agent connected to a local MCP server for expense logging."""
45+
async with (
46+
MCPStreamableHTTPTool(name="Expenses MCP Server", url=MCP_SERVER_URL) as mcp_server,
47+
ChatAgent(
48+
chat_client=client,
49+
instructions=f"You help users with tasks using the available tools. Today's date is {datetime.now().strftime('%Y-%m-%d')}.",
50+
tools=[mcp_server],
51+
) as agent,
52+
):
53+
response = await agent.run("yesterday I bought a laptop for $1200 using my visa.")
54+
print(response.text)
55+
56+
if async_credential:
57+
await async_credential.close()
58+
59+
60+
if __name__ == "__main__":
61+
asyncio.run(main())

examples/agent_mcp_remote.py

Lines changed: 59 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,59 @@
1+
import asyncio
2+
import logging
3+
import os
4+
5+
from agent_framework import ChatAgent, MCPStreamableHTTPTool
6+
from agent_framework.openai import OpenAIChatClient
7+
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
8+
from dotenv import load_dotenv
9+
from rich import print
10+
from rich.logging import RichHandler
11+
12+
# Setup logging
13+
handler = RichHandler(show_path=False, rich_tracebacks=True, show_level=False)
14+
logging.basicConfig(level=logging.WARNING, handlers=[handler], force=True, format="%(message)s")
15+
logger = logging.getLogger(__name__)
16+
logger.setLevel(logging.INFO)
17+
18+
# Configure OpenAI client based on environment
19+
load_dotenv(override=True)
20+
API_HOST = os.getenv("API_HOST", "github")
21+
22+
async_credential = None
23+
if API_HOST == "azure":
24+
async_credential = DefaultAzureCredential()
25+
token_provider = get_bearer_token_provider(async_credential, "https://cognitiveservices.azure.com/.default")
26+
client = OpenAIChatClient(
27+
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
28+
api_key=token_provider,
29+
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
30+
)
31+
elif API_HOST == "github":
32+
client = OpenAIChatClient(
33+
base_url="https://models.github.ai/inference",
34+
api_key=os.environ["GITHUB_TOKEN"],
35+
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-5-mini"),
36+
)
37+
else:
38+
client = OpenAIChatClient(api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-5-mini"))
39+
40+
41+
async def main() -> None:
42+
"""Run an agent that uses a remote MCP server (Microsoft Learn) to answer documentation questions."""
43+
async with (
44+
MCPStreamableHTTPTool(name="Microsoft Learn MCP", url="https://learn.microsoft.com/api/mcp") as mcp_server,
45+
ChatAgent(
46+
chat_client=client,
47+
instructions="You help with Microsoft documentation questions. Use the available tools to search for relevant docs.",
48+
tools=[mcp_server],
49+
) as agent,
50+
):
51+
response = await agent.run("How do I create an Azure storage account using az cli?")
52+
print(response.text)
53+
54+
if async_credential:
55+
await async_credential.close()
56+
57+
58+
if __name__ == "__main__":
59+
asyncio.run(main())

0 commit comments

Comments
 (0)