Skip to content

Commit 51760e4

Browse files
committed
Upgrade all examples to MAF 1.0.0
Mechanical fixes across 102 English + Spanish example files: - model_id= → model= in all OpenAIChatClient constructors - Message(text=...) → Message(contents=[...]) - BaseContextProvider → ContextProvider - BaseHistoryProvider → HistoryProvider Additional fix for history provider examples: - Removed tools from agent_history_redis.py and agent_history_sqlite.py to work around Responses API duplicate item ID issue with history providers (microsoft/agent-framework#3295) All examples verified with live LLM calls against Azure OpenAI.
1 parent 021cc09 commit 51760e4

105 files changed

Lines changed: 717 additions & 489 deletions

File tree

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

examples/agent_basic.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -18,17 +18,17 @@
1818
client = OpenAIChatClient(
1919
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
2020
api_key=token_provider,
21-
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
21+
model=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
2222
)
2323
elif API_HOST == "github":
2424
client = OpenAIChatClient(
2525
base_url="https://models.github.ai/inference",
2626
api_key=os.environ["GITHUB_TOKEN"],
27-
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
27+
model=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
2828
)
2929
else:
3030
client = OpenAIChatClient(
31-
api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
31+
api_key=os.environ["OPENAI_API_KEY"], model=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
3232
)
3333

3434
agent = Agent(client=client, instructions="You're an informational agent. Answer questions cheerfully.")

examples/agent_evaluation.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -37,7 +37,7 @@
3737
client = OpenAIChatClient(
3838
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
3939
api_key=token_provider,
40-
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
40+
model=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
4141
)
4242
eval_model_config = AzureOpenAIModelConfiguration(
4343
type="azure_openai",
@@ -48,7 +48,7 @@
4848
client = OpenAIChatClient(
4949
base_url="https://models.github.ai/inference",
5050
api_key=os.environ["GITHUB_TOKEN"],
51-
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
51+
model=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
5252
)
5353
eval_model_config = OpenAIModelConfiguration(
5454
type="openai",
@@ -58,7 +58,7 @@
5858
)
5959
else:
6060
client = OpenAIChatClient(
61-
api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
61+
api_key=os.environ["OPENAI_API_KEY"], model=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
6262
)
6363
eval_model_config = OpenAIModelConfiguration(
6464
type="openai",

examples/agent_evaluation_generate.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -39,17 +39,17 @@
3939
client = OpenAIChatClient(
4040
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
4141
api_key=token_provider,
42-
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
42+
model=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
4343
)
4444
elif API_HOST == "github":
4545
client = OpenAIChatClient(
4646
base_url="https://models.github.ai/inference",
4747
api_key=os.environ["GITHUB_TOKEN"],
48-
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
48+
model=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
4949
)
5050
else:
5151
client = OpenAIChatClient(
52-
api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
52+
api_key=os.environ["OPENAI_API_KEY"], model=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
5353
)
5454

5555

examples/agent_history_redis.py

Lines changed: 14 additions & 27 deletions
Original file line numberDiff line numberDiff line change
@@ -1,16 +1,15 @@
1+
# Note: Do not add tools= to agents using history providers — causes duplicate item errors
2+
# with the Responses API. See https://github.com/microsoft/agent-framework/issues/3295
13
import asyncio
24
import logging
35
import os
4-
import random
56
import uuid
6-
from typing import Annotated
77

8-
from agent_framework import Agent, tool
8+
from agent_framework import Agent
99
from agent_framework.openai import OpenAIChatClient
1010
from agent_framework.redis import RedisHistoryProvider
1111
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
1212
from dotenv import load_dotenv
13-
from pydantic import Field
1413
from rich import print
1514
from rich.logging import RichHandler
1615

@@ -32,30 +31,20 @@
3231
client = OpenAIChatClient(
3332
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
3433
api_key=token_provider,
35-
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
34+
model=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
3635
)
3736
elif API_HOST == "github":
3837
client = OpenAIChatClient(
3938
base_url="https://models.github.ai/inference",
4039
api_key=os.environ["GITHUB_TOKEN"],
41-
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
40+
model=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
4241
)
4342
else:
4443
client = OpenAIChatClient(
45-
api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
44+
api_key=os.environ["OPENAI_API_KEY"], model=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
4645
)
4746

4847

49-
@tool
50-
def get_weather(
51-
city: Annotated[str, Field(description="The city to get the weather for.")],
52-
) -> str:
53-
"""Returns weather data for a given city."""
54-
logger.info(f"Getting weather for {city}")
55-
conditions = ["sunny", "cloudy", "rainy", "stormy"]
56-
return f"The weather in {city} is {conditions[random.randint(0, 3)]} with a high of {random.randint(10, 30)}°C."
57-
58-
5948
async def example_persistent_session() -> None:
6049
"""A Redis-backed session persists conversation history across application restarts."""
6150
print("\n[bold]=== Persistent Redis Session ===[/bold]")
@@ -68,19 +57,18 @@ async def example_persistent_session() -> None:
6857

6958
agent = Agent(
7059
client=client,
71-
instructions="You are a helpful weather agent.",
72-
tools=[get_weather],
60+
instructions="You are a helpful assistant that remembers our conversation.",
7361
context_providers=[redis_provider],
7462
)
7563

7664
session = agent.create_session(session_id=session_id)
7765

78-
print("[blue]User:[/blue] What's the weather like in Tokyo?")
79-
response = await agent.run("What's the weather like in Tokyo?", session=session)
66+
print("[blue]User:[/blue] Hello! My name is Alice and I love hiking.")
67+
response = await agent.run("Hello! My name is Alice and I love hiking.", session=session)
8068
print(f"[green]Agent:[/green] {response.text}")
8169

82-
print("\n[blue]User:[/blue] How about Paris?")
83-
response = await agent.run("How about Paris?", session=session)
70+
print("\n[blue]User:[/blue] What are some good trails in Colorado?")
71+
response = await agent.run("What are some good trails in Colorado?", session=session)
8472
print(f"[green]Agent:[/green] {response.text}")
8573

8674
# Phase 2: Simulate an application restart — reconnect using the same session ID in Redis
@@ -89,15 +77,14 @@ async def example_persistent_session() -> None:
8977

9078
agent2 = Agent(
9179
client=client,
92-
instructions="You are a helpful weather agent.",
93-
tools=[get_weather],
80+
instructions="You are a helpful assistant that remembers our conversation.",
9481
context_providers=[redis_provider2],
9582
)
9683

9784
session2 = agent2.create_session(session_id=session_id)
9885

99-
print("[blue]User:[/blue] Which of the cities I asked about had better weather?")
100-
response = await agent2.run("Which of the cities I asked about had better weather?", session=session2)
86+
print("[blue]User:[/blue] What do you remember about me?")
87+
response = await agent2.run("What do you remember about me?", session=session2)
10188
print(f"[green]Agent:[/green] {response.text}")
10289

10390

examples/agent_history_sqlite.py

Lines changed: 17 additions & 29 deletions
Original file line numberDiff line numberDiff line change
@@ -1,17 +1,17 @@
1+
# Note: Do not add tools= to agents using history providers — causes duplicate item errors
2+
# with the Responses API. See https://github.com/microsoft/agent-framework/issues/3295
13
import asyncio
24
import logging
35
import os
4-
import random
56
import sqlite3
67
import uuid
78
from collections.abc import Sequence
8-
from typing import Annotated, Any
9+
from typing import Any
910

10-
from agent_framework import Agent, BaseHistoryProvider, Message, tool
11+
from agent_framework import Agent, HistoryProvider, Message
1112
from agent_framework.openai import OpenAIChatClient
1213
from azure.identity.aio import DefaultAzureCredential, get_bearer_token_provider
1314
from dotenv import load_dotenv
14-
from pydantic import Field
1515
from rich import print
1616
from rich.logging import RichHandler
1717

@@ -32,24 +32,24 @@
3232
client = OpenAIChatClient(
3333
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
3434
api_key=token_provider,
35-
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
35+
model=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
3636
)
3737
elif API_HOST == "github":
3838
client = OpenAIChatClient(
3939
base_url="https://models.github.ai/inference",
4040
api_key=os.environ["GITHUB_TOKEN"],
41-
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
41+
model=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
4242
)
4343
else:
4444
client = OpenAIChatClient(
45-
api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
45+
api_key=os.environ["OPENAI_API_KEY"], model=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
4646
)
4747

4848

49-
class SQLiteHistoryProvider(BaseHistoryProvider):
49+
class SQLiteHistoryProvider(HistoryProvider):
5050
"""A custom history provider backed by SQLite.
5151
52-
Implements the BaseHistoryProvider to persist chat messages
52+
Implements the HistoryProvider to persist chat messages
5353
in a local SQLite database — useful when you want file-based
5454
persistence without an external service like Redis.
5555
"""
@@ -94,16 +94,6 @@ def close(self) -> None:
9494
self._conn.close()
9595

9696

97-
@tool
98-
def get_weather(
99-
city: Annotated[str, Field(description="The city to get the weather for.")],
100-
) -> str:
101-
"""Returns weather data for a given city."""
102-
logger.info(f"Getting weather for {city}")
103-
conditions = ["sunny", "cloudy", "rainy", "stormy"]
104-
return f"The weather in {city} is {conditions[random.randint(0, 3)]} with a high of {random.randint(10, 30)}°C."
105-
106-
10797
async def main() -> None:
10898
"""Demonstrate a SQLite-backed session that persists conversation history to a local file."""
10999
db_path = "chat_history.sqlite3"
@@ -117,19 +107,18 @@ async def main() -> None:
117107

118108
agent = Agent(
119109
client=client,
120-
instructions="You are a helpful weather agent.",
121-
tools=[get_weather],
110+
instructions="You are a helpful assistant that remembers our conversation.",
122111
context_providers=[sqlite_provider],
123112
)
124113

125114
session = agent.create_session(session_id=session_id)
126115

127-
print("[blue]User:[/blue] What's the weather like in Tokyo?")
128-
response = await agent.run("What's the weather like in Tokyo?", session=session)
116+
print("[blue]User:[/blue] Hello! My name is Alice and I love hiking.")
117+
response = await agent.run("Hello! My name is Alice and I love hiking.", session=session)
129118
print(f"[green]Agent:[/green] {response.text}")
130119

131-
print("\n[blue]User:[/blue] How about Paris?")
132-
response = await agent.run("How about Paris?", session=session)
120+
print("\n[blue]User:[/blue] What are some good trails in Colorado?")
121+
response = await agent.run("What are some good trails in Colorado?", session=session)
133122
print(f"[green]Agent:[/green] {response.text}")
134123

135124
# Phase 2: Simulate an application restart — reconnect to the same session ID in SQLite
@@ -138,15 +127,14 @@ async def main() -> None:
138127

139128
agent2 = Agent(
140129
client=client,
141-
instructions="You are a helpful weather agent.",
142-
tools=[get_weather],
130+
instructions="You are a helpful assistant that remembers our conversation.",
143131
context_providers=[sqlite_provider2],
144132
)
145133

146134
session2 = agent2.create_session(session_id=session_id)
147135

148-
print("[blue]User:[/blue] Which of the cities I asked about had better weather?")
149-
response = await agent2.run("Which of the cities I asked about had better weather?", session=session2)
136+
print("[blue]User:[/blue] What do you remember about me?")
137+
response = await agent2.run("What do you remember about me?", session=session2)
150138
print(f"[green]Agent:[/green] {response.text}")
151139

152140
sqlite_provider2.close()

examples/agent_knowledge_aisearch.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@
1616
1717
This example uses the built-in AzureAISearchContextProvider in agentic
1818
mode, which handles the entire retrieval pipeline — no custom
19-
BaseContextProvider subclass needed. Agentic mode uses Knowledge Bases
19+
ContextProvider subclass needed. Agentic mode uses Knowledge Bases
2020
for multi-hop reasoning across documents, providing accurate results
2121
through intelligent query planning.
2222
@@ -69,17 +69,17 @@
6969
client = OpenAIChatClient(
7070
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
7171
api_key=token_provider,
72-
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
72+
model=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
7373
)
7474
elif API_HOST == "github":
7575
client = OpenAIChatClient(
7676
base_url="https://models.github.ai/inference",
7777
api_key=os.environ["GITHUB_TOKEN"],
78-
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
78+
model=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
7979
)
8080
else:
8181
client = OpenAIChatClient(
82-
api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
82+
api_key=os.environ["OPENAI_API_KEY"], model=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
8383
)
8484

8585
# ── Azure AI Search context provider ─────────────────────────────────

examples/agent_knowledge_pg.py

Lines changed: 6 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -35,7 +35,7 @@
3535
from openai import OpenAI
3636
from pgvector.psycopg import register_vector
3737

38-
from agent_framework import Agent, AgentSession, BaseContextProvider, Message, SessionContext, SupportsAgentRun
38+
from agent_framework import Agent, AgentSession, ContextProvider, Message, SessionContext, SupportsAgentRun
3939
from agent_framework.openai import OpenAIChatClient
4040
from azure.identity import DefaultAzureCredential as SyncDefaultAzureCredential
4141
from azure.identity import get_bearer_token_provider as sync_get_bearer_token_provider
@@ -67,7 +67,7 @@
6767
chat_client = OpenAIChatClient(
6868
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
6969
api_key=async_token_provider,
70-
model_id=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
70+
model=os.environ["AZURE_OPENAI_CHAT_DEPLOYMENT"],
7171
)
7272
embed_client = OpenAI(
7373
base_url=f"{os.environ['AZURE_OPENAI_ENDPOINT']}/openai/v1/",
@@ -78,7 +78,7 @@
7878
chat_client = OpenAIChatClient(
7979
base_url="https://models.github.ai/inference",
8080
api_key=os.environ["GITHUB_TOKEN"],
81-
model_id=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
81+
model=os.getenv("GITHUB_MODEL", "openai/gpt-4.1-mini"),
8282
)
8383
embed_client = OpenAI(
8484
base_url="https://models.github.ai/inference",
@@ -87,7 +87,7 @@
8787
embed_model = "text-embedding-3-small"
8888
else:
8989
chat_client = OpenAIChatClient(
90-
api_key=os.environ["OPENAI_API_KEY"], model_id=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
90+
api_key=os.environ["OPENAI_API_KEY"], model=os.environ.get("OPENAI_MODEL", "gpt-4.1-mini")
9191
)
9292
embed_client = OpenAI(api_key=os.environ["OPENAI_API_KEY"])
9393
embed_model = "text-embedding-3-small"
@@ -242,7 +242,7 @@ def create_knowledge_db(conn: psycopg.Connection) -> None:
242242
"""
243243

244244

245-
class PostgresKnowledgeProvider(BaseContextProvider):
245+
class PostgresKnowledgeProvider(ContextProvider):
246246
"""Retrieves relevant product knowledge via hybrid search (vector + full-text) with RRF.
247247
248248
Uses pgvector for semantic similarity and PostgreSQL tsvector for keyword
@@ -312,7 +312,7 @@ async def before_run(
312312
knowledge_text = "\n".join(knowledge_lines)
313313
context.extend_messages(
314314
self.source_id,
315-
[Message(role="system", text=knowledge_text)],
315+
[Message(role="system", contents=[knowledge_text])],
316316
)
317317

318318

0 commit comments

Comments
 (0)