Description
What happened?
This is all running MCP server and I have also validated it via cloude desktop
from typing import Dict
from mcp.server.fastmcp import FastMCP
# Initialize the FastMCP server with a name
mcp = FastMCP("simple_weather")
# Weather data for different topics
WEATHER_DATA = {
"rain": {
"description": "Rain is liquid water in droplets that falls from clouds",
"measurement": "Measured in millimeters or inches",
"effects": "Can cause flooding in heavy amounts",
},
}
# Define the weather info tool using the @mcp.tool decorator
@mcp.tool()
async def get_weather_info(topic: str) -> Dict[str, str]:
"""
A tool to fetch weather-related information based on the topic.
Arguments:
- topic: The weather topic (rain, temperature, wind, etc.)
Returns:
- A dictionary containing the weather description, measurement, and effects
"""
topic = topic.lower() # Normalize topic to lowercase
if topic not in WEATHER_DATA:
return {
"error": f"Unknown weather topic '{topic}'",
"available_topics": list(WEATHER_DATA.keys()),
}
return WEATHER_DATA[topic]
# Run the MCP server when the script is executed
if __name__ == "__main__":
mcp.run()
Now, I want to use it via local client, these things I am doing
running dummy server like : uv run dummy_server.py
running this autogen client
import asyncio
from autogen_ext.tools.mcp import McpWorkbench, StdioServerParams
from autogen_ext.models.openai import OpenAIChatCompletionClient
async def run_chat_with_topic(topic: str):
server_params = StdioServerParams(
command="python",
args=["dummy_server.py"],
read_timeout_seconds=60,
)
workbench = McpWorkbench(server_params=server_params)
all_tools = await workbench.list_tools()
selected_tool = next(
(
tool
for tool in all_tools
if isinstance(tool, dict) and tool.get("name") == "get_weather_info"
),
None,
)
if not selected_tool:
raise ValueError("Tool 'get_weather_info' not found on the MCP server.")
# Call the selected tool's function
# if "function" in selected_tool:
# result = await selected_tool["function"](topic=topic)
# else:
# raise ValueError("The selected tool does not have a callable 'function'.")
result = await workbench.call_tool(selected_tool["name"], {"topic": topic})
model_client = OpenAIChatCompletionClient(model="gpt-3.5-turbo")
from autogen_agentchat.agents import AssistantAgent
from autogen_agentchat.conditions import MaxMessageTermination
from autogen_agentchat.teams import RoundRobinGroupChat
assistant_1 = AssistantAgent(
name="Planner",
model_client=model_client,
system_message="You are an AI weather planner for India.",
tools=[selected_tool],
reflect_on_tool_use=True,
tool_call_summary_format="[Tool]{tool_name} result : {result}",
)
assistant_2 = AssistantAgent(
name="Researcher",
model_client=model_client,
system_message="You are an AI weather researcher for India.",
tools=[selected_tool],
reflect_on_tool_use=True,
tool_call_summary_format="[Tool]{tool_name} result : {result}",
)
agents = [assistant_1, assistant_2]
termination_condition = MaxMessageTermination(max_message_number=len(agents) + 1)
team = RoundRobinGroupChat(
agents=agents,
termination_condition=termination_condition,
)
return await team.run(
task=[{"role": "user", "content": f"Get weather info for '{topic}'"}]
)
if __name__ == "__main__":
topic = "rain"
asyncio.run(run_chat_with_topic(topic))
but I am getting this error
line 36, in run_chat_with_topic
raise ValueError("The selected tool does not have a callable 'function'.")
ValueError: The selected tool does not have a callable 'function'.
this is tool definition but but its all running fine with cloude, cursor
{'name': 'get_weather_info', 'description': '\n A tool to fetch
weather-related information based on the topic.\n\n
Arguments:\n - topic: The weather topic (rain, temperature, wind, etc.)
\n\n Returns:\n - A dictionary containing the weather description,
measurement, and effects\n ',
'parameters': {'type': 'object', 'properties': {'topic':
{'title': 'Topic', 'type': 'string'}},
'required': ['topic'], 'additionalProperties': False}}
seems It don't have any definition of function like : 'function': get_weather_info_function, can you please help me here
these are version I am using here
requires-python = ">=3.11.7"
dependencies = [
"autogen-agentchat>=0.5.5",
"autogen-ext[openai]>=0.5.5",
"httpx>=0.28.1",
"mcp[cli]>=1.7.1",
]
### Which packages was the bug in?
Python AgentChat (autogen-agentchat>=0.4.0)
### AutoGen library version.
Python dev (main branch)
### Other library version.
"autogen-agentchat>=0.5.5", "autogen-ext[openai]>=0.5.5", "httpx>=0.28.1", "mcp[cli]>=1.7.1",
### Model used
all
### Model provider
OpenAI
### Other model provider
_No response_
### Python version
None
### .NET version
None
### Operating system
None