Open
Description
Describe the question
i use custom model qwen with mcp_hosted tool error output
agents.exceptions.UserError: Hosted tools are not supported with the ChatCompletions API. Got tool type: <class 'agents.tool.HostedMCPTool'>, tool: HostedMCPTool(tool_config={'type': 'mcp', 'server_label': 'gitmcp', 'server_url': 'https://gitmcp.io/openai/codex', 'require_approval': 'never'}, on_approval_request=None)
Debug information
- Agents SDK version: openai-agents0.1.0
- Python 3.11
code
MODEL_NAME = "qwen-plus" ## use custom model
client = AsyncOpenAI(base_url=BASE_URL, api_key=API_KEY)
set_tracing_disabled(disabled=True)
# An alternate approach that would also work:
# PROVIDER = OpenAIProvider(openai_client=client)
# agent = Agent(..., model="some-custom-model")
# Runner.run(agent, ..., run_config=RunConfig(model_provider=PROVIDER))
async def main(verbose: bool, stream: bool):
agent = Agent(
name="Assistant",
model=OpenAIChatCompletionsModel(model=MODEL_NAME, openai_client=client),
tools=[
HostedMCPTool(
tool_config={
"type": "mcp",
"server_label": "gitmcp",
"server_url": "https://gitmcp.io/openai/codex",
"require_approval": "never",
}
)
],
)
if stream:
result = Runner.run_streamed(agent, "Which language is this repo written in?")
print(result)
async for event in result.stream_events():
if event.type == "run_item_stream_event":
print(f"Got event of type {event.item.__class__.__name__}")
print(f"Done streaming; final result: {result.final_output}")
else:
res = await Runner.run(agent, "Which language is this repo written in?")
print(res.final_output)
# The repository is primarily written in multiple languages, including Rust and TypeScript...
if verbose:
for item in res.new_items:
print(item)
Expected behavior
work well.