OAuth MCP servers require user interaction for the initial browser-based authentication. This means they are not suitable for fully automated/headless workflows. If you need headless access, check if the MCP provider offers API key authentication as an alternative.
Here’s an example integrating MCP servers with an agent:
examples/01_standalone_sdk/07_mcp_integration.py
Copy
Ask AI
import osfrom pydantic import SecretStrfrom openhands.sdk import ( LLM, Agent, Conversation, Event, LLMConvertibleEvent, get_logger,)from openhands.sdk.security.llm_analyzer import LLMSecurityAnalyzerfrom openhands.sdk.tool import Toolfrom openhands.tools.file_editor import FileEditorToolfrom openhands.tools.terminal import TerminalToollogger = get_logger(__name__)# Configure LLMapi_key = os.getenv("LLM_API_KEY")assert api_key is not None, "LLM_API_KEY environment variable is not set."model = os.getenv("LLM_MODEL", "anthropic/claude-sonnet-4-5-20250929")base_url = os.getenv("LLM_BASE_URL")llm = LLM( usage_id="agent", model=model, base_url=base_url, api_key=SecretStr(api_key),)cwd = os.getcwd()tools = [ Tool(name=TerminalTool.name), Tool(name=FileEditorTool.name),]# Add MCP Toolsmcp_config = { "mcpServers": { "fetch": {"command": "uvx", "args": ["mcp-server-fetch"]}, "repomix": {"command": "npx", "args": ["-y", "repomix@1.4.2", "--mcp"]}, }}# Agentagent = Agent( llm=llm, tools=tools, mcp_config=mcp_config, # This regex filters out all repomix tools except pack_codebase filter_tools_regex="^(?!repomix)(.*)|^repomix.*pack_codebase.*$",)llm_messages = [] # collect raw LLM messagesdef conversation_callback(event: Event): if isinstance(event, LLMConvertibleEvent): llm_messages.append(event.to_llm_message())# Conversationconversation = Conversation( agent=agent, callbacks=[conversation_callback], workspace=cwd,)conversation.set_security_analyzer(LLMSecurityAnalyzer())logger.info("Starting conversation with MCP integration...")conversation.send_message( "Read https://github.com/OpenHands/OpenHands and write 3 facts " "about the project into FACTS.txt.")conversation.run()conversation.send_message("Great! Now delete that file.")conversation.run()print("=" * 100)print("Conversation finished. Got the following LLM messages:")for i, message in enumerate(llm_messages): print(f"Message {i}: {str(message)[:200]}")# Report costcost = llm.metrics.accumulated_costprint(f"EXAMPLE_COST: {cost}")
You can run the example code as-is.
The model name should follow the LiteLLM convention: provider/model_name (e.g., anthropic/claude-sonnet-4-5-20250929, openai/gpt-4o).
The LLM_API_KEY should be the API key for your chosen provider.
ChatGPT Plus/Pro subscribers: You can use LLM.subscription_login() to authenticate with your ChatGPT account and access Codex models without consuming API credits. See the LLM Subscriptions guide for details.
import osfrom pydantic import SecretStrfrom openhands.sdk import ( LLM, Agent, Conversation, Event, LLMConvertibleEvent, get_logger,)from openhands.sdk.tool import Toolfrom openhands.tools.file_editor import FileEditorToolfrom openhands.tools.terminal import TerminalToollogger = get_logger(__name__)# Configure LLMapi_key = os.getenv("LLM_API_KEY")assert api_key is not None, "LLM_API_KEY environment variable is not set."model = os.getenv("LLM_MODEL", "anthropic/claude-sonnet-4-5-20250929")base_url = os.getenv("LLM_BASE_URL")llm = LLM( usage_id="agent", model=model, base_url=base_url, api_key=SecretStr(api_key),)cwd = os.getcwd()tools = [ Tool( name=TerminalTool.name, ), Tool(name=FileEditorTool.name),]mcp_config = { "mcpServers": {"Notion": {"url": "https://mcp.notion.com/mcp", "auth": "oauth"}}}agent = Agent(llm=llm, tools=tools, mcp_config=mcp_config)llm_messages = [] # collect raw LLM messagesdef conversation_callback(event: Event): if isinstance(event, LLMConvertibleEvent): llm_messages.append(event.to_llm_message())# Conversationconversation = Conversation( agent=agent, callbacks=[conversation_callback],)logger.info("Starting conversation with MCP integration...")conversation.send_message("Can you search about OpenHands V1 in my notion workspace?")conversation.run()print("=" * 100)print("Conversation finished. Got the following LLM messages:")for i, message in enumerate(llm_messages): print(f"Message {i}: {str(message)[:200]}")
You can run the example code as-is.
The model name should follow the LiteLLM convention: provider/model_name (e.g., anthropic/claude-sonnet-4-5-20250929, openai/gpt-4o).
The LLM_API_KEY should be the API key for your chosen provider.
ChatGPT Plus/Pro subscribers: You can use LLM.subscription_login() to authenticate with your ChatGPT account and access Codex models without consuming API credits. See the LLM Subscriptions guide for details.