LlamaIndex + MCP 使用指南¶
llama-index-tools-mcp
软件包提供了多种工具,用于在 LlamaIndex 中集成 MCP 功能。
In [ ]:
Copied!
%pip install llama-index-tools-mcp
%pip install llama-index-tools-mcp
从 MCP 服务器使用工具¶
通过 get_tools_from_mcp_url
或 aget_tools_from_mcp_url
函数,您可以从 MCP 服务器获取 FunctionTool
的列表。
In [ ]:
Copied!
from llama_index.tools.mcp import (
get_tools_from_mcp_url,
aget_tools_from_mcp_url,
)
# async
tools = await aget_tools_from_mcp_url("http://127.0.0.1:8000/mcp")
from llama_index.tools.mcp import (
get_tools_from_mcp_url,
aget_tools_from_mcp_url,
)
# async
tools = await aget_tools_from_mcp_url("http://127.0.0.1:8000/mcp")
默认情况下,系统会使用我们的 BasicMCPClient
,该客户端将执行命令或连接 URL 并返回工具集。
您也可以传入自定义的 ClientSession
以使用不同的客户端。
此外,您还可以指定允许的工具列表来筛选返回的工具集。
In [ ]:
Copied!
from llama_index.tools.mcp import BasicMCPClient
client = BasicMCPClient("http://127.0.0.1:8000/mcp")
tools = await aget_tools_from_mcp_url(
"http://127.0.0.1:8000/mcp",
client=client,
allowed_tools=["tool1", "tool2"],
)
from llama_index.tools.mcp import BasicMCPClient
client = BasicMCPClient("http://127.0.0.1:8000/mcp")
tools = await aget_tools_from_mcp_url(
"http://127.0.0.1:8000/mcp",
client=client,
allowed_tools=["tool1", "tool2"],
)
In [ ]:
Copied!
from llama_index.core.workflow import (
Context,
Workflow,
Event,
StartEvent,
StopEvent,
step,
)
from llama_index.tools.mcp.utils import workflow_as_mcp
class RunEvent(StartEvent):
msg: str
class InfoEvent(Event):
msg: str
class LoudWorkflow(Workflow):
"""Useful for converting strings to uppercase and making them louder."""
@step
def step_one(self, ctx: Context, ev: RunEvent) -> StopEvent:
ctx.write_event_to_stream(InfoEvent(msg="Hello, world!"))
return StopEvent(result=ev.msg.upper() + "!")
workflow = LoudWorkflow()
mcp = workflow_as_mcp(workflow)
from llama_index.core.workflow import (
Context,
Workflow,
Event,
StartEvent,
StopEvent,
step,
)
from llama_index.tools.mcp.utils import workflow_as_mcp
class RunEvent(StartEvent):
msg: str
class InfoEvent(Event):
msg: str
class LoudWorkflow(Workflow):
"""Useful for converting strings to uppercase and making them louder."""
@step
def step_one(self, ctx: Context, ev: RunEvent) -> StopEvent:
ctx.write_event_to_stream(InfoEvent(msg="Hello, world!"))
return StopEvent(result=ev.msg.upper() + "!")
workflow = LoudWorkflow()
mcp = workflow_as_mcp(workflow)
这段代码将自动生成一个 FastMCP
服务器,该服务器会:
- 使用工作流类名作为工具名称
- 采用我们自定义的
RunEvent
作为工具的类型化输入 - 自动使用 SSE 流来传输工作流事件流的 JSON 转储
如果这段代码位于名为 script.py
的脚本中,你可以通过以下命令启动 MCP 服务器:
mcp dev script.py
或者使用 MCP CLI README 中记载的其他命令。
请注意,要通过 CLI 启动,可能需要先安装 MCP 命令行工具:
pip install "mcp[cli]"
您可以通过向 workflow_as_mcp
函数传递额外参数来进一步自定义 FastMCP
服务器:
workflow_name
:工作流名称。默认为类名。workflow_description
:工作流描述。默认为类的文档字符串。start_event_model
:用于启动事件的事件模型。您既可以使用工作流中的自定义StartEvent
类,也可以在此处传入自己的 pydantic 模型来定义工作流的输入参数。**fastmcp_init_kwargs
:传递给FastMCP()
服务器构造函数的任意额外参数。
In [ ]:
Copied!
from llama_index.tools.mcp import BasicMCPClient
# Connect to an MCP server using different transports
http_client = BasicMCPClient("https://example.com/mcp") # Streamable HTTP
sse_client = BasicMCPClient("https://example.com/sse") # Server-Sent Events
local_client = BasicMCPClient("python", args=["server.py"]) # stdio
# List available tools
tools = await http_client.list_tools()
# Call a tool
result = await http_client.call_tool("calculate", {"x": 5, "y": 10})
# List available resources
resources = await http_client.list_resources()
# Read a resource
content, mime_type = await http_client.read_resource("config://app")
# List available prompts
prompts = await http_client.list_prompts()
# Get a prompt
prompt_result = await http_client.get_prompt("greet", {"name": "World"})
from llama_index.tools.mcp import BasicMCPClient
# Connect to an MCP server using different transports
http_client = BasicMCPClient("https://example.com/mcp") # Streamable HTTP
sse_client = BasicMCPClient("https://example.com/sse") # Server-Sent Events
local_client = BasicMCPClient("python", args=["server.py"]) # stdio
# List available tools
tools = await http_client.list_tools()
# Call a tool
result = await http_client.call_tool("calculate", {"x": 5, "y": 10})
# List available resources
resources = await http_client.list_resources()
# Read a resource
content, mime_type = await http_client.read_resource("config://app")
# List available prompts
prompts = await http_client.list_prompts()
# Get a prompt
prompt_result = await http_client.get_prompt("greet", {"name": "World"})
In [ ]:
Copied!
from llama_index.tools.mcp import BasicMCPClient
# Simple authentication with in-memory token storage
client = BasicMCPClient.with_oauth(
"https://api.example.com/mcp",
client_name="My App",
redirect_uris=["http://localhost:3000/callback"],
# Function to handle the redirect URL (e.g., open a browser)
redirect_handler=lambda url: print(f"Please visit: {url}"),
# Function to get the authorization code from the user
callback_handler=lambda: (input("Enter the code: "), None),
)
# Use the authenticated client
tools = await client.list_tools()
from llama_index.tools.mcp import BasicMCPClient
# Simple authentication with in-memory token storage
client = BasicMCPClient.with_oauth(
"https://api.example.com/mcp",
client_name="My App",
redirect_uris=["http://localhost:3000/callback"],
# Function to handle the redirect URL (e.g., open a browser)
redirect_handler=lambda url: print(f"Please visit: {url}"),
# Function to get the authorization code from the user
callback_handler=lambda: (input("Enter the code: "), None),
)
# Use the authenticated client
tools = await client.list_tools()
默认情况下,若未提供 token_storage
参数,客户端将使用内存中的令牌存储。您可以通过传入自定义的 TokenStorage
实例来使用不同的存储方案。
以下示例展示了默认的内存令牌存储实现:
In [ ]:
Copied!
from llama_index.tools.mcp import BasicMCPClient
from mcp.client.auth import TokenStorage
from mcp.shared.auth import OAuthToken, OAuthClientInformationFull
from typing import Optional
class DefaultInMemoryTokenStorage(TokenStorage):
"""
Simple in-memory token storage implementation for OAuth authentication.
This is the default storage used when none is provided to with_oauth().
Not suitable for production use across restarts as tokens are only stored
in memory.
"""
def __init__(self):
self._tokens: Optional[OAuthToken] = None
self._client_info: Optional[OAuthClientInformationFull] = None
async def get_tokens(self) -> Optional[OAuthToken]:
"""Get the stored OAuth tokens."""
return self._tokens
async def set_tokens(self, tokens: OAuthToken) -> None:
"""Store OAuth tokens."""
self._tokens = tokens
async def get_client_info(self) -> Optional[OAuthClientInformationFull]:
"""Get the stored client information."""
return self._client_info
async def set_client_info(
self, client_info: OAuthClientInformationFull
) -> None:
"""Store client information."""
self._client_info = client_info
# Use custom storage
client = BasicMCPClient.with_oauth(
"https://api.example.com/mcp",
client_name="My App",
redirect_uris=["http://localhost:3000/callback"],
redirect_handler=lambda url: print(f"Please visit: {url}"),
callback_handler=lambda: (input("Enter the code: "), None),
token_storage=DefaultInMemoryTokenStorage(),
)
from llama_index.tools.mcp import BasicMCPClient
from mcp.client.auth import TokenStorage
from mcp.shared.auth import OAuthToken, OAuthClientInformationFull
from typing import Optional
class DefaultInMemoryTokenStorage(TokenStorage):
"""
Simple in-memory token storage implementation for OAuth authentication.
This is the default storage used when none is provided to with_oauth().
Not suitable for production use across restarts as tokens are only stored
in memory.
"""
def __init__(self):
self._tokens: Optional[OAuthToken] = None
self._client_info: Optional[OAuthClientInformationFull] = None
async def get_tokens(self) -> Optional[OAuthToken]:
"""Get the stored OAuth tokens."""
return self._tokens
async def set_tokens(self, tokens: OAuthToken) -> None:
"""Store OAuth tokens."""
self._tokens = tokens
async def get_client_info(self) -> Optional[OAuthClientInformationFull]:
"""Get the stored client information."""
return self._client_info
async def set_client_info(
self, client_info: OAuthClientInformationFull
) -> None:
"""Store client information."""
self._client_info = client_info
# Use custom storage
client = BasicMCPClient.with_oauth(
"https://api.example.com/mcp",
client_name="My App",
redirect_uris=["http://localhost:3000/callback"],
redirect_handler=lambda url: print(f"Please visit: {url}"),
callback_handler=lambda: (input("Enter the code: "), None),
token_storage=DefaultInMemoryTokenStorage(),
)