from helix.client import Client
from helix.mcp import MCPServer
from helix.providers.openai_client import OpenAIProvider
from fastmcp.tools.tool import FunctionTool
# Initialize MCP server
client = Client(local=True)
mcp = MCPServer("helix-mcp", client)
# Add your custom tool to the MCP server
def get_user(connection_id: str, user_id: str):
"""
Get the name of a user by their ID
Args: connection_id: The connection ID, user_id: The ID of the user
Returns: The user object
"""
return client.query(
"mcp/get_userMcp",
{"connection_id": connection_id, "data":{"user_id": user_id}}
)
mcp.add_tool(FunctionTool.from_function(get_user))
# Start MCP server
mcp.run_bg()
# Enable MCP in your LLM provider
llm = OpenAIProvider(
name="openai-llm",
instructions="You are a helpful assistant with access to user data.",
model="gpt-4o",
history=True
)
llm.enable_mcps("helix-mcp")
# The AI can now call your MCP queries
response = llm.generate(f"What is the name of user with ID {user_id}?")
print(response)