import json
import urllib.request
from typing import Any, Dict
from langchain.tools import BaseTool
from langchain.agents import AgentExecutor, create_openai_functions_agent
from langchain_openai import ChatOpenAI
from langchain.prompts import ChatPromptTemplate
class AlloyMCPClient:
"""Simple client to communicate with Alloy MCP Server"""
def __init__(self, server_url: str, server_id: str, access_token: str):
self.base_url = f"{server_url}/mcp/{server_id}/{access_token}"
def call_tool(self, name: str, arguments: Dict[str, Any]) -> Any:
"""Execute an MCP tool and return the result"""
payload = {
"jsonrpc": "2.0",
"method": "tools/call",
"params": {"name": name, "arguments": arguments},
"id": 1
}
req = urllib.request.Request(
self.base_url,
data=json.dumps(payload).encode('utf-8'),
headers={'Content-Type': 'application/json', 'Accept': 'application/json, text/event-stream'}
)
try:
with urllib.request.urlopen(req) as response:
# Parse the event-stream response
for line in response.read().decode('utf-8').split('\n'):
if line.startswith('data: '):
return json.loads(line[6:]).get('result', {})
except Exception as e:
print(f"Error calling MCP tool: {e}")
return {}
class MCPTool(BaseTool):
"""Wrapper to use MCP tools in LangChain"""
def __init__(self, name: str, description: str, mcp_client: AlloyMCPClient):
self.mcp_client = mcp_client
super().__init__(name=name, description=description)
def _run(self, **kwargs) -> str:
result = self.mcp_client.call_tool(self.name, kwargs)
return json.dumps(result, indent=2)
# Setup
mcp = AlloyMCPClient(
server_url="https://mcp.runalloy.com",
server_id="your-server-id",
access_token="your-token"
)
# Define available tools
tools = [
MCPTool("list_connectors_alloy", "List all available platform integrations", mcp),
MCPTool("execute_action_alloy", "Execute an action on a connected platform", mcp)
]
# Create the agent
llm = ChatOpenAI(model="gpt-4", temperature=0)
prompt = ChatPromptTemplate.from_messages([
("system", "You are a helpful assistant that manages platform integrations. "
"You can list available connectors and execute actions on them."),
("human", "{input}"),
("placeholder", "{agent_scratchpad}")
])
agent = create_openai_functions_agent(llm, tools, prompt)
executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
# Use it!
response = executor.invoke({
"input": "What Slack actions are available?"
})
print(response["output"])