Create powerful AI workflows by connecting multiple MCP servers including Supabase, Firecrawl Deep Research, Firecrawl Web Search for enhanced automation capabilities in Klavis AI.
Supabase official MCP Server
A personal research assistant that analyze sources across the web, based on Firecrawl
Advanced web crawling, scraping, and search capabilities powered by Firecrawl
Follow these steps to connect LangChain to these MCP servers
Sign up for KlavisAI to access our MCP server management platform.
Add your desired MCP servers to LangChain and configure authentication settings.
Verify your connections work correctly and start using your enhanced AI capabilities.
import os
import asyncio
from klavis import Klavis
from klavis.types import McpServerName, ConnectionType
from langchain_mcp_adapters.client import MultiServerMCPClient
from langgraph.prebuilt import create_react_agent
from langchain_openai import ChatOpenAI
# Initialize clients
klavis_client = Klavis(api_key=os.getenv("KLAVIS_API_KEY"))
llm = ChatOpenAI(model="gpt-4o-mini", api_key=os.getenv("OPENAI_API_KEY"))
supabase_mcp_instance = klavis_client.mcp_server.create_server_instance(
server_name=McpServerName.SUPABASE,
user_id="1234",
platform_name="Klavis",
connection_type=ConnectionType.STREAMABLE_HTTP,
)
firecrawl_deep_research_mcp_instance = klavis_client.mcp_server.create_server_instance(
server_name=McpServerName.FIRECRAWL_DEEP_RESEARCH,
user_id="1234",
platform_name="Klavis",
connection_type=ConnectionType.STREAMABLE_HTTP,
)
firecrawl_web_search_mcp_instance = klavis_client.mcp_server.create_server_instance(
server_name=McpServerName.FIRECRAWL_WEB_SEARCH,
user_id="1234",
platform_name="Klavis",
connection_type=ConnectionType.STREAMABLE_HTTP,
)
mcp_client = MultiServerMCPClient({
"supabase": {
"transport": "streamable_http",
"url": supabase_mcp_instance.server_url
},
"firecrawl deep research": {
"transport": "streamable_http",
"url": firecrawl_deep_research_mcp_instance.server_url
},
"firecrawl web search": {
"transport": "streamable_http",
"url": firecrawl_web_search_mcp_instance.server_url
}
})
tools = asyncio.run(mcp_client.get_tools())
agent = create_react_agent(
model=llm,
tools=tools,
)
response = asyncio.run(agent.ainvoke({
"messages": [{"role": "user", "content": "Your query here"}]
}))
Everything you need to know about connecting to these MCP servers
Join developers who are already using KlavisAI to power their LangChain applications with these MCP servers.
Start For Free