Skip to content

Instantly share code, notes, and snippets.

@pokutuna
Created June 8, 2025 17:57
Show Gist options
  • Save pokutuna/f1ce8a40b3bfad7c9e93f6d645c22630 to your computer and use it in GitHub Desktop.
Save pokutuna/f1ce8a40b3bfad7c9e93f6d645c22630 to your computer and use it in GitHub Desktop.
import asyncio
import logging
from pprint import pprint
from langchain_google_vertexai import ChatVertexAI
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain_mcp_adapters.tools import load_mcp_tools
from langgraph.prebuilt import create_react_agent
# Mute gemini unsupported schema warnings
logging.getLogger("langchain_google_vertexai.functions_utils").setLevel(logging.ERROR)
model = ChatVertexAI(
model_name="gemini-2.5-flash-preview-04-17",
project="pokutuna-playground",
location="global",
)
mcp_client = MultiServerMCPClient(
{
"playwright": {
"command": "npx",
"args": ["@playwright/mcp@latest"],
"transport": "stdio",
}
}
)
prompt = """
1. access https://example.com/
2. click "More information" link
3. what can you see?
"""
async def main():
async with mcp_client.session("playwright") as session:
tools = await load_mcp_tools(session)
agent = create_react_agent(model=model, tools=tools)
async for chunk in agent.astream(
{"messages": [("user", prompt)]},
):
pprint(chunk)
if __name__ == "__main__":
asyncio.run(main())
import asyncio
import logging
from pprint import pprint
from typing import Annotated, TypedDict
from langchain_core.messages import BaseMessage
from langchain_core.runnables import RunnableConfig
from langchain_google_vertexai import ChatVertexAI
from langchain_mcp_adapters.client import MultiServerMCPClient
from langchain_mcp_adapters.tools import load_mcp_tools
from langgraph.graph import END, START
from langgraph.graph.graph import CompiledGraph
from langgraph.graph.message import add_messages
from langgraph.graph.state import StateGraph
from langgraph.prebuilt import ToolNode, tools_condition
# Mute gemini unsupported schema warnings
logging.getLogger("langchain_google_vertexai.functions_utils").setLevel(logging.ERROR)
model = ChatVertexAI(
model_name="gemini-2.5-flash-preview-04-17",
project="pokutuna-playground",
location="global",
)
mcp_client = MultiServerMCPClient(
{
"playwright": {
"command": "npx",
"args": ["@playwright/mcp@latest"],
"transport": "stdio",
}
}
)
class State(TypedDict):
messages: Annotated[list[BaseMessage], add_messages]
async def agent(state: State, config: RunnableConfig) -> State:
configurable = config.get("configurable", {})
session = configurable.get("session")
tools = await load_mcp_tools(session)
messages = state["messages"]
model_with_tools = model.bind_tools(tools)
response = await model_with_tools.ainvoke(messages)
return {"messages": [response]}
async def tools_node(state: State, config: RunnableConfig) -> State:
configurable = config.get("configurable", {})
session = configurable.get("session")
tools = await load_mcp_tools(session)
tool_node = ToolNode(tools)
return await tool_node.ainvoke(state, config)
def build_workflow() -> CompiledGraph:
workflow = StateGraph(State)
workflow.add_node("agent", agent)
workflow.add_node("tools", tools_node)
workflow.add_edge(START, "agent")
workflow.add_conditional_edges(
"agent",
tools_condition,
{
"tools": "tools",
"__end__": END,
},
)
workflow.add_edge("tools", "agent")
return workflow.compile()
prompt = """
1. access https://example.com/
2. click "More information" link
3. what can you see?
"""
async def main():
# Graph construction is independent of sessions - CompiledGraph instances are reusable
# Instead, we need to implement a graph equivalent to create_react_agent
agent = build_workflow()
async with mcp_client.session("playwright") as session:
config = RunnableConfig(configurable={"session": session})
async for chunk in agent.astream(
{"messages": [("human", prompt)]}, config=config
):
pprint(chunk)
if __name__ == "__main__":
asyncio.run(main())
[project]
name = "08-mcp-ronwuc"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.11"
dependencies = [
"langchain>=0.3.25",
"langchain-google-vertexai>=2.0.24",
"langchain-mcp-adapters>=0.1.7",
"langgraph>=0.4.8",
"langgraph-cli[inmem]>=0.2.12",
]
Sign up for free to join this conversation on GitHub. Already have an account? Sign in to comment