8000 Is it possible to pass variables to the MCP server? · Issue #844 · modelcontextprotocol/python-sdk · GitHub
[go: up one dir, main page]

Skip to content
Is it possible to pass variables to the MCP server? #844
Closed as not planned
Closed as not planned
@ChellShort

Description

@ChellShort

I'm trying to pass my chat history from my langgraph app to the MCP server, but I don't know if it's just possible with HTTP headers. I tried using @mcp.resource to try pass some str variables, but I'm still confused.

This is my code, i got it from here:

client_langgraph.py

from typing_extensions import TypedDict
from typing import Annotated
from langchain.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_ollama import ChatOllama
from langgraph.prebuilt import tools_condition, ToolNode
from langgraph.graph import StateGraph, START, END
from langgraph.graph.message import AnyMessage, add_messages
from langgraph.checkpoint.memory import MemorySaver
from langchain_mcp_adapters.tools import load_mcp_tools
from langchain_mcp_adapters.resources import load_mcp_resources
from langchain_mcp_adapters.prompts import load_mcp_prompt
from mcp import ClientSession, StdioServerParameters
from mcp.client.stdio import stdio_client
from mcp.client.streamable_http import streamablehttp_client

import asyncio

math_server_url = "http://localhost:8000/mcp"

async def create_graph(session):
    llm = ChatOllama(model="qwen3:4b", temperature = 0.5)
    
    tools = await load_mcp_tools(session)
    llm_with_tool = llm.bind_tools(tools)

    system_prompt = await load_mcp_prompt(session, "system_prompt")
    prompt_template = ChatPromptTemplate.from_messages([
        ("system", system_prompt[0].content),
        MessagesPlaceholder("messages")
    ])
    chat_llm = prompt_template | llm_with_tool

    # State Management
    class State(TypedDict):
        messages: Annotated[List[AnyMessage], add_messages]

    # Nodes
    def chat_node(state: State) -> State:
        state["messages"] = chat_llm.invoke({"messages": state["messages"]})
        return state

    # Building the graph
    graph_builder = StateGraph(State)
    graph_builder.add_node("chat_node", chat_node)
    graph_builder.add_node("tool_node", ToolNode(tools=tools))
    graph_builder.add_edge(START, "chat_node")
    graph_builder.add_conditional_edges("chat_node", tools_condition, {"tools": "tool_node", "__end__": END})
    graph_builder.add_edge("tool_node", "chat_node")
    graph = graph_builder.compile(checkpointer=MemorySaver())
    return graph

async def stream_responses(agent, message, config):
    async for stream_mode, chunk in agent.astream(
        {"messages": message}, 
        config=config,
        stream_mode=["messages", "updates"]
    ):
        if stream_mode == "messages":
            yield chunk[0].content
                
async def run_program(agent, message, config):
    async for i in stream_responses(agent, message, config):
        print(i, end="", flush=True)
    
async def main():
    config = {"configurable": {"thread_id": 1234}}
    async with streamablehttp_client(math_server_url) as (read, write, _):
        async with ClientSession(read, write) as session:
            await session.initialize()

            # Check available tools
            tools = await load_mcp_tools(session)
            print("Available tools:", [tool.name for tool in tools])

            # Check availa
5769
ble prompts
            prompts = await load_mcp_prompt(session, "example_prompt", arguments={"question": "what is 2+2"})
            print("Available prompts:", [prompt.content for prompt in prompts])
            prompts = await load_mcp_prompt(session, "system_prompt")
            print("Available prompts:", [prompt.content for prompt in prompts])

            # Check available resources
            resources = await load_mcp_resources(session, uris=["greeting://Ricardo", "config://app"])
            print("Available resources:", [resource.data for resource in resources])

            # Use the MCP Server in the graph
            agent = await create_graph(session)
            
            while True:
                message = input("\n\nUser: ")
                await run_program(agent, message, config)

if __name__ == "__main__":
    asyncio.run(main())```






example_mcp.py

```from mcp.server.fastmcp import FastMCP
import mcp.types as types
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser
from langchain_core.runnables import RunnablePassthrough
from langchain_ollama import ChatOllama
from mcp.server.fastmcp import FastMCP, Context

mcp = FastMCP("chat_tools_test")

# Prompts ----------------------------------------------------------
@mcp.prompt()
def example_prompt(question: str) -> str:
    """Example prompt description"""
    return f"""
    You are a math assistant. Answer the question.
    Question: {question}
    """

@mcp.prompt()
def system_prompt() -> str:
    """System prompt description"""
    return """
    You are an AI assistant use the tools if needed. If you dont find a tool suitable for the task, just say "The question is out of my capabilities".
    Don't try to generate any type of function or example of a function to help the user find his answer
    """    

# Resources ----------------------------------------------------------
# Estos solo pueden acceder a archivos estaticos y realizar llamadas a api externas, pero no pueden acceder a la memoria del agente
# Sirve más bien para cosas que el usuario necesita al momento, por ejemplo, acceder a este archivo llamado asi .... y cosas asi.add()
# Aunque no le encuentro diferencia a una función normal que podria escribir aca
@mcp.resource("greeting://{name}")
def get_greeting(name: str) -> str:
    """Get a personalized greeting"""
    return f"Hello, {name}!"

@mcp.resource("config://app")
def get_config() -> str:
    """Static configuration data"""
    return "App configuration here"

# Tools ----------------------------------------------------------
@mcp.tool()
def add(a: int, b: int) -> int:
    """Add two numbers"""
    return a + b

@mcp.tool()
def multiply(a: int, b: int) -> int:
    """Multiply two numbers"""
    return a * b

if __name__ == "__main__":
    mcp.run(transport = "streamable-http")  # Run server via stdio```


At first I tought about using the resources to just inject values into the server, but I think that's not the purpose of it. A solution I am thinking is just using an env file to let it access some variables, but my problem of passing the history would stilll remain. Any help?

Metadata

Metadata

Assignees

No one assigned

    Labels

    No labels
    No labels

    Type

    No type

    Projects

    No projects

    Milestone

    No milestone

    Relationships

    None yet

    Development

    No branches or pull requests

    Issue actions

      0