Skip to main content
Glama

MCP Server Basic Example

by ugundhar
client.py2.1 kB
import asyncio from dotenv import load_dotenv from langchain_groq import ChatGroq from mcp_use import MCPAgent, MCPClient import os async def run_memory_chat(): """Run a chat using MCPAgent's built-in conversation memory.""" # Load environment variables for API keys load_dotenv() os.environ["GROQ_API_KEY"]=os.getenv("GROQ_API_KEY") # Config file path - change this to your config file config_file = "server/weather.json" print("Initializing chat...") # Create MCP client and agent with memory enabled client = MCPClient.from_config_file(config_file) llm = ChatGroq(model="qwen-qwq-32b") # Create agent with memory_enabled=True agent = MCPAgent( llm=llm, client=client, max_steps=15, memory_enabled=True, # Enable built-in conversation memory ) print("\n===== Interactive MCP Chat =====") print("Type 'exit' or 'quit' to end the conversation") print("Type 'clear' to clear conversation history") print("==================================\n") try: # Main chat loop while True: # Get user input user_input = input("\nYou: ") # Check for exit command if user_input.lower() in ["exit", "quit"]: print("Ending conversation...") break # Check for clear history command if user_input.lower() == "clear": agent.clear_conversation_history() print("Conversation history cleared.") continue # Get response from agent print("\nAssistant: ", end="", flush=True) try: # Run the agent with the user input (memory handling is automatic) response = await agent.run(user_input) print(response) except Exception as e: print(f"\nError: {e}") finally: # Clean up if client and client.sessions: await client.close_all_sessions() if __name__ == "__main__": asyncio.run(run_memory_chat())

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/ugundhar/mcp-server'

If you have feedback or need assistance with the MCP directory API, please join our Discord server