Skip to main content
Glama
client-example.pyโ€ข5.6 kB
#!/usr/bin/env python3 """ Simple CLI client to interact with the MCP-LLama Bridge Service This demonstrates how to use the integrated LLama + Kafka Schema Registry MCP system """ import argparse import asyncio import json import sys import httpx class MCPLlamaClient: def __init__(self, bridge_url: str = "http://localhost:8080"): self.bridge_url = bridge_url async def health_check(self) -> dict: """Check if the bridge service is healthy""" async with httpx.AsyncClient() as client: response = await client.get(f"{self.bridge_url}/health") response.raise_for_status() return response.json() async def chat(self, message: str, model: str = "llama3.2:3b", use_mcp: bool = True) -> dict: """Send a chat message to LLama with optional MCP integration""" async with httpx.AsyncClient(timeout=120.0) as client: response = await client.post( f"{self.bridge_url}/chat", json={"message": message, "model": model, "use_mcp": use_mcp} ) response.raise_for_status() return response.json() async def interactive_chat(self, model: str = "llama3.2:3b"): """Start an interactive chat session""" print(f"๐Ÿค– Starting interactive chat with {model}") print("๐Ÿ’ก Type 'help' for Schema Registry commands, 'quit' to exit") print("๐Ÿ”ง MCP tools are enabled - you can ask about schemas, subjects, etc.") print("-" * 60) while True: try: user_input = input("\n๐Ÿง‘ You: ").strip() if user_input.lower() in ["quit", "exit", "q"]: print("๐Ÿ‘‹ Goodbye!") break if user_input.lower() == "help": self.show_help() continue if not user_input: continue print("๐Ÿค– LLama: Thinking...", end="", flush=True) response = await self.chat(user_input, model=model) print(f"\r๐Ÿค– LLama: {response['response']}") if response.get("used_tools"): print(f"๐Ÿ”ง Used tools: {', '.join(response['used_tools'])}") except KeyboardInterrupt: print("\n๐Ÿ‘‹ Goodbye!") break except Exception as e: print(f"\nโŒ Error: {e}") def show_help(self): """Show help information""" print( """ ๐Ÿ“š Schema Registry Commands You Can Try: ๐Ÿ“‹ Basic Operations: โ€ข "List all subjects in the schema registry" โ€ข "Show me the schemas in the user-events subject" โ€ข "Get the latest version of the order-schema" โ€ข "Show me all contexts" ๐Ÿ” Schema Analysis: โ€ข "Show me the structure of the user-profile schema" โ€ข "Compare versions 1 and 2 of the payment-events schema" โ€ข "Find all schemas that contain a field called 'user_id'" โœ… Compatibility & Validation: โ€ข "Check if this schema is compatible: {schema_json}" โ€ข "Validate this schema against AVRO standards" โ€ข "Show me compatibility requirements for subject X" ๐Ÿ“Š Registry Management: โ€ข "Export all schemas from the production context" โ€ข "Show me registry statistics and usage" โ€ข "List all schema versions for subject Y" ๐Ÿ”ง Configuration: โ€ข "Show global compatibility settings" โ€ข "What's the current mode of the registry?" โ€ข "Update compatibility mode for subject Z" ๐Ÿ’ก You can ask in natural language - LLama will understand and use the appropriate tools! """ ) async def main(): parser = argparse.ArgumentParser(description="MCP-LLama Client") parser.add_argument("--bridge-url", default="http://localhost:8080", help="Bridge service URL") parser.add_argument("--model", default="llama3.2:3b", help="LLama model to use") parser.add_argument("--message", help="Single message to send") parser.add_argument("--no-mcp", action="store_true", help="Disable MCP tools") parser.add_argument("--health", action="store_true", help="Check service health") args = parser.parse_args() client = MCPLlamaClient(args.bridge_url) try: if args.health: print("๐Ÿฅ Checking service health...") health = await client.health_check() print(json.dumps(health, indent=2)) return # Check if service is available health = await client.health_check() if health["status"] != "healthy": print(f"โš ๏ธ Service is {health['status']}") if health["status"] == "unhealthy": print("โŒ Bridge service is not available") sys.exit(1) if args.message: # Single message mode print(f"๐Ÿค– Sending message to {args.model}...") response = await client.chat(args.message, model=args.model, use_mcp=not args.no_mcp) print(f"\n๐Ÿค– Response: {response['response']}") if response.get("used_tools"): print(f"๐Ÿ”ง Used tools: {', '.join(response['used_tools'])}") else: # Interactive mode await client.interactive_chat(args.model) except httpx.ConnectError: print("โŒ Cannot connect to bridge service. Make sure it's running at:", args.bridge_url) print("๐Ÿ’ก Try running: ./run-llama-mcp.sh start") sys.exit(1) except Exception as e: print(f"โŒ Error: {e}") sys.exit(1) if __name__ == "__main__": asyncio.run(main())

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/aywengo/kafka-schema-reg-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server