Skip to main content
Glama
test_ollama_client.pyβ€’1.31 kB
""" Tests for Ollama Client """ import pytest from mcp_server.llm_clients import OllamaClient @pytest.mark.asyncio async def test_ollama_client_init(): """Test Ollama client initialization""" client = OllamaClient(base_url="http://localhost:11434", timeout=30) assert client.base_url == "http://localhost:11434" assert client.timeout == 30 @pytest.mark.asyncio async def test_ollama_parse_json(): """Test JSON parsing from text""" client = OllamaClient() # Test direct JSON text = '{"action": "create_box", "params": {"width": 20}}' result = client._parse_json(text) assert result is not None assert result["action"] == "create_box" # Test JSON embedded in text text = 'Here is the action: {"action": "create_box", "params": {"width": 20}} done' result = client._parse_json(text) assert result is not None assert result["action"] == "create_box" # Test invalid JSON text = 'This is not JSON at all' result = client._parse_json(text) assert result is None @pytest.mark.asyncio async def test_ollama_list_models(): """Test listing Ollama models""" client = OllamaClient() # This will fail if Ollama is not running, which is okay for tests models = await client.list_models() assert isinstance(models, list)

Latest Blog Posts

MCP directory API

We provide all the information about MCP servers via our MCP API.

curl -X GET 'https://glama.ai/api/mcp/v1/servers/jaskirat1616/fusion360-mcp'

If you have feedback or need assistance with the MCP directory API, please join our Discord server