import { z } from 'zod';
import { MongoClient, Db, Collection, ObjectId, Document } from 'mongodb';
interface ToolResponse {
content: Array<{
type: "text";
text: string;
}>;
isError?: boolean;
}
// Connection pool management
const connectionPool: Map<string, MongoClient> = new Map();
/**
* Get or create a MongoDB client connection
*/
async function getMongoClient(url: string): Promise<MongoClient> {
if (connectionPool.has(url)) {
const client = connectionPool.get(url)!;
// Verify connection is still alive
try {
await client.db('admin').command({ ping: 1 });
return client;
} catch (error) {
// Connection is dead, remove from pool
connectionPool.delete(url);
}
}
// Create new connection
const client = new MongoClient(url, {
serverSelectionTimeoutMS: 5000,
connectTimeoutMS: 10000,
});
await client.connect();
connectionPool.set(url, client);
return client;
}
/**
* Get database instance
*/
async function getDatabase(url: string, dbName: string): Promise<Db> {
const client = await getMongoClient(url);
return client.db(dbName);
}
/**
* Get collection instance
*/
async function getCollection(url: string, dbName: string, collectionName: string): Promise<Collection> {
const db = await getDatabase(url, dbName);
return db.collection(collectionName);
}
/**
* Serialize MongoDB documents (handle ObjectId, Date, etc.)
*/
function serializeDocument(doc: any): any {
if (doc === null || doc === undefined) {
return doc;
}
if (doc instanceof ObjectId) {
return { $oid: doc.toHexString() };
}
if (doc instanceof Date) {
return { $date: doc.toISOString() };
}
if (Array.isArray(doc)) {
return doc.map(serializeDocument);
}
if (typeof doc === 'object') {
const serialized: any = {};
for (const [key, value] of Object.entries(doc)) {
serialized[key] = serializeDocument(value);
}
return serialized;
}
return doc;
}
/**
* Deserialize query/filter objects (handle $oid references)
*/
function deserializeQuery(query: any): any {
if (query === null || query === undefined) {
return query;
}
if (typeof query === 'object' && query.$oid) {
return new ObjectId(query.$oid);
}
if (Array.isArray(query)) {
return query.map(deserializeQuery);
}
if (typeof query === 'object') {
const deserialized: any = {};
for (const [key, value] of Object.entries(query)) {
// Special handling for _id field
if (key === '_id' && typeof value === 'string') {
deserialized[key] = new ObjectId(value);
} else {
deserialized[key] = deserializeQuery(value);
}
}
return deserialized;
}
return query;
}
// ==================== Schemas ====================
export const mongodbConnectSchema = z.object({
url: z.string().describe('MongoDB connection string (e.g., mongodb://localhost:27017 or mongodb+srv://...)'),
database: z.string().optional().describe('Database name to connect to (optional, for testing connection)'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbFindSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
filter: z.record(z.any()).optional().describe('Query filter (MongoDB query object)'),
projection: z.record(z.any()).optional().describe('Fields to include/exclude'),
sort: z.record(z.any()).optional().describe('Sort order'),
limit: z.number().optional().describe('Maximum number of documents to return'),
skip: z.number().optional().describe('Number of documents to skip'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbFindOneSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
filter: z.record(z.any()).optional().describe('Query filter'),
projection: z.record(z.any()).optional().describe('Fields to include/exclude'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbInsertOneSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
document: z.record(z.any()).describe('Document to insert'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbInsertManySchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
documents: z.array(z.record(z.any())).describe('Array of documents to insert'),
ordered: z.boolean().default(true).describe('Whether to perform ordered or unordered insert'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbUpdateOneSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
filter: z.record(z.any()).describe('Query filter to match documents'),
update: z.record(z.any()).describe('Update operations (e.g., {$set: {field: value}})'),
upsert: z.boolean().default(false).describe('Create document if not found'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbUpdateManySchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
filter: z.record(z.any()).describe('Query filter to match documents'),
update: z.record(z.any()).describe('Update operations'),
upsert: z.boolean().default(false).describe('Create documents if not found'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbDeleteOneSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
filter: z.record(z.any()).describe('Query filter to match document to delete'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbDeleteManySchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
filter: z.record(z.any()).describe('Query filter to match documents to delete'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbAggregateSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
pipeline: z.array(z.record(z.any())).describe('Aggregation pipeline stages'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbCountSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
filter: z.record(z.any()).optional().describe('Query filter'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbCreateCollectionSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name to create'),
validator: z.record(z.any()).optional().describe('JSON Schema validator for document validation'),
validationLevel: z.enum(['off', 'strict', 'moderate']).optional().describe('Validation level'),
validationAction: z.enum(['error', 'warn']).optional().describe('Action when validation fails'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbDropCollectionSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name to drop'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbCreateIndexSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
keys: z.record(z.any()).describe('Index keys (e.g., {email: 1} for ascending, {name: -1} for descending)'),
options: z.object({
unique: z.boolean().optional().describe('Ensure unique values'),
name: z.string().optional().describe('Index name'),
sparse: z.boolean().optional().describe('Sparse index (only index documents with the field)'),
expireAfterSeconds: z.number().optional().describe('TTL in seconds for automatic document deletion')
}).optional().describe('Index options'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbListIndexesSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbDropIndexSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().describe('Collection name'),
indexName: z.string().describe('Name of index to drop'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbListDatabasesSchema = z.object({
url: z.string().describe('MongoDB connection string'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbListCollectionsSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
cwd: z.string().optional().describe('Working directory')
});
export const mongodbStatsSchema = z.object({
url: z.string().describe('MongoDB connection string'),
database: z.string().describe('Database name'),
collection: z.string().optional().describe('Collection name (omit for database stats)'),
cwd: z.string().optional().describe('Working directory')
});
// ==================== Tool Implementations ====================
export async function mongodbConnect(args: z.infer<typeof mongodbConnectSchema>): Promise<ToolResponse> {
try {
const client = await getMongoClient(args.url);
// Test connection
const adminDb = client.db('admin');
const result = await adminDb.command({ ping: 1 });
// Get server info
const serverInfo = await adminDb.admin().serverInfo();
// List databases if no specific database requested
let databases: any[] = [];
if (!args.database) {
const dbList = await adminDb.admin().listDatabases();
databases = dbList.databases;
} else {
// Test specific database
const db = client.db(args.database);
await db.command({ ping: 1 });
}
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
connected: true,
server: {
version: serverInfo.version,
platform: serverInfo.platform
},
database: args.database || null,
databases: databases.length > 0 ? databases.map(db => ({
name: db.name,
sizeOnDisk: db.sizeOnDisk,
empty: db.empty
})) : undefined,
message: args.database
? `Connected to database '${args.database}'`
: 'Connected to MongoDB server'
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
connected: false,
error: error.message,
hint: error.message.includes('ECONNREFUSED')
? 'MongoDB server is not running or not accessible'
: error.message.includes('Authentication')
? 'Invalid credentials - check username/password'
: 'Check connection string format and network connectivity'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbFind(args: z.infer<typeof mongodbFindSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const filter = args.filter ? deserializeQuery(args.filter) : {};
const options: any = {};
if (args.projection) {
options.projection = args.projection;
}
if (args.sort) {
options.sort = args.sort;
}
if (args.limit) {
options.limit = args.limit;
}
if (args.skip) {
options.skip = args.skip;
}
const documents = await collection.find(filter, options).toArray();
const serializedDocs = documents.map(serializeDocument);
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
count: documents.length,
documents: serializedDocs,
filter: args.filter || {},
limit: args.limit,
skip: args.skip
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
filter: args.filter
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbFindOne(args: z.infer<typeof mongodbFindOneSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const filter = args.filter ? deserializeQuery(args.filter) : {};
const options: any = {};
if (args.projection) {
options.projection = args.projection;
}
const document = await collection.findOne(filter, options);
const serializedDoc = document ? serializeDocument(document) : null;
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
found: document !== null,
document: serializedDoc,
filter: args.filter || {}
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
filter: args.filter
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbInsertOne(args: z.infer<typeof mongodbInsertOneSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const result = await collection.insertOne(args.document);
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
insertedId: result.insertedId.toHexString(),
acknowledged: result.acknowledged,
document: serializeDocument(args.document)
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
hint: error.message.includes('duplicate key')
? 'Document with this unique field already exists'
: error.message.includes('validation')
? 'Document does not match collection schema validation'
: 'Check document structure and try again'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbInsertMany(args: z.infer<typeof mongodbInsertManySchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const result = await collection.insertMany(args.documents, { ordered: args.ordered });
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
insertedCount: result.insertedCount,
insertedIds: Object.values(result.insertedIds).map(id => (id as ObjectId).toHexString()),
acknowledged: result.acknowledged,
ordered: args.ordered
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
documentsAttempted: args.documents.length,
hint: error.message.includes('duplicate key')
? 'Some documents have duplicate unique fields'
: 'Check document structures and try again'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbUpdateOne(args: z.infer<typeof mongodbUpdateOneSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const filter = deserializeQuery(args.filter);
const result = await collection.updateOne(filter, args.update, { upsert: args.upsert });
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
matchedCount: result.matchedCount,
modifiedCount: result.modifiedCount,
upsertedId: result.upsertedId ? (result.upsertedId as ObjectId).toHexString() : null,
upserted: result.upsertedCount > 0,
acknowledged: result.acknowledged,
filter: args.filter,
update: args.update
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
filter: args.filter,
hint: error.message.includes('update operator')
? 'Update must use operators like $set, $inc, $push, etc.'
: 'Check filter and update syntax'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbUpdateMany(args: z.infer<typeof mongodbUpdateManySchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const filter = deserializeQuery(args.filter);
const result = await collection.updateMany(filter, args.update, { upsert: args.upsert });
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
matchedCount: result.matchedCount,
modifiedCount: result.modifiedCount,
upsertedId: result.upsertedId ? (result.upsertedId as ObjectId).toHexString() : null,
upserted: result.upsertedCount > 0,
acknowledged: result.acknowledged,
filter: args.filter,
update: args.update
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
filter: args.filter,
hint: 'Check filter and update syntax'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbDeleteOne(args: z.infer<typeof mongodbDeleteOneSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const filter = deserializeQuery(args.filter);
const result = await collection.deleteOne(filter);
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
deletedCount: result.deletedCount,
acknowledged: result.acknowledged,
filter: args.filter,
message: result.deletedCount > 0
? 'Document deleted successfully'
: 'No document matched the filter'
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
filter: args.filter
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbDeleteMany(args: z.infer<typeof mongodbDeleteManySchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const filter = deserializeQuery(args.filter);
const result = await collection.deleteMany(filter);
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
deletedCount: result.deletedCount,
acknowledged: result.acknowledged,
filter: args.filter,
message: result.deletedCount > 0
? `${result.deletedCount} document(s) deleted successfully`
: 'No documents matched the filter'
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
filter: args.filter
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbAggregate(args: z.infer<typeof mongodbAggregateSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
// Don't deserialize aggregation pipelines - they have different semantics
// Field references like "$city" should not be converted to ObjectIds
const pipeline = args.pipeline;
const results = await collection.aggregate(pipeline).toArray();
const serializedResults = results.map(serializeDocument);
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
count: results.length,
results: serializedResults,
pipeline: args.pipeline
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
pipeline: args.pipeline,
hint: 'Check aggregation pipeline syntax - each stage should be like {$match: {...}}, {$group: {...}}, etc.'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbCount(args: z.infer<typeof mongodbCountSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const filter = args.filter ? deserializeQuery(args.filter) : {};
const count = await collection.countDocuments(filter);
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
count,
filter: args.filter || {}
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
filter: args.filter
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbCreateCollection(args: z.infer<typeof mongodbCreateCollectionSchema>): Promise<ToolResponse> {
try {
const db = await getDatabase(args.url, args.database);
const options: any = {};
if (args.validator) {
// Use validator directly - it should already contain $jsonSchema if needed
options.validator = args.validator;
}
if (args.validationLevel) {
options.validationLevel = args.validationLevel;
}
if (args.validationAction) {
options.validationAction = args.validationAction;
}
const collection = await db.createCollection(args.collection, options);
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
created: true,
validator: args.validator ? 'Applied' : 'None',
validationLevel: args.validationLevel || 'strict',
validationAction: args.validationAction || 'error',
message: `Collection '${args.collection}' created successfully`
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
hint: error.message.includes('already exists')
? 'Collection already exists in this database'
: error.message.includes('validator')
? 'Check JSON Schema validator syntax'
: 'Check collection name and database permissions'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbDropCollection(args: z.infer<typeof mongodbDropCollectionSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const result = await collection.drop();
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
dropped: result,
message: `Collection '${args.collection}' dropped successfully`
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
hint: error.message.includes('ns not found')
? 'Collection does not exist'
: 'Check collection name and database permissions'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbCreateIndex(args: z.infer<typeof mongodbCreateIndexSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const indexName = await collection.createIndex(args.keys, args.options || {});
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
indexName,
keys: args.keys,
options: args.options || {},
message: `Index '${indexName}' created successfully`
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
keys: args.keys,
hint: error.message.includes('already exists')
? 'Index with this name or specification already exists'
: 'Check index keys format (e.g., {field: 1} for ascending, {field: -1} for descending)'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbListIndexes(args: z.infer<typeof mongodbListIndexesSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
const indexes = await collection.indexes();
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
count: indexes.length,
indexes: indexes.map(idx => ({
name: idx.name,
keys: idx.key,
unique: idx.unique || false,
sparse: idx.sparse || false,
expireAfterSeconds: idx.expireAfterSeconds
}))
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbDropIndex(args: z.infer<typeof mongodbDropIndexSchema>): Promise<ToolResponse> {
try {
const collection = await getCollection(args.url, args.database, args.collection);
await collection.dropIndex(args.indexName);
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
indexName: args.indexName,
message: `Index '${args.indexName}' dropped successfully`
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection,
indexName: args.indexName,
hint: error.message.includes('not found')
? 'Index does not exist'
: error.message.includes('_id_')
? 'Cannot drop the _id index'
: 'Check index name'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbListDatabases(args: z.infer<typeof mongodbListDatabasesSchema>): Promise<ToolResponse> {
try {
const client = await getMongoClient(args.url);
const adminDb = client.db('admin');
const result = await adminDb.admin().listDatabases();
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
totalSize: result.totalSize,
count: result.databases.length,
databases: result.databases.map(db => ({
name: db.name,
sizeOnDisk: db.sizeOnDisk,
empty: db.empty
}))
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
hint: 'Check connection string and authentication'
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbListCollections(args: z.infer<typeof mongodbListCollectionsSchema>): Promise<ToolResponse> {
try {
const db = await getDatabase(args.url, args.database);
const collections = await db.listCollections().toArray();
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
count: collections.length,
collections: collections.map(col => ({
name: col.name,
type: col.type
}))
}, null, 2)
}]
};
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database
}, null, 2)
}],
isError: true
};
}
}
export async function mongodbStats(args: z.infer<typeof mongodbStatsSchema>): Promise<ToolResponse> {
try {
const db = await getDatabase(args.url, args.database);
if (args.collection) {
// Collection stats using collStats command
const stats = await db.command({ collStats: args.collection });
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
collection: args.collection,
stats: {
count: stats.count,
size: stats.size,
avgObjSize: stats.avgObjSize,
storageSize: stats.storageSize,
totalIndexSize: stats.totalIndexSize,
indexSizes: stats.indexSizes,
nindexes: stats.nindexes
}
}, null, 2)
}]
};
} else {
// Database stats using dbStats command
const stats = await db.command({ dbStats: 1 });
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: true,
database: args.database,
stats: {
collections: stats.collections,
views: stats.views,
objects: stats.objects,
avgObjSize: stats.avgObjSize,
dataSize: stats.dataSize,
storageSize: stats.storageSize,
indexes: stats.indexes,
indexSize: stats.indexSize
}
}, null, 2)
}]
};
}
} catch (error: any) {
return {
content: [{
type: "text" as const,
text: JSON.stringify({
success: false,
error: error.message,
database: args.database,
collection: args.collection
}, null, 2)
}],
isError: true
};
}
}
// ==================== Tool Metadata ====================
export const mongodbTools = [
{
name: 'mongodb_connect',
description: 'Connect to MongoDB server and verify connection. Lists databases if no specific database is provided. Supports both local MongoDB and MongoDB Atlas.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string (e.g., mongodb://localhost:27017 or mongodb+srv://...)' },
database: { type: 'string', description: 'Database name to connect to (optional)' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url']
}
},
{
name: 'mongodb_find',
description: 'Query documents from a collection. Supports filtering, projection, sorting, limit, and skip. Returns array of matching documents.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
filter: { type: 'object', description: 'Query filter (MongoDB query object)' },
projection: { type: 'object', description: 'Fields to include/exclude' },
sort: { type: 'object', description: 'Sort order' },
limit: { type: 'number', description: 'Maximum number of documents' },
skip: { type: 'number', description: 'Number of documents to skip' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection']
}
},
{
name: 'mongodb_find_one',
description: 'Find a single document from a collection. Returns the first document matching the filter, or null if not found.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
filter: { type: 'object', description: 'Query filter' },
projection: { type: 'object', description: 'Fields to include/exclude' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection']
}
},
{
name: 'mongodb_insert_one',
description: 'Insert a single document into a collection. Returns the inserted document ID.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
document: { type: 'object', description: 'Document to insert' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection', 'document']
}
},
{
name: 'mongodb_insert_many',
description: 'Insert multiple documents into a collection. Supports ordered and unordered inserts.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
documents: { type: 'array', items: { type: 'object' }, description: 'Array of documents to insert' },
ordered: { type: 'boolean', default: true, description: 'Ordered or unordered insert' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection', 'documents']
}
},
{
name: 'mongodb_update_one',
description: 'Update a single document in a collection. Supports upsert (create if not found).',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
filter: { type: 'object', description: 'Query filter to match document' },
update: { type: 'object', description: 'Update operations (e.g., {$set: {field: value}})' },
upsert: { type: 'boolean', default: false, description: 'Create document if not found' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection', 'filter', 'update']
}
},
{
name: 'mongodb_update_many',
description: 'Update multiple documents in a collection. Supports upsert.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
filter: { type: 'object', description: 'Query filter to match documents' },
update: { type: 'object', description: 'Update operations' },
upsert: { type: 'boolean', default: false, description: 'Create documents if not found' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection', 'filter', 'update']
}
},
{
name: 'mongodb_delete_one',
description: 'Delete a single document from a collection. Deletes the first document matching the filter.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
filter: { type: 'object', description: 'Query filter to match document' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection', 'filter']
}
},
{
name: 'mongodb_delete_many',
description: 'Delete multiple documents from a collection. Deletes all documents matching the filter.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
filter: { type: 'object', description: 'Query filter to match documents' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection', 'filter']
}
},
{
name: 'mongodb_aggregate',
description: 'Run an aggregation pipeline on a collection. Supports all MongoDB aggregation stages ($match, $group, $sort, $project, etc.).',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
pipeline: { type: 'array', items: { type: 'object' }, description: 'Aggregation pipeline stages' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection', 'pipeline']
}
},
{
name: 'mongodb_count',
description: 'Count documents in a collection. Supports filtering to count specific documents.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
filter: { type: 'object', description: 'Query filter (optional)' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection']
}
},
{
name: 'mongodb_create_collection',
description: 'Create a new collection with optional JSON Schema validation. Supports validation levels (strict/moderate/off) and actions (error/warn).',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name to create' },
validator: { type: 'object', description: 'JSON Schema validator' },
validationLevel: { type: 'string', enum: ['off', 'strict', 'moderate'], description: 'Validation level' },
validationAction: { type: 'string', enum: ['error', 'warn'], description: 'Validation action' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection']
}
},
{
name: 'mongodb_drop_collection',
description: 'Drop (delete) a collection from a database. This operation is permanent and cannot be undone.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name to drop' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection']
}
},
{
name: 'mongodb_create_index',
description: 'Create an index on a collection. Supports unique, sparse, and TTL indexes. Indexes improve query performance.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
keys: { type: 'object', description: 'Index keys (e.g., {email: 1} for ascending)' },
options: {
type: 'object',
properties: {
unique: { type: 'boolean', description: 'Ensure unique values' },
name: { type: 'string', description: 'Index name' },
sparse: { type: 'boolean', description: 'Sparse index' },
expireAfterSeconds: { type: 'number', description: 'TTL in seconds' }
}
},
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection', 'keys']
}
},
{
name: 'mongodb_list_indexes',
description: 'List all indexes on a collection. Shows index names, keys, and options (unique, sparse, TTL).',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection']
}
},
{
name: 'mongodb_drop_index',
description: 'Drop (delete) an index from a collection. Cannot drop the _id index.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name' },
indexName: { type: 'string', description: 'Name of index to drop' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database', 'collection', 'indexName']
}
},
{
name: 'mongodb_list_databases',
description: 'List all databases on the MongoDB server. Shows database names, sizes, and whether they are empty.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url']
}
},
{
name: 'mongodb_list_collections',
description: 'List all collections in a database. Shows collection names, types, and options.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database']
}
},
{
name: 'mongodb_stats',
description: 'Get database or collection statistics. Shows document count, sizes, index information. Omit collection parameter for database-wide stats.',
inputSchema: {
type: 'object',
properties: {
url: { type: 'string', description: 'MongoDB connection string' },
database: { type: 'string', description: 'Database name' },
collection: { type: 'string', description: 'Collection name (optional for db stats)' },
cwd: { type: 'string', description: 'Working directory' }
},
required: ['url', 'database']
}
}
];