diff --git a/README.md b/README.md index 28c0c755d..44f17c3a2 100644 --- a/README.md +++ b/README.md @@ -1,7 +1,11 @@ -# MongoDB MCP Server +# MongoDB MCP Server (Search Skunkworks 2025 Branch) A Model Context Protocol server for interacting with MongoDB Databases and MongoDB Atlas. +This is a branch cut for the search team to collaborate on MCP server changes made during +Skunkworks 2025. Please make your PRs against the `search-skunkworks-2025` branch instead of +the main branch during skunkworks. + ## 📚 Table of Contents - [🚀 Getting Started](#getting-started) @@ -138,19 +142,26 @@ NOTE: atlas tools are only available when you set credentials on [configuration] - `insert-one` - Insert a single document into a MongoDB collection - `insert-many` - Insert multiple documents into a MongoDB collection - `create-index` - Create an index for a MongoDB collection +- `create-search-index` - Create a search index for a MongoDB collection +- `create-vector-index` - Create a vector search index for a MongoDB collection - `update-one` - Update a single document in a MongoDB collection - `update-many` - Update multiple documents in a MongoDB collection +- `update-search-index` - Update a search index for a MongoDB collection +- `update-vector-index` - Update a vector search index for a MongoDB collection - `rename-collection` - Rename a MongoDB collection - `delete-one` - Delete a single document from a MongoDB collection - `delete-many` - Delete multiple documents from a MongoDB collection - `drop-collection` - Remove a collection from a MongoDB database - `drop-database` - Remove a MongoDB database +- `drop-search-index` - Remove a search or vector search index from a MongoDB collection - `list-databases` - List all databases for a MongoDB connection - `list-collections` - List all collections for a given database - `collection-indexes` - Describe the indexes for a collection +- `collection-search-indexes` - Describe the search indexes for a collection - `collection-schema` - Describe the schema for a collection - `collection-storage-size` - Get the size of a collection in MB - `db-stats` - Return statistics about a MongoDB database +- `run-pipeline` - Run an aggregation against JSON documents (Atlas account not required) ## Configuration diff --git a/src/common/playground/playgroundClient.ts b/src/common/playground/playgroundClient.ts new file mode 100644 index 000000000..2803f1186 --- /dev/null +++ b/src/common/playground/playgroundClient.ts @@ -0,0 +1,88 @@ +const PLAYGROUND_SEARCH_URL = "https://search-playground.mongodb.com/api/tools/code-playground/search"; + +/** + * Payload for the Playground endpoint. + */ +export interface PlaygroundRunRequest { + documents: string; + aggregationPipeline: string; + indexDefinition: string; + synonyms: string; +} + +/** + * Successful response from Playground server. + */ +export interface PlaygroundRunResponse { + documents: Array>; +} + +/** + * Error response from Playground server. + */ +interface PlaygroundRunErrorResponse { + code: string; + message: string; +} + +/** + * MCP specific Playground error public for tools. + */ +export class PlaygroundRunError extends Error implements PlaygroundRunErrorResponse { + constructor( + public message: string, + public code: string + ) { + super(message); + } +} + +export enum RunErrorCode { + NETWORK_ERROR = "NETWORK_ERROR", + UNKNOWN = "UNKNOWN", +} + +/** + * Handles Search Playground requests, abstracting low-level details from MCP tools. + * https://search-playground.mongodb.com + */ +export class PlaygroundClient { + async run(request: PlaygroundRunRequest): Promise { + const options: RequestInit = { + method: "POST", + headers: { + "Content-Type": "application/json", + }, + body: JSON.stringify(request), + }; + + let response: Response; + try { + response = await fetch(PLAYGROUND_SEARCH_URL, options); + } catch { + throw new PlaygroundRunError("Cannot run pipeline.", RunErrorCode.NETWORK_ERROR); + } + + if (!response.ok) { + const runErrorResponse = await this.getRunErrorResponse(response); + throw new PlaygroundRunError(runErrorResponse.message, runErrorResponse.code); + } + + try { + return (await response.json()) as PlaygroundRunResponse; + } catch { + throw new PlaygroundRunError("Response is not valid JSON.", RunErrorCode.UNKNOWN); + } + } + + private async getRunErrorResponse(response: Response): Promise { + try { + return (await response.json()) as PlaygroundRunErrorResponse; + } catch { + return { + message: `HTTP ${response.status} ${response.statusText}.`, + code: RunErrorCode.UNKNOWN, + }; + } + } +} diff --git a/src/common/search/listSearchIndexesOutput.ts b/src/common/search/listSearchIndexesOutput.ts new file mode 100644 index 000000000..48c051131 --- /dev/null +++ b/src/common/search/listSearchIndexesOutput.ts @@ -0,0 +1,49 @@ +export interface IndexDefinitionVersion { + version: number; + createdAt: string; // ISO 8601 date string +} + +export interface IndexDefinition { + [key: string]: unknown; +} + +export interface SynonymMappingStatusDetails { + status: string; + queryable: boolean; + message?: string; +} + +export interface IndexStatusInfo { + status: string; + queryable: boolean; + synonymMappingStatus?: string; + synonymMappingStatusDetails?: SynonymMappingStatusDetails; + definitionVersion: IndexDefinitionVersion; + definition: IndexDefinition; +} + +export interface SearchIndexStatusDetail { + hostname: string; + status: string; + queryable: boolean; + mainIndex: IndexStatusInfo; + stagedIndex?: IndexStatusInfo; +} + +export interface SynonymMappingStatusDetail { + status: string; + queryable: boolean; + message?: string; +} + +export interface ListSearchIndexOutput { + id: string; + name: string; + status: string; + queryable: boolean; + latestDefinitionVersion: IndexDefinitionVersion; + latestDefinition: IndexDefinition; + statusDetail: SearchIndexStatusDetail[]; + synonymMappingStatus?: "BUILDING" | "FAILED" | "READY"; + synonymMappingStatusDetail?: SynonymMappingStatusDetail[]; +} diff --git a/src/server.ts b/src/server.ts index b0e8e19c9..2a2cedf35 100644 --- a/src/server.ts +++ b/src/server.ts @@ -3,6 +3,7 @@ import { Session } from "./session.js"; import { Transport } from "@modelcontextprotocol/sdk/shared/transport.js"; import { AtlasTools } from "./tools/atlas/tools.js"; import { MongoDbTools } from "./tools/mongodb/tools.js"; +import { PlaygroundTools } from "./tools/playground/tools.js"; import logger, { initializeLogger, LogId } from "./logger.js"; import { ObjectId } from "mongodb"; import { Telemetry } from "./telemetry/telemetry.js"; @@ -134,7 +135,7 @@ export class Server { } private registerTools() { - for (const tool of [...AtlasTools, ...MongoDbTools]) { + for (const tool of [...AtlasTools, ...MongoDbTools, ...PlaygroundTools]) { new tool(this.session, this.userConfig, this.telemetry).register(this.mcpServer); } } diff --git a/src/tools/mongodb/create/createSearchIndex.ts b/src/tools/mongodb/create/createSearchIndex.ts new file mode 100644 index 000000000..8f01c9d1d --- /dev/null +++ b/src/tools/mongodb/create/createSearchIndex.ts @@ -0,0 +1,47 @@ +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { DbOperationArgs, MongoDBToolBase, SearchIndexArgs } from "../mongodbTool.js"; +import { OperationType, ToolArgs } from "../../tool.js"; + +const SEARCH_INDEX_TYPE = "search"; + +export class CreateSearchIndexTool extends MongoDBToolBase { + protected name = "create-search-index"; + protected description = "Create an Atlas Search index for a collection"; + protected argsShape = { + ...DbOperationArgs, + name: SearchIndexArgs.name, + analyzer: SearchIndexArgs.analyzer, + mappings: SearchIndexArgs.mappings, + }; + + protected operationType: OperationType = "create"; + + protected async execute({ + database, + collection, + name, + analyzer, + mappings, + }: ToolArgs): Promise { + const provider = await this.ensureConnected(); + const indexes = await provider.createSearchIndexes(database, collection, [ + { + name, + type: SEARCH_INDEX_TYPE, + definition: { + analyzer, + mappings, + }, + }, + ]); + + return { + content: [ + { + text: `Created the search index "${indexes[0]}" on collection "${collection}" in database "${database}"`, + type: "text", + }, + ], + }; + } +} diff --git a/src/tools/mongodb/create/createVectorIndex.ts b/src/tools/mongodb/create/createVectorIndex.ts new file mode 100644 index 000000000..55ac2c8dd --- /dev/null +++ b/src/tools/mongodb/create/createVectorIndex.ts @@ -0,0 +1,44 @@ +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { buildVectorFields, DbOperationArgs, MongoDBToolBase, VectorIndexArgs } from "../mongodbTool.js"; +import { OperationType, ToolArgs } from "../../tool.js"; + +const VECTOR_INDEX_TYPE = "vectorSearch"; +export class CreateVectorIndexTool extends MongoDBToolBase { + protected name = "create-vector-index"; + protected description = "Create an Atlas Vector Search Index for a collection."; + protected argsShape = { + ...DbOperationArgs, + name: VectorIndexArgs.name, + vectorDefinition: VectorIndexArgs.vectorDefinition, + filterFields: VectorIndexArgs.filterFields, + }; + + protected operationType: OperationType = "create"; + + protected async execute({ + database, + collection, + name, + vectorDefinition, + filterFields, + }: ToolArgs): Promise { + const provider = await this.ensureConnected(); + + const indexes = await provider.createSearchIndexes(database, collection, [ + { + name, + type: VECTOR_INDEX_TYPE, + definition: { fields: buildVectorFields(vectorDefinition, filterFields) }, + }, + ]); + + return { + content: [ + { + text: `Created the vector index ${indexes[0]} on collection "${collection}" in database "${database}"`, + type: "text", + }, + ], + }; + } +} diff --git a/src/tools/mongodb/delete/dropSearchIndex.ts b/src/tools/mongodb/delete/dropSearchIndex.ts new file mode 100644 index 000000000..d13566c66 --- /dev/null +++ b/src/tools/mongodb/delete/dropSearchIndex.ts @@ -0,0 +1,29 @@ +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { SearchIndexOperationArgs, MongoDBToolBase } from "../mongodbTool.js"; +import { ToolArgs, OperationType } from "../../tool.js"; + +export class DropSearchIndexTool extends MongoDBToolBase { + protected name = "drop-search-index"; + protected description = "Deletes a text or vector search index from the database."; + protected argsShape = { + ...SearchIndexOperationArgs, + }; + protected operationType: OperationType = "delete"; + + protected async execute({ + database, + collection, + searchIndexName, + }: ToolArgs): Promise { + const provider = await this.ensureConnected(); + await provider.dropSearchIndex(database, collection, searchIndexName); + return { + content: [ + { + text: `Successfully dropped index ${searchIndexName} from database ${database}`, + type: "text", + }, + ], + }; + } +} diff --git a/src/tools/mongodb/mongodbTool.ts b/src/tools/mongodb/mongodbTool.ts index 2ef1aee0e..7ce15daa1 100644 --- a/src/tools/mongodb/mongodbTool.ts +++ b/src/tools/mongodb/mongodbTool.ts @@ -1,5 +1,5 @@ import { z } from "zod"; -import { ToolArgs, ToolBase, ToolCategory, TelemetryToolMetadata } from "../tool.js"; +import { TelemetryToolMetadata, ToolArgs, ToolBase, ToolCategory } from "../tool.js"; import { NodeDriverServiceProvider } from "@mongosh/service-provider-node-driver"; import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; import { ErrorCodes, MongoDBError } from "../../errors.js"; @@ -10,6 +10,121 @@ export const DbOperationArgs = { collection: z.string().describe("Collection name"), }; +export const SearchIndexArgs = { + name: z.string().describe("The name of the index"), + analyzer: z + .string() + .optional() + .default("lucene.standard") + .describe( + "The analyzer to use for the index. Can be one of the built-in lucene analyzers (`lucene.standard`, `lucene.simple`, `lucene.whitespace`, `lucene.keyword`), a language-specific analyzer, such as `lucene.cjk` or `lucene.czech`, or a custom analyzer defined in the Atlas UI." + ), + mappings: z + .object({ + dynamic: z + .boolean() + .optional() + .default(false) + .describe( + "Enables or disables dynamic mapping of fields for this index. If set to true, Atlas Search recursively indexes all dynamically indexable fields. If set to false, you must specify individual fields to index using mappings.fields." + ), + fields: z + .record( + z.string().describe("The field name"), + z + .object({ + type: z + .enum([ + "autocomplete", + "boolean", + "date", + "document", + "embeddedDocuments", + "geo", + "number", + "objectId", + "string", + "token", + "uuid", + ]) + .describe("The field type"), + }) + .passthrough() + + .describe( + "The field index definition. It must contain the field type, as well as any additional options for that field type." + ) + ) + .optional() + .describe("The field mapping definitions. If `dynamic` is set to false, this is required."), + }) + .describe("Document describing the index to create."), +}; + +export enum VectorFieldType { + VECTOR = "vector", + FILTER = "filter", +} +export const VectorIndexArgs = { + name: z.string().describe("The name of the index"), + vectorDefinition: z + .object({ + path: z + .string() + .min(1) + .describe( + "Name of the field to index. For nested fields, use dot notation to specify path to embedded fields." + ), + numDimensions: z + .number() + .int() + .min(1) + .max(8192) + .describe("Number of vector dimensions to enforce at index-time and query-time."), + similarity: z + .enum(["euclidean", "cosine", "dotProduct"]) + .describe("Vector similarity function to use to search for top K-nearest neighbors."), + quantization: z + .enum(["none", "scalar", "binary"]) + .default("none") + .optional() + .describe( + "Automatic vector quantization. Use this setting only if your embeddings are float or double vectors." + ), + }) + .describe("The vector index definition."), + filterFields: z + .array( + z.object({ + path: z + .string() + .min(1) + .describe( + "Name of the field to filter by. For nested fields, use dot notation to specify path to embedded fields." + ), + }) + ) + .optional() + .describe("Additional indexed fields that pre-filter data."), +}; + +type VectorDefinitionType = z.infer; +type FilterFieldsType = z.infer; +export function buildVectorFields(vectorDefinition: VectorDefinitionType, filterFields: FilterFieldsType): object[] { + const typedVectorField = { ...vectorDefinition, type: VectorFieldType.VECTOR }; + const typedFilterFields = (filterFields ?? []).map((f) => ({ + ...f, + type: VectorFieldType.FILTER, + })); + return [typedVectorField, ...typedFilterFields]; +} + +export const SearchIndexOperationArgs = { + database: z.string().describe("Database name"), + collection: z.string().describe("Collection name"), + searchIndexName: z.string().describe("Search Index or Vector Search Index name"), +}; + export abstract class MongoDBToolBase extends ToolBase { protected category: ToolCategory = "mongodb"; diff --git a/src/tools/mongodb/read/collectionSearchIndexes.ts b/src/tools/mongodb/read/collectionSearchIndexes.ts new file mode 100644 index 000000000..0432fc310 --- /dev/null +++ b/src/tools/mongodb/read/collectionSearchIndexes.ts @@ -0,0 +1,74 @@ +import { DbOperationArgs, MongoDBToolBase } from "../mongodbTool.js"; +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { ToolArgs, OperationType } from "../../tool.js"; +import { z } from "zod"; +import { ListSearchIndexOutput } from "../../../common/search/listSearchIndexesOutput.js"; + +export const ListSearchIndexesArgs = { + indexName: z + .string() + .default("") + .optional() + .describe( + "The name of the index to return information about. Returns all indexes on collection if not provided." + ), +}; + +export class CollectionSearchIndexesTool extends MongoDBToolBase { + protected name = "collection-search-indexes"; + protected description = "Describe the search indexes for a collection"; + protected argsShape = { + ...DbOperationArgs, + ...ListSearchIndexesArgs, + }; + + protected operationType: OperationType = "read"; + + protected async execute({ + database, + collection, + indexName, + }: ToolArgs): Promise { + const provider = await this.ensureConnected(); + const indexes: ListSearchIndexOutput[] = ( + (await provider.getSearchIndexes(database, collection, indexName)) as ListSearchIndexOutput[] + ).map((doc) => ({ + id: doc.id, + name: doc.name, + status: doc.status, + queryable: doc.queryable, + latestDefinitionVersion: doc.latestDefinitionVersion, + latestDefinition: doc.latestDefinition, + statusDetail: doc.statusDetail, + synonymMappingStatus: doc.synonymMappingStatus, + synonymMappingStatusDetail: doc.synonymMappingStatusDetail, + })); + + return { + content: [ + { + text: indexName + ? `Found ${indexes.length} search indexes in the collection "${collection}" with name "${indexName}":` + : `Found ${indexes.length} search indexes in the collection "${collection}"`, + type: "text", + }, + ...(indexes.map((indexDefinition) => { + return { + text: [ + `Name: "${indexDefinition.name}"`, + `Definition: ${JSON.stringify(indexDefinition.latestDefinition, null, 2)}`, + `Queryable: ${indexDefinition.queryable}`, + `Status: "${indexDefinition.status}"`, + `Status Detail: ${JSON.stringify(indexDefinition.statusDetail, null, 2)}`, + `Definition Version: ${JSON.stringify(indexDefinition.latestDefinitionVersion, null, 2)}`, + `Synonym Mapping Status: ${indexDefinition.synonymMappingStatus}`, + `Synonym Mapping Status Detail: ${JSON.stringify(indexDefinition.synonymMappingStatusDetail, null, 2)}`, + `ID: ${indexDefinition.id}`, + ].join("\n"), + type: "text", + }; + }) as { text: string; type: "text" }[]), + ], + }; + } +} diff --git a/src/tools/mongodb/tools.ts b/src/tools/mongodb/tools.ts index d64d53ea7..ea8b5d3b4 100644 --- a/src/tools/mongodb/tools.ts +++ b/src/tools/mongodb/tools.ts @@ -18,12 +18,19 @@ import { DropCollectionTool } from "./delete/dropCollection.js"; import { ExplainTool } from "./metadata/explain.js"; import { CreateCollectionTool } from "./create/createCollection.js"; import { LogsTool } from "./metadata/logs.js"; +import { CreateSearchIndexTool } from "./create/createSearchIndex.js"; +import { UpdateSearchIndexTool } from "./update/updateSearchIndex.js"; +import { CreateVectorIndexTool } from "./create/createVectorIndex.js"; +import { UpdateVectorIndexTool } from "./update/updateVectorIndex.js"; +import { CollectionSearchIndexesTool } from "./read/collectionSearchIndexes.js"; +import { DropSearchIndexTool } from "./delete/dropSearchIndex.js"; export const MongoDbTools = [ ConnectTool, ListCollectionsTool, ListDatabasesTool, CollectionIndexesTool, + CollectionSearchIndexesTool, CreateIndexTool, CollectionSchemaTool, FindTool, @@ -40,4 +47,9 @@ export const MongoDbTools = [ ExplainTool, CreateCollectionTool, LogsTool, + CreateSearchIndexTool, + UpdateSearchIndexTool, + CreateVectorIndexTool, + UpdateVectorIndexTool, + DropSearchIndexTool, ]; diff --git a/src/tools/mongodb/update/updateSearchIndex.ts b/src/tools/mongodb/update/updateSearchIndex.ts new file mode 100644 index 000000000..f34cce258 --- /dev/null +++ b/src/tools/mongodb/update/updateSearchIndex.ts @@ -0,0 +1,41 @@ +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { DbOperationArgs, MongoDBToolBase, SearchIndexArgs } from "../mongodbTool.js"; +import { OperationType, ToolArgs } from "../../tool.js"; + +export class UpdateSearchIndexTool extends MongoDBToolBase { + protected name = "update-search-index"; + protected description = "Updates an Atlas Search index for a collection"; + protected argsShape = { + ...DbOperationArgs, + name: SearchIndexArgs.name, + analyzer: SearchIndexArgs.analyzer, + mappings: SearchIndexArgs.mappings, + }; + + protected operationType: OperationType = "update"; + + protected async execute({ + database, + collection, + name, + analyzer, + mappings, + }: ToolArgs): Promise { + const provider = await this.ensureConnected(); + // @ts-expect-error: Interface expects a SearchIndexDefinition. However, + // passing analyzer/mappings at the root for the definition is necessary for the call to succeed. + await provider.updateSearchIndex(database, collection, name, { + analyzer, + mappings, + }); + + return { + content: [ + { + text: `Successfully updated search index "${name}" on collection "${collection}" in database "${database}"`, + type: "text", + }, + ], + }; + } +} diff --git a/src/tools/mongodb/update/updateVectorIndex.ts b/src/tools/mongodb/update/updateVectorIndex.ts new file mode 100644 index 000000000..476cfc5e1 --- /dev/null +++ b/src/tools/mongodb/update/updateVectorIndex.ts @@ -0,0 +1,41 @@ +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { buildVectorFields, DbOperationArgs, MongoDBToolBase, VectorIndexArgs } from "../mongodbTool.js"; +import { OperationType, ToolArgs } from "../../tool.js"; + +export class UpdateVectorIndexTool extends MongoDBToolBase { + protected name = "update-vector-index"; + protected description = "Updates an Atlas Search vector for a collection"; + protected argsShape = { + ...DbOperationArgs, + name: VectorIndexArgs.name, + vectorDefinition: VectorIndexArgs.vectorDefinition, + filterFields: VectorIndexArgs.filterFields, + }; + + protected operationType: OperationType = "create"; + + protected async execute({ + database, + collection, + name, + vectorDefinition, + filterFields, + }: ToolArgs): Promise { + const provider = await this.ensureConnected(); + + // @ts-expect-error: Interface expects a SearchIndexDefinition {definition: {fields}}. However, + // passing fields at the root level is necessary for the call to succeed. + await provider.updateSearchIndex(database, collection, name, { + fields: buildVectorFields(vectorDefinition, filterFields), + }); + + return { + content: [ + { + text: `Successfully updated vector index "${name}" on collection "${collection}" in database "${database}"`, + type: "text", + }, + ], + }; + } +} diff --git a/src/tools/playground/runPipeline.ts b/src/tools/playground/runPipeline.ts new file mode 100644 index 000000000..85f9c43d5 --- /dev/null +++ b/src/tools/playground/runPipeline.ts @@ -0,0 +1,106 @@ +import { OperationType, TelemetryToolMetadata, ToolArgs, ToolBase, ToolCategory } from "../tool.js"; +import { z } from "zod"; +import { CallToolResult } from "@modelcontextprotocol/sdk/types.js"; +import { EJSON } from "bson"; +import { + PlaygroundRunError, + PlaygroundRunRequest, + PlaygroundRunResponse, +} from "../../common/playground/playgroundClient.js"; + +const DEFAULT_SEARCH_INDEX_DEFINITION = { + mappings: { + dynamic: true, + }, +}; + +const DEFAULT_SYNONYMS: Array> = []; + +export const RunPipelineOperationArgs = { + documents: z + .array(z.record(z.string(), z.unknown())) + .max(500) + .describe("Documents to run the pipeline against. 500 is maximum."), + aggregationPipeline: z + .array(z.record(z.string(), z.unknown())) + .describe("MongoDB aggregation pipeline to run on the provided documents."), + searchIndexDefinition: z + .record(z.string(), z.unknown()) + .describe("MongoDB search index definition to create before running the pipeline.") + .optional() + .default(DEFAULT_SEARCH_INDEX_DEFINITION), + synonyms: z + .array(z.record(z.any())) + .describe("MongoDB synonyms mapping to create before running the pipeline.") + .optional() + .default(DEFAULT_SYNONYMS), +}; + +export class RunPipeline extends ToolBase { + protected name = "run-pipeline"; + protected description = + "Run MongoDB aggregation pipeline for provided documents without needing an Atlas account, cluster, or collection. The tool can be useful for running ad-hoc pipelines for testing or debugging."; + protected category: ToolCategory = "playground"; + protected operationType: OperationType = "metadata"; + protected argsShape = RunPipelineOperationArgs; + + protected async execute(toolArgs: ToolArgs): Promise { + const runRequest = this.convertToRunRequest(toolArgs); + const runResponse = await this.runPipeline(runRequest); + const toolResult = this.convertToToolResult(runResponse); + return toolResult; + } + + protected resolveTelemetryMetadata(): TelemetryToolMetadata { + return {}; + } + + private async runPipeline(runRequest: PlaygroundRunRequest): Promise { + // import PlaygroundClient dynamically so we can mock it properly in the tests + const { PlaygroundClient } = await import("../../common/playground/playgroundClient.js"); + const client = new PlaygroundClient(); + try { + return await client.run(runRequest); + } catch (error: unknown) { + let message: string | undefined; + + if (error instanceof PlaygroundRunError) { + message = `Error code: ${error.code}. Error message: ${error.message}.`; + } + + throw new Error(message || "Cannot run pipeline."); + } + } + + private convertToRunRequest(toolArgs: ToolArgs): PlaygroundRunRequest { + try { + return { + documents: JSON.stringify(toolArgs.documents), + aggregationPipeline: JSON.stringify(toolArgs.aggregationPipeline), + indexDefinition: JSON.stringify(toolArgs.searchIndexDefinition || DEFAULT_SEARCH_INDEX_DEFINITION), + synonyms: JSON.stringify(toolArgs.synonyms || DEFAULT_SYNONYMS), + }; + } catch { + throw new Error("Invalid arguments type."); + } + } + + private convertToToolResult(runResponse: PlaygroundRunResponse): CallToolResult { + const content: Array<{ text: string; type: "text" }> = [ + { + text: `Found ${runResponse.documents.length} documents":`, + type: "text", + }, + ...runResponse.documents.map((doc) => { + return { + text: EJSON.stringify(doc), + type: "text", + } as { text: string; type: "text" }; + }), + ]; + + return { + content, + }; + } +} diff --git a/src/tools/playground/tools.ts b/src/tools/playground/tools.ts new file mode 100644 index 000000000..25e1290a2 --- /dev/null +++ b/src/tools/playground/tools.ts @@ -0,0 +1,3 @@ +import { RunPipeline } from "./runPipeline.js"; + +export const PlaygroundTools = [RunPipeline]; diff --git a/src/tools/tool.ts b/src/tools/tool.ts index 5e4fc1a3b..d06973b8b 100644 --- a/src/tools/tool.ts +++ b/src/tools/tool.ts @@ -10,7 +10,7 @@ import { UserConfig } from "../config.js"; export type ToolArgs = z.objectOutputType; export type OperationType = "metadata" | "read" | "create" | "delete" | "update"; -export type ToolCategory = "mongodb" | "atlas"; +export type ToolCategory = "mongodb" | "atlas" | "playground"; export type TelemetryToolMetadata = { projectId?: string; orgId?: string; diff --git a/tests/integration/helpers.ts b/tests/integration/helpers.ts index 9d529376f..179f75bbb 100644 --- a/tests/integration/helpers.ts +++ b/tests/integration/helpers.ts @@ -182,6 +182,17 @@ export const databaseCollectionParameters: ParameterInfo[] = [ { name: "collection", type: "string", description: "Collection name", required: true }, ]; +export const collectionWithSearchIndexParameters: ParameterInfo[] = [ + ...databaseCollectionParameters, + { + name: "indexName", + type: "string", + description: + "The name of the index to return information about. Returns all indexes on collection if not provided.", + required: false, + }, +]; + export const databaseCollectionInvalidArgs = [ {}, { database: "test" }, diff --git a/tests/integration/tools/atlas-search/read/collectionSearchIndexes.test.ts b/tests/integration/tools/atlas-search/read/collectionSearchIndexes.test.ts new file mode 100644 index 000000000..1a422e59c --- /dev/null +++ b/tests/integration/tools/atlas-search/read/collectionSearchIndexes.test.ts @@ -0,0 +1,19 @@ +import { describeWithMongoDB } from "../../mongodb/mongodbHelpers.js"; +import { + databaseCollectionInvalidArgs, + validateThrowsForInvalidArguments, + validateToolMetadata, + collectionWithSearchIndexParameters, +} from "../../../helpers.js"; + +describeWithMongoDB("collectionSearchIndexes tool", (integration) => { + validateToolMetadata( + integration, + "collection-search-indexes", + "Describe the search indexes for a collection", + collectionWithSearchIndexParameters + ); + validateThrowsForInvalidArguments(integration, "collection-search-indexes", databaseCollectionInvalidArgs); + + // Real tests to be added once search indexes are supported in test env. +}); diff --git a/tests/integration/tools/playground/runPipeline.test.ts b/tests/integration/tools/playground/runPipeline.test.ts new file mode 100644 index 000000000..d76aba0eb --- /dev/null +++ b/tests/integration/tools/playground/runPipeline.test.ts @@ -0,0 +1,80 @@ +import { jest } from "@jest/globals"; +import { describeWithMongoDB } from "../mongodb/mongodbHelpers.js"; +import { getResponseElements } from "../../helpers.js"; +import { PlaygroundRunError } from "../../../../src/common/playground/playgroundClient.js"; + +const setupMockPlaygroundClient = (implementation: unknown) => { + // mock ESM modules https://jestjs.io/docs/ecmascript-modules#module-mocking-in-esm + jest.unstable_mockModule("../../../../src/common/playground/playgroundClient.js", () => ({ + PlaygroundClient: implementation, + })); +}; + +describeWithMongoDB("runPipeline tool", (integration) => { + beforeEach(() => { + jest.resetModules(); + }); + + it("should return results", async () => { + class PlaygroundClientMock { + run = () => ({ + documents: [{ name: "First document" }], + }); + } + setupMockPlaygroundClient(PlaygroundClientMock); + + const response = await integration.mcpClient().callTool({ + name: "run-pipeline", + arguments: { + documents: [{ name: "First document" }, { name: "Second document" }], + aggregationPipeline: [ + { + $search: { + index: "default", + text: { + query: "first", + path: { + wildcard: "*", + }, + }, + }, + }, + ], + }, + }); + const elements = getResponseElements(response.content); + expect(elements).toEqual([ + { + text: 'Found 1 documents":', + type: "text", + }, + { + text: '{"name":"First document"}', + type: "text", + }, + ]); + }); + + it("should return error", async () => { + class PlaygroundClientMock { + run = () => { + throw new PlaygroundRunError("Test error message", "TEST_CODE"); + }; + } + setupMockPlaygroundClient(PlaygroundClientMock); + + const response = await integration.mcpClient().callTool({ + name: "run-pipeline", + arguments: { + documents: [], + aggregationPipeline: [], + }, + }); + expect(response.content).toEqual([ + { + type: "text", + text: "Error running run-pipeline: Error code: TEST_CODE. Error message: Test error message.", + }, + ]); + }); +});