diff --git a/README.md b/README.md index ed8d701b..08244282 100644 --- a/README.md +++ b/README.md @@ -19,11 +19,11 @@
- - +
+
-

📦 Strata

-

off-the-shelf intelligent connectors for your AI agent

+

Strata

+

Intelligent connectors for your AI agent, optimize context window

+
-

🛠️ MCP Integrations

+

MCP Integrations

100+ prebuilt integrations out-of-the-box, with OAuth support

@@ -45,6 +45,19 @@
+
+
+

MCP Sandbox

+

scalable MCP environments for LLM training and RL

+
+
+ + + +
+
+
diff --git a/docs/api-reference/mcp-server/update-a-server-instance.mdx b/docs/api-reference/mcp-server/update-a-server-instance.mdx new file mode 100644 index 00000000..56b9a9aa --- /dev/null +++ b/docs/api-reference/mcp-server/update-a-server-instance.mdx @@ -0,0 +1,4 @@ +--- +openapi: patch /mcp-server/instance/{instanceId} +--- + diff --git a/docs/api-reference/openapi.json b/docs/api-reference/openapi.json index 566233e0..5fa695f5 100644 --- a/docs/api-reference/openapi.json +++ b/docs/api-reference/openapi.json @@ -717,6 +717,65 @@ } } }, + "patch": { + "tags": [ + "mcp-server" + ], + "summary": "Update", + "description": "Updates the settings of a specific server connection instance.\nCurrently supports updating the read-only status of the connection.", + "operationId": "updateServerInstance", + "security": [ + { + "HTTPBearer": [] + } + ], + "parameters": [ + { + "name": "instanceId", + "in": "path", + "required": true, + "schema": { + "type": "string", + "format": "uuid", + "description": "The ID of the connection integration instance to update.", + "title": "Instanceid" + }, + "description": "The ID of the connection integration instance to update." + } + ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateServerInstanceRequest" + } + } + } + }, + "responses": { + "200": { + "description": "Successful Response", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/UpdateServerInstanceResponse" + } + } + } + }, + "422": { + "description": "Validation Error", + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/HTTPValidationError" + } + } + } + } + } + }, "delete": { "tags": [ "mcp-server" @@ -6242,13 +6301,14 @@ } } }, - "/oauth/zoom/refresh_token": { - "post": { + "/oauth/zoho-mail/authorize": { + "get": { "tags": [ - "zoom-oauth" + "zoho-mail-oauth" ], - "summary": "Refresh Token", - "operationId": "refresh_token_oauth_zoom_refresh_token_post", + "summary": "Authorize Zoho Mail", + "description": "Start Zoho Mail OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeZohoMail", "parameters": [ { "name": "instance_id", @@ -6256,10 +6316,64 @@ "required": true, "schema": { "type": "string", - "description": "Instance ID for which to refresh the token", + "description": "Unique identifier for the client instance requesting authorization", "title": "Instance Id" }, - "description": "Instance ID for which to refresh the token" + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" } ], "responses": { @@ -6267,22 +6381,10 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/ZoomOAuthSuccessResponse" - } + "schema": {} } } }, - "400": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/ZoomOAuthErrorResponse" - } - } - }, - "description": "Bad Request" - }, "422": { "description": "Validation Error", "content": { @@ -6296,14 +6398,13 @@ } } }, - "/oauth/zoho-mail/authorize": { + "/oauth/sharesight/authorize": { "get": { "tags": [ - "zoho-mail-oauth" + "sharesight-oauth" ], - "summary": "Authorize Zoho Mail", - "description": "Start Zoho Mail OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", - "operationId": "authorizeZohoMail", + "summary": "Authorize Sharesight", + "operationId": "authorizeSharesight", "parameters": [ { "name": "instance_id", @@ -6347,10 +6448,10 @@ "type": "null" } ], - "description": "Optional OAuth scopes to request (comma-separated string)", + "description": "Optional OAuth scopes to request (space-separated string)", "title": "Scope" }, - "description": "Optional OAuth scopes to request (comma-separated string)" + "description": "Optional OAuth scopes to request (space-separated string)" }, { "name": "redirect_url", @@ -6393,13 +6494,13 @@ } } }, - "/oauth/sharesight/authorize": { + "/oauth/instagram/authorize": { "get": { "tags": [ - "sharesight-oauth" + "instagram-oauth" ], - "summary": "Authorize Sharesight", - "operationId": "authorizeSharesight", + "summary": "Authorize Instagram", + "operationId": "authorizeInstagram", "parameters": [ { "name": "instance_id", @@ -6443,10 +6544,10 @@ "type": "null" } ], - "description": "Optional OAuth scopes to request (space-separated string)", + "description": "Optional OAuth scopes to request (comma-separated string)", "title": "Scope" }, - "description": "Optional OAuth scopes to request (space-separated string)" + "description": "Optional OAuth scopes to request (comma-separated string)" }, { "name": "redirect_url", @@ -6489,13 +6590,14 @@ } } }, - "/oauth/sharesight/refresh_token": { - "post": { + "/oauth/youtube/authorize": { + "get": { "tags": [ - "sharesight-oauth" + "youtube-oauth" ], - "summary": "Refresh Token", - "operationId": "refresh_token_oauth_sharesight_refresh_token_post", + "summary": "Authorize Youtube", + "description": "Start YouTube OAuth flow\n\nParameters:\n- instance_id: Identifier for the instance requesting authorization\n- client_id: Optional client ID for white labeling\n- scope: Optional scopes to request (comma-separated)\n- redirect_url: Optional URL to redirect to after authorization completes", + "operationId": "authorizeYouTube", "parameters": [ { "name": "instance_id", @@ -6503,10 +6605,64 @@ "required": true, "schema": { "type": "string", - "description": "Instance ID for which to refresh the token", + "description": "Unique identifier for the client instance requesting authorization", "title": "Instance Id" }, - "description": "Instance ID for which to refresh the token" + "description": "Unique identifier for the client instance requesting authorization" + }, + { + "name": "client_id", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Client ID for white labeling, if not provided will use default credentials", + "title": "Client Id" + }, + "description": "Client ID for white labeling, if not provided will use default credentials" + }, + { + "name": "scope", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional OAuth scopes to request (comma-separated string)", + "title": "Scope" + }, + "description": "Optional OAuth scopes to request (comma-separated string)" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization completes", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization completes" } ], "responses": { @@ -6514,22 +6670,10 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/SharesightOAuthSuccessResponse" - } + "schema": {} } } }, - "400": { - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SharesightOAuthErrorResponse" - } - } - }, - "description": "Bad Request" - }, "422": { "description": "Validation Error", "content": { @@ -6616,59 +6760,14 @@ } } }, - "/oauth/intercom/refresh_token": { - "post": { + "/oauth/paypal/authorize": { + "get": { "tags": [ - "intercom-oauth" + "paypal-oauth" ], - "summary": "Refresh Token", - "description": "Refresh OAuth token for an MCP connection.\n\nThis endpoint triggers a token refresh by making a list_tools request to the MCP server.\nThe MCP SDK will automatically detect if the token is expired and refresh it if a refresh_token is available.", - "operationId": "refresh_intercom_oauth_token", - "parameters": [ - { - "name": "instance_id", - "in": "query", - "required": true, - "schema": { - "type": "string", - "description": "Unique identifier for the MCP connection", - "title": "Instance Id" - }, - "description": "Unique identifier for the MCP connection" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MCPOAuthSuccessResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, - "/oauth/paypal/authorize": { - "get": { - "tags": [ - "paypal-oauth" - ], - "summary": "Authorize", - "description": "Start OAuth flow and redirect to authorization page.", - "operationId": "authorizepaypal_oauth", + "summary": "Authorize", + "description": "Start OAuth flow and redirect to authorization page.", + "operationId": "authorizepaypal_oauth", "parameters": [ { "name": "instance_id", @@ -6734,51 +6833,6 @@ } } }, - "/oauth/paypal/refresh_token": { - "post": { - "tags": [ - "paypal-oauth" - ], - "summary": "Refresh Token", - "description": "Refresh OAuth token for an MCP connection.\n\nThis endpoint triggers a token refresh by making a list_tools request to the MCP server.\nThe MCP SDK will automatically detect if the token is expired and refresh it if a refresh_token is available.", - "operationId": "refresh_paypal_oauth_token", - "parameters": [ - { - "name": "instance_id", - "in": "query", - "required": true, - "schema": { - "type": "string", - "description": "Unique identifier for the MCP connection", - "title": "Instance Id" - }, - "description": "Unique identifier for the MCP connection" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MCPOAuthSuccessResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, "/oauth/sentry/authorize": { "get": { "tags": [ @@ -6852,51 +6906,6 @@ } } }, - "/oauth/sentry/refresh_token": { - "post": { - "tags": [ - "sentry-oauth" - ], - "summary": "Refresh Token", - "description": "Refresh OAuth token for an MCP connection.\n\nThis endpoint triggers a token refresh by making a list_tools request to the MCP server.\nThe MCP SDK will automatically detect if the token is expired and refresh it if a refresh_token is available.", - "operationId": "refresh_sentry_oauth_token", - "parameters": [ - { - "name": "instance_id", - "in": "query", - "required": true, - "schema": { - "type": "string", - "description": "Unique identifier for the MCP connection", - "title": "Instance Id" - }, - "description": "Unique identifier for the MCP connection" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MCPOAuthSuccessResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, "/oauth/netlify/authorize": { "get": { "tags": [ @@ -6970,14 +6979,14 @@ } } }, - "/oauth/netlify/refresh_token": { - "post": { + "/oauth/huggingface/authorize": { + "get": { "tags": [ - "netlify-oauth" + "huggingface-oauth" ], - "summary": "Refresh Token", - "description": "Refresh OAuth token for an MCP connection.\n\nThis endpoint triggers a token refresh by making a list_tools request to the MCP server.\nThe MCP SDK will automatically detect if the token is expired and refresh it if a refresh_token is available.", - "operationId": "refresh_netlify_oauth_token", + "summary": "Authorize", + "description": "Start OAuth flow and redirect to authorization page.", + "operationId": "authorizehuggingface_oauth", "parameters": [ { "name": "instance_id", @@ -6989,6 +6998,36 @@ "title": "Instance Id" }, "description": "Unique identifier for the MCP connection" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization" + }, + { + "name": "force_refresh", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "description": "Force re-authorization even if valid tokens exist", + "default": true, + "title": "Force Refresh" + }, + "description": "Force re-authorization even if valid tokens exist" } ], "responses": { @@ -6996,9 +7035,7 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/MCPOAuthSuccessResponse" - } + "schema": {} } } }, @@ -7015,14 +7052,14 @@ } } }, - "/oauth/huggingface/authorize": { + "/oauth/square/authorize": { "get": { "tags": [ - "huggingface-oauth" + "square-oauth" ], "summary": "Authorize", "description": "Start OAuth flow and redirect to authorization page.", - "operationId": "authorizehuggingface_oauth", + "operationId": "authorizesquare_oauth", "parameters": [ { "name": "instance_id", @@ -7088,14 +7125,14 @@ } } }, - "/oauth/huggingface/refresh_token": { - "post": { + "/oauth/clockwise/authorize": { + "get": { "tags": [ - "huggingface-oauth" + "clockwise-oauth" ], - "summary": "Refresh Token", - "description": "Refresh OAuth token for an MCP connection.\n\nThis endpoint triggers a token refresh by making a list_tools request to the MCP server.\nThe MCP SDK will automatically detect if the token is expired and refresh it if a refresh_token is available.", - "operationId": "refresh_huggingface_oauth_token", + "summary": "Authorize", + "description": "Start OAuth flow and redirect to authorization page.", + "operationId": "authorizeclockwise_oauth", "parameters": [ { "name": "instance_id", @@ -7107,6 +7144,36 @@ "title": "Instance Id" }, "description": "Unique identifier for the MCP connection" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization" + }, + { + "name": "force_refresh", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "description": "Force re-authorization even if valid tokens exist", + "default": true, + "title": "Force Refresh" + }, + "description": "Force re-authorization even if valid tokens exist" } ], "responses": { @@ -7114,9 +7181,7 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/MCPOAuthSuccessResponse" - } + "schema": {} } } }, @@ -7133,14 +7198,14 @@ } } }, - "/oauth/square/authorize": { + "/oauth/jotform/authorize": { "get": { "tags": [ - "square-oauth" + "jotform-oauth" ], "summary": "Authorize", "description": "Start OAuth flow and redirect to authorization page.", - "operationId": "authorizesquare_oauth", + "operationId": "authorizejotform_oauth", "parameters": [ { "name": "instance_id", @@ -7206,14 +7271,14 @@ } } }, - "/oauth/square/refresh_token": { - "post": { + "/oauth/honeycomb/authorize": { + "get": { "tags": [ - "square-oauth" + "honeycomb-oauth" ], - "summary": "Refresh Token", - "description": "Refresh OAuth token for an MCP connection.\n\nThis endpoint triggers a token refresh by making a list_tools request to the MCP server.\nThe MCP SDK will automatically detect if the token is expired and refresh it if a refresh_token is available.", - "operationId": "refresh_square_oauth_token", + "summary": "Authorize", + "description": "Start OAuth flow and redirect to authorization page.", + "operationId": "authorizehoneycomb_oauth", "parameters": [ { "name": "instance_id", @@ -7225,6 +7290,36 @@ "title": "Instance Id" }, "description": "Unique identifier for the MCP connection" + }, + { + "name": "redirect_url", + "in": "query", + "required": false, + "schema": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "description": "Optional URL to redirect to after authorization", + "title": "Redirect Url" + }, + "description": "Optional URL to redirect to after authorization" + }, + { + "name": "force_refresh", + "in": "query", + "required": false, + "schema": { + "type": "boolean", + "description": "Force re-authorization even if valid tokens exist", + "default": true, + "title": "Force Refresh" + }, + "description": "Force re-authorization even if valid tokens exist" } ], "responses": { @@ -7232,9 +7327,7 @@ "description": "Successful Response", "content": { "application/json": { - "schema": { - "$ref": "#/components/schemas/MCPOAuthSuccessResponse" - } + "schema": {} } } }, @@ -7251,14 +7344,14 @@ } } }, - "/oauth/clockwise/authorize": { + "/oauth/amplitude/authorize": { "get": { "tags": [ - "clockwise-oauth" + "amplitude-oauth" ], "summary": "Authorize", "description": "Start OAuth flow and redirect to authorization page.", - "operationId": "authorizeclockwise_oauth", + "operationId": "authorizeamplitude_oauth", "parameters": [ { "name": "instance_id", @@ -7324,34 +7417,47 @@ } } }, - "/oauth/clockwise/refresh_token": { + "/sandbox/{server_name}": { "post": { "tags": [ - "clockwise-oauth" + "sandbox" + ], + "summary": "Acquire a sandbox", + "description": "Acquire an idle sandbox instance for a specific MCP server. The sandbox will be marked as 'occupied'. Optionally specify a test_account_email to acquire a specific test account.", + "operationId": "create_sandbox_sandbox__server_name__post", + "security": [ + { + "HTTPBearer": [] + } ], - "summary": "Refresh Token", - "description": "Refresh OAuth token for an MCP connection.\n\nThis endpoint triggers a token refresh by making a list_tools request to the MCP server.\nThe MCP SDK will automatically detect if the token is expired and refresh it if a refresh_token is available.", - "operationId": "refresh_clockwise_oauth_token", "parameters": [ { - "name": "instance_id", - "in": "query", + "name": "server_name", + "in": "path", "required": true, "schema": { - "type": "string", - "description": "Unique identifier for the MCP connection", - "title": "Instance Id" + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "The MCP server name" }, - "description": "Unique identifier for the MCP connection" + "description": "The MCP server name" } ], + "requestBody": { + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/AcquireSandboxRequest" + } + } + } + }, "responses": { - "200": { + "201": { "description": "Successful Response", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MCPOAuthSuccessResponse" + "$ref": "#/components/schemas/CreateSandboxResponse" } } } @@ -7369,55 +7475,40 @@ } } }, - "/oauth/jotform/authorize": { + "/sandbox/{server_name}/{sandbox_id}": { "get": { "tags": [ - "jotform-oauth" + "sandbox" + ], + "summary": "Get sandbox details", + "description": "Retrieve detailed information about a specific sandbox instance.", + "operationId": "get_sandbox_sandbox__server_name___sandbox_id__get", + "security": [ + { + "HTTPBearer": [] + } ], - "summary": "Authorize", - "description": "Start OAuth flow and redirect to authorization page.", - "operationId": "authorizejotform_oauth", "parameters": [ { - "name": "instance_id", - "in": "query", + "name": "server_name", + "in": "path", "required": true, "schema": { - "type": "string", - "description": "Unique identifier for the MCP connection", - "title": "Instance Id" - }, - "description": "Unique identifier for the MCP connection" - }, - { - "name": "redirect_url", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "description": "Optional URL to redirect to after authorization", - "title": "Redirect Url" + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "The MCP server name" }, - "description": "Optional URL to redirect to after authorization" + "description": "The MCP server name" }, { - "name": "force_refresh", - "in": "query", - "required": false, + "name": "sandbox_id", + "in": "path", + "required": true, "schema": { - "type": "boolean", - "description": "Force re-authorization even if valid tokens exist", - "default": true, - "title": "Force Refresh" + "type": "string", + "description": "The unique sandbox identifier", + "title": "Sandbox Id" }, - "description": "Force re-authorization even if valid tokens exist" + "description": "The unique sandbox identifier" } ], "responses": { @@ -7425,7 +7516,9 @@ "description": "Successful Response", "content": { "application/json": { - "schema": {} + "schema": { + "$ref": "#/components/schemas/SandboxInfo" + } } } }, @@ -7440,27 +7533,40 @@ } } } - } - }, - "/oauth/jotform/refresh_token": { - "post": { + }, + "delete": { "tags": [ - "jotform-oauth" + "sandbox" + ], + "summary": "Release sandbox", + "description": "Release an occupied sandbox back to idle state and marks the sandbox as available for reuse.", + "operationId": "delete_sandbox_sandbox__server_name___sandbox_id__delete", + "security": [ + { + "HTTPBearer": [] + } ], - "summary": "Refresh Token", - "description": "Refresh OAuth token for an MCP connection.\n\nThis endpoint triggers a token refresh by making a list_tools request to the MCP server.\nThe MCP SDK will automatically detect if the token is expired and refresh it if a refresh_token is available.", - "operationId": "refresh_jotform_oauth_token", "parameters": [ { - "name": "instance_id", - "in": "query", + "name": "server_name", + "in": "path", + "required": true, + "schema": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "The MCP server name" + }, + "description": "The MCP server name" + }, + { + "name": "sandbox_id", + "in": "path", "required": true, "schema": { "type": "string", - "description": "Unique identifier for the MCP connection", - "title": "Instance Id" + "description": "The unique sandbox identifier", + "title": "Sandbox Id" }, - "description": "Unique identifier for the MCP connection" + "description": "The unique sandbox identifier" } ], "responses": { @@ -7469,7 +7575,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MCPOAuthSuccessResponse" + "$ref": "#/components/schemas/ReleaseSandboxResponse" } } } @@ -7487,55 +7593,40 @@ } } }, - "/oauth/honeycomb/authorize": { - "get": { + "/sandbox/{server_name}/{sandbox_id}/reset": { + "post": { "tags": [ - "honeycomb-oauth" + "sandbox" + ], + "summary": "Reset sandbox to initial state", + "description": "Reset the sandbox to its initial empty state, clearing all data while maintaining the sandbox instance.", + "operationId": "reset_sandbox_sandbox__server_name___sandbox_id__reset_post", + "security": [ + { + "HTTPBearer": [] + } ], - "summary": "Authorize", - "description": "Start OAuth flow and redirect to authorization page.", - "operationId": "authorizehoneycomb_oauth", "parameters": [ { - "name": "instance_id", - "in": "query", + "name": "server_name", + "in": "path", "required": true, "schema": { - "type": "string", - "description": "Unique identifier for the MCP connection", - "title": "Instance Id" - }, - "description": "Unique identifier for the MCP connection" - }, - { - "name": "redirect_url", - "in": "query", - "required": false, - "schema": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "description": "Optional URL to redirect to after authorization", - "title": "Redirect Url" + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "The MCP server name" }, - "description": "Optional URL to redirect to after authorization" + "description": "The MCP server name" }, { - "name": "force_refresh", - "in": "query", - "required": false, + "name": "sandbox_id", + "in": "path", + "required": true, "schema": { - "type": "boolean", - "description": "Force re-authorization even if valid tokens exist", - "default": true, - "title": "Force Refresh" + "type": "string", + "description": "The unique sandbox identifier", + "title": "Sandbox Id" }, - "description": "Force re-authorization even if valid tokens exist" + "description": "The unique sandbox identifier" } ], "responses": { @@ -7543,7 +7634,9 @@ "description": "Successful Response", "content": { "application/json": { - "schema": {} + "schema": { + "$ref": "#/components/schemas/ResetSandboxResponse" + } } } }, @@ -7560,34 +7653,49 @@ } } }, - "/oauth/honeycomb/refresh_token": { + "/sandbox/jira/{sandbox_id}/initialize": { "post": { "tags": [ - "honeycomb-oauth" + "sandbox" + ], + "summary": "Initialize jira sandbox with data", + "description": "Initialize the sandbox with jira-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_jira__sandbox_id__initialize_post", + "security": [ + { + "HTTPBearer": [] + } ], - "summary": "Refresh Token", - "description": "Refresh OAuth token for an MCP connection.\n\nThis endpoint triggers a token refresh by making a list_tools request to the MCP server.\nThe MCP SDK will automatically detect if the token is expired and refresh it if a refresh_token is available.", - "operationId": "refresh_honeycomb_oauth_token", "parameters": [ { - "name": "instance_id", - "in": "query", + "name": "sandbox_id", + "in": "path", "required": true, "schema": { "type": "string", - "description": "Unique identifier for the MCP connection", - "title": "Instance Id" + "description": "The unique sandbox identifier", + "title": "Sandbox Id" }, - "description": "Unique identifier for the MCP connection" + "description": "The unique sandbox identifier" } ], + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/JiraData-Input" + } + } + } + }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/MCPOAuthSuccessResponse" + "$ref": "#/components/schemas/InitializeSandboxResponse" } } } @@ -7605,14 +7713,14 @@ } } }, - "/sandbox/{server_name}": { - "post": { + "/sandbox/jira/{sandbox_id}/dump": { + "get": { "tags": [ "sandbox" ], - "summary": "Acquire a sandbox", - "description": "Acquire an idle sandbox instance for a specific MCP server. The sandbox will be marked as 'occupied'.", - "operationId": "create_sandbox_sandbox__server_name__post", + "summary": "Export jira sandbox data", + "description": "Export all data from the sandbox in the same format used for initialization.", + "operationId": "dump_sandbox_sandbox_jira__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -7620,23 +7728,24 @@ ], "parameters": [ { - "name": "server_name", + "name": "sandbox_id", "in": "path", "required": true, "schema": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "The MCP server name" + "type": "string", + "description": "The unique sandbox identifier", + "title": "Sandbox Id" }, - "description": "The MCP server name" + "description": "The unique sandbox identifier" } ], "responses": { - "201": { + "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/CreateSandboxResponse" + "$ref": "#/components/schemas/DumpSandboxResponse_JiraData_" } } } @@ -7654,14 +7763,14 @@ } } }, - "/sandbox/{server_name}/{sandbox_id}": { - "get": { + "/sandbox/google_calendar/{sandbox_id}/initialize": { + "post": { "tags": [ "sandbox" ], - "summary": "Get sandbox details", - "description": "Retrieve detailed information about a specific sandbox instance.", - "operationId": "get_sandbox_sandbox__server_name___sandbox_id__get", + "summary": "Initialize google_calendar sandbox with data", + "description": "Initialize the sandbox with google_calendar-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_google_calendar__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -7669,17 +7778,7 @@ ], "parameters": [ { - "name": "server_name", - "in": "path", - "required": true, - "schema": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "The MCP server name" - }, - "description": "The MCP server name" - }, - { - "name": "sandbox_id", + "name": "sandbox_id", "in": "path", "required": true, "schema": { @@ -7690,71 +7789,23 @@ "description": "The unique sandbox identifier" } ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/SandboxInfo" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } + "requestBody": { + "required": true, + "content": { + "application/json": { + "schema": { + "$ref": "#/components/schemas/GoogleCalendarData-Input" } } } - } - }, - "delete": { - "tags": [ - "sandbox" - ], - "summary": "Release sandbox", - "description": "Release an occupied sandbox back to idle state and marks the sandbox as available for reuse.", - "operationId": "delete_sandbox_sandbox__server_name___sandbox_id__delete", - "security": [ - { - "HTTPBearer": [] - } - ], - "parameters": [ - { - "name": "server_name", - "in": "path", - "required": true, - "schema": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "The MCP server name" - }, - "description": "The MCP server name" - }, - { - "name": "sandbox_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "The unique sandbox identifier", - "title": "Sandbox Id" - }, - "description": "The unique sandbox identifier" - } - ], + }, "responses": { "200": { "description": "Successful Response", "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ReleaseSandboxResponse" + "$ref": "#/components/schemas/InitializeSandboxResponse" } } } @@ -7772,30 +7823,20 @@ } } }, - "/sandbox/{server_name}/{sandbox_id}/reset": { - "post": { + "/sandbox/google_calendar/{sandbox_id}/dump": { + "get": { "tags": [ "sandbox" ], - "summary": "Reset sandbox to initial state", - "description": "Reset the sandbox to its initial empty state, clearing all data while maintaining the sandbox instance.", - "operationId": "reset_sandbox_sandbox__server_name___sandbox_id__reset_post", + "summary": "Export google_calendar sandbox data", + "description": "Export all data from the sandbox in the same format used for initialization.", + "operationId": "dump_sandbox_sandbox_google_calendar__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] } ], "parameters": [ - { - "name": "server_name", - "in": "path", - "required": true, - "schema": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "The MCP server name" - }, - "description": "The MCP server name" - }, { "name": "sandbox_id", "in": "path", @@ -7814,7 +7855,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/ResetSandboxResponse" + "$ref": "#/components/schemas/DumpSandboxResponse_GoogleCalendarData_" } } } @@ -7832,14 +7873,14 @@ } } }, - "/sandbox/jira/{sandbox_id}/initialize": { + "/sandbox/gmail/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" ], - "summary": "Initialize jira sandbox with data", - "description": "Initialize the sandbox with jira-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_jira__sandbox_id__initialize_post", + "summary": "Initialize gmail sandbox with data", + "description": "Initialize the sandbox with gmail-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_gmail__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -7863,7 +7904,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/JiraData-Input" + "$ref": "#/components/schemas/GmailData" } } } @@ -7892,14 +7933,14 @@ } } }, - "/sandbox/jira/{sandbox_id}/dump": { + "/sandbox/gmail/{sandbox_id}/dump": { "get": { "tags": [ "sandbox" ], - "summary": "Export jira sandbox data", + "summary": "Export gmail sandbox data", "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_jira__sandbox_id__dump_get", + "operationId": "dump_sandbox_sandbox_gmail__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -7924,7 +7965,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_JiraData_" + "$ref": "#/components/schemas/DumpSandboxResponse_GmailData_" } } } @@ -7942,14 +7983,14 @@ } } }, - "/sandbox/google_calendar/{sandbox_id}/initialize": { + "/sandbox/google_docs/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" ], - "summary": "Initialize google_calendar sandbox with data", - "description": "Initialize the sandbox with google_calendar-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_google_calendar__sandbox_id__initialize_post", + "summary": "Initialize google_docs sandbox with data", + "description": "Initialize the sandbox with google_docs-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_google_docs__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -7973,7 +8014,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GoogleCalendarData-Input" + "$ref": "#/components/schemas/GoogleDocsData" } } } @@ -8002,14 +8043,14 @@ } } }, - "/sandbox/google_calendar/{sandbox_id}/dump": { + "/sandbox/google_docs/{sandbox_id}/dump": { "get": { "tags": [ "sandbox" ], - "summary": "Export google_calendar sandbox data", + "summary": "Export google_docs sandbox data", "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_google_calendar__sandbox_id__dump_get", + "operationId": "dump_sandbox_sandbox_google_docs__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -8034,7 +8075,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_GoogleCalendarData_" + "$ref": "#/components/schemas/DumpSandboxResponse_GoogleDocsData_" } } } @@ -8052,14 +8093,14 @@ } } }, - "/sandbox/gmail/{sandbox_id}/initialize": { + "/sandbox/google_drive/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" ], - "summary": "Initialize gmail sandbox with data", - "description": "Initialize the sandbox with gmail-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_gmail__sandbox_id__initialize_post", + "summary": "Initialize google_drive sandbox with data", + "description": "Initialize the sandbox with google_drive-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_google_drive__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -8083,7 +8124,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GmailData" + "$ref": "#/components/schemas/GoogleDriveData" } } } @@ -8112,14 +8153,14 @@ } } }, - "/sandbox/gmail/{sandbox_id}/dump": { + "/sandbox/google_drive/{sandbox_id}/dump": { "get": { "tags": [ "sandbox" ], - "summary": "Export gmail sandbox data", + "summary": "Export google_drive sandbox data", "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_gmail__sandbox_id__dump_get", + "operationId": "dump_sandbox_sandbox_google_drive__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -8144,7 +8185,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_GmailData_" + "$ref": "#/components/schemas/DumpSandboxResponse_GoogleDriveData_" } } } @@ -8162,14 +8203,14 @@ } } }, - "/sandbox/google_docs/{sandbox_id}/initialize": { + "/sandbox/google_forms/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" ], - "summary": "Initialize google_docs sandbox with data", - "description": "Initialize the sandbox with google_docs-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_google_docs__sandbox_id__initialize_post", + "summary": "Initialize google_forms sandbox with data", + "description": "Initialize the sandbox with google_forms-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_google_forms__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -8193,7 +8234,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GoogleDocsData" + "$ref": "#/components/schemas/GoogleFormsData-Input" } } } @@ -8222,14 +8263,14 @@ } } }, - "/sandbox/google_docs/{sandbox_id}/dump": { + "/sandbox/google_forms/{sandbox_id}/dump": { "get": { "tags": [ "sandbox" ], - "summary": "Export google_docs sandbox data", + "summary": "Export google_forms sandbox data", "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_google_docs__sandbox_id__dump_get", + "operationId": "dump_sandbox_sandbox_google_forms__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -8254,7 +8295,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_GoogleDocsData_" + "$ref": "#/components/schemas/DumpSandboxResponse_GoogleFormsData_" } } } @@ -8272,14 +8313,14 @@ } } }, - "/sandbox/google_drive/{sandbox_id}/initialize": { + "/sandbox/google_sheets/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" ], - "summary": "Initialize google_drive sandbox with data", - "description": "Initialize the sandbox with google_drive-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_google_drive__sandbox_id__initialize_post", + "summary": "Initialize google_sheets sandbox with data", + "description": "Initialize the sandbox with google_sheets-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_google_sheets__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -8303,7 +8344,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GoogleDriveData" + "$ref": "#/components/schemas/GoogleSheetsData-Input" } } } @@ -8332,14 +8373,14 @@ } } }, - "/sandbox/google_drive/{sandbox_id}/dump": { + "/sandbox/google_sheets/{sandbox_id}/dump": { "get": { "tags": [ "sandbox" ], - "summary": "Export google_drive sandbox data", + "summary": "Export google_sheets sandbox data", "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_google_drive__sandbox_id__dump_get", + "operationId": "dump_sandbox_sandbox_google_sheets__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -8364,7 +8405,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_GoogleDriveData_" + "$ref": "#/components/schemas/DumpSandboxResponse_GoogleSheetsData_" } } } @@ -8382,14 +8423,14 @@ } } }, - "/sandbox/google_forms/{sandbox_id}/initialize": { + "/sandbox/salesforce/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" ], - "summary": "Initialize google_forms sandbox with data", - "description": "Initialize the sandbox with google_forms-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_google_forms__sandbox_id__initialize_post", + "summary": "Initialize salesforce sandbox with data", + "description": "Initialize the sandbox with salesforce-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_salesforce__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -8413,7 +8454,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GoogleFormsData-Input" + "$ref": "#/components/schemas/SalesforceData-Input" } } } @@ -8442,14 +8483,14 @@ } } }, - "/sandbox/google_forms/{sandbox_id}/dump": { + "/sandbox/salesforce/{sandbox_id}/dump": { "get": { "tags": [ "sandbox" ], - "summary": "Export google_forms sandbox data", + "summary": "Export salesforce sandbox data", "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_google_forms__sandbox_id__dump_get", + "operationId": "dump_sandbox_sandbox_salesforce__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -8474,7 +8515,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_GoogleFormsData_" + "$ref": "#/components/schemas/DumpSandboxResponse_SalesforceData_" } } } @@ -8492,14 +8533,14 @@ } } }, - "/sandbox/google_sheets/{sandbox_id}/initialize": { + "/sandbox/onedrive/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" ], - "summary": "Initialize google_sheets sandbox with data", - "description": "Initialize the sandbox with google_sheets-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_google_sheets__sandbox_id__initialize_post", + "summary": "Initialize onedrive sandbox with data", + "description": "Initialize the sandbox with onedrive-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_onedrive__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -8523,7 +8564,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/GoogleSheetsData-Input" + "$ref": "#/components/schemas/OneDriveData-Input" } } } @@ -8552,14 +8593,14 @@ } } }, - "/sandbox/google_sheets/{sandbox_id}/dump": { + "/sandbox/onedrive/{sandbox_id}/dump": { "get": { "tags": [ "sandbox" ], - "summary": "Export google_sheets sandbox data", + "summary": "Export onedrive sandbox data", "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_google_sheets__sandbox_id__dump_get", + "operationId": "dump_sandbox_sandbox_onedrive__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -8584,7 +8625,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_GoogleSheetsData_" + "$ref": "#/components/schemas/DumpSandboxResponse_OneDriveData_" } } } @@ -8602,14 +8643,14 @@ } } }, - "/sandbox/salesforce/{sandbox_id}/initialize": { + "/sandbox/microsoft_teams/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" ], - "summary": "Initialize salesforce sandbox with data", - "description": "Initialize the sandbox with salesforce-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_salesforce__sandbox_id__initialize_post", + "summary": "Initialize microsoft_teams sandbox with data", + "description": "Initialize the sandbox with microsoft_teams-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_microsoft_teams__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -8633,7 +8674,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/SalesforceData" + "$ref": "#/components/schemas/MsTeamsData-Input" } } } @@ -8662,14 +8703,14 @@ } } }, - "/sandbox/salesforce/{sandbox_id}/dump": { + "/sandbox/microsoft_teams/{sandbox_id}/dump": { "get": { "tags": [ "sandbox" ], - "summary": "Export salesforce sandbox data", + "summary": "Export microsoft_teams sandbox data", "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_salesforce__sandbox_id__dump_get", + "operationId": "dump_sandbox_sandbox_microsoft_teams__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -8694,7 +8735,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_SalesforceData_" + "$ref": "#/components/schemas/DumpSandboxResponse_MsTeamsData_" } } } @@ -8712,14 +8753,14 @@ } } }, - "/sandbox/onedrive/{sandbox_id}/initialize": { + "/sandbox/outlook_mail/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" ], - "summary": "Initialize onedrive sandbox with data", - "description": "Initialize the sandbox with onedrive-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_onedrive__sandbox_id__initialize_post", + "summary": "Initialize outlook_mail sandbox with data", + "description": "Initialize the sandbox with outlook_mail-specific data following the defined schema.", + "operationId": "initialize_sandbox_sandbox_outlook_mail__sandbox_id__initialize_post", "security": [ { "HTTPBearer": [] @@ -8743,7 +8784,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/OneDriveData-Input" + "$ref": "#/components/schemas/OutlookMailData" } } } @@ -8772,14 +8813,14 @@ } } }, - "/sandbox/onedrive/{sandbox_id}/dump": { + "/sandbox/outlook_mail/{sandbox_id}/dump": { "get": { "tags": [ "sandbox" ], - "summary": "Export onedrive sandbox data", + "summary": "Export outlook_mail sandbox data", "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_onedrive__sandbox_id__dump_get", + "operationId": "dump_sandbox_sandbox_outlook_mail__sandbox_id__dump_get", "security": [ { "HTTPBearer": [] @@ -8804,7 +8845,7 @@ "content": { "application/json": { "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_OneDriveData_" + "$ref": "#/components/schemas/DumpSandboxResponse_OutlookMailData_" } } } @@ -8822,227 +8863,7 @@ } } }, - "/sandbox/microsoft_teams/{sandbox_id}/initialize": { - "post": { - "tags": [ - "sandbox" - ], - "summary": "Initialize microsoft_teams sandbox with data", - "description": "Initialize the sandbox with microsoft_teams-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_microsoft_teams__sandbox_id__initialize_post", - "security": [ - { - "HTTPBearer": [] - } - ], - "parameters": [ - { - "name": "sandbox_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "The unique sandbox identifier", - "title": "Sandbox Id" - }, - "description": "The unique sandbox identifier" - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/MsTeamsData-Input" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/InitializeSandboxResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, - "/sandbox/microsoft_teams/{sandbox_id}/dump": { - "get": { - "tags": [ - "sandbox" - ], - "summary": "Export microsoft_teams sandbox data", - "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_microsoft_teams__sandbox_id__dump_get", - "security": [ - { - "HTTPBearer": [] - } - ], - "parameters": [ - { - "name": "sandbox_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "The unique sandbox identifier", - "title": "Sandbox Id" - }, - "description": "The unique sandbox identifier" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_MsTeamsData_" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, - "/sandbox/outlook_mail/{sandbox_id}/initialize": { - "post": { - "tags": [ - "sandbox" - ], - "summary": "Initialize outlook_mail sandbox with data", - "description": "Initialize the sandbox with outlook_mail-specific data following the defined schema.", - "operationId": "initialize_sandbox_sandbox_outlook_mail__sandbox_id__initialize_post", - "security": [ - { - "HTTPBearer": [] - } - ], - "parameters": [ - { - "name": "sandbox_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "The unique sandbox identifier", - "title": "Sandbox Id" - }, - "description": "The unique sandbox identifier" - } - ], - "requestBody": { - "required": true, - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/OutlookMailData" - } - } - } - }, - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/InitializeSandboxResponse" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, - "/sandbox/outlook_mail/{sandbox_id}/dump": { - "get": { - "tags": [ - "sandbox" - ], - "summary": "Export outlook_mail sandbox data", - "description": "Export all data from the sandbox in the same format used for initialization.", - "operationId": "dump_sandbox_sandbox_outlook_mail__sandbox_id__dump_get", - "security": [ - { - "HTTPBearer": [] - } - ], - "parameters": [ - { - "name": "sandbox_id", - "in": "path", - "required": true, - "schema": { - "type": "string", - "description": "The unique sandbox identifier", - "title": "Sandbox Id" - }, - "description": "The unique sandbox identifier" - } - ], - "responses": { - "200": { - "description": "Successful Response", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/DumpSandboxResponse_OutlookMailData_" - } - } - } - }, - "422": { - "description": "Validation Error", - "content": { - "application/json": { - "schema": { - "$ref": "#/components/schemas/HTTPValidationError" - } - } - } - } - } - } - }, - "/sandbox/Cal.com/{sandbox_id}/initialize": { + "/sandbox/Cal.com/{sandbox_id}/initialize": { "post": { "tags": [ "sandbox" @@ -11795,42 +11616,252 @@ }, "components": { "schemas": { - "AirtableData-Input": { + "Account": { "properties": { - "tables": { + "name": { + "type": "string", + "maxLength": 255, + "minLength": 1, + "title": "Name", + "description": "Company name" + }, + "industry": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/AirtableTable" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Tables", - "description": "List of tables (simplified, assumes single base)" - } - }, - "type": "object", - "title": "AirtableData", - "description": "Complete Airtable sandbox data structure.\n\nHierarchy:\n- Bases contain Tables\n- Tables contain Fields (schema) and Records (data)\n- Records contain field values\n\nNote: For sandbox purposes, we typically work with a single base\nand multiple tables within it." - }, - "AirtableData-Output": { - "additionalProperties": true, - "type": "object" - }, - "AirtableField": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Field name" + "title": "Industry", + "description": "e.g., Technology, Healthcare, Finance" }, "type": { - "type": "string", - "title": "Type", + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Type", + "description": "e.g., Customer, Prospect, Partner" + }, + "phone": { + "anyOf": [ + { + "type": "string", + "maxLength": 40 + }, + { + "type": "null" + } + ], + "title": "Phone" + }, + "website": { + "anyOf": [ + { + "type": "string", + "maxLength": 255 + }, + { + "type": "null" + } + ], + "title": "Website" + }, + "revenue": { + "anyOf": [ + { + "type": "number", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Revenue", + "description": "Annual revenue" + }, + "employees": { + "anyOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "type": "null" + } + ], + "title": "Employees" + }, + "address": { + "anyOf": [ + { + "$ref": "#/components/schemas/Address" + }, + { + "type": "null" + } + ] + }, + "description": { + "anyOf": [ + { + "type": "string", + "maxLength": 32000 + }, + { + "type": "null" + } + ], + "title": "Description" + }, + "rating": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Rating", + "description": "e.g., Hot, Warm, Cold" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "Account", + "description": "Company/organization record" + }, + "AcquireSandboxRequest": { + "properties": { + "test_account_email": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Test Account Email", + "description": "Optional email of a specific test account to acquire. If provided, the system will attempt to acquire the sandbox associated with this test account email instead of a random idle sandbox." + } + }, + "type": "object", + "title": "AcquireSandboxRequest", + "description": "Request model for acquiring a sandbox" + }, + "Address": { + "properties": { + "street": { + "anyOf": [ + { + "type": "string", + "maxLength": 255 + }, + { + "type": "null" + } + ], + "title": "Street" + }, + "city": { + "anyOf": [ + { + "type": "string", + "maxLength": 40 + }, + { + "type": "null" + } + ], + "title": "City" + }, + "state": { + "anyOf": [ + { + "type": "string", + "maxLength": 80 + }, + { + "type": "null" + } + ], + "title": "State" + }, + "postal_code": { + "anyOf": [ + { + "type": "string", + "maxLength": 20 + }, + { + "type": "null" + } + ], + "title": "Postal Code" + }, + "country": { + "anyOf": [ + { + "type": "string", + "maxLength": 80 + }, + { + "type": "null" + } + ], + "title": "Country" + } + }, + "type": "object", + "title": "Address", + "description": "Reusable address structure" + }, + "AirtableData-Input": { + "properties": { + "tables": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/AirtableTable" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Tables", + "description": "List of tables (simplified, assumes single base)" + } + }, + "type": "object", + "title": "AirtableData", + "description": "Complete Airtable sandbox data structure.\n\nHierarchy:\n- Bases contain Tables\n- Tables contain Fields (schema) and Records (data)\n- Records contain field values\n\nNote: For sandbox purposes, we typically work with a single base\nand multiple tables within it." + }, + "AirtableData-Output": { + "additionalProperties": true, + "type": "object" + }, + "AirtableField": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Field name" + }, + "type": { + "type": "string", + "title": "Type", "description": "Field type (singleLineText, multilineText, number, singleSelect, multipleSelects, date, checkbox, url, email, phoneNumber, attachment, etc.)" }, "options": { @@ -12004,7 +12035,7 @@ "title": "Name", "description": "Project name" }, - "notes": { + "description": { "anyOf": [ { "type": "string" @@ -12013,8 +12044,8 @@ "type": "null" } ], - "title": "Notes", - "description": "Project notes/description" + "title": "Description", + "description": "Project description" }, "tasks": { "anyOf": [ @@ -12085,7 +12116,7 @@ "title": "Name", "description": "Task name" }, - "notes": { + "description": { "anyOf": [ { "type": "string" @@ -12094,8 +12125,8 @@ "type": "null" } ], - "title": "Notes", - "description": "Task notes/description" + "title": "Description", + "description": "Task description" }, "completed": { "anyOf": [ @@ -12122,7 +12153,7 @@ "title": "Assignee", "description": "User GID assigned to task" }, - "due_on": { + "due_date": { "anyOf": [ { "type": "string" @@ -12131,10 +12162,10 @@ "type": "null" } ], - "title": "Due On", + "title": "Due Date", "description": "Due date (YYYY-MM-DD)" }, - "due_at": { + "due_datetime": { "anyOf": [ { "type": "string" @@ -12143,10 +12174,10 @@ "type": "null" } ], - "title": "Due At", + "title": "Due Datetime", "description": "Due datetime (ISO 8601)" }, - "start_on": { + "start_date": { "anyOf": [ { "type": "string" @@ -12155,7 +12186,7 @@ "type": "null" } ], - "title": "Start On", + "title": "Start Date", "description": "Start date (YYYY-MM-DD)" }, "stories": { @@ -12242,9 +12273,10 @@ }, "BigQueryDataset": { "properties": { - "datasetReference": { - "$ref": "#/components/schemas/BigQueryDatasetReference", - "description": "Dataset reference" + "id": { + "type": "string", + "title": "Id", + "description": "Dataset ID" }, "description": { "anyOf": [ @@ -12264,7 +12296,7 @@ "description": "Dataset location", "default": "US" }, - "creationTime": { + "created_at": { "anyOf": [ { "type": "string" @@ -12273,10 +12305,10 @@ "type": "null" } ], - "title": "Creationtime", + "title": "Created At", "description": "Creation timestamp" }, - "lastModifiedTime": { + "updated_at": { "anyOf": [ { "type": "string" @@ -12285,50 +12317,30 @@ "type": "null" } ], - "title": "Lastmodifiedtime", + "title": "Updated At", "description": "Last modified timestamp" } }, "type": "object", "required": [ - "datasetReference" + "id" ], "title": "BigQueryDataset", - "description": "BigQuery dataset object - matches API format" + "description": "BigQuery dataset" }, - "BigQueryDatasetReference": { + "BigQueryField-Input": { "properties": { - "datasetId": { + "name": { "type": "string", - "title": "Datasetid", - "description": "Dataset ID" - } - }, - "type": "object", - "required": [ - "datasetId" - ], - "title": "BigQueryDatasetReference", - "description": "Reference to a BigQuery dataset" - }, - "BigQueryTable-Input": { - "properties": { - "tableReference": { - "$ref": "#/components/schemas/BigQueryTableReference", - "description": "Table reference" + "title": "Name", + "description": "Field name" }, - "schema": { - "anyOf": [ - { - "$ref": "#/components/schemas/BigQueryTableSchema-Input" - }, - { - "type": "null" - } - ], - "description": "Table schema" + "type": { + "type": "string", + "title": "Type", + "description": "Field type (STRING, INTEGER, FLOAT, BOOLEAN, TIMESTAMP, RECORD, etc.)" }, - "description": { + "mode": { "anyOf": [ { "type": "string" @@ -12337,10 +12349,11 @@ "type": "null" } ], - "title": "Description", - "description": "Table description" + "title": "Mode", + "description": "Field mode (NULLABLE, REQUIRED, REPEATED)", + "default": "NULLABLE" }, - "numRows": { + "description": { "anyOf": [ { "type": "string" @@ -12349,47 +12362,46 @@ "type": "null" } ], - "title": "Numrows", - "description": "Number of rows" + "title": "Description", + "description": "Field description" }, - "creationTime": { + "fields": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/BigQueryField-Input" + }, + "type": "array" }, { "type": "null" } ], - "title": "Creationtime", - "description": "Creation timestamp" + "title": "Fields", + "description": "Nested fields for RECORD type" } }, "type": "object", "required": [ - "tableReference" + "name", + "type" ], - "title": "BigQueryTable", - "description": "BigQuery table object - matches API format" + "title": "BigQueryField", + "description": "Schema field for a BigQuery table" }, - "BigQueryTable-Output": { + "BigQueryField-Output": { "properties": { - "tableReference": { - "$ref": "#/components/schemas/BigQueryTableReference", - "description": "Table reference" + "name": { + "type": "string", + "title": "Name", + "description": "Field name" }, - "schema": { - "anyOf": [ - { - "$ref": "#/components/schemas/BigQueryTableSchema-Output" - }, - { - "type": "null" - } - ], - "description": "Table schema" + "type": { + "type": "string", + "title": "Type", + "description": "Field type (STRING, INTEGER, FLOAT, BOOLEAN, TIMESTAMP, RECORD, etc.)" }, - "description": { + "mode": { "anyOf": [ { "type": "string" @@ -12398,10 +12410,11 @@ "type": "null" } ], - "title": "Description", - "description": "Table description" + "title": "Mode", + "description": "Field mode (NULLABLE, REQUIRED, REPEATED)", + "default": "NULLABLE" }, - "numRows": { + "description": { "anyOf": [ { "type": "string" @@ -12410,42 +12423,46 @@ "type": "null" } ], - "title": "Numrows", - "description": "Number of rows" + "title": "Description", + "description": "Field description" }, - "creationTime": { + "fields": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/BigQueryField-Output" + }, + "type": "array" }, { "type": "null" } ], - "title": "Creationtime", - "description": "Creation timestamp" + "title": "Fields", + "description": "Nested fields for RECORD type" } }, "type": "object", "required": [ - "tableReference" + "name", + "type" ], - "title": "BigQueryTable", - "description": "BigQuery table object - matches API format" + "title": "BigQueryField", + "description": "Schema field for a BigQuery table" }, - "BigQueryTableFieldSchema-Input": { + "BigQueryTable-Input": { "properties": { - "name": { + "dataset_id": { "type": "string", - "title": "Name", - "description": "Field name" + "title": "Dataset Id", + "description": "Parent dataset ID" }, - "type": { + "id": { "type": "string", - "title": "Type", - "description": "Field type (STRING, INTEGER, FLOAT, BOOLEAN, TIMESTAMP, RECORD, etc.)" + "title": "Id", + "description": "Table ID" }, - "mode": { + "description": { "anyOf": [ { "type": "string" @@ -12454,11 +12471,25 @@ "type": "null" } ], - "title": "Mode", - "description": "Field mode (NULLABLE, REQUIRED, REPEATED)", - "default": "NULLABLE" + "title": "Description", + "description": "Table description" }, - "description": { + "fields": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/BigQueryField-Input" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Fields", + "description": "Table schema fields" + }, + "row_count": { "anyOf": [ { "type": "string" @@ -12467,46 +12498,43 @@ "type": "null" } ], - "title": "Description", - "description": "Field description" + "title": "Row Count", + "description": "Number of rows" }, - "fields": { + "created_at": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/BigQueryTableFieldSchema-Input" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Fields", - "description": "Nested fields for RECORD type" + "title": "Created At", + "description": "Creation timestamp" } }, "type": "object", "required": [ - "name", - "type" + "dataset_id", + "id" ], - "title": "BigQueryTableFieldSchema", - "description": "Schema for a BigQuery table field" + "title": "BigQueryTable", + "description": "BigQuery table" }, - "BigQueryTableFieldSchema-Output": { + "BigQueryTable-Output": { "properties": { - "name": { + "dataset_id": { "type": "string", - "title": "Name", - "description": "Field name" + "title": "Dataset Id", + "description": "Parent dataset ID" }, - "type": { + "id": { "type": "string", - "title": "Type", - "description": "Field type (STRING, INTEGER, FLOAT, BOOLEAN, TIMESTAMP, RECORD, etc.)" + "title": "Id", + "description": "Table ID" }, - "mode": { + "description": { "anyOf": [ { "type": "string" @@ -12515,96 +12543,56 @@ "type": "null" } ], - "title": "Mode", - "description": "Field mode (NULLABLE, REQUIRED, REPEATED)", - "default": "NULLABLE" + "title": "Description", + "description": "Table description" }, - "description": { + "fields": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/BigQueryField-Output" + }, + "type": "array" }, { "type": "null" } ], - "title": "Description", - "description": "Field description" + "title": "Fields", + "description": "Table schema fields" }, - "fields": { + "row_count": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/BigQueryTableFieldSchema-Output" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Fields", - "description": "Nested fields for RECORD type" - } - }, - "type": "object", - "required": [ - "name", - "type" - ], - "title": "BigQueryTableFieldSchema", - "description": "Schema for a BigQuery table field" - }, - "BigQueryTableReference": { - "properties": { - "datasetId": { - "type": "string", - "title": "Datasetid", - "description": "Dataset ID" + "title": "Row Count", + "description": "Number of rows" }, - "tableId": { - "type": "string", - "title": "Tableid", - "description": "Table ID" + "created_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Created At", + "description": "Creation timestamp" } }, "type": "object", "required": [ - "datasetId", - "tableId" + "dataset_id", + "id" ], - "title": "BigQueryTableReference", - "description": "Reference to a BigQuery table" - }, - "BigQueryTableSchema-Input": { - "properties": { - "fields": { - "items": { - "$ref": "#/components/schemas/BigQueryTableFieldSchema-Input" - }, - "type": "array", - "title": "Fields", - "description": "List of table fields" - } - }, - "type": "object", - "title": "BigQueryTableSchema", - "description": "Schema for a BigQuery table" - }, - "BigQueryTableSchema-Output": { - "properties": { - "fields": { - "items": { - "$ref": "#/components/schemas/BigQueryTableFieldSchema-Output" - }, - "type": "array", - "title": "Fields", - "description": "List of table fields" - } - }, - "type": "object", - "title": "BigQueryTableSchema", - "description": "Schema for a BigQuery table" + "title": "BigQueryTable", + "description": "BigQuery table" }, "CalcomData-Input": { "properties": { @@ -12799,85 +12787,39 @@ "title": "CallToolResult", "description": "The server's response to a tool call." }, - "ClickUpComment": { + "Campaign": { "properties": { - "comment_text": { + "name": { "type": "string", - "title": "Comment Text", - "description": "Comment text content" - } - }, - "type": "object", - "required": [ - "comment_text" - ], - "title": "ClickUpComment", - "description": "ClickUp Comment object" - }, - "ClickUpData-Input": { - "properties": { - "spaces": { + "maxLength": 80, + "minLength": 1, + "title": "Name" + }, + "type": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ClickUpSpace" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Spaces", - "description": "List of spaces with nested objects" - } - }, - "type": "object", - "title": "ClickUpData", - "description": "Complete ClickUp sandbox data structure.\n\nNested hierarchy for initialization:\n- Spaces contain Folders and Lists (folderless)\n- Folders contain Lists\n- Lists contain Tasks\n- Tasks contain Comments" - }, - "ClickUpData-Output": { - "additionalProperties": true, - "type": "object" - }, - "ClickUpFolder": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Folder name" + "title": "Type", + "description": "e.g., Email, Webinar, Conference" }, - "lists": { + "status": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ClickUpList" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Lists", - "description": "Lists in this folder" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "ClickUpFolder", - "description": "ClickUp Folder object" - }, - "ClickUpList": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "List name" + "title": "Status", + "default": "Planned" }, - "content": { + "start_date": { "anyOf": [ { "type": "string" @@ -12886,134 +12828,97 @@ "type": "null" } ], - "title": "Content", - "description": "List description/content" + "title": "Start Date", + "description": "YYYY-MM-DD" }, - "tasks": { + "end_date": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ClickUpTask" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Tasks", - "description": "Tasks in this list" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "ClickUpList", - "description": "ClickUp List object" - }, - "ClickUpSpace": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Space name" + "title": "End Date", + "description": "YYYY-MM-DD" }, - "folders": { + "expected_revenue": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ClickUpFolder" - }, - "type": "array" + "type": "number", + "minimum": 0 }, { "type": "null" } ], - "title": "Folders", - "description": "Folders in this space" + "title": "Expected Revenue" }, - "lists": { + "budget": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ClickUpList" - }, - "type": "array" + "type": "number", + "minimum": 0 }, { "type": "null" } ], - "title": "Lists", - "description": "Folderless lists in this space" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "ClickUpSpace", - "description": "ClickUp Space object" - }, - "ClickUpTask": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Task name" + "title": "Budget" }, - "description": { + "actual_cost": { "anyOf": [ { - "type": "string" + "type": "number", + "minimum": 0 }, { "type": "null" } ], - "title": "Description", - "description": "Task description" + "title": "Actual Cost" }, - "priority": { + "description": { "anyOf": [ { - "type": "integer" + "type": "string", + "maxLength": 32000 }, { "type": "null" } ], - "title": "Priority", - "description": "Task priority (1-4)" + "title": "Description" }, - "comments": { + "active": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ClickUpComment" - }, - "type": "array" + "type": "boolean" }, { "type": "null" } ], - "title": "Comments", - "description": "Comments on this task" + "title": "Active" } }, "type": "object", "required": [ "name" ], - "title": "ClickUpTask", - "description": "ClickUp Task object" + "title": "Campaign", + "description": "Marketing campaign" }, - "CloseContact": { + "Case": { "properties": { - "name": { + "subject": { + "type": "string", + "maxLength": 255, + "minLength": 1, + "title": "Subject" + }, + "status": { "anyOf": [ { "type": "string" @@ -13022,80 +12927,47 @@ "type": "null" } ], - "title": "Name", - "description": "Contact full name" + "title": "Status", + "default": "New" }, - "emails": { + "priority": { "anyOf": [ { - "items": { - "additionalProperties": true, - "type": "object" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Emails", - "description": "List of email objects" + "title": "Priority", + "description": "High, Medium, or Low", + "default": "Medium" }, - "phones": { + "origin": { "anyOf": [ { - "items": { - "additionalProperties": true, - "type": "object" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Phones", - "description": "List of phone objects" - } - }, - "type": "object", - "title": "CloseContact", - "description": "Close Contact object - child of Lead" - }, - "CloseData-Input": { - "properties": { - "leads": { + "title": "Origin", + "description": "e.g., Phone, Email, Web" + }, + "type": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/CloseLead" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Leads", - "description": "List of leads with nested objects" - } - }, - "type": "object", - "title": "CloseData", - "description": "Complete Close sandbox data structure.\n\nNested hierarchy for initialization:\n- Leads contain Contacts, Opportunities, and Tasks" - }, - "CloseData-Output": { - "additionalProperties": true, - "type": "object" - }, - "CloseLead": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Lead name/company name" + "title": "Type", + "description": "e.g., Problem, Question, Feature Request" }, - "status_label": { + "reason": { "anyOf": [ { "type": "string" @@ -13104,189 +12976,165 @@ "type": "null" } ], - "title": "Status Label", - "description": "Lead status" + "title": "Reason" }, - "contacts": { + "account_id": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/CloseContact" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Contacts", - "description": "Contacts associated with this lead" + "title": "Account Id" }, - "opportunities": { + "contact_id": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/CloseOpportunity" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Opportunities", - "description": "Opportunities associated with this lead" + "title": "Contact Id" }, - "tasks": { + "description": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/CloseTask" - }, - "type": "array" + "type": "string", + "maxLength": 32000 }, { "type": "null" } ], - "title": "Tasks", - "description": "Tasks associated with this lead" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "CloseLead", - "description": "Close Lead object - parent object containing contacts, opportunities, and tasks" - }, - "CloseOpportunity": { - "properties": { - "note": { - "type": "string", - "title": "Note", - "description": "Opportunity note/description" + "title": "Description" }, - "value": { + "customer_email": { "anyOf": [ { - "type": "integer" + "type": "string", + "format": "email" }, { "type": "null" } ], - "title": "Value", - "description": "Opportunity value in cents" + "title": "Customer Email" }, - "confidence": { + "customer_name": { "anyOf": [ { - "type": "integer" + "type": "string", + "maxLength": 80 }, { "type": "null" } ], - "title": "Confidence", - "description": "Confidence percentage (0-100)" + "title": "Customer Name" }, - "value_period": { + "customer_phone": { "anyOf": [ { - "type": "string" + "type": "string", + "maxLength": 40 }, { "type": "null" } ], - "title": "Value Period", - "description": "Value period (e.g., 'one_time', 'monthly', 'annual')" + "title": "Customer Phone" } }, "type": "object", "required": [ - "note" + "subject" ], - "title": "CloseOpportunity", - "description": "Close Opportunity object - child of Lead" + "title": "Case", + "description": "Customer support ticket" }, - "CloseTask": { + "ClickUpComment": { "properties": { - "text": { + "body": { "type": "string", - "title": "Text", - "description": "Task description" - }, - "date": { + "title": "Body", + "description": "Comment body content" + } + }, + "type": "object", + "required": [ + "body" + ], + "title": "ClickUpComment", + "description": "ClickUp Comment object" + }, + "ClickUpData-Input": { + "properties": { + "spaces": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/ClickUpSpace" + }, + "type": "array" }, { "type": "null" } ], - "title": "Date", - "description": "The date of a task represents the point in time of when the task is actionable and appears in the inbox" + "title": "Spaces", + "description": "List of spaces with nested objects" + } + }, + "type": "object", + "title": "ClickUpData", + "description": "Complete ClickUp sandbox data structure.\n\nNested hierarchy for initialization:\n- Spaces contain Folders and Lists (folderless)\n- Folders contain Lists\n- Lists contain Tasks\n- Tasks contain Comments" + }, + "ClickUpData-Output": { + "additionalProperties": true, + "type": "object" + }, + "ClickUpFolder": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Folder name" }, - "is_complete": { + "lists": { "anyOf": [ { - "type": "boolean" + "items": { + "$ref": "#/components/schemas/ClickUpList" + }, + "type": "array" }, { "type": "null" } ], - "title": "Is Complete", - "description": "Whether task is completed" - }, - "assigned_to": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Assigned To", - "description": "User ID assigned to" + "title": "Lists", + "description": "Lists in this folder" } }, "type": "object", "required": [ - "text" + "name" ], - "title": "CloseTask", - "description": "Close Task object - child of Lead" + "title": "ClickUpFolder", + "description": "ClickUp Folder object" }, - "ComputeInstance": { + "ClickUpList": { "properties": { "name": { "type": "string", "title": "Name", - "description": "Instance name" - }, - "zone": { - "type": "string", - "title": "Zone", - "description": "Instance zone" - }, - "machineType": { - "type": "string", - "title": "Machinetype", - "description": "Machine type", - "default": "e2-micro" - }, - "status": { - "type": "string", - "title": "Status", - "description": "Instance status", - "default": "RUNNING" + "description": "List name" }, - "creationTimestamp": { + "description": { "anyOf": [ { "type": "string" @@ -13295,32 +13143,83 @@ "type": "null" } ], - "title": "Creationtimestamp", - "description": "Creation timestamp" + "title": "Description", + "description": "List description" }, - "id": { + "tasks": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/ClickUpTask" + }, + "type": "array" }, { "type": "null" } ], - "title": "Id", - "description": "Instance ID" + "title": "Tasks", + "description": "Tasks in this list" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "ClickUpList", + "description": "ClickUp List object" + }, + "ClickUpSpace": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Space name" }, - "selfLink": { + "folders": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/ClickUpFolder" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Folders", + "description": "Folders in this space" + }, + "lists": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/ClickUpList" + }, + "type": "array" }, { "type": "null" } ], - "title": "Selflink", - "description": "Self link URL" + "title": "Lists", + "description": "Folderless lists in this space" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "ClickUpSpace", + "description": "ClickUp Space object" + }, + "ClickUpTask": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Task name" }, "description": { "anyOf": [ @@ -13332,83 +13231,63 @@ } ], "title": "Description", - "description": "Instance description" + "description": "Task description" }, - "labels": { + "priority": { "anyOf": [ { - "additionalProperties": { - "type": "string" + "type": "integer" + }, + { + "type": "null" + } + ], + "title": "Priority", + "description": "Task priority (1-4)" + }, + "comments": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/ClickUpComment" }, - "type": "object" + "type": "array" }, { "type": "null" } ], - "title": "Labels", - "description": "Instance labels" - } - }, - "type": "object", - "required": [ - "name", - "zone" - ], - "title": "ComputeInstance", - "description": "Compute Engine instance - matches API format" - }, - "ConfluenceComment": { - "properties": { - "body": { - "type": "string", - "title": "Body", - "description": "Comment text content (storage format)" + "title": "Comments", + "description": "Comments on this task" } }, "type": "object", "required": [ - "body" + "name" ], - "title": "ConfluenceComment", - "description": "Confluence Comment object" - }, - "ConfluenceData-Input": { - "properties": { - "spaces": { - "items": { - "$ref": "#/components/schemas/ConfluenceSpace" - }, - "type": "array", - "title": "Spaces", - "description": "List of spaces" - } - }, - "type": "object", - "title": "ConfluenceData", - "description": "Complete Confluence sandbox data structure.\n\nHierarchy:\n- Spaces contain Pages\n- Pages can have children (nested pages) and Comments" - }, - "ConfluenceData-Output": { - "additionalProperties": true, - "type": "object" + "title": "ClickUpTask", + "description": "ClickUp Task object" }, - "ConfluencePage-Input": { + "CloseContact": { "properties": { - "title": { - "type": "string", - "title": "Title", - "description": "Page title" - }, - "body": { - "type": "string", - "title": "Body", - "description": "Page content (storage format)" + "name": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Name", + "description": "Contact full name" }, - "children": { + "emails": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/ConfluencePage-Input" + "additionalProperties": true, + "type": "object" }, "type": "array" }, @@ -13416,14 +13295,15 @@ "type": "null" } ], - "title": "Children", - "description": "Child pages" + "title": "Emails", + "description": "List of email objects" }, - "comments": { + "phones": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/ConfluenceComment" + "additionalProperties": true, + "type": "object" }, "type": "array" }, @@ -13431,35 +13311,48 @@ "type": "null" } ], - "title": "Comments", - "description": "Comments on this page" + "title": "Phones", + "description": "List of phone objects" } }, "type": "object", - "required": [ - "title", - "body" - ], - "title": "ConfluencePage", - "description": "Confluence Page object" + "title": "CloseContact", + "description": "Close Contact object - child of Lead" }, - "ConfluencePage-Output": { + "CloseData-Input": { + "properties": { + "leads": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/CloseLead" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Leads", + "description": "List of leads with nested objects" + } + }, + "type": "object", + "title": "CloseData", + "description": "Complete Close sandbox data structure.\n\nNested hierarchy for initialization:\n- Leads contain Contacts, Opportunities, and Tasks" + }, + "CloseData-Output": { "additionalProperties": true, "type": "object" }, - "ConfluenceSpace": { + "CloseLead": { "properties": { - "key": { - "type": "string", - "title": "Key", - "description": "Space key (e.g., DS)" - }, "name": { "type": "string", "title": "Name", - "description": "Space name" + "description": "Lead name/company name" }, - "description": { + "status": { "anyOf": [ { "type": "string" @@ -13468,14 +13361,14 @@ "type": "null" } ], - "title": "Description", - "description": "Space description" + "title": "Status", + "description": "Lead status" }, - "pages": { + "contacts": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/ConfluencePage-Input" + "$ref": "#/components/schemas/CloseContact" }, "type": "array" }, @@ -13483,214 +13376,79 @@ "type": "null" } ], - "title": "Pages", - "description": "Root pages in this space" - } - }, - "type": "object", - "required": [ - "key", - "name" - ], - "title": "ConfluenceSpace", - "description": "Confluence Space object" - }, - "ConnectionType": { - "type": "string", - "enum": [ - "SSE", - "StreamableHttp" - ], - "title": "ConnectionType" - }, - "CreateSandboxResponse": { - "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Unique identifier for the acquired sandbox" - }, - "server_url": { - "type": "string", - "title": "Server Url", - "description": "URL to connect to the MCP server" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "The MCP server name" - }, - "status": { - "$ref": "#/components/schemas/SandboxStatus", - "description": "Current status of the sandbox" - }, - "message": { - "type": "string", - "title": "Message", - "description": "Status message" - } - }, - "type": "object", - "required": [ - "sandbox_id", - "server_url", - "server_name", - "status", - "message" - ], - "title": "CreateSandboxResponse", - "description": "Response model for sandbox acquisition" - }, - "CreateSelfHostedServerRequest": { - "properties": { - "serverName": { - "$ref": "#/components/schemas/McpServerName", - "description": "The name of the target MCP server. Case-insensitive (e.g., 'google calendar', 'GOOGLE_CALENDAR', 'Google Calendar' are all valid)." - }, - "userId": { - "type": "string", - "minLength": 1, - "title": "Userid", - "description": "The unique identifier for the user. The server instance along with the all the authentication data will belong to that specific user only. It can be a UUID from the database, a unique email address from the user, etc." - } - }, - "type": "object", - "required": [ - "serverName", - "userId" - ], - "title": "CreateSelfHostedServerRequest" - }, - "CreateSelfHostedServerResponse": { - "properties": { - "instanceId": { - "type": "string", - "title": "Instanceid", - "description": "The unique identifier for this specific server connection integration instance." + "title": "Contacts", + "description": "Contacts associated with this lead" }, - "oauthUrl": { + "opportunities": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/CloseOpportunity" + }, + "type": "array" }, { "type": "null" } ], - "title": "Oauthurl", - "description": "The OAuth authorization URL for the specified server, if OAuth is configured." - } - }, - "type": "object", - "required": [ - "instanceId" - ], - "title": "CreateSelfHostedServerResponse" - }, - "CreateServerRequest": { - "properties": { - "serverName": { - "$ref": "#/components/schemas/McpServerName", - "description": "The name of the target MCP server. Case-insensitive (e.g., 'google calendar', 'GOOGLE_CALENDAR', 'Google Calendar' are all valid)." - }, - "userId": { - "type": "string", - "minLength": 1, - "title": "Userid", - "description": "The unique identifier for the user. The server instance along with the all the authentication data will belong to that specific user only. It can be a UUID from the database, a unique email address from the user, etc." + "title": "Opportunities", + "description": "Opportunities associated with this lead" }, - "platformName": { + "tasks": { "anyOf": [ { - "type": "string", - "minLength": 1 + "items": { + "$ref": "#/components/schemas/CloseTask" + }, + "type": "array" }, { "type": "null" } ], - "title": "Platformname", - "description": "The name of the platform associated with the user. Optional." - }, - "connectionType": { - "$ref": "#/components/schemas/ConnectionType", - "description": "The connection type to use for the MCP server. Default is STREAMABLE_HTTP.", - "default": "StreamableHttp" - }, - "legacy": { - "type": "boolean", - "title": "Legacy", - "description": "Whether to use the legacy server. Default is False.", - "default": false + "title": "Tasks", + "description": "Tasks associated with this lead" } }, "type": "object", "required": [ - "serverName", - "userId" + "name" ], - "title": "CreateServerRequest" + "title": "CloseLead", + "description": "Close Lead object - parent object containing contacts, opportunities, and tasks" }, - "CreateServerResponse": { + "CloseOpportunity": { "properties": { - "serverUrl": { - "type": "string", - "title": "Serverurl", - "description": "The full URL for connecting to the MCP server, including the instance ID." - }, - "instanceId": { + "note": { "type": "string", - "title": "Instanceid", - "description": "The unique identifier for this specific server connection integration instance." + "title": "Note", + "description": "Opportunity note/description" }, - "oauthUrl": { + "value": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "Oauthurl", - "description": "The OAuth URL for authentication if available, supports white label if configured." - } - }, - "type": "object", - "required": [ - "serverUrl", - "instanceId" - ], - "title": "CreateServerResponse" - }, - "CreateWhiteLabelingRequest": { - "properties": { - "client_id": { - "type": "string", - "title": "Client Id", - "description": "OAuth client ID" - }, - "client_secret": { - "type": "string", - "title": "Client Secret", - "description": "OAuth client secret" - }, - "server_name": { - "$ref": "#/components/schemas/OAuthServerName", - "description": "Optional. The name of the server" + "title": "Value", + "description": "Opportunity value in cents" }, - "callback_url": { + "confidence": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "Callback Url", - "description": "Optional. OAuth callback URL" + "title": "Confidence", + "description": "Confidence percentage (0-100)" }, - "account_id": { + "period": { "anyOf": [ { "type": "string" @@ -13699,39 +13457,25 @@ "type": "null" } ], - "title": "Account Id", - "description": "Optional. The UUID of the account" + "title": "Period", + "description": "Billing period (e.g., 'one_time', 'monthly', 'annual')" } }, "type": "object", "required": [ - "client_id", - "client_secret", - "server_name" + "note" ], - "title": "CreateWhiteLabelingRequest" + "title": "CloseOpportunity", + "description": "Close Opportunity object - child of Lead" }, - "DeleteUserResponse": { + "CloseTask": { "properties": { - "success": { - "type": "boolean", - "title": "Success" - }, - "message": { + "description": { "type": "string", - "title": "Message" - } - }, - "type": "object", - "required": [ - "success", - "message" - ], - "title": "DeleteUserResponse" - }, - "DiscordChannel": { - "properties": { - "channel_id": { + "title": "Description", + "description": "Task description" + }, + "date": { "anyOf": [ { "type": "string" @@ -13740,107 +13484,164 @@ "type": "null" } ], - "title": "Channel Id", - "description": "Discord channel ID (auto-discovered if not provided)" + "title": "Date", + "description": "The date when the task is actionable" }, - "messages": { + "completed": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/DiscordMessage" - }, - "type": "array" + "type": "boolean" }, { "type": "null" } ], - "title": "Messages", - "description": "Messages in this channel" - } - }, - "type": "object", - "title": "DiscordChannel", - "description": "Discord Channel - uses existing channel by ID" - }, - "DiscordData-Input": { - "properties": { - "channels": { + "title": "Completed", + "description": "Whether task is completed" + }, + "assignee_id": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/DiscordChannel" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Channels", - "description": "List of channels" + "title": "Assignee Id", + "description": "User ID assigned to" } }, "type": "object", - "title": "DiscordData", - "description": "Complete Discord sandbox data structure.\n\nHierarchy:\n- Channels contain Messages\n- Messages can have Reactions" - }, - "DiscordData-Output": { - "additionalProperties": true, - "type": "object" + "required": [ + "description" + ], + "title": "CloseTask", + "description": "Close Task object - child of Lead" }, - "DiscordMessage": { + "ComputeInstance": { "properties": { - "content": { + "name": { "type": "string", - "title": "Content", - "description": "Message text content" + "title": "Name", + "description": "Instance name" }, - "reactions": { + "zone": { + "type": "string", + "title": "Zone", + "description": "Instance zone" + }, + "machine_type": { + "type": "string", + "title": "Machine Type", + "description": "Machine type", + "default": "e2-micro" + }, + "status": { + "type": "string", + "title": "Status", + "description": "Instance status", + "default": "RUNNING" + }, + "description": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/DiscordReaction" + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description", + "description": "Instance description" + }, + "labels": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" }, - "type": "array" + "type": "object" }, { "type": "null" } ], - "title": "Reactions", - "description": "Reactions to this message" + "title": "Labels", + "description": "Instance labels" + }, + "created_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Created At", + "description": "Creation timestamp" } }, "type": "object", "required": [ - "content" + "name", + "zone" ], - "title": "DiscordMessage", - "description": "Discord Message object" + "title": "ComputeInstance", + "description": "Compute Engine instance" }, - "DiscordReaction": { + "ConfluenceComment": { "properties": { - "name": { + "content": { "type": "string", - "title": "Name", - "description": "Emoji (Unicode like '👍' or custom 'name:id')" + "title": "Content", + "description": "Comment text content" } }, "type": "object", "required": [ - "name" + "content" ], - "title": "DiscordReaction", - "description": "Discord Reaction object" + "title": "ConfluenceComment", + "description": "Confluence Comment object" }, - "DropboxData": { + "ConfluenceData-Input": { "properties": { - "folders": { + "spaces": { + "items": { + "$ref": "#/components/schemas/ConfluenceSpace" + }, + "type": "array", + "title": "Spaces", + "description": "List of spaces" + } + }, + "type": "object", + "title": "ConfluenceData", + "description": "Complete Confluence sandbox data structure.\n\nHierarchy:\n- Spaces contain Pages\n- Pages can have children (nested pages) and Comments" + }, + "ConfluenceData-Output": { + "additionalProperties": true, + "type": "object" + }, + "ConfluencePage-Input": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "description": "Page title" + }, + "content": { + "type": "string", + "title": "Content", + "description": "Page content" + }, + "children": { "anyOf": [ { "items": { - "type": "string" + "$ref": "#/components/schemas/ConfluencePage-Input" }, "type": "array" }, @@ -13848,14 +13649,14 @@ "type": "null" } ], - "title": "Folders", - "description": "List of folder paths to create (e.g., ['/SandboxTest', '/SandboxTest/Documents'])" + "title": "Children", + "description": "Child pages" }, - "files": { + "comments": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/DropboxFile" + "$ref": "#/components/schemas/ConfluenceComment" }, "type": "array" }, @@ -13863,409 +13664,610 @@ "type": "null" } ], - "title": "Files", - "description": "List of files to create with their content" - } - }, - "type": "object", - "title": "DropboxData", - "description": "Complete Dropbox sandbox data structure\n\nDescribes the folder and file structure to be created in the sandbox.\nFolders are created in the order specified, and files are uploaded after\nall folders have been created.\n\nNote: The root folder /SandboxTest is always created first automatically.\nAdditional folders should be nested within /SandboxTest." - }, - "DropboxFile": { - "properties": { - "path": { - "type": "string", - "minLength": 1, - "title": "Path", - "description": "Full path to the file (must start with /SandboxTest/)" - }, - "content": { - "type": "string", - "title": "Content", - "description": "Text content of the file" + "title": "Comments", + "description": "Comments on this page" } }, "type": "object", "required": [ - "path", + "title", "content" ], - "title": "DropboxFile", - "description": "Dropbox file data\n\nAttributes:\n path: Full path to the file (e.g., \"/SandboxTest/document.txt\")\n content: Text content of the file" + "title": "ConfluencePage", + "description": "Confluence Page object" }, - "DumpSandboxResponse_AirtableData_": { + "ConfluencePage-Output": { + "additionalProperties": true, + "type": "object" + }, + "ConfluenceSpace": { "properties": { - "sandbox_id": { + "identifier": { "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" + "title": "Identifier", + "description": "Space identifier" }, - "dumped_at": { + "name": { "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" + "title": "Name", + "description": "Space name" }, - "data": { - "$ref": "#/components/schemas/AirtableData-Output", - "description": "Dumped sandbox data in server-specific format" + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description", + "description": "Space description" + }, + "pages": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/ConfluencePage-Input" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Pages", + "description": "Root pages in this space" } }, "type": "object", "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" + "identifier", + "name" ], - "title": "DumpSandboxResponse[AirtableData]" + "title": "ConfluenceSpace", + "description": "Confluence Space object" }, - "DumpSandboxResponse_AsanaData_": { + "ConnectionType": { + "type": "string", + "enum": [ + "SSE", + "StreamableHttp" + ], + "title": "ConnectionType" + }, + "Contact": { "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" + "first_name": { + "anyOf": [ + { + "type": "string", + "maxLength": 40 + }, + { + "type": "null" + } + ], + "title": "First Name" }, - "dumped_at": { + "last_name": { "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" + "maxLength": 80, + "minLength": 1, + "title": "Last Name" }, - "data": { - "$ref": "#/components/schemas/AsanaData-Output", - "description": "Dumped sandbox data in server-specific format" + "email": { + "anyOf": [ + { + "type": "string", + "format": "email" + }, + { + "type": "null" + } + ], + "title": "Email" + }, + "phone": { + "anyOf": [ + { + "type": "string", + "maxLength": 40 + }, + { + "type": "null" + } + ], + "title": "Phone" + }, + "mobile": { + "anyOf": [ + { + "type": "string", + "maxLength": 40 + }, + { + "type": "null" + } + ], + "title": "Mobile" + }, + "title": { + "anyOf": [ + { + "type": "string", + "maxLength": 128 + }, + { + "type": "null" + } + ], + "title": "Title", + "description": "Job title" + }, + "department": { + "anyOf": [ + { + "type": "string", + "maxLength": 80 + }, + { + "type": "null" + } + ], + "title": "Department" + }, + "account_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Account Id", + "description": "Related account ID" + }, + "address": { + "anyOf": [ + { + "$ref": "#/components/schemas/Address" + }, + { + "type": "null" + } + ] + }, + "description": { + "anyOf": [ + { + "type": "string", + "maxLength": 32000 + }, + { + "type": "null" + } + ], + "title": "Description" + }, + "source": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Source", + "description": "e.g., Web, Phone, Referral" } }, "type": "object", "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" + "last_name" ], - "title": "DumpSandboxResponse[AsanaData]" + "title": "Contact", + "description": "Individual person associated with an account" }, - "DumpSandboxResponse_CalcomData_": { + "CreateSandboxResponse": { "properties": { "sandbox_id": { "type": "string", "title": "Sandbox Id", - "description": "Sandbox identifier" + "description": "Unique identifier for the acquired sandbox" + }, + "server_url": { + "type": "string", + "title": "Server Url", + "description": "URL to connect to the MCP server" }, "server_name": { "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" + "description": "The MCP server name" }, - "dumped_at": { - "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" + "status": { + "$ref": "#/components/schemas/SandboxStatus", + "description": "Current status of the sandbox" }, - "data": { - "$ref": "#/components/schemas/CalcomData-Output", - "description": "Dumped sandbox data in server-specific format" + "message": { + "type": "string", + "title": "Message", + "description": "Status message" } }, "type": "object", "required": [ "sandbox_id", + "server_url", "server_name", - "dumped_at", - "data" + "status", + "message" ], - "title": "DumpSandboxResponse[CalcomData]" + "title": "CreateSandboxResponse", + "description": "Response model for sandbox acquisition" }, - "DumpSandboxResponse_ClickUpData_": { + "CreateSelfHostedServerRequest": { "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" + "serverName": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the target MCP server. Case-insensitive (e.g., 'google calendar', 'GOOGLE_CALENDAR', 'Google Calendar' are all valid)." }, - "dumped_at": { + "userId": { "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" - }, - "data": { - "$ref": "#/components/schemas/ClickUpData-Output", - "description": "Dumped sandbox data in server-specific format" + "minLength": 1, + "title": "Userid", + "description": "The unique identifier for the user. The server instance along with the all the authentication data will belong to that specific user only. It can be a UUID from the database, a unique email address from the user, etc." } }, "type": "object", "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" + "serverName", + "userId" ], - "title": "DumpSandboxResponse[ClickUpData]" + "title": "CreateSelfHostedServerRequest" }, - "DumpSandboxResponse_CloseData_": { + "CreateSelfHostedServerResponse": { "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" - }, - "dumped_at": { + "instanceId": { "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" + "title": "Instanceid", + "description": "The unique identifier for this specific server connection integration instance." }, - "data": { - "$ref": "#/components/schemas/CloseData-Output", - "description": "Dumped sandbox data in server-specific format" + "oauthUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Oauthurl", + "description": "The OAuth authorization URL for the specified server, if OAuth is configured." } }, "type": "object", "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" + "instanceId" ], - "title": "DumpSandboxResponse[CloseData]" + "title": "CreateSelfHostedServerResponse" }, - "DumpSandboxResponse_ConfluenceData_": { + "CreateServerRequest": { "properties": { - "sandbox_id": { + "serverName": { + "$ref": "#/components/schemas/McpServerName", + "description": "The name of the target MCP server. Case-insensitive (e.g., 'google calendar', 'GOOGLE_CALENDAR', 'Google Calendar' are all valid)." + }, + "userId": { "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" + "minLength": 1, + "title": "Userid", + "description": "The unique identifier for the user. The server instance along with the all the authentication data will belong to that specific user only. It can be a UUID from the database, a unique email address from the user, etc." }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" + "platformName": { + "anyOf": [ + { + "type": "string", + "minLength": 1 + }, + { + "type": "null" + } + ], + "title": "Platformname", + "description": "The name of the platform associated with the user. Optional." }, - "dumped_at": { - "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" + "connectionType": { + "$ref": "#/components/schemas/ConnectionType", + "description": "The connection type to use for the MCP server. Default is STREAMABLE_HTTP.", + "default": "StreamableHttp" }, - "data": { - "$ref": "#/components/schemas/ConfluenceData-Output", - "description": "Dumped sandbox data in server-specific format" + "legacy": { + "type": "boolean", + "title": "Legacy", + "description": "Whether to use the legacy server. Default is False.", + "default": false + }, + "isReadOnly": { + "type": "boolean", + "title": "Isreadonly", + "description": "Whether the MCP server connection is read-only. When true, write operations will be restricted. Default is False.", + "default": false } }, "type": "object", "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" + "serverName", + "userId" ], - "title": "DumpSandboxResponse[ConfluenceData]" + "title": "CreateServerRequest" }, - "DumpSandboxResponse_DiscordData_": { + "CreateServerResponse": { "properties": { - "sandbox_id": { + "serverUrl": { "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" + "title": "Serverurl", + "description": "The full URL for connecting to the MCP server, including the instance ID." }, - "dumped_at": { + "instanceId": { "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" + "title": "Instanceid", + "description": "The unique identifier for this specific server connection integration instance." }, - "data": { - "$ref": "#/components/schemas/DiscordData-Output", - "description": "Dumped sandbox data in server-specific format" + "oauthUrl": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Oauthurl", + "description": "The OAuth URL for authentication if available, supports white label if configured." } }, "type": "object", "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" + "serverUrl", + "instanceId" ], - "title": "DumpSandboxResponse[DiscordData]" + "title": "CreateServerResponse" }, - "DumpSandboxResponse_DropboxData_": { + "CreateWhiteLabelingRequest": { "properties": { - "sandbox_id": { + "client_id": { "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" + "title": "Client Id", + "description": "OAuth client ID" + }, + "client_secret": { + "type": "string", + "title": "Client Secret", + "description": "OAuth client secret" }, "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" + "$ref": "#/components/schemas/OAuthServerName", + "description": "Optional. The name of the server" }, - "dumped_at": { - "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" + "callback_url": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Callback Url", + "description": "Optional. OAuth callback URL" }, - "data": { - "$ref": "#/components/schemas/DropboxData", - "description": "Dumped sandbox data in server-specific format" + "account_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Account Id", + "description": "Optional. The UUID of the account" } }, "type": "object", "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" + "client_id", + "client_secret", + "server_name" ], - "title": "DumpSandboxResponse[DropboxData]" + "title": "CreateWhiteLabelingRequest" }, - "DumpSandboxResponse_GitHubData_": { + "DeleteUserResponse": { "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" + "success": { + "type": "boolean", + "title": "Success" }, - "dumped_at": { + "message": { "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" - }, - "data": { - "$ref": "#/components/schemas/GitHubData-Output", - "description": "Dumped sandbox data in server-specific format" + "title": "Message" } }, "type": "object", "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" + "success", + "message" ], - "title": "DumpSandboxResponse[GitHubData]" + "title": "DeleteUserResponse" }, - "DumpSandboxResponse_GmailData_": { + "DiscordChannel": { "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" - }, - "dumped_at": { - "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" + "channel_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Channel Id", + "description": "Discord channel ID (auto-discovered if not provided)" }, - "data": { - "$ref": "#/components/schemas/GmailData", - "description": "Dumped sandbox data in server-specific format" + "messages": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/DiscordMessage" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Messages", + "description": "Messages in this channel" } }, "type": "object", - "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" - ], - "title": "DumpSandboxResponse[GmailData]" + "title": "DiscordChannel", + "description": "Discord Channel - uses existing channel by ID" }, - "DumpSandboxResponse_GoogleCalendarData_": { + "DiscordData-Input": { "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" - }, - "dumped_at": { - "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" - }, - "data": { - "$ref": "#/components/schemas/GoogleCalendarData-Output", - "description": "Dumped sandbox data in server-specific format" + "channels": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/DiscordChannel" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Channels", + "description": "List of channels" } }, "type": "object", - "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" - ], - "title": "DumpSandboxResponse[GoogleCalendarData]" + "title": "DiscordData", + "description": "Complete Discord sandbox data structure.\n\nHierarchy:\n- Channels contain Messages\n- Messages can have Reactions" }, - "DumpSandboxResponse_GoogleCloudData_": { + "DiscordData-Output": { + "additionalProperties": true, + "type": "object" + }, + "DiscordMessage": { "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "server_name": { - "$ref": "#/components/schemas/SandboxMCPServer", - "description": "MCP server type" - }, - "dumped_at": { + "content": { "type": "string", - "format": "date-time", - "title": "Dumped At", - "description": "Timestamp of dump" + "title": "Content", + "description": "Message text content" }, - "data": { - "$ref": "#/components/schemas/GoogleCloudData-Output", - "description": "Dumped sandbox data in server-specific format" + "reactions": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/DiscordReaction" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Reactions", + "description": "Reactions to this message" } }, "type": "object", "required": [ - "sandbox_id", - "server_name", - "dumped_at", - "data" + "content" ], - "title": "DumpSandboxResponse[GoogleCloudData]" + "title": "DiscordMessage", + "description": "Discord Message object" }, - "DumpSandboxResponse_GoogleDocsData_": { + "DiscordReaction": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Emoji (Unicode like '👍' or custom 'name:id')" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "DiscordReaction", + "description": "Discord Reaction object" + }, + "DropboxData": { + "properties": { + "folders": { + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Folders", + "description": "List of folder paths to create (e.g., ['/SandboxTest', '/SandboxTest/Documents'])" + }, + "files": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/DropboxFile" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Files", + "description": "List of files to create with their content" + } + }, + "type": "object", + "title": "DropboxData", + "description": "Complete Dropbox sandbox data structure\n\nDescribes the folder and file structure to be created in the sandbox.\nFolders are created in the order specified, and files are uploaded after\nall folders have been created.\n\nNote: The root folder /SandboxTest is always created first automatically.\nAdditional folders should be nested within /SandboxTest." + }, + "DropboxFile": { + "properties": { + "path": { + "type": "string", + "minLength": 1, + "title": "Path", + "description": "Full path to the file (must start with /SandboxTest/)" + }, + "content": { + "type": "string", + "title": "Content", + "description": "Text content of the file" + } + }, + "type": "object", + "required": [ + "path", + "content" + ], + "title": "DropboxFile", + "description": "Dropbox file data\n\nAttributes:\n path: Full path to the file (e.g., \"/SandboxTest/document.txt\")\n content: Text content of the file" + }, + "DumpSandboxResponse_AirtableData_": { "properties": { "sandbox_id": { "type": "string", @@ -14283,7 +14285,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/GoogleDocsData", + "$ref": "#/components/schemas/AirtableData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14294,9 +14296,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[GoogleDocsData]" + "title": "DumpSandboxResponse[AirtableData]" }, - "DumpSandboxResponse_GoogleDriveData_": { + "DumpSandboxResponse_AsanaData_": { "properties": { "sandbox_id": { "type": "string", @@ -14314,7 +14316,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/GoogleDriveData", + "$ref": "#/components/schemas/AsanaData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14325,9 +14327,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[GoogleDriveData]" + "title": "DumpSandboxResponse[AsanaData]" }, - "DumpSandboxResponse_GoogleFormsData_": { + "DumpSandboxResponse_CalcomData_": { "properties": { "sandbox_id": { "type": "string", @@ -14345,7 +14347,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/GoogleFormsData-Output", + "$ref": "#/components/schemas/CalcomData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14356,9 +14358,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[GoogleFormsData]" + "title": "DumpSandboxResponse[CalcomData]" }, - "DumpSandboxResponse_GoogleSheetsData_": { + "DumpSandboxResponse_ClickUpData_": { "properties": { "sandbox_id": { "type": "string", @@ -14376,7 +14378,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/GoogleSheetsData-Output", + "$ref": "#/components/schemas/ClickUpData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14387,9 +14389,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[GoogleSheetsData]" + "title": "DumpSandboxResponse[ClickUpData]" }, - "DumpSandboxResponse_HubSpotData_": { + "DumpSandboxResponse_CloseData_": { "properties": { "sandbox_id": { "type": "string", @@ -14407,7 +14409,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/HubSpotData-Output", + "$ref": "#/components/schemas/CloseData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14418,9 +14420,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[HubSpotData]" + "title": "DumpSandboxResponse[CloseData]" }, - "DumpSandboxResponse_JiraData_": { + "DumpSandboxResponse_ConfluenceData_": { "properties": { "sandbox_id": { "type": "string", @@ -14438,7 +14440,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/JiraData-Output", + "$ref": "#/components/schemas/ConfluenceData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14449,9 +14451,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[JiraData]" + "title": "DumpSandboxResponse[ConfluenceData]" }, - "DumpSandboxResponse_LinearData_": { + "DumpSandboxResponse_DiscordData_": { "properties": { "sandbox_id": { "type": "string", @@ -14469,7 +14471,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/LinearData-Output", + "$ref": "#/components/schemas/DiscordData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14480,9 +14482,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[LinearData]" + "title": "DumpSandboxResponse[DiscordData]" }, - "DumpSandboxResponse_Mem0Data_": { + "DumpSandboxResponse_DropboxData_": { "properties": { "sandbox_id": { "type": "string", @@ -14500,7 +14502,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/Mem0Data-Output", + "$ref": "#/components/schemas/DropboxData", "description": "Dumped sandbox data in server-specific format" } }, @@ -14511,9 +14513,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[Mem0Data]" + "title": "DumpSandboxResponse[DropboxData]" }, - "DumpSandboxResponse_MondayData_": { + "DumpSandboxResponse_GitHubData_": { "properties": { "sandbox_id": { "type": "string", @@ -14531,7 +14533,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/MondayData-Output", + "$ref": "#/components/schemas/GitHubData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14542,9 +14544,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[MondayData]" + "title": "DumpSandboxResponse[GitHubData]" }, - "DumpSandboxResponse_MoneybirdData_": { + "DumpSandboxResponse_GmailData_": { "properties": { "sandbox_id": { "type": "string", @@ -14562,7 +14564,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/MoneybirdData-Output", + "$ref": "#/components/schemas/GmailData", "description": "Dumped sandbox data in server-specific format" } }, @@ -14573,9 +14575,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[MoneybirdData]" + "title": "DumpSandboxResponse[GmailData]" }, - "DumpSandboxResponse_MotionData_": { + "DumpSandboxResponse_GoogleCalendarData_": { "properties": { "sandbox_id": { "type": "string", @@ -14593,7 +14595,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/MotionData-Output", + "$ref": "#/components/schemas/GoogleCalendarData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14604,9 +14606,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[MotionData]" + "title": "DumpSandboxResponse[GoogleCalendarData]" }, - "DumpSandboxResponse_MsTeamsData_": { + "DumpSandboxResponse_GoogleCloudData_": { "properties": { "sandbox_id": { "type": "string", @@ -14624,7 +14626,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/MsTeamsData-Output", + "$ref": "#/components/schemas/GoogleCloudData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14635,9 +14637,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[MsTeamsData]" + "title": "DumpSandboxResponse[GoogleCloudData]" }, - "DumpSandboxResponse_NotionData_": { + "DumpSandboxResponse_GoogleDocsData_": { "properties": { "sandbox_id": { "type": "string", @@ -14655,7 +14657,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/NotionData-Output", + "$ref": "#/components/schemas/GoogleDocsData", "description": "Dumped sandbox data in server-specific format" } }, @@ -14666,9 +14668,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[NotionData]" + "title": "DumpSandboxResponse[GoogleDocsData]" }, - "DumpSandboxResponse_OneDriveData_": { + "DumpSandboxResponse_GoogleDriveData_": { "properties": { "sandbox_id": { "type": "string", @@ -14686,7 +14688,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/OneDriveData-Output", + "$ref": "#/components/schemas/GoogleDriveData", "description": "Dumped sandbox data in server-specific format" } }, @@ -14697,9 +14699,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[OneDriveData]" + "title": "DumpSandboxResponse[GoogleDriveData]" }, - "DumpSandboxResponse_OutlookCalendarData_": { + "DumpSandboxResponse_GoogleFormsData_": { "properties": { "sandbox_id": { "type": "string", @@ -14717,7 +14719,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/OutlookCalendarData", + "$ref": "#/components/schemas/GoogleFormsData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14728,9 +14730,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[OutlookCalendarData]" + "title": "DumpSandboxResponse[GoogleFormsData]" }, - "DumpSandboxResponse_OutlookMailData_": { + "DumpSandboxResponse_GoogleSheetsData_": { "properties": { "sandbox_id": { "type": "string", @@ -14748,7 +14750,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/OutlookMailData", + "$ref": "#/components/schemas/GoogleSheetsData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14759,9 +14761,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[OutlookMailData]" + "title": "DumpSandboxResponse[GoogleSheetsData]" }, - "DumpSandboxResponse_QuickBooksData_": { + "DumpSandboxResponse_HubSpotData_": { "properties": { "sandbox_id": { "type": "string", @@ -14779,7 +14781,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/QuickBooksData", + "$ref": "#/components/schemas/HubSpotData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14790,9 +14792,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[QuickBooksData]" + "title": "DumpSandboxResponse[HubSpotData]" }, - "DumpSandboxResponse_ResendData_": { + "DumpSandboxResponse_JiraData_": { "properties": { "sandbox_id": { "type": "string", @@ -14810,7 +14812,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/ResendData-Output", + "$ref": "#/components/schemas/JiraData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14821,9 +14823,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[ResendData]" + "title": "DumpSandboxResponse[JiraData]" }, - "DumpSandboxResponse_SalesforceData_": { + "DumpSandboxResponse_LinearData_": { "properties": { "sandbox_id": { "type": "string", @@ -14841,7 +14843,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/SalesforceData", + "$ref": "#/components/schemas/LinearData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14852,9 +14854,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[SalesforceData]" + "title": "DumpSandboxResponse[LinearData]" }, - "DumpSandboxResponse_ShopifyData_": { + "DumpSandboxResponse_Mem0Data_": { "properties": { "sandbox_id": { "type": "string", @@ -14872,7 +14874,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/ShopifyData-Output", + "$ref": "#/components/schemas/Mem0Data-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14883,9 +14885,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[ShopifyData]" + "title": "DumpSandboxResponse[Mem0Data]" }, - "DumpSandboxResponse_SlackData_": { + "DumpSandboxResponse_MondayData_": { "properties": { "sandbox_id": { "type": "string", @@ -14903,7 +14905,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/SlackData-Output", + "$ref": "#/components/schemas/MondayData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14914,9 +14916,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[SlackData]" + "title": "DumpSandboxResponse[MondayData]" }, - "DumpSandboxResponse_SnowflakeData_": { + "DumpSandboxResponse_MoneybirdData_": { "properties": { "sandbox_id": { "type": "string", @@ -14934,7 +14936,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/SnowflakeData-Output", + "$ref": "#/components/schemas/MoneybirdData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14945,9 +14947,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[SnowflakeData]" + "title": "DumpSandboxResponse[MoneybirdData]" }, - "DumpSandboxResponse_SupabaseData_": { + "DumpSandboxResponse_MotionData_": { "properties": { "sandbox_id": { "type": "string", @@ -14965,7 +14967,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/SupabaseData-Output", + "$ref": "#/components/schemas/MotionData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -14976,9 +14978,9 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[SupabaseData]" + "title": "DumpSandboxResponse[MotionData]" }, - "DumpSandboxResponse_WordPressData_": { + "DumpSandboxResponse_MsTeamsData_": { "properties": { "sandbox_id": { "type": "string", @@ -14996,7 +14998,7 @@ "description": "Timestamp of dump" }, "data": { - "$ref": "#/components/schemas/WordPressData", + "$ref": "#/components/schemas/MsTeamsData-Output", "description": "Dumped sandbox data in server-specific format" } }, @@ -15007,1601 +15009,802 @@ "dumped_at", "data" ], - "title": "DumpSandboxResponse[WordPressData]" + "title": "DumpSandboxResponse[MsTeamsData]" }, - "ExternalServerInfo": { + "DumpSandboxResponse_NotionData_": { "properties": { - "name": { + "sandbox_id": { "type": "string", - "title": "Name", - "description": "The name of the external server" + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "url": { - "type": "string", - "title": "Url", - "description": "The URL of the external MCP server" + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" }, - "headers": { - "anyOf": [ - { - "additionalProperties": { - "type": "string" - }, - "type": "object" - }, - { - "type": "null" - } - ], - "title": "Headers", - "description": "Optional HTTP headers used when connecting to the external server" + "dumped_at": { + "type": "string", + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" + }, + "data": { + "$ref": "#/components/schemas/NotionData-Output", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "name", - "url" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "ExternalServerInfo" + "title": "DumpSandboxResponse[NotionData]" }, - "ExternalServerRequest": { + "DumpSandboxResponse_OneDriveData_": { "properties": { - "name": { + "sandbox_id": { "type": "string", - "minLength": 1, - "title": "Name", - "description": "The name of the external server" + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "url": { + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" + }, + "dumped_at": { "type": "string", - "minLength": 1, - "title": "Url", - "description": "The URL of the external MCP server" + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" }, - "headers": { - "anyOf": [ - { - "additionalProperties": { - "type": "string" - }, - "type": "object" - }, - { - "type": "null" - } - ], - "title": "Headers", - "description": "Optional HTTP headers to include when connecting to the external server (e.g., for bearer authentication)" + "data": { + "$ref": "#/components/schemas/OneDriveData-Output", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "name", - "url" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "ExternalServerRequest" + "title": "DumpSandboxResponse[OneDriveData]" }, - "GeneralAuth": { + "DumpSandboxResponse_OutlookCalendarData_": { "properties": { + "sandbox_id": { + "type": "string", + "title": "Sandbox Id", + "description": "Sandbox identifier" + }, + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" + }, + "dumped_at": { + "type": "string", + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" + }, "data": { - "additionalProperties": true, - "type": "object", - "title": "Data", - "description": "Any other general authentication data to save" + "$ref": "#/components/schemas/OutlookCalendarData", + "description": "Dumped sandbox data in server-specific format" } }, - "additionalProperties": false, "type": "object", - "title": "Option 2: General Auth Data" + "required": [ + "sandbox_id", + "server_name", + "dumped_at", + "data" + ], + "title": "DumpSandboxResponse[OutlookCalendarData]" }, - "GetAllUsersResponse": { + "DumpSandboxResponse_OutlookMailData_": { "properties": { - "users": { - "items": { - "$ref": "#/components/schemas/UserInfo" - }, - "type": "array", - "title": "Users", - "description": "List of users" - }, - "totalCount": { - "type": "integer", - "title": "Totalcount", - "description": "Total number of users across all pages" + "sandbox_id": { + "type": "string", + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "page": { - "type": "integer", - "title": "Page", - "description": "Current page number" + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" }, - "pageSize": { - "type": "integer", - "title": "Pagesize", - "description": "Number of results per page" + "dumped_at": { + "type": "string", + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" }, - "totalPages": { - "type": "integer", - "title": "Totalpages", - "description": "Total number of pages" + "data": { + "$ref": "#/components/schemas/OutlookMailData", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "users", - "totalCount", - "page", - "pageSize", - "totalPages" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "GetAllUsersResponse" + "title": "DumpSandboxResponse[OutlookMailData]" }, - "GetAuthDataResponse": { + "DumpSandboxResponse_QuickBooksData_": { "properties": { - "success": { - "type": "boolean", - "title": "Success", - "description": "Whether the request was successful" + "sandbox_id": { + "type": "string", + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "authData": { - "anyOf": [ - { - "additionalProperties": true, - "type": "object" - }, - { - "type": "null" - } - ], - "title": "GetAuthDataResponseAuthData", - "description": "Complete authentication data including access token, refresh token, scope, expiration, and platform-specific data" + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" }, - "error": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Error", - "description": "Error message if the request failed" + "dumped_at": { + "type": "string", + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" + }, + "data": { + "$ref": "#/components/schemas/QuickBooksData", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "success" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "GetAuthDataResponse" + "title": "DumpSandboxResponse[QuickBooksData]" }, - "GetInstanceResponse": { + "DumpSandboxResponse_ResendData_": { "properties": { - "instanceId": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Instanceid", - "description": "The unique identifier of the connection integration instance." - }, - "authNeeded": { - "type": "boolean", - "title": "Authneeded", - "description": "Indicates whether authentication is required for this server instance.", - "default": false - }, - "isAuthenticated": { - "type": "boolean", - "title": "Isauthenticated", - "description": "Indicates whether the instance is authenticated successfully.", - "default": false - }, - "serverName": { + "sandbox_id": { "type": "string", - "title": "Servername", - "description": "The name of the MCP server associated with the instance.", - "default": "" + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "platform": { - "type": "string", - "title": "Platform", - "description": "The platform associated with the instance.", - "default": "" + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" }, - "externalUserId": { + "dumped_at": { "type": "string", - "title": "Externaluserid", - "description": "The user's identifier on the external platform.", - "default": "" + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" }, - "oauthUrl": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Oauthurl", - "description": "The OAuth URL for authentication if available, supports white label if configured." - } - }, - "type": "object", - "title": "GetInstanceResponse" - }, - "GetMcpServersResponse": { - "properties": { - "servers": { - "items": { - "$ref": "#/components/schemas/McpServer" - }, - "type": "array", - "title": "Servers" + "data": { + "$ref": "#/components/schemas/ResendData-Output", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "servers" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "GetMcpServersResponse" + "title": "DumpSandboxResponse[ResendData]" }, - "GetUserAuthResponse": { + "DumpSandboxResponse_SalesforceData_": { "properties": { - "success": { - "type": "boolean", - "title": "Success" - }, - "userId": { - "type": "string", - "title": "Userid", - "description": "The user identifier" - }, - "serverName": { + "sandbox_id": { "type": "string", - "title": "Servername", - "description": "The name of the server" + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "authData": { - "anyOf": [ - { - "additionalProperties": true, - "type": "object" - }, - { - "type": "null" - } - ], - "title": "GetUserAuthResponseAuthData", - "description": "The authentication data if available" + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" }, - "isAuthenticated": { - "type": "boolean", - "title": "Isauthenticated", - "description": "Whether the server has authentication data configured" + "dumped_at": { + "type": "string", + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" }, - "message": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Message" + "data": { + "$ref": "#/components/schemas/SalesforceData-Output", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "success", - "userId", - "serverName", - "isAuthenticated" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "GetUserAuthResponse" + "title": "DumpSandboxResponse[SalesforceData]" }, - "GetUserIntegrationsResponse": { + "DumpSandboxResponse_ShopifyData_": { "properties": { - "integrations": { - "items": { - "$ref": "#/components/schemas/IntegrationItem" - }, - "type": "array", - "title": "Integrations" + "sandbox_id": { + "type": "string", + "title": "Sandbox Id", + "description": "Sandbox identifier" + }, + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" + }, + "dumped_at": { + "type": "string", + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" + }, + "data": { + "$ref": "#/components/schemas/ShopifyData-Output", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "integrations" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "GetUserIntegrationsResponse" + "title": "DumpSandboxResponse[ShopifyData]" }, - "GetUserResponse": { + "DumpSandboxResponse_SlackData_": { "properties": { - "userId": { + "sandbox_id": { "type": "string", - "title": "Userid" + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "createdAt": { + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" + }, + "dumped_at": { "type": "string", - "title": "Createdat" + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" }, - "lastUsedAt": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Lastusedat" + "data": { + "$ref": "#/components/schemas/SlackData-Output", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "userId", - "createdAt" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "GetUserResponse" + "title": "DumpSandboxResponse[SlackData]" }, - "GitHubBranch-Input": { + "DumpSandboxResponse_SnowflakeData_": { "properties": { - "name": { + "sandbox_id": { "type": "string", - "title": "Name", - "description": "Branch name" + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "folders": { - "anyOf": [ - { - "$ref": "#/components/schemas/GitHubFolder-Input" - }, - { - "type": "null" - } - ], - "description": "Root folder structure for this branch" + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" + }, + "dumped_at": { + "type": "string", + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" + }, + "data": { + "$ref": "#/components/schemas/SnowflakeData-Output", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "name" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "GitHubBranch", - "description": "GitHub Branch object" + "title": "DumpSandboxResponse[SnowflakeData]" }, - "GitHubBranch-Output": { + "DumpSandboxResponse_SupabaseData_": { "properties": { - "name": { + "sandbox_id": { "type": "string", - "title": "Name", - "description": "Branch name" + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "folders": { - "anyOf": [ - { - "$ref": "#/components/schemas/GitHubFolder-Output" - }, - { - "type": "null" - } - ], - "description": "Root folder structure for this branch" + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" + }, + "dumped_at": { + "type": "string", + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" + }, + "data": { + "$ref": "#/components/schemas/SupabaseData-Output", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "name" + "sandbox_id", + "server_name", + "dumped_at", + "data" ], - "title": "GitHubBranch", - "description": "GitHub Branch object" + "title": "DumpSandboxResponse[SupabaseData]" }, - "GitHubData-Input": { + "DumpSandboxResponse_WordPressData_": { "properties": { - "repos": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubRepo-Input" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Repos", - "description": "List of repositories" - } - }, - "type": "object", - "title": "GitHubData", - "description": "Complete GitHub sandbox data structure" - }, - "GitHubData-Output": { - "properties": { - "repos": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubRepo-Output" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Repos", - "description": "List of repositories" - } - }, - "type": "object", - "title": "GitHubData", - "description": "Complete GitHub sandbox data structure" - }, - "GitHubFile": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "File name" - }, - "path": { - "type": "string", - "title": "Path", - "description": "File path within the repository" - }, - "content": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Content", - "description": "File content (base64 encoded for binary files)" - } - }, - "type": "object", - "required": [ - "name", - "path" - ], - "title": "GitHubFile", - "description": "GitHub File object" - }, - "GitHubFolder-Input": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Folder name" - }, - "path": { - "type": "string", - "title": "Path", - "description": "Folder path within the repository" - }, - "files": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubFile" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Files", - "description": "Files within this folder" - }, - "folders": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubFolder-Input" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Folders", - "description": "Subfolders within this folder" - } - }, - "type": "object", - "required": [ - "name", - "path" - ], - "title": "GitHubFolder", - "description": "GitHub Folder object" - }, - "GitHubFolder-Output": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Folder name" - }, - "path": { + "sandbox_id": { "type": "string", - "title": "Path", - "description": "Folder path within the repository" - }, - "files": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubFile" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Files", - "description": "Files within this folder" + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "folders": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubFolder-Output" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Folders", - "description": "Subfolders within this folder" - } - }, - "type": "object", - "required": [ - "name", - "path" - ], - "title": "GitHubFolder", - "description": "GitHub Folder object" - }, - "GitHubIssue": { - "properties": { - "id": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "title": "Id", - "description": "Issue ID (read-only, set by GitHub)" + "server_name": { + "$ref": "#/components/schemas/SandboxMCPServer", + "description": "MCP server type" }, - "title": { + "dumped_at": { "type": "string", - "title": "Title", - "description": "Issue title" - }, - "body": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Body", - "description": "Issue description/body" - }, - "state": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "State", - "description": "Issue state: open or closed", - "default": "open" + "format": "date-time", + "title": "Dumped At", + "description": "Timestamp of dump" }, - "labels": { - "anyOf": [ - { - "items": { - "type": "string" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Labels", - "description": "List of label names" + "data": { + "$ref": "#/components/schemas/WordPressData", + "description": "Dumped sandbox data in server-specific format" } }, "type": "object", "required": [ - "title" - ], - "title": "GitHubIssue", - "description": "GitHub Issue object" - }, - "GitHubPullRequest": { - "properties": { - "id": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "title": "Id", - "description": "PR ID (read-only, set by GitHub)" - }, - "title": { - "type": "string", - "title": "Title", - "description": "Pull request title" - }, - "body": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Body", - "description": "Pull request description/body" - }, - "state": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "State", - "description": "PR state: open, closed, or merged", - "default": "open" - }, - "head": { - "type": "string", - "title": "Head", - "description": "Head branch name" - }, - "base": { - "type": "string", - "title": "Base", - "description": "Base branch name", - "default": "main" - }, - "labels": { - "anyOf": [ - { - "items": { - "type": "string" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Labels", - "description": "List of label names" - } - }, - "type": "object", - "required": [ - "title", - "head" - ], - "title": "GitHubPullRequest", - "description": "GitHub Pull Request object" - }, - "GitHubRepo-Input": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Repository name" - }, - "description": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Description", - "description": "Repository description" - }, - "branches": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubBranch-Input" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Branches", - "description": "List of branches with their folder structures" - }, - "prs": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubPullRequest" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Prs", - "description": "List of pull requests" - }, - "issues": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubIssue" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Issues", - "description": "List of issues" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "GitHubRepo", - "description": "GitHub Repository object" - }, - "GitHubRepo-Output": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Repository name" - }, - "description": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Description", - "description": "Repository description" - }, - "branches": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubBranch-Output" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Branches", - "description": "List of branches with their folder structures" - }, - "prs": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubPullRequest" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Prs", - "description": "List of pull requests" - }, - "issues": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GitHubIssue" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Issues", - "description": "List of issues" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "GitHubRepo", - "description": "GitHub Repository object" - }, - "GmailData": { - "properties": { - "messages": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GmailMessage" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Messages", - "description": "List of Gmail messages to send" - }, - "drafts": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GmailDraft" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Drafts", - "description": "List of Gmail drafts to create" - } - }, - "type": "object", - "title": "GmailData", - "description": "Complete Gmail sandbox data structure with all supported objects" - }, - "GmailDraft": { - "properties": { - "subject": { - "type": "string", - "maxLength": 255, - "title": "Subject", - "description": "Draft subject (required)" - }, - "to": { - "type": "string", - "title": "To", - "description": "Recipient email address (required)" - }, - "cc": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Cc", - "description": "CC email addresses (comma-separated)" - }, - "bcc": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Bcc", - "description": "BCC email addresses (comma-separated)" - }, - "body": { - "type": "string", - "title": "Body", - "description": "Draft body content (required)" - }, - "from": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "From", - "description": "Sender email address (optional, defaults to authenticated user)" - }, - "reply_to": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Reply To", - "description": "Reply-to email address" - } - }, - "type": "object", - "required": [ - "subject", - "to", - "body" - ], - "title": "GmailDraft", - "description": "Gmail Draft object with essential fields" - }, - "GmailMessage": { - "properties": { - "subject": { - "type": "string", - "maxLength": 255, - "title": "Subject", - "description": "Email subject (required)" - }, - "to": { - "type": "string", - "title": "To", - "description": "Recipient email address (required)" - }, - "cc": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Cc", - "description": "CC email addresses (comma-separated)" - }, - "bcc": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Bcc", - "description": "BCC email addresses (comma-separated)" - }, - "body": { - "type": "string", - "title": "Body", - "description": "Email body content (required)" - }, - "from": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "From", - "description": "Sender email address (optional, defaults to authenticated user)" - }, - "reply_to": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Reply To", - "description": "Reply-to email address" - }, - "labels": { - "anyOf": [ - { - "items": { - "type": "string" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Labels", - "description": "Gmail labels (e.g., INBOX, SENT, IMPORTANT)" - } - }, - "type": "object", - "required": [ - "subject", - "to", - "body" - ], - "title": "GmailMessage", - "description": "Gmail Message object with essential fields" - }, - "GoogleCalendarAttendee": { - "properties": { - "email": { - "type": "string", - "format": "email", - "title": "Email", - "description": "Attendee email address (required)" - }, - "optional": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "title": "Optional", - "description": "Whether attendance is optional" - } - }, - "type": "object", - "required": [ - "email" - ], - "title": "GoogleCalendarAttendee", - "description": "Google Calendar event attendee" - }, - "GoogleCalendarData-Input": { - "properties": { - "events": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GoogleCalendarEvent" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Events", - "description": "List of Google Calendar events" - } - }, - "type": "object", - "title": "GoogleCalendarData", - "description": "Complete Google Calendar sandbox data structure" - }, - "GoogleCalendarData-Output": { - "properties": { - "events": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GoogleCalendarEvent" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Events", - "description": "List of Google Calendar events" - } - }, - "type": "object", - "title": "GoogleCalendarData", - "description": "Complete Google Calendar sandbox data structure" - }, - "GoogleCalendarEvent": { - "properties": { - "summary": { - "type": "string", - "minLength": 1, - "title": "Summary", - "description": "Event title/summary (required)" - }, - "start": { - "$ref": "#/components/schemas/GoogleCalendarEventDateTime", - "description": "Event start time (required)" - }, - "end": { - "$ref": "#/components/schemas/GoogleCalendarEventDateTime", - "description": "Event end time (required)" - }, - "description": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Description", - "description": "Event description" - }, - "location": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Location", - "description": "Event location" - }, - "attendees": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/GoogleCalendarAttendee" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Attendees", - "description": "List of event attendees" - }, - "colorId": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Colorid", - "description": "Event color (1-11)" - }, - "visibility": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Visibility", - "description": "Visibility (default, public, private, confidential)" - }, - "transparency": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Transparency", - "description": "Transparency (opaque, transparent)" - } - }, - "type": "object", - "required": [ - "summary", - "start", - "end" - ], - "title": "GoogleCalendarEvent", - "description": "Google Calendar event object" - }, - "GoogleCalendarEventDateTime": { - "properties": { - "dateTime": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Datetime", - "description": "RFC3339 timestamp (e.g., 2025-12-01T09:00:00-07:00)" - }, - "date": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Date", - "description": "All-day event date in YYYY-MM-DD format" - }, - "timeZone": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Timezone", - "description": "IANA timezone (e.g., America/Los_Angeles)" - } - }, - "type": "object", - "title": "GoogleCalendarEventDateTime", - "description": "Event date/time with timezone support" - }, - "GoogleCloudData-Input": { - "properties": { - "datasets": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/BigQueryDataset" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Datasets", - "description": "BigQuery datasets" + "sandbox_id", + "server_name", + "dumped_at", + "data" + ], + "title": "DumpSandboxResponse[WordPressData]" + }, + "ExternalServerInfo": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "The name of the external server" }, - "tables": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/BigQueryTable-Input" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Tables", - "description": "BigQuery tables" + "url": { + "type": "string", + "title": "Url", + "description": "The URL of the external MCP server" }, - "buckets": { + "headers": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/StorageBucket-Input" + "additionalProperties": { + "type": "string" }, - "type": "array" + "type": "object" }, { "type": "null" } ], - "title": "Buckets", - "description": "Cloud Storage buckets" + "title": "Headers", + "description": "Optional HTTP headers used when connecting to the external server" + } + }, + "type": "object", + "required": [ + "name", + "url" + ], + "title": "ExternalServerInfo" + }, + "ExternalServerRequest": { + "properties": { + "name": { + "type": "string", + "minLength": 1, + "title": "Name", + "description": "The name of the external server" }, - "objects": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/StorageObject" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Objects", - "description": "Cloud Storage objects" + "url": { + "type": "string", + "minLength": 1, + "title": "Url", + "description": "The URL of the external MCP server" }, - "logEntries": { + "headers": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/LogEntry" + "additionalProperties": { + "type": "string" }, - "type": "array" + "type": "object" }, { "type": "null" } ], - "title": "Logentries", - "description": "Log entries" + "title": "Headers", + "description": "Optional HTTP headers to include when connecting to the external server (e.g., for bearer authentication)" + } + }, + "type": "object", + "required": [ + "name", + "url" + ], + "title": "ExternalServerRequest" + }, + "GeneralAuth": { + "properties": { + "data": { + "additionalProperties": true, + "type": "object", + "title": "Data", + "description": "Any other general authentication data to save" + } + }, + "additionalProperties": false, + "type": "object", + "title": "Option 2: General Auth Data" + }, + "GetAllUsersResponse": { + "properties": { + "users": { + "items": { + "$ref": "#/components/schemas/UserInfo" + }, + "type": "array", + "title": "Users", + "description": "List of users" }, - "logSinks": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/LogSink" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Logsinks", - "description": "Log sinks" + "totalCount": { + "type": "integer", + "title": "Totalcount", + "description": "Total number of users across all pages" + }, + "page": { + "type": "integer", + "title": "Page", + "description": "Current page number" + }, + "pageSize": { + "type": "integer", + "title": "Pagesize", + "description": "Number of results per page" + }, + "totalPages": { + "type": "integer", + "title": "Totalpages", + "description": "Total number of pages" + } + }, + "type": "object", + "required": [ + "users", + "totalCount", + "page", + "pageSize", + "totalPages" + ], + "title": "GetAllUsersResponse" + }, + "GetAuthDataResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success", + "description": "Whether the request was successful" }, - "logBuckets": { + "authData": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/LogBucket" - }, - "type": "array" + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Logbuckets", - "description": "Log buckets" + "title": "GetAuthDataResponseAuthData", + "description": "Complete authentication data including access token, refresh token, scope, expiration, and platform-specific data" }, - "instances": { + "error": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ComputeInstance" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Instances", - "description": "Compute Engine instances" + "title": "Error", + "description": "Error message if the request failed" } }, "type": "object", - "title": "GoogleCloudData", - "description": "Complete Google Cloud sandbox data structure" + "required": [ + "success" + ], + "title": "GetAuthDataResponse" }, - "GoogleCloudData-Output": { + "GetInstanceResponse": { "properties": { - "datasets": { + "instanceId": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/BigQueryDataset" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Datasets", - "description": "BigQuery datasets" + "title": "Instanceid", + "description": "The unique identifier of the connection integration instance." }, - "tables": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/BigQueryTable-Output" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Tables", - "description": "BigQuery tables" + "authNeeded": { + "type": "boolean", + "title": "Authneeded", + "description": "Indicates whether authentication is required for this server instance.", + "default": false }, - "buckets": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/StorageBucket-Output" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Buckets", - "description": "Cloud Storage buckets" + "isAuthenticated": { + "type": "boolean", + "title": "Isauthenticated", + "description": "Indicates whether the instance is authenticated successfully.", + "default": false + }, + "serverName": { + "type": "string", + "title": "Servername", + "description": "The name of the MCP server associated with the instance.", + "default": "" + }, + "platform": { + "type": "string", + "title": "Platform", + "description": "The platform associated with the instance.", + "default": "" + }, + "externalUserId": { + "type": "string", + "title": "Externaluserid", + "description": "The user's identifier on the external platform.", + "default": "" }, - "objects": { + "oauthUrl": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/StorageObject" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Objects", - "description": "Cloud Storage objects" + "title": "Oauthurl", + "description": "The OAuth URL for authentication if available, supports white label if configured." + } + }, + "type": "object", + "title": "GetInstanceResponse" + }, + "GetMcpServersResponse": { + "properties": { + "servers": { + "items": { + "$ref": "#/components/schemas/McpServer" + }, + "type": "array", + "title": "Servers" + } + }, + "type": "object", + "required": [ + "servers" + ], + "title": "GetMcpServersResponse" + }, + "GetUserAuthResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success" + }, + "userId": { + "type": "string", + "title": "Userid", + "description": "The user identifier" + }, + "serverName": { + "type": "string", + "title": "Servername", + "description": "The name of the server" }, - "logEntries": { + "authData": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/LogEntry" - }, - "type": "array" + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Logentries", - "description": "Log entries" + "title": "GetUserAuthResponseAuthData", + "description": "The authentication data if available" + }, + "isAuthenticated": { + "type": "boolean", + "title": "Isauthenticated", + "description": "Whether the server has authentication data configured" }, - "logSinks": { + "message": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/LogSink" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Logsinks", - "description": "Log sinks" + "title": "Message" + } + }, + "type": "object", + "required": [ + "success", + "userId", + "serverName", + "isAuthenticated" + ], + "title": "GetUserAuthResponse" + }, + "GetUserIntegrationsResponse": { + "properties": { + "integrations": { + "items": { + "$ref": "#/components/schemas/IntegrationItem" + }, + "type": "array", + "title": "Integrations" + } + }, + "type": "object", + "required": [ + "integrations" + ], + "title": "GetUserIntegrationsResponse" + }, + "GetUserResponse": { + "properties": { + "userId": { + "type": "string", + "title": "Userid" }, - "logBuckets": { + "createdAt": { + "type": "string", + "title": "Createdat" + }, + "lastUsedAt": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/LogBucket" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Logbuckets", - "description": "Log buckets" + "title": "Lastusedat" + } + }, + "type": "object", + "required": [ + "userId", + "createdAt" + ], + "title": "GetUserResponse" + }, + "GitHubBranch-Input": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Branch name" }, - "instances": { + "folders": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ComputeInstance" - }, - "type": "array" + "$ref": "#/components/schemas/GitHubFolder-Input" }, { "type": "null" } ], - "title": "Instances", - "description": "Compute Engine instances" + "description": "Root folder structure for this branch" } }, "type": "object", - "title": "GoogleCloudData", - "description": "Complete Google Cloud sandbox data structure" + "required": [ + "name" + ], + "title": "GitHubBranch", + "description": "GitHub Branch object" }, - "GoogleDocsData": { + "GitHubBranch-Output": { "properties": { - "documents": { + "name": { + "type": "string", + "title": "Name", + "description": "Branch name" + }, + "folders": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/GoogleDocsDocument" - }, - "type": "array" + "$ref": "#/components/schemas/GitHubFolder-Output" }, { "type": "null" } ], - "title": "Documents", - "description": "List of Google Docs documents" + "description": "Root folder structure for this branch" } }, "type": "object", - "title": "GoogleDocsData", - "description": "Complete Google Docs sandbox data structure" + "required": [ + "name" + ], + "title": "GitHubBranch", + "description": "GitHub Branch object" }, - "GoogleDocsDocument": { + "GitHubData-Input": { "properties": { - "title": { - "type": "string", - "minLength": 1, - "title": "Title", - "description": "Document title (required)" - }, - "content": { + "repos": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/GitHubRepo-Input" + }, + "type": "array" }, { "type": "null" } ], - "title": "Content", - "description": "Plain text content of the document" + "title": "Repos", + "description": "List of repositories" } }, "type": "object", - "required": [ - "title" - ], - "title": "GoogleDocsDocument", - "description": "Google Docs document object" + "title": "GitHubData", + "description": "Complete GitHub sandbox data structure" }, - "GoogleDriveData": { + "GitHubData-Output": { "properties": { - "files": { + "repos": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/GoogleDriveFile" + "$ref": "#/components/schemas/GitHubRepo-Output" }, "type": "array" }, @@ -16609,28 +15812,27 @@ "type": "null" } ], - "title": "Files", - "description": "List of Google Drive files and folders" + "title": "Repos", + "description": "List of repositories" } }, "type": "object", - "title": "GoogleDriveData", - "description": "Complete Google Drive sandbox data structure" + "title": "GitHubData", + "description": "Complete GitHub sandbox data structure" }, - "GoogleDriveFile": { + "GitHubFile": { "properties": { "name": { "type": "string", - "minLength": 1, "title": "Name", - "description": "Name of the file or folder (required)" + "description": "File name" }, - "mimeType": { + "path": { "type": "string", - "title": "Mimetype", - "description": "MIME type of the file (required). Use 'application/vnd.google-apps.folder' for folders" + "title": "Path", + "description": "File path within the repository" }, - "description": { + "content": { "anyOf": [ { "type": "string" @@ -16639,14 +15841,35 @@ "type": "null" } ], - "title": "Description", - "description": "Description of the file" + "title": "Content", + "description": "File content" + } + }, + "type": "object", + "required": [ + "name", + "path" + ], + "title": "GitHubFile", + "description": "GitHub File object" + }, + "GitHubFolder-Input": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Folder name" }, - "parents": { + "path": { + "type": "string", + "title": "Path", + "description": "Folder path within the repository" + }, + "files": { "anyOf": [ { "items": { - "type": "string" + "$ref": "#/components/schemas/GitHubFile" }, "type": "array" }, @@ -16654,37 +15877,50 @@ "type": "null" } ], - "title": "Parents", - "description": "List of parent folder IDs" + "title": "Files", + "description": "Files within this folder" }, - "content": { + "folders": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/GitHubFolder-Input" + }, + "type": "array" }, { "type": "null" } ], - "title": "Content", - "description": "Text content for Google Docs documents" + "title": "Folders", + "description": "Subfolders within this folder" } }, "type": "object", "required": [ "name", - "mimeType" + "path" ], - "title": "GoogleDriveFile", - "description": "Google Drive file or folder object" + "title": "GitHubFolder", + "description": "GitHub Folder object" }, - "GoogleFormsData-Input": { + "GitHubFolder-Output": { "properties": { - "forms": { + "name": { + "type": "string", + "title": "Name", + "description": "Folder name" + }, + "path": { + "type": "string", + "title": "Path", + "description": "Folder path within the repository" + }, + "files": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/GoogleFormsForm-Input" + "$ref": "#/components/schemas/GitHubFile" }, "type": "array" }, @@ -16692,21 +15928,14 @@ "type": "null" } ], - "title": "Forms", - "description": "List of Google Forms" - } - }, - "type": "object", - "title": "GoogleFormsData", - "description": "Complete Google Forms sandbox data structure" - }, - "GoogleFormsData-Output": { - "properties": { - "forms": { + "title": "Files", + "description": "Files within this folder" + }, + "folders": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/GoogleFormsForm-Output" + "$ref": "#/components/schemas/GitHubFolder-Output" }, "type": "array" }, @@ -16714,76 +15943,67 @@ "type": "null" } ], - "title": "Forms", - "description": "List of Google Forms" + "title": "Folders", + "description": "Subfolders within this folder" } }, "type": "object", - "title": "GoogleFormsData", - "description": "Complete Google Forms sandbox data structure" + "required": [ + "name", + "path" + ], + "title": "GitHubFolder", + "description": "GitHub Folder object" }, - "GoogleFormsForm-Input": { + "GitHubIssue": { "properties": { - "info": { - "$ref": "#/components/schemas/GoogleFormsInfo", - "description": "Required. The title and description of the form" - }, - "settings": { + "id": { "anyOf": [ { - "$ref": "#/components/schemas/GoogleFormsFormSettings" + "type": "integer" }, { "type": "null" } ], - "description": "The form's settings" + "title": "Id", + "description": "Issue ID (read-only, set by GitHub)" }, - "items": { + "issue_title": { + "type": "string", + "title": "Issue Title", + "description": "Issue title" + }, + "description": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/GoogleFormsItem" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Items", - "description": "A list of the form's items" - } - }, - "type": "object", - "required": [ - "info" - ], - "title": "GoogleFormsForm", - "description": "Google Forms form object - matches API Form resource" - }, - "GoogleFormsForm-Output": { - "properties": { - "info": { - "$ref": "#/components/schemas/GoogleFormsInfo", - "description": "Required. The title and description of the form" + "title": "Description", + "description": "Issue description/body" }, - "settings": { + "status": { "anyOf": [ { - "$ref": "#/components/schemas/GoogleFormsFormSettings" + "type": "string" }, { "type": "null" } ], - "description": "The form's settings" + "title": "Status", + "description": "Issue state: open or closed", + "default": "open" }, - "items": { + "labels": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/GoogleFormsItem" + "type": "string" }, "type": "array" }, @@ -16791,41 +16011,35 @@ "type": "null" } ], - "title": "Items", - "description": "A list of the form's items" + "title": "Labels", + "description": "List of label names" } }, "type": "object", "required": [ - "info" + "issue_title" ], - "title": "GoogleFormsForm", - "description": "Google Forms form object - matches API Form resource" + "title": "GitHubIssue", + "description": "GitHub Issue object" }, - "GoogleFormsFormSettings": { + "GitHubPullRequest": { "properties": { - "quizSettings": { + "id": { "anyOf": [ { - "$ref": "#/components/schemas/GoogleFormsQuizSettings" + "type": "integer" }, { "type": "null" } ], - "description": "Settings related to quiz forms" - } - }, - "type": "object", - "title": "GoogleFormsFormSettings", - "description": "Form settings - matches API FormSettings object" - }, - "GoogleFormsInfo": { - "properties": { - "title": { + "title": "Id", + "description": "PR ID (read-only, set by GitHub)" + }, + "pr_title": { "type": "string", - "title": "Title", - "description": "The title of the form visible to responders" + "title": "Pr Title", + "description": "Pull request title" }, "description": { "anyOf": [ @@ -16837,31 +16051,9 @@ } ], "title": "Description", - "description": "The description of the form" + "description": "Pull request description/body" }, - "documentTitle": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Documenttitle", - "description": "The title of the document visible in Drive (output only, can be set on create)" - } - }, - "type": "object", - "required": [ - "title" - ], - "title": "GoogleFormsInfo", - "description": "Form info - matches API Info object" - }, - "GoogleFormsItem": { - "properties": { - "itemId": { + "status": { "anyOf": [ { "type": "string" @@ -16870,20 +16062,51 @@ "type": "null" } ], - "title": "Itemid", - "description": "The item ID" + "title": "Status", + "description": "PR state: open, closed, or merged", + "default": "open" }, - "title": { + "source_branch": { + "type": "string", + "title": "Source Branch", + "description": "Head branch name" + }, + "target_branch": { + "type": "string", + "title": "Target Branch", + "description": "Base branch name", + "default": "main" + }, + "labels": { "anyOf": [ { - "type": "string" + "items": { + "type": "string" + }, + "type": "array" }, { "type": "null" } ], - "title": "Title", - "description": "The title of the item" + "title": "Labels", + "description": "List of label names" + } + }, + "type": "object", + "required": [ + "pr_title", + "source_branch" + ], + "title": "GitHubPullRequest", + "description": "GitHub Pull Request object" + }, + "GitHubRepo-Input": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Repository name" }, "description": { "anyOf": [ @@ -16895,137 +16118,155 @@ } ], "title": "Description", - "description": "The description of the item" + "description": "Repository description" }, - "questionItem": { + "branches": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "$ref": "#/components/schemas/GitHubBranch-Input" + }, + "type": "array" }, { "type": "null" } ], - "title": "Questionitem", - "description": "Poses a question to the user" + "title": "Branches", + "description": "List of branches with their folder structures" }, - "questionGroupItem": { + "prs": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "$ref": "#/components/schemas/GitHubPullRequest" + }, + "type": "array" }, { "type": "null" } ], - "title": "Questiongroupitem", - "description": "Poses one or more questions with a single major prompt" + "title": "Prs", + "description": "List of pull requests" }, - "pageBreakItem": { + "issues": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "$ref": "#/components/schemas/GitHubIssue" + }, + "type": "array" }, { "type": "null" } ], - "title": "Pagebreakitem", - "description": "Starts a new page with a title" + "title": "Issues", + "description": "List of issues" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "GitHubRepo", + "description": "GitHub Repository object" + }, + "GitHubRepo-Output": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Repository name" }, - "textItem": { + "description": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Textitem", - "description": "Displays a title and description on the page" + "title": "Description", + "description": "Repository description" }, - "imageItem": { + "branches": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "$ref": "#/components/schemas/GitHubBranch-Output" + }, + "type": "array" }, { "type": "null" } ], - "title": "Imageitem", - "description": "Displays an image on the page" + "title": "Branches", + "description": "List of branches with their folder structures" }, - "videoItem": { + "prs": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "$ref": "#/components/schemas/GitHubPullRequest" + }, + "type": "array" }, { "type": "null" } ], - "title": "Videoitem", - "description": "Displays a video on the page" - } - }, - "type": "object", - "title": "GoogleFormsItem", - "description": "Form item - matches API Item object. Uses Dict for flexibility to match API structure." - }, - "GoogleFormsQuizSettings": { - "properties": { - "isQuiz": { + "title": "Prs", + "description": "List of pull requests" + }, + "issues": { "anyOf": [ { - "type": "boolean" + "items": { + "$ref": "#/components/schemas/GitHubIssue" + }, + "type": "array" }, { "type": "null" } ], - "title": "Isquiz", - "description": "Whether this form is a quiz" + "title": "Issues", + "description": "List of issues" } }, "type": "object", - "title": "GoogleFormsQuizSettings", - "description": "Quiz settings - matches API QuizSettings object" + "required": [ + "name" + ], + "title": "GitHubRepo", + "description": "GitHub Repository object" }, - "GoogleSheetsCellValue": { + "GmailData": { "properties": { - "userEnteredValue": { + "messages": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "$ref": "#/components/schemas/GmailMessage" + }, + "type": "array" }, { "type": "null" } ], - "title": "Userenteredvalue", - "description": "The value entered by the user" - } - }, - "type": "object", - "title": "GoogleSheetsCellValue", - "description": "Cell value containing only user-entered data" - }, - "GoogleSheetsData-Input": { - "properties": { - "spreadsheets": { + "title": "Messages", + "description": "List of Gmail messages to send" + }, + "drafts": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/GoogleSheetsSpreadsheet-Input" + "$ref": "#/components/schemas/GmailDraft" }, "type": "array" }, @@ -17033,165 +16274,161 @@ "type": "null" } ], - "title": "Spreadsheets", - "description": "List of Google Sheets spreadsheets" + "title": "Drafts", + "description": "List of Gmail drafts to create" } }, "type": "object", - "title": "GoogleSheetsData", - "description": "Complete Google Sheets sandbox data structure" + "title": "GmailData", + "description": "Complete Gmail sandbox data structure with all supported objects" }, - "GoogleSheetsData-Output": { + "GmailDraft": { "properties": { - "spreadsheets": { + "subject": { + "type": "string", + "maxLength": 255, + "title": "Subject", + "description": "Draft subject (required)" + }, + "to": { + "type": "string", + "title": "To", + "description": "Recipient email address (required)" + }, + "cc": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/GoogleSheetsSpreadsheet-Output" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Spreadsheets", - "description": "List of Google Sheets spreadsheets" - } - }, - "type": "object", - "title": "GoogleSheetsData", - "description": "Complete Google Sheets sandbox data structure" - }, - "GoogleSheetsGridData-Input": { - "properties": { - "startRow": { + "title": "Cc", + "description": "CC email addresses (comma-separated)" + }, + "bcc": { "anyOf": [ { - "type": "integer" + "type": "string" }, { "type": "null" } ], - "title": "Startrow", - "description": "Starting row index", - "default": 0 + "title": "Bcc", + "description": "BCC email addresses (comma-separated)" + }, + "body": { + "type": "string", + "title": "Body", + "description": "Draft body content (required)" }, - "startColumn": { + "from": { "anyOf": [ { - "type": "integer" + "type": "string" }, { "type": "null" } ], - "title": "Startcolumn", - "description": "Starting column index", - "default": 0 + "title": "From", + "description": "Sender email address (optional, defaults to authenticated user)" }, - "rowData": { + "reply_to": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/GoogleSheetsRowData" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Rowdata", - "description": "Row data with values array" + "title": "Reply To", + "description": "Reply-to email address" } }, "type": "object", - "title": "GoogleSheetsGridData", - "description": "Grid data for a sheet" + "required": [ + "subject", + "to", + "body" + ], + "title": "GmailDraft", + "description": "Gmail Draft object with essential fields" }, - "GoogleSheetsGridData-Output": { + "GmailMessage": { "properties": { - "startRow": { + "subject": { + "type": "string", + "maxLength": 255, + "title": "Subject", + "description": "Email subject (required)" + }, + "to": { + "type": "string", + "title": "To", + "description": "Recipient email address (required)" + }, + "cc": { "anyOf": [ { - "type": "integer" + "type": "string" }, { "type": "null" } ], - "title": "Startrow", - "description": "Starting row index", - "default": 0 + "title": "Cc", + "description": "CC email addresses (comma-separated)" }, - "startColumn": { + "bcc": { "anyOf": [ { - "type": "integer" + "type": "string" }, { "type": "null" } ], - "title": "Startcolumn", - "description": "Starting column index", - "default": 0 + "title": "Bcc", + "description": "BCC email addresses (comma-separated)" }, - "rowData": { + "body": { + "type": "string", + "title": "Body", + "description": "Email body content (required)" + }, + "from": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/GoogleSheetsRowData" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Rowdata", - "description": "Row data with values array" - } - }, - "type": "object", - "title": "GoogleSheetsGridData", - "description": "Grid data for a sheet" - }, - "GoogleSheetsRowData": { - "properties": { - "values": { + "title": "From", + "description": "Sender email address (optional, defaults to authenticated user)" + }, + "reply_to": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/GoogleSheetsCellValue" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Values", - "description": "Cell values in the row" - } - }, - "type": "object", - "title": "GoogleSheetsRowData", - "description": "Row data containing cell values" - }, - "GoogleSheetsSheet-Input": { - "properties": { - "properties": { - "$ref": "#/components/schemas/GoogleSheetsSheetProperties", - "description": "Sheet properties (required)" + "title": "Reply To", + "description": "Reply-to email address" }, - "data": { + "labels": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/GoogleSheetsGridData-Input" + "type": "string" }, "type": "array" }, @@ -17199,98 +16436,78 @@ "type": "null" } ], - "title": "Data", - "description": "Grid data for the sheet" + "title": "Labels", + "description": "Gmail labels (e.g., INBOX, SENT, IMPORTANT)" } }, "type": "object", "required": [ - "properties" + "subject", + "to", + "body" ], - "title": "GoogleSheetsSheet", - "description": "A sheet within a Google Sheets spreadsheet" + "title": "GmailMessage", + "description": "Gmail Message object with essential fields" }, - "GoogleSheetsSheet-Output": { + "GoogleCalendarAttendee": { "properties": { - "properties": { - "$ref": "#/components/schemas/GoogleSheetsSheetProperties", - "description": "Sheet properties (required)" + "email": { + "type": "string", + "format": "email", + "title": "Email", + "description": "Attendee email address (required)" }, - "data": { + "name": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/GoogleSheetsGridData-Output" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Data", - "description": "Grid data for the sheet" - } - }, - "type": "object", - "required": [ - "properties" - ], - "title": "GoogleSheetsSheet", - "description": "A sheet within a Google Sheets spreadsheet" - }, - "GoogleSheetsSheetProperties": { - "properties": { - "title": { - "type": "string", - "minLength": 1, - "title": "Title", - "description": "Sheet title (required)" + "title": "Name", + "description": "Attendee display name" }, - "index": { + "isOptional": { "anyOf": [ { - "type": "integer" + "type": "boolean" }, { "type": "null" } ], - "title": "Index", - "description": "Sheet index position" + "title": "Isoptional", + "description": "Whether attendance is optional" }, - "gridProperties": { + "comment": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Gridproperties", - "description": "Grid properties (rowCount, columnCount)" + "title": "Comment", + "description": "Attendee's response comment" } }, "type": "object", "required": [ - "title" + "email" ], - "title": "GoogleSheetsSheetProperties", - "description": "Properties for a sheet within a spreadsheet" + "title": "GoogleCalendarAttendee", + "description": "Google Calendar event attendee" }, - "GoogleSheetsSpreadsheet-Input": { + "GoogleCalendarData-Input": { "properties": { - "properties": { - "$ref": "#/components/schemas/GoogleSheetsSpreadsheetProperties", - "description": "Spreadsheet properties including title (required)" - }, - "sheets": { + "events": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/GoogleSheetsSheet-Input" + "$ref": "#/components/schemas/GoogleCalendarEvent" }, "type": "array" }, @@ -17298,28 +16515,21 @@ "type": "null" } ], - "title": "Sheets", - "description": "List of sheets in the spreadsheet" + "title": "Events", + "description": "List of Google Calendar events" } }, "type": "object", - "required": [ - "properties" - ], - "title": "GoogleSheetsSpreadsheet", - "description": "Google Sheets spreadsheet object" + "title": "GoogleCalendarData", + "description": "Complete Google Calendar sandbox data structure" }, - "GoogleSheetsSpreadsheet-Output": { + "GoogleCalendarData-Output": { "properties": { - "properties": { - "$ref": "#/components/schemas/GoogleSheetsSpreadsheetProperties", - "description": "Spreadsheet properties including title (required)" - }, - "sheets": { + "events": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/GoogleSheetsSheet-Output" + "$ref": "#/components/schemas/GoogleCalendarEvent" }, "type": "array" }, @@ -17327,53 +16537,23 @@ "type": "null" } ], - "title": "Sheets", - "description": "List of sheets in the spreadsheet" - } - }, - "type": "object", - "required": [ - "properties" - ], - "title": "GoogleSheetsSpreadsheet", - "description": "Google Sheets spreadsheet object" - }, - "GoogleSheetsSpreadsheetProperties": { - "properties": { - "title": { - "type": "string", - "title": "Title", - "description": "Spreadsheet title (required)" - } - }, - "type": "object", - "required": [ - "title" - ], - "title": "GoogleSheetsSpreadsheetProperties", - "description": "Properties for a spreadsheet - only includes title" - }, - "HTTPValidationError": { - "properties": { - "detail": { - "items": { - "$ref": "#/components/schemas/ValidationError" - }, - "type": "array", - "title": "Detail" + "title": "Events", + "description": "List of Google Calendar events" } }, "type": "object", - "title": "HTTPValidationError" + "title": "GoogleCalendarData", + "description": "Complete Google Calendar sandbox data structure" }, - "HubSpotCompany": { + "GoogleCalendarEvent": { "properties": { - "name": { + "title": { "type": "string", - "title": "Name", - "description": "Company name" + "minLength": 1, + "title": "Title", + "description": "Event title/summary (required)" }, - "domain": { + "startTime": { "anyOf": [ { "type": "string" @@ -17382,10 +16562,10 @@ "type": "null" } ], - "title": "Domain", - "description": "Company domain/website" + "title": "Starttime", + "description": "RFC3339 timestamp for timed events (e.g., 2025-12-01T09:00:00-07:00)" }, - "industry": { + "endTime": { "anyOf": [ { "type": "string" @@ -17394,10 +16574,10 @@ "type": "null" } ], - "title": "Industry", - "description": "Company industry" + "title": "Endtime", + "description": "RFC3339 timestamp for timed events" }, - "city": { + "startDate": { "anyOf": [ { "type": "string" @@ -17406,10 +16586,10 @@ "type": "null" } ], - "title": "City", - "description": "Company city" + "title": "Startdate", + "description": "All-day event start date in YYYY-MM-DD format" }, - "state": { + "endDate": { "anyOf": [ { "type": "string" @@ -17418,10 +16598,10 @@ "type": "null" } ], - "title": "State", - "description": "Company state/region" + "title": "Enddate", + "description": "All-day event end date in YYYY-MM-DD format" }, - "country": { + "timezone": { "anyOf": [ { "type": "string" @@ -17430,10 +16610,10 @@ "type": "null" } ], - "title": "Country", - "description": "Company country" + "title": "Timezone", + "description": "IANA timezone (e.g., America/Los_Angeles)" }, - "phone": { + "description": { "anyOf": [ { "type": "string" @@ -17442,10 +16622,10 @@ "type": "null" } ], - "title": "Phone", - "description": "Company phone number" + "title": "Description", + "description": "Event description" }, - "number_of_employees": { + "location": { "anyOf": [ { "type": "string" @@ -17454,10 +16634,10 @@ "type": "null" } ], - "title": "Number Of Employees", - "description": "Number of employees" + "title": "Location", + "description": "Event location" }, - "annual_revenue": { + "visibility": { "anyOf": [ { "type": "string" @@ -17466,75 +16646,54 @@ "type": "null" } ], - "title": "Annual Revenue", - "description": "Annual revenue" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "HubSpotCompany", - "description": "HubSpot Company object.\nBase object, typically doesn't associate upwards in this schema, but contacts/deals associate to it." - }, - "HubSpotContact": { - "properties": { - "firstname": { - "type": "string", - "title": "Firstname", - "description": "Contact first name" - }, - "lastname": { - "type": "string", - "title": "Lastname", - "description": "Contact last name" - }, - "email": { - "type": "string", - "title": "Email", - "description": "Contact email address" + "title": "Visibility", + "description": "Visibility (default, public, private, confidential)" }, - "phone": { + "recurrence": { "anyOf": [ { - "type": "string" + "items": { + "type": "string" + }, + "type": "array" }, { "type": "null" } ], - "title": "Phone", - "description": "Contact phone number" + "title": "Recurrence", + "description": "Recurrence rules (RRULE format)" }, - "job_title": { + "attendees": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/GoogleCalendarAttendee" + }, + "type": "array" }, { "type": "null" } ], - "title": "Job Title", - "description": "Contact job title" + "title": "Attendees", + "description": "List of event attendees" } }, "type": "object", "required": [ - "firstname", - "lastname", - "email" + "title" ], - "title": "HubSpotContact", - "description": "HubSpot Contact object.\nCan be associated with: Companies." + "title": "GoogleCalendarEvent", + "description": "Google Calendar event object" }, - "HubSpotData-Input": { + "GoogleCloudData-Input": { "properties": { - "companies": { + "datasets": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/HubSpotCompany" + "$ref": "#/components/schemas/BigQueryDataset" }, "type": "array" }, @@ -17542,14 +16701,14 @@ "type": "null" } ], - "title": "Companies", - "description": "List of companies" + "title": "Datasets", + "description": "BigQuery datasets" }, - "contacts": { + "tables": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/HubSpotContact" + "$ref": "#/components/schemas/BigQueryTable-Input" }, "type": "array" }, @@ -17557,14 +16716,14 @@ "type": "null" } ], - "title": "Contacts", - "description": "List of contacts" + "title": "Tables", + "description": "BigQuery tables" }, - "deals": { + "buckets": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/HubSpotDeal" + "$ref": "#/components/schemas/StorageBucket" }, "type": "array" }, @@ -17572,14 +16731,14 @@ "type": "null" } ], - "title": "Deals", - "description": "List of deals" + "title": "Buckets", + "description": "Cloud Storage buckets" }, - "tickets": { + "objects": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/HubSpotTicket" + "$ref": "#/components/schemas/StorageObject" }, "type": "array" }, @@ -17587,14 +16746,14 @@ "type": "null" } ], - "title": "Tickets", - "description": "List of tickets" + "title": "Objects", + "description": "Cloud Storage objects" }, - "tasks": { + "log_entries": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/HubSpotTask" + "$ref": "#/components/schemas/LogEntry" }, "type": "array" }, @@ -17602,280 +16761,272 @@ "type": "null" } ], - "title": "Tasks", - "description": "List of tasks" - } - }, - "type": "object", - "title": "HubSpotData", - "description": "Complete HubSpot sandbox data structure (Flat Schema).\n\nAll object types are top-level lists. Relationships are defined via association fields\nin the respective objects (e.g. associated_company_names in HubSpotContact)." - }, - "HubSpotData-Output": { - "additionalProperties": true, - "type": "object" - }, - "HubSpotDeal": { - "properties": { - "deal_name": { - "type": "string", - "title": "Deal Name", - "description": "Deal name" + "title": "Log Entries", + "description": "Log entries" }, - "amount": { + "log_sinks": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/LogSink" + }, + "type": "array" }, { "type": "null" } ], - "title": "Amount", - "description": "Deal amount" + "title": "Log Sinks", + "description": "Log sinks" }, - "deal_stage": { + "log_buckets": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/LogBucket" + }, + "type": "array" }, { "type": "null" } ], - "title": "Deal Stage", - "description": "Deal stage" + "title": "Log Buckets", + "description": "Log buckets" }, - "pipeline": { + "instances": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/ComputeInstance" + }, + "type": "array" }, { "type": "null" } ], - "title": "Pipeline", - "description": "Deal pipeline" - }, - "close_date": { + "title": "Instances", + "description": "Compute Engine instances" + } + }, + "type": "object", + "title": "GoogleCloudData", + "description": "Complete Google Cloud sandbox data structure" + }, + "GoogleCloudData-Output": { + "properties": { + "datasets": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/BigQueryDataset" + }, + "type": "array" }, { "type": "null" } ], - "title": "Close Date", - "description": "Expected close date" - } - }, - "type": "object", - "required": [ - "deal_name" - ], - "title": "HubSpotDeal", - "description": "HubSpot Deal object.\nCan be associated with: Contacts, Companies." - }, - "HubSpotTask": { - "properties": { - "subject": { - "type": "string", - "title": "Subject", - "description": "Task subject/title" + "title": "Datasets", + "description": "BigQuery datasets" }, - "body": { + "tables": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/BigQueryTable-Output" + }, + "type": "array" }, { "type": "null" } ], - "title": "Body", - "description": "Task body/description" + "title": "Tables", + "description": "BigQuery tables" }, - "status": { + "buckets": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/StorageBucket" + }, + "type": "array" }, { "type": "null" } ], - "title": "Status", - "description": "Task status (NOT_STARTED, IN_PROGRESS, COMPLETED, WAITING, DEFERRED)" + "title": "Buckets", + "description": "Cloud Storage buckets" }, - "priority": { + "objects": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/StorageObject" + }, + "type": "array" }, { "type": "null" } ], - "title": "Priority", - "description": "Task priority (HIGH, MEDIUM, LOW)" + "title": "Objects", + "description": "Cloud Storage objects" }, - "due_date": { + "log_entries": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/LogEntry" + }, + "type": "array" }, { "type": "null" } - ], - "title": "Due Date", - "description": "Task due date (ISO 8601 format)" - } - }, - "type": "object", - "required": [ - "subject" - ], - "title": "HubSpotTask", - "description": "HubSpot Task object.\nCan be associated with: Contacts, Companies, Deals, Tickets." - }, - "HubSpotTicket": { - "properties": { - "subject": { - "type": "string", - "title": "Subject", - "description": "Ticket subject" + ], + "title": "Log Entries", + "description": "Log entries" }, - "content": { + "log_sinks": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/LogSink" + }, + "type": "array" }, { "type": "null" } ], - "title": "Content", - "description": "Ticket content/description" + "title": "Log Sinks", + "description": "Log sinks" }, - "pipeline": { + "log_buckets": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/LogBucket" + }, + "type": "array" }, { "type": "null" } ], - "title": "Pipeline", - "description": "Ticket pipeline" + "title": "Log Buckets", + "description": "Log buckets" }, - "pipeline_stage": { + "instances": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/ComputeInstance" + }, + "type": "array" }, { "type": "null" } ], - "title": "Pipeline Stage", - "description": "Ticket pipeline stage" - }, - "priority": { + "title": "Instances", + "description": "Compute Engine instances" + } + }, + "type": "object", + "title": "GoogleCloudData", + "description": "Complete Google Cloud sandbox data structure" + }, + "GoogleDocsData": { + "properties": { + "documents": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/GoogleDocsDocument" + }, + "type": "array" }, { "type": "null" } ], - "title": "Priority", - "description": "Ticket priority (HIGH, MEDIUM, LOW)" + "title": "Documents", + "description": "List of Google Docs documents" } }, "type": "object", - "required": [ - "subject" - ], - "title": "HubSpotTicket", - "description": "HubSpot Ticket object.\nCan be associated with: Contacts, Companies." + "title": "GoogleDocsData", + "description": "Complete Google Docs sandbox data structure" }, - "InitializeSandboxResponse": { + "GoogleDocsDocument": { "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "status": { - "$ref": "#/components/schemas/SandboxStatus", - "description": "Current status" - }, - "message": { + "title": { "type": "string", - "title": "Message", - "description": "Initialization result message" + "minLength": 1, + "title": "Title", + "description": "Document title (required)" }, - "records_created": { + "content": { "anyOf": [ { - "additionalProperties": { - "type": "integer" - }, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Records Created", - "description": "Count of records created per object type" + "title": "Content", + "description": "Plain text content of the document" } }, "type": "object", "required": [ - "sandbox_id", - "status", - "message" + "title" ], - "title": "InitializeSandboxResponse", - "description": "Response model for sandbox initialization" + "title": "GoogleDocsDocument", + "description": "Google Docs document object" }, - "IntegrationItem": { + "GoogleDriveData": { "properties": { - "name": { - "$ref": "#/components/schemas/McpServerName" - }, - "is_authenticated": { - "type": "boolean", - "title": "Is Authenticated", - "description": "Whether the integration is authenticated" + "files": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/GoogleDriveFile" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Files", + "description": "List of Google Drive files and folders" } }, "type": "object", - "required": [ - "name", - "is_authenticated" - ], - "title": "IntegrationItem" + "title": "GoogleDriveData", + "description": "Complete Google Drive sandbox data structure" }, - "JiraBoard": { + "GoogleDriveFile": { "properties": { "name": { "type": "string", + "minLength": 1, "title": "Name", - "description": "Board name" + "description": "Name of the file or folder (required)" }, - "type": { + "mimeType": { "type": "string", - "title": "Type", - "description": "Board type (scrum or kanban)", - "default": "scrum" + "title": "Mimetype", + "description": "MIME type of the file (required). Use 'application/vnd.google-apps.folder' for folders" }, - "project_key": { + "description": { "anyOf": [ { "type": "string" @@ -17884,10 +17035,25 @@ "type": "null" } ], - "title": "Project Key", - "description": "Key of the project this board belongs to (for initialization)" + "title": "Description", + "description": "Description of the file" }, - "filter_jql": { + "parents": { + "anyOf": [ + { + "items": { + "type": "string" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Parents", + "description": "List of parent folder IDs" + }, + "content": { "anyOf": [ { "type": "string" @@ -17896,150 +17062,233 @@ "type": "null" } ], - "title": "Filter Jql", - "description": "JQL for the board filter" + "title": "Content", + "description": "Text content for Google Docs documents" } }, "type": "object", "required": [ - "name" + "name", + "mimeType" ], - "title": "JiraBoard", - "description": "Jira Board object" + "title": "GoogleDriveFile", + "description": "Google Drive file or folder object" }, - "JiraComment": { + "GoogleFormsChoiceOption": { "properties": { - "body": { + "value": { "type": "string", - "title": "Body", - "description": "Comment text content" + "title": "Value", + "description": "The text value of the choice option" } }, "type": "object", "required": [ - "body" + "value" ], - "title": "JiraComment", - "description": "Jira Comment object" + "title": "GoogleFormsChoiceOption", + "description": "A single choice option for choice-based questions" }, - "JiraData-Input": { + "GoogleFormsData-Input": { "properties": { - "projects": { + "forms": { "items": { - "$ref": "#/components/schemas/JiraProject" + "$ref": "#/components/schemas/GoogleFormsForm-Input" }, "type": "array", - "title": "Projects", - "description": "List of projects with their issues" + "title": "Forms", + "description": "List of Google Forms" + } + }, + "type": "object", + "title": "GoogleFormsData", + "description": "Complete Google Forms sandbox data structure" + }, + "GoogleFormsData-Output": { + "properties": { + "forms": { + "items": { + "$ref": "#/components/schemas/GoogleFormsForm-Output" + }, + "type": "array", + "title": "Forms", + "description": "List of Google Forms" + } + }, + "type": "object", + "title": "GoogleFormsData", + "description": "Complete Google Forms sandbox data structure" + }, + "GoogleFormsForm-Input": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "description": "The form title visible to responders" }, - "boards": { + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description", + "description": "The form description" + }, + "is_quiz": { + "type": "boolean", + "title": "Is Quiz", + "description": "Whether this form is a quiz", + "default": false + }, + "items": { "items": { - "$ref": "#/components/schemas/JiraBoard" + "$ref": "#/components/schemas/GoogleFormsItem-Input" }, "type": "array", - "title": "Boards", - "description": "List of boards" + "title": "Items", + "description": "List of form items (questions and section headers)" + } + }, + "type": "object", + "required": [ + "title" + ], + "title": "GoogleFormsForm", + "description": "A Google Form with simplified structure" + }, + "GoogleFormsForm-Output": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "description": "The form title visible to responders" + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description", + "description": "The form description" + }, + "is_quiz": { + "type": "boolean", + "title": "Is Quiz", + "description": "Whether this form is a quiz", + "default": false }, - "sprints": { + "items": { "items": { - "$ref": "#/components/schemas/JiraSprint" + "$ref": "#/components/schemas/GoogleFormsItem-Output" }, "type": "array", - "title": "Sprints", - "description": "List of sprints" + "title": "Items", + "description": "List of form items (questions and section headers)" } }, "type": "object", - "title": "JiraData", - "description": "Complete Jira sandbox data structure.\n\nRelational structure for initialization:\n- Projects contain Issues\n- Boards refer to Projects (via key)\n- Sprints refer to Boards (via name)\n- Issues can refer to Sprints (via name)" - }, - "JiraData-Output": { - "additionalProperties": true, - "type": "object" + "required": [ + "title" + ], + "title": "GoogleFormsForm", + "description": "A Google Form with simplified structure" }, - "JiraIssue": { + "GoogleFormsItem-Input": { "properties": { - "summary": { + "item_type": { "type": "string", - "title": "Summary", - "description": "Issue summary/title" + "enum": [ + "question", + "section_header" + ], + "title": "Item Type", + "description": "Type of form item" }, - "description": { + "question": { "anyOf": [ { - "type": "string" + "$ref": "#/components/schemas/GoogleFormsQuestion" }, { "type": "null" } ], - "title": "Description", - "description": "Issue description" - }, - "issue_type": { - "type": "string", - "title": "Issue Type", - "description": "Issue type (Task, Bug, Story, etc.)", - "default": "Task" + "description": "Question details (when item_type is 'question')" }, - "priority": { + "section_header": { "anyOf": [ { - "type": "string" + "$ref": "#/components/schemas/GoogleFormsSectionHeader" }, { "type": "null" } ], - "title": "Priority", - "description": "Issue priority (Highest, High, Medium, Low, Lowest)" + "description": "Section header details (when item_type is 'section_header')" + } + }, + "type": "object", + "required": [ + "item_type" + ], + "title": "GoogleFormsItem", + "description": "A single item in a form - can be a question or section header" + }, + "GoogleFormsItem-Output": { + "properties": { + "item_type": { + "type": "string", + "enum": [ + "question", + "section_header" + ], + "title": "Item Type", + "description": "Type of form item" }, - "sprint_name": { + "question": { "anyOf": [ { - "type": "string" + "$ref": "#/components/schemas/GoogleFormsQuestion" }, { "type": "null" } ], - "title": "Sprint Name", - "description": "Name of the sprint this issue belongs to (for initialization)" + "description": "Question details (when item_type is 'question')" }, - "comments": { + "section_header": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/JiraComment" - }, - "type": "array" + "$ref": "#/components/schemas/GoogleFormsSectionHeader" }, { "type": "null" } ], - "title": "Comments", - "description": "Comments on this issue" + "description": "Section header details (when item_type is 'section_header')" } }, "type": "object", "required": [ - "summary" + "item_type" ], - "title": "JiraIssue", - "description": "Jira Issue object" + "title": "GoogleFormsItem", + "description": "A single item in a form - can be a question or section header" }, - "JiraProject": { + "GoogleFormsQuestion": { "properties": { - "key": { - "type": "string", - "title": "Key", - "description": "Project key (e.g., PROJ)" - }, - "name": { + "title": { "type": "string", - "title": "Name", - "description": "Project name" + "title": "Title", + "description": "The question title/prompt" }, "description": { "anyOf": [ @@ -18051,19 +17300,34 @@ } ], "title": "Description", - "description": "Project description" + "description": "Additional description for the question" }, - "project_type": { + "required": { + "type": "boolean", + "title": "Required", + "description": "Whether this question is required", + "default": false + }, + "question_type": { "type": "string", - "title": "Project Type", - "description": "Project type (software, business, etc.)", - "default": "software" + "enum": [ + "short_text", + "paragraph", + "multiple_choice", + "checkbox", + "dropdown", + "scale", + "date", + "time" + ], + "title": "Question Type", + "description": "The type of question" }, - "issues": { + "options": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/JiraIssue" + "$ref": "#/components/schemas/GoogleFormsChoiceOption" }, "type": "array" }, @@ -18071,50 +17335,34 @@ "type": "null" } ], - "title": "Issues", - "description": "Issues in this project" - } - }, - "type": "object", - "required": [ - "key", - "name" - ], - "title": "JiraProject", - "description": "Jira Project object" - }, - "JiraSprint": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Sprint name" + "title": "Options", + "description": "List of options for choice-based questions" }, - "start_date": { + "scale_low": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "Start Date", - "description": "Sprint start date (ISO 8601 format)" + "title": "Scale Low", + "description": "Low end of scale (for scale questions)" }, - "end_date": { + "scale_high": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "End Date", - "description": "Sprint end date (ISO 8601 format)" + "title": "Scale High", + "description": "High end of scale (for scale questions)" }, - "origin_board_name": { + "scale_low_label": { "anyOf": [ { "type": "string" @@ -18123,10 +17371,10 @@ "type": "null" } ], - "title": "Origin Board Name", - "description": "Name of the board this sprint belongs to (for initialization)" + "title": "Scale Low Label", + "description": "Label for low end of scale" }, - "goal": { + "scale_high_label": { "anyOf": [ { "type": "string" @@ -18135,94 +17383,188 @@ "type": "null" } ], - "title": "Goal", - "description": "Sprint goal" + "title": "Scale High Label", + "description": "Label for high end of scale" }, - "state": { + "include_year": { "anyOf": [ { - "type": "string" + "type": "boolean" }, { "type": "null" } ], - "title": "State", - "description": "Sprint state (future, active, closed)" + "title": "Include Year", + "description": "Whether to include year in date questions" + }, + "include_time": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "title": "Include Time", + "description": "Whether to include time in date questions" } }, "type": "object", "required": [ - "name" + "title", + "question_type" ], - "title": "JiraSprint", - "description": "Jira Sprint object" + "title": "GoogleFormsQuestion", + "description": "A question item in the form - simplified structure" }, - "LinearComment": { + "GoogleFormsSectionHeader": { "properties": { - "body": { + "title": { "type": "string", - "title": "Body", - "description": "Comment text content" + "title": "Title", + "description": "The section title" + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description", + "description": "Section description" } }, "type": "object", "required": [ - "body" + "title" ], - "title": "LinearComment", - "description": "Linear Comment object" + "title": "GoogleFormsSectionHeader", + "description": "A section header/text item in the form" }, - "LinearData-Input": { + "GoogleSheetsCell": { "properties": { - "projects": { - "items": { - "$ref": "#/components/schemas/LinearProject-Input" - }, - "type": "array", - "title": "Projects", - "description": "List of projects with their issues. At most 50 projects can be included." + "row": { + "type": "integer", + "title": "Row", + "description": "Row index (0-based)" + }, + "col": { + "type": "integer", + "title": "Col", + "description": "Column index (0-based)" + }, + "value": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "integer" + }, + { + "type": "number" + }, + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "title": "Value", + "description": "Cell value (string, number, boolean, or null)" + }, + "formula": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Formula", + "description": "Formula if the cell contains one (e.g., '=A1+B1')" } }, "type": "object", - "title": "LinearData", - "description": "Complete Linear sandbox data structure.\n\nRelational structure for initialization:\n- Projects contain Issues\n- Issues contain Comments" + "required": [ + "row", + "col" + ], + "title": "GoogleSheetsCell", + "description": "A single cell with its position and value" }, - "LinearData-Output": { + "GoogleSheetsData-Input": { "properties": { - "projects": { - "items": { - "$ref": "#/components/schemas/LinearProject-Output" - }, - "type": "array", - "title": "Projects", - "description": "List of projects with their issues. At most 50 projects can be included." + "spreadsheets": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/GoogleSheetsSpreadsheet-Input" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Spreadsheets", + "description": "List of Google Sheets spreadsheets" } }, "type": "object", - "title": "LinearData", - "description": "Complete Linear sandbox data structure.\n\nRelational structure for initialization:\n- Projects contain Issues\n- Issues contain Comments" + "title": "GoogleSheetsData", + "description": "Complete Google Sheets sandbox data structure" }, - "LinearIssue": { + "GoogleSheetsData-Output": { + "properties": { + "spreadsheets": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/GoogleSheetsSpreadsheet-Output" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Spreadsheets", + "description": "List of Google Sheets spreadsheets" + } + }, + "type": "object", + "title": "GoogleSheetsData", + "description": "Complete Google Sheets sandbox data structure" + }, + "GoogleSheetsSheet": { "properties": { "title": { "type": "string", + "minLength": 1, "title": "Title", - "description": "Issue title" + "description": "Sheet title" }, - "description": { + "index": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "Description", - "description": "Issue description" + "title": "Index", + "description": "Sheet index position" }, - "priority": { + "rowCount": { "anyOf": [ { "type": "integer" @@ -18231,26 +17573,26 @@ "type": "null" } ], - "title": "Priority", - "description": "Issue priority (0=No priority, 1=Urgent, 2=High, 3=Medium, 4=Low)" + "title": "Rowcount", + "description": "Number of rows in the sheet" }, - "state_name": { + "columnCount": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "State Name", - "description": "Name of the workflow state (e.g., 'Todo', 'In Progress', 'Done')" + "title": "Columncount", + "description": "Number of columns in the sheet" }, - "comments": { + "cells": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/LinearComment" + "$ref": "#/components/schemas/GoogleSheetsCell" }, "type": "array" }, @@ -18258,53 +17600,59 @@ "type": "null" } ], - "title": "Comments", - "description": "Comments on this issue" + "title": "Cells", + "description": "List of cells with data" } }, "type": "object", "required": [ "title" ], - "title": "LinearIssue", - "description": "Linear Issue object" + "title": "GoogleSheetsSheet", + "description": "A sheet within a spreadsheet" }, - "LinearProject-Input": { + "GoogleSheetsSpreadsheet-Input": { "properties": { - "name": { + "title": { "type": "string", - "title": "Name", - "description": "Project name" - }, - "description": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Description", - "description": "Project description" + "title": "Title", + "description": "Spreadsheet title" }, - "state": { + "sheets": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/GoogleSheetsSheet" + }, + "type": "array" }, { "type": "null" } ], - "title": "State", - "description": "Project state (planned, started, paused, completed, canceled)" + "title": "Sheets", + "description": "List of sheets in the spreadsheet" + } + }, + "type": "object", + "required": [ + "title" + ], + "title": "GoogleSheetsSpreadsheet", + "description": "Google Sheets spreadsheet object" + }, + "GoogleSheetsSpreadsheet-Output": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "description": "Spreadsheet title" }, - "issues": { + "sheets": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/LinearIssue" + "$ref": "#/components/schemas/GoogleSheetsSheet" }, "type": "array" }, @@ -18312,25 +17660,38 @@ "type": "null" } ], - "title": "Issues", - "description": "Issues in this project" + "title": "Sheets", + "description": "List of sheets in the spreadsheet" } }, "type": "object", "required": [ - "name" + "title" ], - "title": "LinearProject", - "description": "Linear Project object" + "title": "GoogleSheetsSpreadsheet", + "description": "Google Sheets spreadsheet object" }, - "LinearProject-Output": { + "HTTPValidationError": { "properties": { - "name": { + "detail": { + "items": { + "$ref": "#/components/schemas/ValidationError" + }, + "type": "array", + "title": "Detail" + } + }, + "type": "object", + "title": "HTTPValidationError" + }, + "HubSpotCompany": { + "properties": { + "company_name": { "type": "string", - "title": "Name", - "description": "Project name" + "title": "Company Name", + "description": "Company name" }, - "description": { + "website_domain": { "anyOf": [ { "type": "string" @@ -18339,10 +17700,10 @@ "type": "null" } ], - "title": "Description", - "description": "Project description" + "title": "Website Domain", + "description": "Company website domain" }, - "state": { + "industry": { "anyOf": [ { "type": "string" @@ -18351,96 +17712,70 @@ "type": "null" } ], - "title": "State", - "description": "Project state (planned, started, paused, completed, canceled)" + "title": "Industry", + "description": "Company industry" }, - "issues": { + "city": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/LinearIssue" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Issues", - "description": "Issues in this project" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "LinearProject", - "description": "Linear Project object" - }, - "ListToolsRequest": { - "properties": { - "serverUrl": { - "type": "string", - "title": "Serverurl", - "description": "The full URL for connecting to the MCP server" - }, - "connectionType": { - "$ref": "#/components/schemas/ConnectionType", - "description": "The connection type to use for the MCP server. Default is STREAMABLE_HTTP.", - "default": "StreamableHttp" + "title": "City", + "description": "Company city" }, - "format": { - "$ref": "#/components/schemas/ToolFormat", - "description": "The format to return tools in. Default is MCP Native format for maximum compatibility.", - "default": "mcp_native" + "state_region": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "State Region", + "description": "Company state/region" }, - "headers": { + "country": { "anyOf": [ { - "additionalProperties": { - "type": "string" - }, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Headers", - "description": "Optional HTTP headers to include when connecting to the server" - } - }, - "type": "object", - "required": [ - "serverUrl" - ], - "title": "ListToolsRequest" - }, - "ListToolsResponse": { - "properties": { - "success": { - "type": "boolean", - "title": "Success", - "description": "Whether the list tools request was successful" + "title": "Country", + "description": "Company country" }, - "tools": { + "phone_number": { "anyOf": [ { - "items": {}, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Tools", - "description": "List of tools in the requested format" + "title": "Phone Number", + "description": "Company phone number" }, - "format": { - "$ref": "#/components/schemas/ToolFormat", - "description": "The format of the returned tools" + "employee_count": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Employee Count", + "description": "Number of employees" }, - "error": { + "yearly_revenue": { "anyOf": [ { "type": "string" @@ -18449,49 +17784,35 @@ "type": "null" } ], - "title": "Error", - "description": "Error message, if the request failed" + "title": "Yearly Revenue", + "description": "Yearly revenue" } }, "type": "object", "required": [ - "success", - "format" + "company_name" ], - "title": "ListToolsResponse" + "title": "HubSpotCompany", + "description": "HubSpot Company object.\nBase object, typically doesn't associate upwards in this schema, but contacts/deals associate to it." }, - "LogBucket": { + "HubSpotContact": { "properties": { - "name": { + "first_name": { "type": "string", - "title": "Name", - "description": "Bucket name/ID" + "title": "First Name", + "description": "Contact first name" }, - "location": { + "last_name": { "type": "string", - "title": "Location", - "description": "Bucket location", - "default": "global" - }, - "retentionDays": { - "type": "integer", - "title": "Retentiondays", - "description": "Retention period in days", - "default": 30 + "title": "Last Name", + "description": "Contact last name" }, - "description": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Description", - "description": "Bucket description" + "email_address": { + "type": "string", + "title": "Email Address", + "description": "Contact email address" }, - "createTime": { + "phone_number": { "anyOf": [ { "type": "string" @@ -18500,10 +17821,10 @@ "type": "null" } ], - "title": "Createtime", - "description": "Creation timestamp" + "title": "Phone Number", + "description": "Contact phone number" }, - "updateTime": { + "job_title": { "anyOf": [ { "type": "string" @@ -18512,108 +17833,113 @@ "type": "null" } ], - "title": "Updatetime", - "description": "Update timestamp" + "title": "Job Title", + "description": "Contact job title" } }, "type": "object", "required": [ - "name" + "first_name", + "last_name", + "email_address" ], - "title": "LogBucket", - "description": "Cloud Logging bucket - matches API format" + "title": "HubSpotContact", + "description": "HubSpot Contact object.\nCan be associated with: Companies." }, - "LogEntry": { + "HubSpotData-Input": { "properties": { - "logName": { - "type": "string", - "title": "Logname", - "description": "Log name" - }, - "textPayload": { + "companies": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/HubSpotCompany" + }, + "type": "array" }, { "type": "null" } ], - "title": "Textpayload", - "description": "Text payload" + "title": "Companies", + "description": "List of companies" }, - "jsonPayload": { + "contacts": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "$ref": "#/components/schemas/HubSpotContact" + }, + "type": "array" }, { "type": "null" } ], - "title": "Jsonpayload", - "description": "JSON payload" - }, - "severity": { - "type": "string", - "title": "Severity", - "description": "Log severity (DEBUG, INFO, WARNING, ERROR, CRITICAL)", - "default": "INFO" + "title": "Contacts", + "description": "List of contacts" }, - "timestamp": { + "deals": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/HubSpotDeal" + }, + "type": "array" }, { "type": "null" } ], - "title": "Timestamp", - "description": "Entry timestamp" + "title": "Deals", + "description": "List of deals" }, - "insertId": { + "tickets": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/HubSpotTicket" + }, + "type": "array" }, { "type": "null" } ], - "title": "Insertid", - "description": "Unique insert ID" + "title": "Tickets", + "description": "List of tickets" }, - "resource": { + "tasks": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "$ref": "#/components/schemas/HubSpotTask" + }, + "type": "array" }, { "type": "null" } ], - "title": "Resource", - "description": "Monitored resource" + "title": "Tasks", + "description": "List of tasks" } }, "type": "object", - "required": [ - "logName" - ], - "title": "LogEntry", - "description": "Cloud Logging log entry - matches API format" + "title": "HubSpotData", + "description": "Complete HubSpot sandbox data structure (Flat Schema).\n\nAll object types are top-level lists. Relationships are defined via association fields\nin the respective objects (e.g. associated_company_names in HubSpotContact)." }, - "LogSink": { + "HubSpotData-Output": { + "additionalProperties": true, + "type": "object" + }, + "HubSpotDeal": { "properties": { - "name": { + "title": { "type": "string", - "title": "Name", - "description": "Sink name" + "title": "Title", + "description": "Deal title" }, - "filter": { + "value": { "anyOf": [ { "type": "string" @@ -18622,10 +17948,10 @@ "type": "null" } ], - "title": "Filter", - "description": "Log filter" + "title": "Value", + "description": "Deal value/amount" }, - "description": { + "stage": { "anyOf": [ { "type": "string" @@ -18634,10 +17960,10 @@ "type": "null" } ], - "title": "Description", - "description": "Sink description" + "title": "Stage", + "description": "Deal stage" }, - "createTime": { + "pipeline": { "anyOf": [ { "type": "string" @@ -18646,10 +17972,10 @@ "type": "null" } ], - "title": "Createtime", - "description": "Creation timestamp" + "title": "Pipeline", + "description": "Deal pipeline" }, - "updateTime": { + "expected_close_date": { "anyOf": [ { "type": "string" @@ -18658,47 +17984,23 @@ "type": "null" } ], - "title": "Updatetime", - "description": "Update timestamp" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "LogSink", - "description": "Cloud Logging sink - matches API format" - }, - "MCPOAuthSuccessResponse": { - "properties": { - "status": { - "type": "string", - "title": "Status", - "description": "Status of the operation", - "default": "success" - }, - "message": { - "type": "string", - "title": "Message", - "description": "Success message" + "title": "Expected Close Date", + "description": "Expected close date" } }, "type": "object", "required": [ - "message" + "title" ], - "title": "MCPOAuthSuccessResponse" + "title": "HubSpotDeal", + "description": "HubSpot Deal object.\nCan be associated with: Contacts, Companies." }, - "McpServer": { + "HubSpotTask": { "properties": { - "id": { - "type": "string", - "format": "uuid", - "title": "Id" - }, - "name": { + "title": { "type": "string", - "title": "Name" + "title": "Title", + "description": "Task title" }, "description": { "anyOf": [ @@ -18709,164 +18011,22 @@ "type": "null" } ], - "title": "Description" + "title": "Description", + "description": "Task description/details" }, - "tools": { + "task_status": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ServerTool" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Tools" + "title": "Task Status", + "description": "Task status (NOT_STARTED, IN_PROGRESS, COMPLETED, WAITING, DEFERRED)" }, - "authNeeded": { - "type": "boolean", - "title": "Authneeded", - "default": true - } - }, - "type": "object", - "required": [ - "id", - "name" - ], - "title": "McpServer" - }, - "McpServerName": { - "type": "string", - "enum": [ - "Affinity", - "Airtable", - "Asana", - "Attio", - "Box", - "Brave Search", - "Cal.com", - "Canva", - "ClickUp", - "Close", - "Cloudflare", - "Coinbase", - "Confluence", - "Dialpad", - "Discord", - "Doc2markdown", - "DocuSign", - "Dropbox", - "ElevenLabs", - "Exa", - "Fathom", - "Fetch URL", - "Figma", - "Fireflies", - "Firecrawl Deep Research", - "Firecrawl Web Search", - "Freshdesk", - "GitHub", - "GitLab", - "Gmail", - "Gong", - "Google Calendar", - "Google Docs", - "Google Drive", - "Google Jobs", - "Google Sheets", - "Google Forms", - "Google Cloud", - "Hacker News", - "Heygen", - "HubSpot", - "Intercom", - "Jira", - "Klaviyo", - "Klavis ReportGen", - "Linear", - "LinkedIn", - "Markdown2doc", - "Mem0", - "Microsoft Teams", - "Mixpanel", - "Monday", - "Moneybird", - "Motion", - "Notion", - "OneDrive", - "Openrouter", - "Outlook Mail", - "Outlook Calendar", - "PagerDuty", - "Pipedrive", - "Plai", - "Postgres", - "PostHog", - "Postman", - "QuickBooks", - "Resend", - "Salesforce", - "SendGrid", - "Shopify", - "Slack", - "Snowflake", - "Stripe", - "Supabase", - "Tavily", - "Vercel", - "WhatsApp", - "WordPress", - "YouTube", - "Zendesk", - "ServiceNow", - "PayPal", - "Sentry", - "Netlify", - "Hugging Face", - "Square", - "Clockwise", - "Jotform", - "Honeycomb", - "Zoho Mail" - ], - "title": "McpServerName" - }, - "Mem0Data-Input": { - "properties": { - "memories": { - "items": { - "$ref": "#/components/schemas/Mem0Memory" - }, - "type": "array", - "title": "Memories", - "description": "List of memories" - } - }, - "type": "object", - "title": "Mem0Data", - "description": "Complete Mem0 sandbox data structure" - }, - "Mem0Data-Output": { - "properties": { - "memories": { - "items": { - "$ref": "#/components/schemas/Mem0Memory" - }, - "type": "array", - "title": "Memories", - "description": "List of memories" - } - }, - "type": "object", - "title": "Mem0Data", - "description": "Complete Mem0 sandbox data structure" - }, - "Mem0Memory": { - "properties": { - "id": { + "priority_level": { "anyOf": [ { "type": "string" @@ -18875,10 +18035,10 @@ "type": "null" } ], - "title": "Id", - "description": "Memory ID (read-only, set by Mem0)" + "title": "Priority Level", + "description": "Task priority (HIGH, MEDIUM, LOW)" }, - "memory": { + "deadline": { "anyOf": [ { "type": "string" @@ -18887,25 +18047,37 @@ "type": "null" } ], - "title": "Memory", - "description": "Memory content (returned from API)" + "title": "Deadline", + "description": "Task deadline (ISO 8601 format)" + } + }, + "type": "object", + "required": [ + "title" + ], + "title": "HubSpotTask", + "description": "HubSpot Task object.\nCan be associated with: Contacts, Companies, Deals, Tickets." + }, + "HubSpotTicket": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "description": "Ticket title" }, - "messages": { + "description": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/Mem0Message" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Messages", - "description": "Array of message objects representing the content" + "title": "Description", + "description": "Ticket description/details" }, - "agent_id": { + "pipeline": { "anyOf": [ { "type": "string" @@ -18914,10 +18086,10 @@ "type": "null" } ], - "title": "Agent Id", - "description": "Agent ID" + "title": "Pipeline", + "description": "Ticket pipeline" }, - "user_id": { + "workflow_stage": { "anyOf": [ { "type": "string" @@ -18926,10 +18098,10 @@ "type": "null" } ], - "title": "User Id", - "description": "User ID" + "title": "Workflow Stage", + "description": "Ticket workflow stage" }, - "app_id": { + "priority_level": { "anyOf": [ { "type": "string" @@ -18938,35 +18110,102 @@ "type": "null" } ], - "title": "App Id", - "description": "App ID" + "title": "Priority Level", + "description": "Ticket priority (HIGH, MEDIUM, LOW)" + } + }, + "type": "object", + "required": [ + "title" + ], + "title": "HubSpotTicket", + "description": "HubSpot Ticket object.\nCan be associated with: Contacts, Companies." + }, + "InitializeSandboxResponse": { + "properties": { + "sandbox_id": { + "type": "string", + "title": "Sandbox Id", + "description": "Sandbox identifier" }, - "run_id": { + "status": { + "$ref": "#/components/schemas/SandboxStatus", + "description": "Current status" + }, + "message": { + "type": "string", + "title": "Message", + "description": "Initialization result message" + }, + "records_created": { "anyOf": [ { - "type": "string" + "additionalProperties": { + "type": "integer" + }, + "type": "object" }, { "type": "null" } ], - "title": "Run Id", - "description": "Run ID" + "title": "Records Created", + "description": "Count of records created per object type" + } + }, + "type": "object", + "required": [ + "sandbox_id", + "status", + "message" + ], + "title": "InitializeSandboxResponse", + "description": "Response model for sandbox initialization" + }, + "IntegrationItem": { + "properties": { + "name": { + "$ref": "#/components/schemas/McpServerName" }, - "metadata": { + "is_authenticated": { + "type": "boolean", + "title": "Is Authenticated", + "description": "Whether the integration is authenticated" + } + }, + "type": "object", + "required": [ + "name", + "is_authenticated" + ], + "title": "IntegrationItem" + }, + "JiraBoard": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Board name" + }, + "type": { + "type": "string", + "title": "Type", + "description": "Board type (scrum or kanban)", + "default": "scrum" + }, + "project_key": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Metadata", - "description": "Additional metadata" + "title": "Project Key", + "description": "Key of the project this board belongs to (for initialization)" }, - "includes": { + "filter_jql": { "anyOf": [ { "type": "string" @@ -18975,10 +18214,75 @@ "type": "null" } ], - "title": "Includes", - "description": "String to include specific preferences in the memory" + "title": "Filter Jql", + "description": "JQL for the board filter" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "JiraBoard", + "description": "Jira Board object" + }, + "JiraComment": { + "properties": { + "body": { + "type": "string", + "title": "Body", + "description": "Comment text content" + } + }, + "type": "object", + "required": [ + "body" + ], + "title": "JiraComment", + "description": "Jira Comment object" + }, + "JiraData-Input": { + "properties": { + "projects": { + "items": { + "$ref": "#/components/schemas/JiraProject" + }, + "type": "array", + "title": "Projects", + "description": "List of projects with their issues" + }, + "boards": { + "items": { + "$ref": "#/components/schemas/JiraBoard" + }, + "type": "array", + "title": "Boards", + "description": "List of boards" + }, + "sprints": { + "items": { + "$ref": "#/components/schemas/JiraSprint" + }, + "type": "array", + "title": "Sprints", + "description": "List of sprints" + } + }, + "type": "object", + "title": "JiraData", + "description": "Complete Jira sandbox data structure.\n\nRelational structure for initialization:\n- Projects contain Issues\n- Boards refer to Projects (via key)\n- Sprints refer to Boards (via name)\n- Issues can refer to Sprints (via name)" + }, + "JiraData-Output": { + "additionalProperties": true, + "type": "object" + }, + "JiraIssue": { + "properties": { + "summary": { + "type": "string", + "title": "Summary", + "description": "Issue summary/title" }, - "excludes": { + "description": { "anyOf": [ { "type": "string" @@ -18987,16 +18291,16 @@ "type": "null" } ], - "title": "Excludes", - "description": "String to exclude specific preferences in the memory" + "title": "Description", + "description": "Issue description" }, - "infer": { - "type": "boolean", - "title": "Infer", - "description": "Whether to infer memories or directly store messages", - "default": true + "issue_type": { + "type": "string", + "title": "Issue Type", + "description": "Issue type (Task, Bug, Story, etc.)", + "default": "Task" }, - "output_format": { + "priority": { "anyOf": [ { "type": "string" @@ -19005,77 +18309,106 @@ "type": "null" } ], - "title": "Output Format", - "description": "Response format structure (v1.0 or v1.1)", - "default": "v1.1" + "title": "Priority", + "description": "Issue priority (Highest, High, Medium, Low, Lowest)" }, - "custom_categories": { + "sprint_name": { "anyOf": [ { - "items": { - "additionalProperties": { - "type": "string" - }, - "type": "object" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Custom Categories", - "description": "List of custom categories with name and description" + "title": "Sprint Name", + "description": "Name of the sprint this issue belongs to (for initialization)" }, - "custom_instructions": { + "comments": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/JiraComment" + }, + "type": "array" }, { "type": "null" } ], - "title": "Custom Instructions", - "description": "Project-specific guidelines for handling memories" - }, - "immutable": { - "type": "boolean", - "title": "Immutable", - "description": "Whether the memory is immutable", - "default": false - }, - "async_mode": { - "type": "boolean", - "title": "Async Mode", - "description": "Whether to add memory asynchronously", - "default": true + "title": "Comments", + "description": "Comments on this issue" + } + }, + "type": "object", + "required": [ + "summary" + ], + "title": "JiraIssue", + "description": "Jira Issue object" + }, + "JiraProject": { + "properties": { + "key": { + "type": "string", + "title": "Key", + "description": "Project key (e.g., PROJ)" }, - "timestamp": { + "name": { + "type": "string", + "title": "Name", + "description": "Project name" + }, + "description": { "anyOf": [ { - "type": "integer" + "type": "string" }, { "type": "null" } ], - "title": "Timestamp", - "description": "Unix timestamp of the memory" + "title": "Description", + "description": "Project description" + }, + "project_type": { + "type": "string", + "title": "Project Type", + "description": "Project type (software, business, etc.)", + "default": "software" }, - "expiration_date": { + "issues": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/JiraIssue" + }, + "type": "array" }, { "type": "null" } ], - "title": "Expiration Date", - "description": "Expiration date (YYYY-MM-DD)" + "title": "Issues", + "description": "Issues in this project" + } + }, + "type": "object", + "required": [ + "key", + "name" + ], + "title": "JiraProject", + "description": "Jira Project object" + }, + "JiraSprint": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Sprint name" }, - "version": { + "start_date": { "anyOf": [ { "type": "string" @@ -19084,10 +18417,10 @@ "type": "null" } ], - "title": "Version", - "description": "Memory version (v1 deprecated, use v2)" + "title": "Start Date", + "description": "Sprint start date (ISO 8601 format)" }, - "created_at": { + "end_date": { "anyOf": [ { "type": "string" @@ -19096,10 +18429,10 @@ "type": "null" } ], - "title": "Created At", - "description": "Creation timestamp" + "title": "End Date", + "description": "Sprint end date (ISO 8601 format)" }, - "updated_at": { + "origin_board_name": { "anyOf": [ { "type": "string" @@ -19108,10 +18441,10 @@ "type": "null" } ], - "title": "Updated At", - "description": "Last update timestamp" + "title": "Origin Board Name", + "description": "Name of the board this sprint belongs to (for initialization)" }, - "owner": { + "goal": { "anyOf": [ { "type": "string" @@ -19120,10 +18453,10 @@ "type": "null" } ], - "title": "Owner", - "description": "Memory owner" + "title": "Goal", + "description": "Sprint goal" }, - "organization": { + "state": { "anyOf": [ { "type": "string" @@ -19132,286 +18465,127 @@ "type": "null" } ], - "title": "Organization", - "description": "Organization" - } - }, - "type": "object", - "title": "Mem0Memory", - "description": "Mem0 Memory object" - }, - "Mem0Message": { - "properties": { - "role": { - "type": "string", - "title": "Role", - "description": "Role of the message sender (user or assistant)" - }, - "content": { - "type": "string", - "title": "Content", - "description": "Content of the message" + "title": "State", + "description": "Sprint state (future, active, closed)" } }, "type": "object", "required": [ - "role", - "content" + "name" ], - "title": "Mem0Message", - "description": "Message object for memory" + "title": "JiraSprint", + "description": "Jira Sprint object" }, - "MondayBoard": { + "Lead": { "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Board name" - }, - "description": { + "first_name": { "anyOf": [ { - "type": "string" + "type": "string", + "maxLength": 40 }, { "type": "null" } ], - "title": "Description", - "description": "Board description" + "title": "First Name" }, - "board_kind": { + "last_name": { "type": "string", - "title": "Board Kind", - "description": "Board kind (public, private, share)", - "default": "public" + "maxLength": 80, + "minLength": 1, + "title": "Last Name" }, - "workspace_id": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Workspace Id", - "description": "Workspace ID this board belongs to (for initialization)" + "company": { + "type": "string", + "maxLength": 255, + "minLength": 1, + "title": "Company" }, - "groups": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/MondayGroup" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Groups", - "description": "Groups in this board" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "MondayBoard", - "description": "Monday.com Board object - DELETE SUPPORTED" - }, - "MondayData-Input": { - "properties": { - "workspaces": { + "email": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MondayWorkspace" - }, - "type": "array" + "type": "string", + "format": "email" }, { "type": "null" } ], - "title": "Workspaces", - "description": "List of workspaces" - }, - "boards": { - "items": { - "$ref": "#/components/schemas/MondayBoard" - }, - "type": "array", - "title": "Boards", - "description": "List of boards with their groups and items" - } - }, - "type": "object", - "title": "MondayData", - "description": "Complete Monday.com sandbox data structure.\n\nRelational structure for initialization:\n- Workspaces (optional, can be created separately)\n- Boards contain Groups\n- Groups contain Items\n- Items contain Updates and Subitems\n\nAll objects support DELETE operations." - }, - "MondayData-Output": { - "additionalProperties": true, - "type": "object" - }, - "MondayGroup": { - "properties": { - "title": { - "type": "string", - "title": "Title", - "description": "Group title" + "title": "Email" }, - "color": { + "phone": { "anyOf": [ { - "type": "string" + "type": "string", + "maxLength": 40 }, { "type": "null" } ], - "title": "Color", - "description": "Group color (e.g., '#FF0000', 'red')" + "title": "Phone" }, - "items": { + "mobile": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MondayItem" - }, - "type": "array" + "type": "string", + "maxLength": 40 }, { "type": "null" } ], - "title": "Items", - "description": "Items in this group" - } - }, - "type": "object", - "required": [ - "title" - ], - "title": "MondayGroup", - "description": "Monday.com Group (Section) object - DELETE SUPPORTED" - }, - "MondayItem": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Item name" + "title": "Mobile" }, - "column_values": { + "title": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string", + "maxLength": 128 }, { "type": "null" } ], - "title": "Column Values", - "description": "Column values as JSON object" + "title": "Title" }, - "updates": { + "status": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MondayUpdate" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Updates", - "description": "Updates/comments on this item" + "title": "Status", + "default": "Open - Not Contacted" }, - "subitems": { + "source": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MondaySubitem" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Subitems", - "description": "Subitems (nested items)" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "MondayItem", - "description": "Monday.com Item (Task/Row) object - DELETE SUPPORTED" - }, - "MondaySubitem": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Subitem name" + "title": "Source", + "description": "e.g., Web, Phone, Referral" }, - "column_values": { + "industry": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Column Values", - "description": "Column values as JSON object" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "MondaySubitem", - "description": "Monday.com Subitem object (nested item) - DELETE SUPPORTED" - }, - "MondayUpdate": { - "properties": { - "body": { - "type": "string", - "title": "Body", - "description": "Update text content" - } - }, - "type": "object", - "required": [ - "body" - ], - "title": "MondayUpdate", - "description": "Monday.com Update (Comment) object - DELETE SUPPORTED" - }, - "MondayWorkspace": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Workspace name" - }, - "kind": { - "type": "string", - "title": "Kind", - "description": "Workspace kind (open, closed)", - "default": "open" + "title": "Industry" }, - "description": { + "rating": { "anyOf": [ { "type": "string" @@ -19420,68 +18594,84 @@ "type": "null" } ], - "title": "Description", - "description": "Workspace description" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "MondayWorkspace", - "description": "Monday.com Workspace object - DELETE SUPPORTED" - }, - "MoneybirdContact": { - "properties": { - "company_name": { + "title": "Rating", + "description": "e.g., Hot, Warm, Cold" + }, + "address": { "anyOf": [ { - "type": "string" + "$ref": "#/components/schemas/Address" + }, + { + "type": "null" + } + ] + }, + "website": { + "anyOf": [ + { + "type": "string", + "maxLength": 255 }, { "type": "null" } ], - "title": "Company Name", - "description": "Company name" + "title": "Website" }, - "firstname": { + "description": { "anyOf": [ { - "type": "string" + "type": "string", + "maxLength": 32000 }, { "type": "null" } ], - "title": "Firstname", - "description": "First name" + "title": "Description" }, - "lastname": { + "employees": { "anyOf": [ { - "type": "string" + "type": "integer", + "minimum": 0 }, { "type": "null" } ], - "title": "Lastname", - "description": "Last name" + "title": "Employees" }, - "phone": { + "revenue": { "anyOf": [ { - "type": "string" + "type": "number", + "minimum": 0 }, { "type": "null" } ], - "title": "Phone", - "description": "Phone number" + "title": "Revenue" + } + }, + "type": "object", + "required": [ + "last_name", + "company" + ], + "title": "Lead", + "description": "Potential customer not yet qualified" + }, + "LifecycleRule": { + "properties": { + "action": { + "type": "string", + "title": "Action", + "description": "Action type (Delete, SetStorageClass)" }, - "address1": { + "action_storage_class": { "anyOf": [ { "type": "string" @@ -19490,22 +18680,22 @@ "type": "null" } ], - "title": "Address1", - "description": "Street address" + "title": "Action Storage Class", + "description": "Target storage class for SetStorageClass action" }, - "city": { + "age_days": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "City", - "description": "City" + "title": "Age Days", + "description": "Age in days to trigger rule" }, - "zipcode": { + "created_before": { "anyOf": [ { "type": "string" @@ -19514,90 +18704,134 @@ "type": "null" } ], - "title": "Zipcode", - "description": "Postal code" + "title": "Created Before", + "description": "Created before date" }, - "country": { + "is_live": { "anyOf": [ { - "type": "string" + "type": "boolean" }, { "type": "null" } ], - "title": "Country", - "description": "Country code" + "title": "Is Live", + "description": "Apply to live versions only" }, - "sepa_iban": { + "num_newer_versions": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "Sepa Iban", - "description": "SEPA IBAN" + "title": "Num Newer Versions", + "description": "Number of newer versions to keep" } }, "type": "object", - "title": "MoneybirdContact", - "description": "Moneybird contact (customer/supplier) - API format\n\nAttributes:\n company_name: Company name (optional, for business contacts)\n firstname: First name (optional, for individual contacts)\n lastname: Last name (optional, for individual contacts)\n phone: Phone number\n address1: Street address\n city: City\n zipcode: Postal code\n country: Country code (e.g., 'NL', 'DE', 'FR')\n sepa_iban: SEPA IBAN for direct debit (optional)" + "required": [ + "action" + ], + "title": "LifecycleRule", + "description": "Lifecycle rule for a storage bucket" }, - "MoneybirdData-Input": { + "LinearComment": { "properties": { - "ledger_accounts": { + "body": { + "type": "string", + "title": "Body", + "description": "Comment text content" + } + }, + "type": "object", + "required": [ + "body" + ], + "title": "LinearComment", + "description": "Linear Comment object" + }, + "LinearData-Input": { + "properties": { + "projects": { + "items": { + "$ref": "#/components/schemas/LinearProject-Input" + }, + "type": "array", + "title": "Projects", + "description": "List of projects with their issues. At most 50 projects can be included." + } + }, + "type": "object", + "title": "LinearData", + "description": "Complete Linear sandbox data structure.\n\nRelational structure for initialization:\n- Projects contain Issues\n- Issues contain Comments" + }, + "LinearData-Output": { + "properties": { + "projects": { + "items": { + "$ref": "#/components/schemas/LinearProject-Output" + }, + "type": "array", + "title": "Projects", + "description": "List of projects with their issues. At most 50 projects can be included." + } + }, + "type": "object", + "title": "LinearData", + "description": "Complete Linear sandbox data structure.\n\nRelational structure for initialization:\n- Projects contain Issues\n- Issues contain Comments" + }, + "LinearIssue": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "description": "Issue title" + }, + "description": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MoneybirdLedgerAccount" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Ledger Accounts", - "description": "List of ledger accounts to create" + "title": "Description", + "description": "Issue description" }, - "contacts": { + "priority": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MoneybirdContact" - }, - "type": "array" + "type": "integer" }, { "type": "null" } ], - "title": "Contacts", - "description": "List of contacts to create" + "title": "Priority", + "description": "Issue priority (0=No priority, 1=Urgent, 2=High, 3=Medium, 4=Low)" }, - "products": { + "state_name": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MoneybirdProduct" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Products", - "description": "List of products to create" + "title": "State Name", + "description": "Name of the workflow state (e.g., 'Todo', 'In Progress', 'Done')" }, - "projects": { + "comments": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/MoneybirdProject" + "$ref": "#/components/schemas/LinearComment" }, "type": "array" }, @@ -19605,29 +18839,53 @@ "type": "null" } ], - "title": "Projects", - "description": "List of projects to create" + "title": "Comments", + "description": "Comments on this issue" + } + }, + "type": "object", + "required": [ + "title" + ], + "title": "LinearIssue", + "description": "Linear Issue object" + }, + "LinearProject-Input": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Project name" }, - "time_entries": { + "description": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MoneybirdTimeEntry" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Time Entries", - "description": "List of time entries to create" + "title": "Description", + "description": "Project description" + }, + "state": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "State", + "description": "Project state (planned, started, paused, completed, canceled)" }, - "sales_invoices": { + "issues": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/MoneybirdSalesInvoice" + "$ref": "#/components/schemas/LinearIssue" }, "type": "array" }, @@ -19635,51 +18893,53 @@ "type": "null" } ], - "title": "Sales Invoices", - "description": "List of sales invoices to create" + "title": "Issues", + "description": "Issues in this project" } }, "type": "object", - "title": "MoneybirdData", - "description": "Complete Moneybird sandbox data structure - API format\n\nNote: Objects are created in dependency order:\n1. Ledger accounts (no dependencies)\n2. Contacts (no dependencies)\n3. Products (no dependencies, but needs tax rates from system)\n4. Projects (no dependencies)\n5. Time entries (linked to projects and contacts)\n6. Sales invoices (linked to contacts)" + "required": [ + "name" + ], + "title": "LinearProject", + "description": "Linear Project object" }, - "MoneybirdData-Output": { + "LinearProject-Output": { "properties": { - "ledger_accounts": { + "name": { + "type": "string", + "title": "Name", + "description": "Project name" + }, + "description": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MoneybirdLedgerAccount" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Ledger Accounts", - "description": "List of ledger accounts to create" + "title": "Description", + "description": "Project description" }, - "contacts": { + "state": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MoneybirdContact" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Contacts", - "description": "List of contacts to create" + "title": "State", + "description": "Project state (planned, started, paused, completed, canceled)" }, - "products": { + "issues": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/MoneybirdProduct" + "$ref": "#/components/schemas/LinearIssue" }, "type": "array" }, @@ -19687,89 +18947,120 @@ "type": "null" } ], - "title": "Products", - "description": "List of products to create" + "title": "Issues", + "description": "Issues in this project" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "LinearProject", + "description": "Linear Project object" + }, + "ListToolsRequest": { + "properties": { + "serverUrl": { + "type": "string", + "title": "Serverurl", + "description": "The full URL for connecting to the MCP server" }, - "projects": { + "connectionType": { + "$ref": "#/components/schemas/ConnectionType", + "description": "The connection type to use for the MCP server. Default is STREAMABLE_HTTP.", + "default": "StreamableHttp" + }, + "format": { + "$ref": "#/components/schemas/ToolFormat", + "description": "The format to return tools in. Default is MCP Native format for maximum compatibility.", + "default": "mcp_native" + }, + "headers": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MoneybirdProject" + "additionalProperties": { + "type": "string" }, - "type": "array" + "type": "object" }, { "type": "null" } ], - "title": "Projects", - "description": "List of projects to create" + "title": "Headers", + "description": "Optional HTTP headers to include when connecting to the server" + } + }, + "type": "object", + "required": [ + "serverUrl" + ], + "title": "ListToolsRequest" + }, + "ListToolsResponse": { + "properties": { + "success": { + "type": "boolean", + "title": "Success", + "description": "Whether the list tools request was successful" }, - "time_entries": { + "tools": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MoneybirdTimeEntry" - }, + "items": {}, "type": "array" }, { "type": "null" } ], - "title": "Time Entries", - "description": "List of time entries to create" + "title": "Tools", + "description": "List of tools in the requested format" }, - "sales_invoices": { + "format": { + "$ref": "#/components/schemas/ToolFormat", + "description": "The format of the returned tools" + }, + "error": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/MoneybirdSalesInvoice" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Sales Invoices", - "description": "List of sales invoices to create" + "title": "Error", + "description": "Error message, if the request failed" } }, "type": "object", - "title": "MoneybirdData", - "description": "Complete Moneybird sandbox data structure - API format\n\nNote: Objects are created in dependency order:\n1. Ledger accounts (no dependencies)\n2. Contacts (no dependencies)\n3. Products (no dependencies, but needs tax rates from system)\n4. Projects (no dependencies)\n5. Time entries (linked to projects and contacts)\n6. Sales invoices (linked to contacts)" + "required": [ + "success", + "format" + ], + "title": "ListToolsResponse" }, - "MoneybirdInvoiceDetail": { + "LogBucket": { "properties": { - "description": { + "name": { "type": "string", - "title": "Description", - "description": "Line item description" + "title": "Name", + "description": "Bucket name/ID" }, - "price": { + "location": { "type": "string", - "title": "Price", - "description": "Unit price as string" + "title": "Location", + "description": "Bucket location", + "default": "global" }, - "amount": { - "type": "string", - "title": "Amount", - "description": "Quantity as string" - } - }, - "type": "object", - "required": [ - "description", - "price", - "amount" - ], - "title": "MoneybirdInvoiceDetail", - "description": "Invoice line item details" - }, - "MoneybirdLedgerAccount": { - "properties": { - "rgs_code": { + "retention_days": { + "type": "integer", + "title": "Retention Days", + "description": "Retention period in days", + "default": 30 + }, + "description": { "anyOf": [ { "type": "string" @@ -19778,20 +19069,22 @@ "type": "null" } ], - "title": "Rgs Code", - "description": "RGS code - required for creation, not returned by GET API" - }, - "name": { - "type": "string", - "title": "Name", - "description": "Account name" + "title": "Description", + "description": "Bucket description" }, - "account_type": { - "type": "string", - "title": "Account Type", - "description": "Account type (revenue, expense, etc.)" + "created_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Created At", + "description": "Creation timestamp" }, - "account_id": { + "updated_at": { "anyOf": [ { "type": "string" @@ -19800,58 +19093,68 @@ "type": "null" } ], - "title": "Account Id", - "description": "Account identifier (optional for system accounts)" + "title": "Updated At", + "description": "Update timestamp" } }, "type": "object", "required": [ - "name", - "account_type" + "name" ], - "title": "MoneybirdLedgerAccount", - "description": "Moneybird ledger account (chart of accounts) - API format\n\nAttributes:\n rgs_code: Reference Grootboekschema (RGS) code - required for creation\n name: Account name\n account_type: Account type (revenue, expense, etc.)\n account_id: Account identifier (optional, system accounts may not have this)" + "title": "LogBucket", + "description": "Cloud Logging bucket" }, - "MoneybirdProduct": { + "LogEntry": { "properties": { - "description": { - "type": "string", - "title": "Description", - "description": "Product description" - }, - "title": { + "log_name": { "type": "string", - "title": "Title", - "description": "Product title/name" + "title": "Log Name", + "description": "Log name" }, - "price": { - "type": "string", - "title": "Price", - "description": "Product price as string" + "message": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Message", + "description": "Text message payload" }, - "currency": { - "type": "string", - "title": "Currency", - "description": "Currency code (e.g., 'EUR')" + "json_data": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Json Data", + "description": "JSON payload" }, - "identifier": { + "severity": { "type": "string", - "title": "Identifier", - "description": "Product SKU/identifier" + "title": "Severity", + "description": "Log severity (DEBUG, INFO, WARNING, ERROR, CRITICAL)", + "default": "INFO" }, - "frequency": { + "timestamp": { "anyOf": [ { - "type": "integer" + "type": "string" }, { "type": "null" } ], - "title": "Frequency", - "description": "Billing frequency" + "title": "Timestamp", + "description": "Entry timestamp" }, - "frequency_type": { + "resource_type": { "anyOf": [ { "type": "string" @@ -19860,167 +19163,289 @@ "type": "null" } ], - "title": "Frequency Type", - "description": "Frequency type (month, year)" + "title": "Resource Type", + "description": "Resource type (e.g., gce_instance, cloud_function)" + }, + "resource_labels": { + "anyOf": [ + { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Resource Labels", + "description": "Resource labels" } }, "type": "object", "required": [ - "description", - "title", - "price", - "currency", - "identifier" + "log_name" ], - "title": "MoneybirdProduct", - "description": "Moneybird product/service - API format\n\nAttributes:\n description: Product description\n title: Product title/name\n price: Product price\n currency: Currency code (e.g., 'EUR')\n identifier: Product SKU/identifier\n frequency: Billing frequency (optional, for recurring products)\n frequency_type: Frequency type (e.g., 'month', 'year')" + "title": "LogEntry", + "description": "Cloud Logging log entry" }, - "MoneybirdProject": { + "LogSink": { "properties": { "name": { "type": "string", "title": "Name", - "description": "Project name" + "description": "Sink name" }, - "state": { - "type": "string", - "title": "State", - "description": "Project state (active, archived)", - "default": "active" + "filter": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Filter", + "description": "Log filter expression" }, - "budget": { + "description": { "anyOf": [ { - "type": "number" + "type": "string" }, { "type": "null" } ], - "title": "Budget", - "description": "Project budget" + "title": "Description", + "description": "Sink description" + }, + "created_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Created At", + "description": "Creation timestamp" + }, + "updated_at": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Updated At", + "description": "Update timestamp" } }, "type": "object", "required": [ "name" ], - "title": "MoneybirdProject", - "description": "Moneybird project for time tracking and invoicing - API format\n\nAttributes:\n name: Project name\n state: Project state (active, archived)\n budget: Project budget" + "title": "LogSink", + "description": "Cloud Logging sink" }, - "MoneybirdSalesInvoice": { + "McpServer": { "properties": { - "reference": { - "type": "string", - "title": "Reference", - "description": "Invoice reference number" - }, - "currency": { + "id": { "type": "string", - "title": "Currency", - "description": "Currency code", - "default": "EUR" - }, - "prices_are_incl_tax": { - "type": "boolean", - "title": "Prices Are Incl Tax", - "description": "Whether prices include tax", - "default": false + "format": "uuid", + "title": "Id" }, - "state": { + "name": { "type": "string", - "title": "State", - "description": "Invoice state (draft, open, paid)", - "default": "draft" + "title": "Name" }, - "details": { - "items": { - "$ref": "#/components/schemas/MoneybirdInvoiceDetail" - }, - "type": "array", - "minItems": 1, - "title": "Details", - "description": "Invoice line items" - } - }, - "type": "object", - "required": [ - "reference", - "details" - ], - "title": "MoneybirdSalesInvoice", - "description": "Moneybird sales invoice - API format\n\nNote: During initialization, contact_id will be automatically assigned based on\nthe contacts created. Invoice dates will be calculated dynamically.\n\nAttributes:\n reference: Invoice reference number\n currency: Currency code (e.g., 'EUR')\n prices_are_incl_tax: Whether prices include tax\n state: Invoice state (draft, open, paid)\n details: List of invoice line items" - }, - "MoneybirdTimeEntry": { - "properties": { "description": { - "type": "string", - "title": "Description", - "description": "Description of work performed" + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description" }, - "hours": { - "type": "number", - "exclusiveMinimum": 0, - "title": "Hours", - "description": "Number of hours worked" + "tools": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/ServerTool" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Tools" }, - "billable": { + "authNeeded": { "type": "boolean", - "title": "Billable", - "description": "Whether this time is billable", + "title": "Authneeded", "default": true } }, "type": "object", "required": [ - "description", - "hours" + "id", + "name" + ], + "title": "McpServer" + }, + "McpServerName": { + "type": "string", + "enum": [ + "Affinity", + "Airtable", + "Amplitude", + "Asana", + "Attio", + "Box", + "Brave Search", + "Cal.com", + "Canva", + "ClickUp", + "Close", + "Cloudflare", + "Coinbase", + "Confluence", + "Dialpad", + "Discord", + "Doc2markdown", + "DocuSign", + "Dropbox", + "ElevenLabs", + "Exa", + "Fathom", + "Fetch URL", + "Figma", + "Fireflies", + "Firecrawl Deep Research", + "Firecrawl Web Search", + "Freshdesk", + "GitHub", + "GitLab", + "Gmail", + "Gong", + "Google Calendar", + "Google Docs", + "Google Drive", + "Google Jobs", + "Google Sheets", + "Google Forms", + "Google Cloud", + "Hacker News", + "Heygen", + "HubSpot", + "Intercom", + "Jira", + "Klaviyo", + "Klavis ReportGen", + "Linear", + "LinkedIn", + "Markdown2doc", + "Mem0", + "Microsoft Teams", + "Mixpanel", + "Monday", + "Moneybird", + "Motion", + "Notion", + "OneDrive", + "Openrouter", + "Outlook Mail", + "Outlook Calendar", + "PagerDuty", + "Pipedrive", + "Plai", + "Postgres", + "PostHog", + "Postman", + "QuickBooks", + "Resend", + "Salesforce", + "SendGrid", + "Shopify", + "Slack", + "Snowflake", + "Stripe", + "Supabase", + "Tavily", + "Vercel", + "WhatsApp", + "WordPress", + "YouTube", + "Zendesk", + "ServiceNow", + "PayPal", + "Sentry", + "Netlify", + "Hugging Face", + "Square", + "Clockwise", + "Jotform", + "Honeycomb", + "Zoho Mail", + "Sharesight", + "Weights and Biases", + "Instagram" ], - "title": "MoneybirdTimeEntry", - "description": "Moneybird time entry - API format\n\nNote: During initialization, this will be linked to a project and contact automatically.\nThe started_at and ended_at timestamps will be generated based on the hours field.\n\nAttributes:\n description: Description of work performed\n hours: Number of hours worked\n billable: Whether this time is billable" + "title": "McpServerName" }, - "MotionComment": { + "Mem0Data-Input": { "properties": { - "text": { - "type": "string", - "title": "Text", - "description": "Comment text content" + "memory_list": { + "items": { + "$ref": "#/components/schemas/Mem0Memory" + }, + "type": "array", + "title": "Memory List", + "description": "List of memories" } }, "type": "object", - "required": [ - "text" - ], - "title": "MotionComment", - "description": "Motion Comment object - supports DELETE operation" + "title": "Mem0Data", + "description": "Complete Mem0 sandbox data structure" }, - "MotionData-Input": { + "Mem0Data-Output": { "properties": { - "workspaces": { + "memory_list": { "items": { - "$ref": "#/components/schemas/MotionWorkspace" + "$ref": "#/components/schemas/Mem0Memory" }, "type": "array", - "title": "Workspaces", - "description": "List of workspaces with their projects and tasks" + "title": "Memory List", + "description": "List of memories" } }, "type": "object", - "title": "MotionData", - "description": "Complete Motion sandbox data structure.\n\nHierarchical structure for initialization:\n- Workspaces contain Projects\n- Projects contain Tasks\n- Tasks contain Comments\n\nDelete operation support:\n- Tasks: YES\n- Comments: YES\n- Projects: YES\n- Workspaces: NO (organization level, typically not deleted)" - }, - "MotionData-Output": { - "additionalProperties": true, - "type": "object" + "title": "Mem0Data", + "description": "Complete Mem0 sandbox data structure" }, - "MotionProject": { + "Mem0Memory": { "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Project name" + "id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Id", + "description": "Memory ID (read-only, set by Mem0)" }, - "description": { + "content_text": { "anyOf": [ { "type": "string" @@ -20029,10 +19454,25 @@ "type": "null" } ], - "title": "Description", - "description": "Project description" + "title": "Content Text", + "description": "Memory content (returned from API)" }, - "due_date": { + "message_list": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/Mem0Message" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Message List", + "description": "Array of message objects representing the content" + }, + "agent_id": { "anyOf": [ { "type": "string" @@ -20041,10 +19481,10 @@ "type": "null" } ], - "title": "Due Date", - "description": "Project due date in ISO 8601 format" + "title": "Agent Id", + "description": "Agent ID" }, - "priority": { + "user_id": { "anyOf": [ { "type": "string" @@ -20053,29 +19493,97 @@ "type": "null" } ], - "title": "Priority", - "description": "Project priority: ASAP, HIGH, MEDIUM, or LOW" + "title": "User Id", + "description": "User ID" }, - "labels": { + "app_id": { "anyOf": [ { - "items": { - "type": "string" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Labels", - "description": "Project labels" + "title": "App Id", + "description": "App ID" }, - "tasks": { + "run_id": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Run Id", + "description": "Run ID" + }, + "metadata": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Metadata", + "description": "Additional metadata" + }, + "include_preferences": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Include Preferences", + "description": "String to include specific preferences in the memory" + }, + "exclude_preferences": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Exclude Preferences", + "description": "String to exclude specific preferences in the memory" + }, + "auto_infer": { + "type": "boolean", + "title": "Auto Infer", + "description": "Whether to infer memories or directly store messages", + "default": true + }, + "response_format": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Response Format", + "description": "Response format structure (v1.0 or v1.1)", + "default": "v1.1" + }, + "category_definitions": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/MotionTask" + "additionalProperties": { + "type": "string" + }, + "type": "object" }, "type": "array" }, @@ -20083,25 +19591,10 @@ "type": "null" } ], - "title": "Tasks", - "description": "Tasks in this project" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "MotionProject", - "description": "Motion Project object - supports DELETE operation" - }, - "MotionTask": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Task name/title" + "title": "Category Definitions", + "description": "List of custom categories with name and description" }, - "description": { + "processing_instructions": { "anyOf": [ { "type": "string" @@ -20110,22 +19603,34 @@ "type": "null" } ], - "title": "Description", - "description": "Task description (supports Markdown)" + "title": "Processing Instructions", + "description": "Project-specific guidelines for handling memories" }, - "status": { + "read_only": { + "type": "boolean", + "title": "Read Only", + "description": "Whether the memory is immutable", + "default": false + }, + "background_processing": { + "type": "boolean", + "title": "Background Processing", + "description": "Whether to add memory asynchronously", + "default": true + }, + "timestamp": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "Status", - "description": "Task status (e.g., 'TODO', 'IN_PROGRESS', 'COMPLETED')" + "title": "Timestamp", + "description": "Unix timestamp of the memory" }, - "priority": { + "created_at": { "anyOf": [ { "type": "string" @@ -20134,10 +19639,10 @@ "type": "null" } ], - "title": "Priority", - "description": "Task priority: ASAP, HIGH, MEDIUM, or LOW" + "title": "Created At", + "description": "Creation timestamp" }, - "due_date": { + "updated_at": { "anyOf": [ { "type": "string" @@ -20146,41 +19651,77 @@ "type": "null" } ], - "title": "Due Date", - "description": "Due date in ISO 8601 format" + "title": "Updated At", + "description": "Last update timestamp" + } + }, + "type": "object", + "title": "Mem0Memory", + "description": "Mem0 Memory object" + }, + "Mem0Message": { + "properties": { + "role": { + "type": "string", + "title": "Role", + "description": "Role of the message sender (user or assistant)" }, - "duration": { + "content": { + "type": "string", + "title": "Content", + "description": "Content of the message" + } + }, + "type": "object", + "required": [ + "role", + "content" + ], + "title": "Mem0Message", + "description": "Message object for memory" + }, + "MondayBoard": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Board name" + }, + "description": { "anyOf": [ { - "type": "integer" + "type": "string" }, { "type": "null" } ], - "title": "Duration", - "description": "Estimated duration in minutes" + "title": "Description", + "description": "Board description" }, - "labels": { + "board_kind": { + "type": "string", + "title": "Board Kind", + "description": "Board kind (public, private, share)", + "default": "public" + }, + "workspace_id": { "anyOf": [ { - "items": { - "type": "string" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Labels", - "description": "Task labels for categorization" + "title": "Workspace Id", + "description": "Workspace ID this board belongs to (for initialization)" }, - "comments": { + "groups": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/MotionComment" + "$ref": "#/components/schemas/MondayGroup" }, "type": "array" }, @@ -20188,29 +19729,24 @@ "type": "null" } ], - "title": "Comments", - "description": "Comments on this task" + "title": "Groups", + "description": "Groups in this board" } }, "type": "object", "required": [ "name" ], - "title": "MotionTask", - "description": "Motion Task object - supports DELETE operation" + "title": "MondayBoard", + "description": "Monday.com Board object - DELETE SUPPORTED" }, - "MotionWorkspace": { + "MondayData-Input": { "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Workspace name" - }, - "projects": { + "workspaces": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/MotionProject" + "$ref": "#/components/schemas/MondayWorkspace" }, "type": "array" }, @@ -20218,39 +19754,50 @@ "type": "null" } ], - "title": "Projects", - "description": "Projects in this workspace" + "title": "Workspaces", + "description": "List of workspaces" + }, + "boards": { + "items": { + "$ref": "#/components/schemas/MondayBoard" + }, + "type": "array", + "title": "Boards", + "description": "List of boards with their groups and items" } }, "type": "object", - "required": [ - "name" - ], - "title": "MotionWorkspace", - "description": "Motion Workspace object - typically NOT deleted (organization level)" + "title": "MondayData", + "description": "Complete Monday.com sandbox data structure.\n\nRelational structure for initialization:\n- Workspaces (optional, can be created separately)\n- Boards contain Groups\n- Groups contain Items\n- Items contain Updates and Subitems\n\nAll objects support DELETE operations." }, - "MsTeamsData-Input": { + "MondayData-Output": { + "additionalProperties": true, + "type": "object" + }, + "MondayGroup": { "properties": { - "channels": { + "title": { + "type": "string", + "title": "Title", + "description": "Group title" + }, + "color": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/TeamsChannel" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Channels", - "description": "List of team channels" + "title": "Color", + "description": "Group color (e.g., '#FF0000', 'red')" }, - "chats": { + "items": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/TeamsChat" + "$ref": "#/components/schemas/MondayItem" }, "type": "array" }, @@ -20258,21 +19805,42 @@ "type": "null" } ], - "title": "Chats", - "description": "List of one-on-one chats" + "title": "Items", + "description": "Items in this group" } }, "type": "object", - "title": "MsTeamsData", - "description": "Complete Microsoft Teams sandbox data structure" + "required": [ + "title" + ], + "title": "MondayGroup", + "description": "Monday.com Group (Section) object - DELETE SUPPORTED" }, - "MsTeamsData-Output": { + "MondayItem": { "properties": { - "channels": { + "name": { + "type": "string", + "title": "Name", + "description": "Item name" + }, + "column_values": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Column Values", + "description": "Column values as JSON object" + }, + "updates": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/TeamsChannel" + "$ref": "#/components/schemas/MondayUpdate" }, "type": "array" }, @@ -20280,14 +19848,14 @@ "type": "null" } ], - "title": "Channels", - "description": "List of team channels" + "title": "Updates", + "description": "Updates/comments on this item" }, - "chats": { + "subitems": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/TeamsChat" + "$ref": "#/components/schemas/MondaySubitem" }, "type": "array" }, @@ -20295,99 +19863,156 @@ "type": "null" } ], - "title": "Chats", - "description": "List of one-on-one chats" + "title": "Subitems", + "description": "Subitems (nested items)" } }, "type": "object", - "title": "MsTeamsData", - "description": "Complete Microsoft Teams sandbox data structure" + "required": [ + "name" + ], + "title": "MondayItem", + "description": "Monday.com Item (Task/Row) object - DELETE SUPPORTED" }, - "NotionBlock": { + "MondaySubitem": { "properties": { - "type": { + "name": { "type": "string", - "title": "Type", - "description": "Block type (paragraph, heading_1, heading_2, etc.)" + "title": "Name", + "description": "Subitem name" }, - "content": { + "column_values": { + "anyOf": [ + { + "additionalProperties": true, + "type": "object" + }, + { + "type": "null" + } + ], + "title": "Column Values", + "description": "Column values as JSON object" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "MondaySubitem", + "description": "Monday.com Subitem object (nested item) - DELETE SUPPORTED" + }, + "MondayUpdate": { + "properties": { + "body": { "type": "string", - "title": "Content", - "description": "Block text content" + "title": "Body", + "description": "Update text content" } }, "type": "object", "required": [ - "type", - "content" + "body" ], - "title": "NotionBlock", - "description": "Notion Block object - fundamental content unit.\n\nBlocks can be: paragraph, heading_1, heading_2, heading_3, bulleted_list_item,\nnumbered_list_item, to_do, toggle, code, quote, callout, etc." + "title": "MondayUpdate", + "description": "Monday.com Update (Comment) object - DELETE SUPPORTED" }, - "NotionComment": { + "MondayWorkspace": { "properties": { - "text": { + "name": { "type": "string", - "title": "Text", - "description": "Comment text content (rich text)" + "title": "Name", + "description": "Workspace name" + }, + "kind": { + "type": "string", + "title": "Kind", + "description": "Workspace kind (open, closed)", + "default": "open" + }, + "description": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Description", + "description": "Workspace description" } }, "type": "object", "required": [ - "text" + "name" ], - "title": "NotionComment", - "description": "Notion Comment object - can be attached to pages or blocks" + "title": "MondayWorkspace", + "description": "Monday.com Workspace object - DELETE SUPPORTED" }, - "NotionData-Input": { + "MoneybirdContact": { "properties": { - "databases": { + "company_name": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/NotionDatabase" - }, - "type": "array" + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Company Name", + "description": "Company name" + }, + "firstname": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Firstname", + "description": "First name" + }, + "lastname": { + "anyOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ], + "title": "Lastname", + "description": "Last name" + }, + "phone": { + "anyOf": [ + { + "type": "string" }, { "type": "null" } ], - "title": "Databases", - "description": "List of databases with their data sources and pages" + "title": "Phone", + "description": "Phone number" }, - "pages": { + "address1": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/NotionPage" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Pages", - "description": "List of standalone pages (not in databases)" - } - }, - "type": "object", - "title": "NotionData", - "description": "Complete Notion sandbox data structure.\n\nHierarchical structure for initialization:\n- Databases contain Data Sources\n- Data Sources contain Pages (with properties matching the schema)\n- Standalone Pages exist at top level (not in databases)\n- Pages contain Blocks (content) and Comments\n\nRelational structure:\n- Pages refer to Data Sources (via data_source_name)\n- Data Sources are nested in Databases" - }, - "NotionData-Output": { - "additionalProperties": true, - "type": "object" - }, - "NotionDataSource": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Data source name" + "title": "Address1", + "description": "Street address" }, - "description": { + "city": { "anyOf": [ { "type": "string" @@ -20396,53 +20021,34 @@ "type": "null" } ], - "title": "Description", - "description": "Data source description" + "title": "City", + "description": "City" }, - "schema": { + "zipcode": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Schema", - "description": "Schema definition (property types and configurations)" + "title": "Zipcode", + "description": "Postal code" }, - "pages": { + "country": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/NotionPage" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Pages", - "description": "Pages (items/rows) in this data source" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "NotionDataSource", - "description": "Notion Data Source object - represents a table within a database.\n\nNew in API version 2025-09-03:\n- A database can contain multiple data sources\n- Each data source has its own schema (properties)\n- Pages within a data source follow that schema" - }, - "NotionDatabase": { - "properties": { - "title": { - "type": "string", - "title": "Title", - "description": "Database title" + "title": "Country", + "description": "Country code" }, - "description": { + "sepa_iban": { "anyOf": [ { "type": "string" @@ -20451,26 +20057,36 @@ "type": "null" } ], - "title": "Description", - "description": "Database description" - }, - "icon": { + "title": "Sepa Iban", + "description": "SEPA IBAN" + } + }, + "type": "object", + "title": "MoneybirdContact", + "description": "Moneybird contact (customer/supplier) - API format\n\nAttributes:\n company_name: Company name (optional, for business contacts)\n firstname: First name (optional, for individual contacts)\n lastname: Last name (optional, for individual contacts)\n phone: Phone number\n address1: Street address\n city: City\n zipcode: Postal code\n country: Country code (e.g., 'NL', 'DE', 'FR')\n sepa_iban: SEPA IBAN for direct debit (optional)" + }, + "MoneybirdData-Input": { + "properties": { + "ledger_accounts": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/MoneybirdLedgerAccount" + }, + "type": "array" }, { "type": "null" } ], - "title": "Icon", - "description": "Database icon (emoji or external URL)" + "title": "Ledger Accounts", + "description": "List of ledger accounts to create" }, - "data_sources": { + "contacts": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/NotionDataSource" + "$ref": "#/components/schemas/MoneybirdContact" }, "type": "array" }, @@ -20478,42 +20094,29 @@ "type": "null" } ], - "title": "Data Sources", - "description": "Data sources within this database" - } - }, - "type": "object", - "required": [ - "title" - ], - "title": "NotionDatabase", - "description": "Notion Database object - container for data sources.\n\nDatabases organize structured data and can contain multiple data sources.\nEach data source represents a table with its own schema." - }, - "NotionPage": { - "properties": { - "title": { - "type": "string", - "title": "Title", - "description": "Page title" + "title": "Contacts", + "description": "List of contacts to create" }, - "properties": { + "products": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "$ref": "#/components/schemas/MoneybirdProduct" + }, + "type": "array" }, { "type": "null" } ], - "title": "Properties", - "description": "Page properties (for database pages)" + "title": "Products", + "description": "List of products to create" }, - "blocks": { + "projects": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/NotionBlock" + "$ref": "#/components/schemas/MoneybirdProject" }, "type": "array" }, @@ -20521,14 +20124,14 @@ "type": "null" } ], - "title": "Blocks", - "description": "Content blocks in this page" + "title": "Projects", + "description": "List of projects to create" }, - "comments": { + "time_entries": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/NotionComment" + "$ref": "#/components/schemas/MoneybirdTimeEntry" }, "type": "array" }, @@ -20536,121 +20139,156 @@ "type": "null" } ], - "title": "Comments", - "description": "Comments on this page" + "title": "Time Entries", + "description": "List of time entries to create" }, - "data_source_name": { + "sales_invoices": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/MoneybirdSalesInvoice" + }, + "type": "array" }, { "type": "null" } ], - "title": "Data Source Name", - "description": "Name of the data source this page belongs to (for initialization)" + "title": "Sales Invoices", + "description": "List of sales invoices to create" } }, "type": "object", - "required": [ - "title" - ], - "title": "NotionPage", - "description": "Notion Page object.\n\nPages can be:\n1. Standalone pages (top-level in workspace)\n2. Items/rows within a database data source\n\nFor pages in databases, properties should match the data source schema." - }, - "OAuthServerName": { - "type": "string", - "enum": [ - "Airtable", - "Asana", - "Attio", - "Box", - "Cal.com", - "Canva", - "ClickUp", - "Close", - "Confluence", - "Dialpad", - "DocuSign", - "Dropbox", - "Fathom", - "Figma", - "GitHub", - "GitLab", - "Gmail", - "Google Calendar", - "Google Docs", - "Google Drive", - "Google Sheets", - "Google Forms", - "Google Cloud", - "HubSpot", - "Jira", - "Klaviyo", - "Linear", - "LinkedIn", - "Microsoft Teams", - "Monday", - "Moneybird", - "Notion", - "Onedrive", - "Outlook", - "MsCalendar", - "PagerDuty", - "Pipedrive", - "QuickBooks", - "Salesforce", - "Slack", - "Stripe", - "Supabase", - "Vercel", - "WordPress", - "Xero", - "Zendesk", - "PayPal", - "Sentry", - "Netlify", - "Hugging Face", - "Square", - "Clockwise", - "Jotform", - "Honeycomb", - "Zoho Mail" - ], - "title": "OAuthServerName" + "title": "MoneybirdData", + "description": "Complete Moneybird sandbox data structure - API format\n\nNote: Objects are created in dependency order:\n1. Ledger accounts (no dependencies)\n2. Contacts (no dependencies)\n3. Products (no dependencies, but needs tax rates from system)\n4. Projects (no dependencies)\n5. Time entries (linked to projects and contacts)\n6. Sales invoices (linked to contacts)" }, - "OneDriveData-Input": { + "MoneybirdData-Output": { "properties": { - "root": { - "$ref": "#/components/schemas/OneDriveFolder-Input", - "description": "Root folder containing all subfolders and files" + "ledger_accounts": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/MoneybirdLedgerAccount" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Ledger Accounts", + "description": "List of ledger accounts to create" + }, + "contacts": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/MoneybirdContact" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Contacts", + "description": "List of contacts to create" + }, + "products": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/MoneybirdProduct" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Products", + "description": "List of products to create" + }, + "projects": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/MoneybirdProject" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Projects", + "description": "List of projects to create" + }, + "time_entries": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/MoneybirdTimeEntry" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Time Entries", + "description": "List of time entries to create" + }, + "sales_invoices": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/MoneybirdSalesInvoice" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Sales Invoices", + "description": "List of sales invoices to create" } }, "type": "object", - "required": [ - "root" - ], - "title": "OneDriveData", - "description": "Complete OneDrive sandbox data structure" + "title": "MoneybirdData", + "description": "Complete Moneybird sandbox data structure - API format\n\nNote: Objects are created in dependency order:\n1. Ledger accounts (no dependencies)\n2. Contacts (no dependencies)\n3. Products (no dependencies, but needs tax rates from system)\n4. Projects (no dependencies)\n5. Time entries (linked to projects and contacts)\n6. Sales invoices (linked to contacts)" }, - "OneDriveData-Output": { + "MoneybirdInvoiceDetail": { "properties": { - "root": { - "$ref": "#/components/schemas/OneDriveFolder-Output", - "description": "Root folder containing all subfolders and files" + "description": { + "type": "string", + "title": "Description", + "description": "Line item description" + }, + "price": { + "type": "string", + "title": "Price", + "description": "Unit price as string" + }, + "amount": { + "type": "string", + "title": "Amount", + "description": "Quantity as string" } }, "type": "object", "required": [ - "root" + "description", + "price", + "amount" ], - "title": "OneDriveData", - "description": "Complete OneDrive sandbox data structure" + "title": "MoneybirdInvoiceDetail", + "description": "Invoice line item details" }, - "OneDriveFile": { + "MoneybirdLedgerAccount": { "properties": { - "id": { + "rgs_code": { "anyOf": [ { "type": "string" @@ -20659,15 +20297,20 @@ "type": "null" } ], - "title": "Id", - "description": "File ID (read-only, set by OneDrive)" + "title": "Rgs Code", + "description": "RGS code - required for creation, not returned by GET API" }, "name": { "type": "string", "title": "Name", - "description": "File name" + "description": "Account name" }, - "content": { + "account_type": { + "type": "string", + "title": "Account Type", + "description": "Account type (revenue, expense, etc.)" + }, + "account_id": { "anyOf": [ { "type": "string" @@ -20676,156 +20319,227 @@ "type": "null" } ], - "title": "Content", - "description": "File content (text files only)" + "title": "Account Id", + "description": "Account identifier (optional for system accounts)" } }, "type": "object", "required": [ - "name" + "name", + "account_type" ], - "title": "OneDriveFile", - "description": "OneDrive File object" + "title": "MoneybirdLedgerAccount", + "description": "Moneybird ledger account (chart of accounts) - API format\n\nAttributes:\n rgs_code: Reference Grootboekschema (RGS) code - required for creation\n name: Account name\n account_type: Account type (revenue, expense, etc.)\n account_id: Account identifier (optional, system accounts may not have this)" }, - "OneDriveFolder-Input": { + "MoneybirdProduct": { "properties": { - "id": { + "description": { + "type": "string", + "title": "Description", + "description": "Product description" + }, + "title": { + "type": "string", + "title": "Title", + "description": "Product title/name" + }, + "price": { + "type": "string", + "title": "Price", + "description": "Product price as string" + }, + "currency": { + "type": "string", + "title": "Currency", + "description": "Currency code (e.g., 'EUR')" + }, + "identifier": { + "type": "string", + "title": "Identifier", + "description": "Product SKU/identifier" + }, + "frequency": { "anyOf": [ { - "type": "string" + "type": "integer" }, { "type": "null" } ], - "title": "Id", - "description": "Folder ID (read-only, set by OneDrive)" - }, - "name": { - "type": "string", - "title": "Name", - "description": "Folder name" + "title": "Frequency", + "description": "Billing frequency" }, - "folders": { + "frequency_type": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/OneDriveFolder-Input" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Folders", - "description": "List of subfolders" + "title": "Frequency Type", + "description": "Frequency type (month, year)" + } + }, + "type": "object", + "required": [ + "description", + "title", + "price", + "currency", + "identifier" + ], + "title": "MoneybirdProduct", + "description": "Moneybird product/service - API format\n\nAttributes:\n description: Product description\n title: Product title/name\n price: Product price\n currency: Currency code (e.g., 'EUR')\n identifier: Product SKU/identifier\n frequency: Billing frequency (optional, for recurring products)\n frequency_type: Frequency type (e.g., 'month', 'year')" + }, + "MoneybirdProject": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Project name" }, - "files": { + "state": { + "type": "string", + "title": "State", + "description": "Project state (active, archived)", + "default": "active" + }, + "budget": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/OneDriveFile" - }, - "type": "array" + "type": "number" }, { "type": "null" } ], - "title": "Files", - "description": "List of files in the folder" + "title": "Budget", + "description": "Project budget" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "MoneybirdProject", + "description": "Moneybird project for time tracking and invoicing - API format\n\nAttributes:\n name: Project name\n state: Project state (active, archived)\n budget: Project budget" + }, + "MoneybirdSalesInvoice": { + "properties": { + "reference": { + "type": "string", + "title": "Reference", + "description": "Invoice reference number" + }, + "currency": { + "type": "string", + "title": "Currency", + "description": "Currency code", + "default": "EUR" + }, + "prices_are_incl_tax": { + "type": "boolean", + "title": "Prices Are Incl Tax", + "description": "Whether prices include tax", + "default": false + }, + "state": { + "type": "string", + "title": "State", + "description": "Invoice state (draft, open, paid)", + "default": "draft" + }, + "details": { + "items": { + "$ref": "#/components/schemas/MoneybirdInvoiceDetail" + }, + "type": "array", + "minItems": 1, + "title": "Details", + "description": "Invoice line items" } }, "type": "object", "required": [ - "name" + "reference", + "details" ], - "title": "OneDriveFolder", - "description": "OneDrive Folder object" + "title": "MoneybirdSalesInvoice", + "description": "Moneybird sales invoice - API format\n\nNote: During initialization, contact_id will be automatically assigned based on\nthe contacts created. Invoice dates will be calculated dynamically.\n\nAttributes:\n reference: Invoice reference number\n currency: Currency code (e.g., 'EUR')\n prices_are_incl_tax: Whether prices include tax\n state: Invoice state (draft, open, paid)\n details: List of invoice line items" }, - "OneDriveFolder-Output": { + "MoneybirdTimeEntry": { "properties": { - "id": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Id", - "description": "Folder ID (read-only, set by OneDrive)" - }, - "name": { + "description": { "type": "string", - "title": "Name", - "description": "Folder name" + "title": "Description", + "description": "Description of work performed" }, - "folders": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/OneDriveFolder-Output" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Folders", - "description": "List of subfolders" + "hours": { + "type": "number", + "exclusiveMinimum": 0, + "title": "Hours", + "description": "Number of hours worked" }, - "files": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/OneDriveFile" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Files", - "description": "List of files in the folder" + "billable": { + "type": "boolean", + "title": "Billable", + "description": "Whether this time is billable", + "default": true } }, "type": "object", "required": [ - "name" + "description", + "hours" ], - "title": "OneDriveFolder", - "description": "OneDrive Folder object" + "title": "MoneybirdTimeEntry", + "description": "Moneybird time entry - API format\n\nNote: During initialization, this will be linked to a project and contact automatically.\nThe started_at and ended_at timestamps will be generated based on the hours field.\n\nAttributes:\n description: Description of work performed\n hours: Number of hours worked\n billable: Whether this time is billable" }, - "OutlookCalendarData": { + "MotionComment": { "properties": { - "events": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/OutlookCalendarEvent" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Events", - "description": "List of calendar events" + "text": { + "type": "string", + "title": "Text", + "description": "Comment text content" } }, "type": "object", - "title": "OutlookCalendarData", - "description": "Complete Outlook Calendar sandbox data structure" + "required": [ + "text" + ], + "title": "MotionComment", + "description": "Motion Comment object - supports DELETE operation" }, - "OutlookCalendarEvent": { + "MotionData-Input": { "properties": { - "id": { + "workspaces": { + "items": { + "$ref": "#/components/schemas/MotionWorkspace" + }, + "type": "array", + "title": "Workspaces", + "description": "List of workspaces with their projects and tasks" + } + }, + "type": "object", + "title": "MotionData", + "description": "Complete Motion sandbox data structure.\n\nHierarchical structure for initialization:\n- Workspaces contain Projects\n- Projects contain Tasks\n- Tasks contain Comments\n\nDelete operation support:\n- Tasks: YES\n- Comments: YES\n- Projects: YES\n- Workspaces: NO (organization level, typically not deleted)" + }, + "MotionData-Output": { + "additionalProperties": true, + "type": "object" + }, + "MotionProject": { + "properties": { + "name": { + "type": "string", + "title": "Name", + "description": "Project name" + }, + "description": { "anyOf": [ { "type": "string" @@ -20834,58 +20548,53 @@ "type": "null" } ], - "title": "Id", - "description": "Event ID (read-only, set by Outlook)" - }, - "subject": { - "type": "string", - "title": "Subject", - "description": "Event subject/title" + "title": "Description", + "description": "Project description" }, - "start": { + "due_date": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Start", - "description": "Start time with dateTime and timeZone" + "title": "Due Date", + "description": "Project due date in ISO 8601 format" }, - "end": { + "priority": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "End", - "description": "End time with dateTime and timeZone" + "title": "Priority", + "description": "Project priority: ASAP, HIGH, MEDIUM, or LOW" }, - "location": { + "labels": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "items": { + "type": "string" + }, + "type": "array" }, { "type": "null" } ], - "title": "Location", - "description": "Location object with displayName" + "title": "Labels", + "description": "Project labels" }, - "attendees": { + "tasks": { "anyOf": [ { "items": { - "type": "string" + "$ref": "#/components/schemas/MotionTask" }, "type": "array" }, @@ -20893,42 +20602,37 @@ "type": "null" } ], - "title": "Attendees", - "description": "List of attendee email addresses (e.g., ['user@example.com'])" + "title": "Tasks", + "description": "Tasks in this project" } }, "type": "object", "required": [ - "subject" + "name" ], - "title": "OutlookCalendarEvent", - "description": "Outlook Calendar Event object" + "title": "MotionProject", + "description": "Motion Project object - supports DELETE operation" }, - "OutlookMailData": { + "MotionTask": { "properties": { - "messages": { + "name": { + "type": "string", + "title": "Name", + "description": "Task name/title" + }, + "description": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/OutlookMailMessage" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Messages", - "description": "List of mail messages" - } - }, - "type": "object", - "title": "OutlookMailData", - "description": "Complete Outlook Mail sandbox data structure" - }, - "OutlookMailMessage": { - "properties": { - "id": { + "title": "Description", + "description": "Task description (supports Markdown)" + }, + "status": { "anyOf": [ { "type": "string" @@ -20937,56 +20641,46 @@ "type": "null" } ], - "title": "Id", - "description": "Message ID (read-only, set by Outlook)" - }, - "subject": { - "type": "string", - "title": "Subject", - "description": "Email subject" + "title": "Status", + "description": "Task status (e.g., 'TODO', 'IN_PROGRESS', 'COMPLETED')" }, - "body": { + "priority": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "Body", - "description": "Email body with content and contentType" + "title": "Priority", + "description": "Task priority: ASAP, HIGH, MEDIUM, or LOW" }, - "from": { + "due_date": { "anyOf": [ { - "additionalProperties": true, - "type": "object" + "type": "string" }, { "type": "null" } ], - "title": "From", - "description": "From address with emailAddress object" + "title": "Due Date", + "description": "Due date in ISO 8601 format" }, - "to_recipients": { + "duration": { "anyOf": [ { - "items": { - "type": "string" - }, - "type": "array" + "type": "integer" }, { "type": "null" } ], - "title": "To Recipients", - "description": "List of recipient email addresses" + "title": "Duration", + "description": "Estimated duration in minutes" }, - "cc_recipients": { + "labels": { "anyOf": [ { "items": { @@ -20997,78 +20691,45 @@ { "type": "null" } - ], - "title": "Cc Recipients", - "description": "List of CC recipient email addresses" - }, - "is_read": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "title": "Is Read", - "description": "Whether the message has been read", - "default": false + ], + "title": "Labels", + "description": "Task labels for categorization" }, - "received_date_time": { + "comments": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/MotionComment" + }, + "type": "array" }, { "type": "null" } ], - "title": "Received Date Time", - "description": "Date and time the message was received" + "title": "Comments", + "description": "Comments on this task" } }, "type": "object", "required": [ - "subject" + "name" ], - "title": "OutlookMailMessage", - "description": "Outlook Mail Message object" + "title": "MotionTask", + "description": "Motion Task object - supports DELETE operation" }, - "Override": { + "MotionWorkspace": { "properties": { - "date": { - "type": "string", - "pattern": "^\\d{4}-\\d{2}-\\d{2}$", - "title": "Date" - }, - "startTime": { + "name": { "type": "string", - "pattern": "^\\d{2}:\\d{2}$", - "title": "Starttime" + "title": "Name", + "description": "Workspace name" }, - "endTime": { - "type": "string", - "pattern": "^\\d{2}:\\d{2}$", - "title": "Endtime" - } - }, - "type": "object", - "required": [ - "date", - "startTime", - "endTime" - ], - "title": "Override", - "description": "Represents a date-specific override to the regular schedule.\n\nAttributes:\n date: Date in YYYY-MM-DD format\n startTime: Start time in HH:MM format (24-hour)\n endTime: End time in HH:MM format (24-hour)" - }, - "QuickBooksData": { - "properties": { - "accounts": { + "projects": { "anyOf": [ { "items": { - "additionalProperties": true, - "type": "object" + "$ref": "#/components/schemas/MotionProject" }, "type": "array" }, @@ -21076,15 +20737,24 @@ "type": "null" } ], - "title": "Accounts", - "description": "List of accounts (input: minimal, output: full API objects)" - }, - "customers": { + "title": "Projects", + "description": "Projects in this workspace" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "MotionWorkspace", + "description": "Motion Workspace object - typically NOT deleted (organization level)" + }, + "MsTeamsData-Input": { + "properties": { + "team_channels": { "anyOf": [ { "items": { - "additionalProperties": true, - "type": "object" + "$ref": "#/components/schemas/TeamsChannel" }, "type": "array" }, @@ -21092,15 +20762,14 @@ "type": "null" } ], - "title": "Customers", - "description": "List of customers (input: minimal, output: full API objects)" + "title": "Team Channels", + "description": "List of team channels" }, - "vendors": { + "team_chats": { "anyOf": [ { "items": { - "additionalProperties": true, - "type": "object" + "$ref": "#/components/schemas/TeamsChat" }, "type": "array" }, @@ -21108,15 +20777,21 @@ "type": "null" } ], - "title": "Vendors", - "description": "List of vendors (input: minimal, output: full API objects)" - }, - "invoices": { + "title": "Team Chats", + "description": "List of one-on-one chats" + } + }, + "type": "object", + "title": "MsTeamsData", + "description": "Complete Microsoft Teams sandbox data structure" + }, + "MsTeamsData-Output": { + "properties": { + "team_channels": { "anyOf": [ { "items": { - "additionalProperties": true, - "type": "object" + "$ref": "#/components/schemas/TeamsChannel" }, "type": "array" }, @@ -21124,15 +20799,14 @@ "type": "null" } ], - "title": "Invoices", - "description": "List of invoices (input: minimal, output: full API objects)" + "title": "Team Channels", + "description": "List of team channels" }, - "payments": { + "team_chats": { "anyOf": [ { "items": { - "additionalProperties": true, - "type": "object" + "$ref": "#/components/schemas/TeamsChat" }, "type": "array" }, @@ -21140,92 +20814,99 @@ "type": "null" } ], - "title": "Payments", - "description": "List of payments (input: minimal, output: full API objects)" + "title": "Team Chats", + "description": "List of one-on-one chats" } }, - "additionalProperties": true, "type": "object", - "title": "QuickBooksData", - "description": "Complete QuickBooks sandbox data structure\n\nNote: For dump operations, accounts/customers/etc. will contain full API response objects\nwith additional fields beyond the minimal input schema. This is expected behavior." + "title": "MsTeamsData", + "description": "Complete Microsoft Teams sandbox data structure" }, - "RawActionsResponse": { + "NotionBlock": { "properties": { - "instanceId": { + "type": { "type": "string", - "title": "Instanceid", - "description": "The instance ID" + "title": "Type", + "description": "Block type (paragraph, heading_1, heading_2, etc.)" }, - "serverName": { + "content": { "type": "string", - "title": "Servername", - "description": "The requested server name" - }, - "result": { - "additionalProperties": true, - "type": "object", - "title": "Result", - "description": "Map of categories to raw actions" + "title": "Content", + "description": "Block text content" } }, "type": "object", "required": [ - "instanceId", - "serverName" + "type", + "content" ], - "title": "RawActionsResponse" + "title": "NotionBlock", + "description": "Notion Block object - fundamental content unit.\n\nBlocks can be: paragraph, heading_1, heading_2, heading_3, bulleted_list_item,\nnumbered_list_item, to_do, toggle, code, quote, callout, etc." }, - "ReleaseSandboxResponse": { + "NotionComment": { "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Released sandbox identifier" - }, - "status": { - "$ref": "#/components/schemas/SandboxStatus", - "description": "Current status after release (should be idle)" - }, - "message": { + "text": { "type": "string", - "title": "Message", - "description": "Release confirmation message" + "title": "Text", + "description": "Comment text content (rich text)" } }, "type": "object", "required": [ - "sandbox_id", - "status", - "message" + "text" ], - "title": "ReleaseSandboxResponse", - "description": "Response model for sandbox release (delete)" + "title": "NotionComment", + "description": "Notion Comment object - can be attached to pages or blocks" }, - "ResendBroadcast": { + "NotionData-Input": { "properties": { - "id": { + "databases": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/NotionDatabase" + }, + "type": "array" }, { "type": "null" } ], - "title": "Id", - "description": "Broadcast ID (read-only, set by Resend)" - }, - "from": { - "type": "string", - "title": "From", - "description": "Sender email with optional display name" + "title": "Databases", + "description": "List of databases with their data sources and pages" }, - "subject": { + "pages": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/NotionPage" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Pages", + "description": "List of standalone pages (not in databases)" + } + }, + "type": "object", + "title": "NotionData", + "description": "Complete Notion sandbox data structure.\n\nHierarchical structure for initialization:\n- Databases contain Data Sources\n- Data Sources contain Pages (with properties matching the schema)\n- Standalone Pages exist at top level (not in databases)\n- Pages contain Blocks (content) and Comments\n\nRelational structure:\n- Pages refer to Data Sources (via data_source_name)\n- Data Sources are nested in Databases" + }, + "NotionData-Output": { + "additionalProperties": true, + "type": "object" + }, + "NotionDataSource": { + "properties": { + "name": { "type": "string", - "title": "Subject", - "description": "Email subject" + "title": "Name", + "description": "Data source name" }, - "name": { + "description": { "anyOf": [ { "type": "string" @@ -21234,22 +20915,53 @@ "type": "null" } ], - "title": "Name", - "description": "Broadcast name for internal reference" + "title": "Description", + "description": "Data source description" }, - "html": { + "schema": { "anyOf": [ { - "type": "string" + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Html", - "description": "HTML content" + "title": "Schema", + "description": "Schema definition (property types and configurations)" + }, + "pages": { + "anyOf": [ + { + "items": { + "$ref": "#/components/schemas/NotionPage" + }, + "type": "array" + }, + { + "type": "null" + } + ], + "title": "Pages", + "description": "Pages (items/rows) in this data source" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "NotionDataSource", + "description": "Notion Data Source object - represents a table within a database.\n\nNew in API version 2025-09-03:\n- A database can contain multiple data sources\n- Each data source has its own schema (properties)\n- Pages within a data source follow that schema" + }, + "NotionDatabase": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "description": "Database title" }, - "text": { + "description": { "anyOf": [ { "type": "string" @@ -21258,10 +20970,10 @@ "type": "null" } ], - "title": "Text", - "description": "Plain text content" + "title": "Description", + "description": "Database description" }, - "reply_to": { + "icon": { "anyOf": [ { "type": "string" @@ -21270,58 +20982,83 @@ "type": "null" } ], - "title": "Reply To", - "description": "Reply-to email address" + "title": "Icon", + "description": "Database icon (emoji or external URL)" }, - "status": { + "data_sources": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/NotionDataSource" + }, + "type": "array" }, { "type": "null" } ], - "title": "Status", - "description": "Broadcast status (draft, sent, etc.)" + "title": "Data Sources", + "description": "Data sources within this database" + } + }, + "type": "object", + "required": [ + "title" + ], + "title": "NotionDatabase", + "description": "Notion Database object - container for data sources.\n\nDatabases organize structured data and can contain multiple data sources.\nEach data source represents a table with its own schema." + }, + "NotionPage": { + "properties": { + "title": { + "type": "string", + "title": "Title", + "description": "Page title" }, - "created_at": { + "properties": { "anyOf": [ { - "type": "string" + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Created At", - "description": "Creation timestamp" + "title": "Properties", + "description": "Page properties (for database pages)" }, - "scheduled_at": { + "blocks": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/NotionBlock" + }, + "type": "array" }, { "type": "null" } ], - "title": "Scheduled At", - "description": "Scheduled send timestamp" + "title": "Blocks", + "description": "Content blocks in this page" }, - "sent_at": { + "comments": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/NotionComment" + }, + "type": "array" }, { "type": "null" } ], - "title": "Sent At", - "description": "Actual send timestamp" + "title": "Comments", + "description": "Comments on this page" }, - "topic_id": { + "data_source_name": { "anyOf": [ { "type": "string" @@ -21330,19 +21067,117 @@ "type": "null" } ], - "title": "Topic Id", - "description": "Topic ID for scoping" + "title": "Data Source Name", + "description": "Name of the data source this page belongs to (for initialization)" } }, "type": "object", "required": [ - "from", - "subject" + "title" ], - "title": "ResendBroadcast", - "description": "Resend Broadcast object - child of Segment, requires parent segment" + "title": "NotionPage", + "description": "Notion Page object.\n\nPages can be:\n1. Standalone pages (top-level in workspace)\n2. Items/rows within a database data source\n\nFor pages in databases, properties should match the data source schema." }, - "ResendContact": { + "OAuthServerName": { + "type": "string", + "enum": [ + "Airtable", + "Asana", + "Attio", + "Box", + "Cal.com", + "Canva", + "ClickUp", + "Close", + "Confluence", + "Dialpad", + "DocuSign", + "Dropbox", + "Fathom", + "Figma", + "GitHub", + "GitLab", + "Gmail", + "Google Calendar", + "Google Docs", + "Google Drive", + "Google Sheets", + "Google Forms", + "Google Cloud", + "HubSpot", + "Jira", + "Klaviyo", + "Linear", + "LinkedIn", + "Microsoft Teams", + "Monday", + "Moneybird", + "Notion", + "Onedrive", + "Outlook", + "MsCalendar", + "PagerDuty", + "Pipedrive", + "QuickBooks", + "Salesforce", + "Slack", + "Stripe", + "Supabase", + "Vercel", + "WordPress", + "Xero", + "Zendesk", + "PayPal", + "Sentry", + "Netlify", + "Hugging Face", + "Square", + "Clockwise", + "Jotform", + "Honeycomb", + "Zoho Mail", + "Sharesight", + "Instagram" + ], + "title": "OAuthServerName" + }, + "OneDriveData-Input": { + "properties": { + "root": { + "items": { + "$ref": "#/components/schemas/OneDriveFolder-Input" + }, + "type": "array", + "title": "Root", + "description": "List containing root folder (should contain only one element)" + } + }, + "type": "object", + "required": [ + "root" + ], + "title": "OneDriveData", + "description": "Complete OneDrive sandbox data structure" + }, + "OneDriveData-Output": { + "properties": { + "root": { + "items": { + "$ref": "#/components/schemas/OneDriveFolder-Output" + }, + "type": "array", + "title": "Root", + "description": "List containing root folder (should contain only one element)" + } + }, + "type": "object", + "required": [ + "root" + ], + "title": "OneDriveData", + "description": "Complete OneDrive sandbox data structure" + }, + "OneDriveFile": { "properties": { "id": { "anyOf": [ @@ -21354,14 +21189,14 @@ } ], "title": "Id", - "description": "Contact ID (read-only, set by Resend)" + "description": "File ID (read-only, set by OneDrive)" }, - "email": { + "name": { "type": "string", - "title": "Email", - "description": "The email address of the contact." + "title": "Name", + "description": "File name" }, - "first_name": { + "content": { "anyOf": [ { "type": "string" @@ -21370,10 +21205,20 @@ "type": "null" } ], - "title": "First Name", - "description": "Contact first name" - }, - "last_name": { + "title": "Content", + "description": "File content (text files only)" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "OneDriveFile", + "description": "OneDrive File object" + }, + "OneDriveFolder-Input": { + "properties": { + "id": { "anyOf": [ { "type": "string" @@ -21382,64 +21227,76 @@ "type": "null" } ], - "title": "Last Name", - "description": "Contact last name" + "title": "Id", + "description": "Folder ID (read-only, set by OneDrive)" }, - "unsubscribed": { + "name": { + "type": "string", + "title": "Name", + "description": "Folder name" + }, + "folders": { "anyOf": [ { - "type": "boolean" + "items": { + "$ref": "#/components/schemas/OneDriveFolder-Input" + }, + "type": "array" }, { "type": "null" } ], - "title": "Unsubscribed", - "description": "Whether contact is unsubscribed from all Broadcasts", - "default": false + "title": "Folders", + "description": "List of subfolders" }, - "created_at": { + "files": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/OneDriveFile" + }, + "type": "array" }, { "type": "null" } ], - "title": "Created At", - "description": "Creation timestamp" + "title": "Files", + "description": "List of files in the folder" } }, "type": "object", "required": [ - "email" + "name" ], - "title": "ResendContact", - "description": "Resend Contact object - standalone, independent of segments" + "title": "OneDriveFolder", + "description": "OneDrive Folder object" }, - "ResendData-Input": { + "OneDriveFolder-Output": { "properties": { - "emails": { + "id": { "anyOf": [ { - "items": { - "$ref": "#/components/schemas/ResendEmail" - }, - "type": "array" + "type": "string" }, { "type": "null" } - ], - "title": "Emails", - "description": "List of standalone transactional emails" + ], + "title": "Id", + "description": "Folder ID (read-only, set by OneDrive)" }, - "contacts": { + "name": { + "type": "string", + "title": "Name", + "description": "Folder name" + }, + "folders": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/ResendContact" + "$ref": "#/components/schemas/OneDriveFolder-Output" }, "type": "array" }, @@ -21447,14 +21304,14 @@ "type": "null" } ], - "title": "Contacts", - "description": "List of standalone contacts" + "title": "Folders", + "description": "List of subfolders" }, - "segments": { + "files": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/ResendSegment" + "$ref": "#/components/schemas/OneDriveFile" }, "type": "array" }, @@ -21462,75 +21319,61 @@ "type": "null" } ], - "title": "Segments", - "description": "List of segments with nested broadcasts" + "title": "Files", + "description": "List of files in the folder" } }, "type": "object", - "title": "ResendData", - "description": "Complete Resend sandbox data structure.\n\nMixed structure:\n- Segments contain nested Broadcasts (broadcasts MUST have a segment)\n- Emails are standalone (transactional)\n- Contacts are standalone (can be optionally linked to segments)" - }, - "ResendData-Output": { - "additionalProperties": true, - "type": "object" + "required": [ + "name" + ], + "title": "OneDriveFolder", + "description": "OneDrive Folder object" }, - "ResendEmail": { + "Opportunity": { "properties": { - "id": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Id", - "description": "Email ID (read-only, set by Resend)" - }, - "from": { + "name": { "type": "string", - "title": "From", - "description": "Sender email address" + "maxLength": 120, + "minLength": 1, + "title": "Name" }, - "to": { - "items": { - "type": "string" - }, - "type": "array", - "title": "To", - "description": "Recipient email addresses" + "stage": { + "type": "string", + "title": "Stage", + "description": "e.g., Prospecting, Qualification, Closed Won" }, - "subject": { + "close_date": { "type": "string", - "title": "Subject", - "description": "Email subject" + "title": "Close Date", + "description": "Expected close date (YYYY-MM-DD)" }, - "html": { + "amount": { "anyOf": [ { - "type": "string" + "type": "number", + "minimum": 0 }, { "type": "null" } ], - "title": "Html", - "description": "HTML content of the email" + "title": "Amount" }, - "text": { + "probability": { "anyOf": [ { - "type": "string" + "type": "integer", + "maximum": 100, + "minimum": 0 }, { "type": "null" } ], - "title": "Text", - "description": "Plain text content of the email" + "title": "Probability" }, - "reply_to": { + "account_id": { "anyOf": [ { "type": "string" @@ -21539,115 +21382,74 @@ "type": "null" } ], - "title": "Reply To", - "description": "Reply-to email address" + "title": "Account Id", + "description": "Related account ID" }, - "cc": { + "type": { "anyOf": [ { - "items": { - "type": "string" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Cc", - "description": "CC email addresses" + "title": "Type", + "description": "e.g., New Business, Renewal" }, - "bcc": { + "source": { "anyOf": [ { - "items": { - "type": "string" - }, - "type": "array" + "type": "string" }, { "type": "null" } ], - "title": "Bcc", - "description": "BCC email addresses" + "title": "Source", + "description": "Lead source" }, - "tags": { + "next_step": { "anyOf": [ { - "items": { - "additionalProperties": { - "type": "string" - }, - "type": "object" - }, - "type": "array" + "type": "string", + "maxLength": 255 }, { "type": "null" } ], - "title": "Tags", - "description": "Email tags for categorization" + "title": "Next Step" }, - "created_at": { + "description": { "anyOf": [ { - "type": "string" + "type": "string", + "maxLength": 32000 }, { "type": "null" } ], - "title": "Created At", - "description": "Creation timestamp" + "title": "Description" } }, "type": "object", "required": [ - "from", - "to", - "subject" + "name", + "stage", + "close_date" ], - "title": "ResendEmail", - "description": "Resend Email object - standalone transactional email" + "title": "Opportunity", + "description": "Sales deal/potential revenue" }, - "ResendSegment": { + "OutlookCalendarData": { "properties": { - "id": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Id", - "description": "Segment ID (read-only, set by Resend)" - }, - "name": { - "type": "string", - "title": "Name", - "description": "Segment name" - }, - "created_at": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Created At", - "description": "Creation timestamp" - }, - "broadcasts": { + "calendar_events": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/ResendBroadcast" + "$ref": "#/components/schemas/OutlookCalendarEvent" }, "type": "array" }, @@ -21655,53 +21457,17 @@ "type": "null" } ], - "title": "Broadcasts", - "description": "Broadcasts sent to this segment" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "ResendSegment", - "description": "Resend Segment object - parent containing broadcasts that target this segment" - }, - "ResetSandboxResponse": { - "properties": { - "sandbox_id": { - "type": "string", - "title": "Sandbox Id", - "description": "Sandbox identifier" - }, - "status": { - "$ref": "#/components/schemas/SandboxStatus", - "description": "Current status after reset" - }, - "message": { - "type": "string", - "title": "Message", - "description": "Reset result message" + "title": "Calendar Events", + "description": "List of calendar events" } }, "type": "object", - "required": [ - "sandbox_id", - "status", - "message" - ], - "title": "ResetSandboxResponse", - "description": "Response model for sandbox reset" + "title": "OutlookCalendarData", + "description": "Complete Outlook Calendar sandbox data structure" }, - "SalesforceAccount": { + "OutlookCalendarEvent": { "properties": { - "Name": { - "type": "string", - "maxLength": 255, - "minLength": 1, - "title": "Name", - "description": "Account name (required)" - }, - "Industry": { + "event_id": { "anyOf": [ { "type": "string" @@ -21710,152 +21476,186 @@ "type": "null" } ], - "title": "Industry", - "description": "Industry (e.g., Technology, Healthcare, Finance)" + "title": "Event Id", + "description": "Event ID (read-only, set by Outlook)" + }, + "title": { + "type": "string", + "title": "Title", + "description": "Event subject/title" }, - "Type": { + "start_time": { "anyOf": [ { - "type": "string" + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Type", - "description": "Account type (e.g., Customer, Prospect, Partner)" + "title": "Start Time", + "description": "Start time with dateTime and timeZone" }, - "Phone": { + "end_time": { "anyOf": [ { - "type": "string", - "maxLength": 40 + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Phone", - "description": "Phone number" + "title": "End Time", + "description": "End time with dateTime and timeZone" }, - "Website": { + "event_location": { "anyOf": [ { - "type": "string", - "maxLength": 255 + "type": "string" }, { "type": "null" } ], - "title": "Website", - "description": "Website URL" + "title": "Event Location", + "description": "Location name" }, - "AnnualRevenue": { + "event_attendees": { "anyOf": [ { - "type": "number", - "minimum": 0 + "items": { + "type": "string" + }, + "type": "array" }, { "type": "null" } ], - "title": "Annualrevenue", - "description": "Annual revenue" - }, - "NumberOfEmployees": { + "title": "Event Attendees", + "description": "List of attendee email addresses (e.g., ['user@example.com'])" + } + }, + "type": "object", + "required": [ + "title" + ], + "title": "OutlookCalendarEvent", + "description": "Outlook Calendar Event object" + }, + "OutlookMailData": { + "properties": { + "messages": { "anyOf": [ { - "type": "integer", - "minimum": 0 + "items": { + "$ref": "#/components/schemas/OutlookMailMessage" + }, + "type": "array" }, { "type": "null" } ], - "title": "Numberofemployees", - "description": "Number of employees" - }, - "BillingStreet": { + "title": "Messages", + "description": "List of mail messages" + } + }, + "type": "object", + "title": "OutlookMailData", + "description": "Complete Outlook Mail sandbox data structure" + }, + "OutlookMailMessage": { + "properties": { + "id": { "anyOf": [ { - "type": "string", - "maxLength": 255 + "type": "string" }, { "type": "null" } ], - "title": "Billingstreet", - "description": "Billing street address" + "title": "Id", + "description": "Message ID (read-only, set by Outlook)" + }, + "title": { + "type": "string", + "title": "Title", + "description": "Email subject" }, - "BillingCity": { + "content": { "anyOf": [ { - "type": "string", - "maxLength": 40 + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Billingcity", - "description": "Billing city" + "title": "Content", + "description": "Email body with content and contentType" }, - "BillingState": { + "from": { "anyOf": [ { - "type": "string", - "maxLength": 80 + "additionalProperties": true, + "type": "object" }, { "type": "null" } ], - "title": "Billingstate", - "description": "Billing state/province" + "title": "From", + "description": "From address with emailAddress object" }, - "BillingPostalCode": { + "to_addresses": { "anyOf": [ { - "type": "string", - "maxLength": 20 + "items": { + "type": "string" + }, + "type": "array" }, { "type": "null" } ], - "title": "Billingpostalcode", - "description": "Billing postal code" + "title": "To Addresses", + "description": "List of recipient email addresses" }, - "BillingCountry": { + "cc_addresses": { "anyOf": [ { - "type": "string", - "maxLength": 80 + "items": { + "type": "string" + }, + "type": "array" }, { "type": "null" } ], - "title": "Billingcountry", - "description": "Billing country" + "title": "Cc Addresses", + "description": "List of CC recipient email addresses" }, - "Description": { + "is_read": { "anyOf": [ { - "type": "string", - "maxLength": 32000 + "type": "boolean" }, { "type": "null" } ], - "title": "Description", - "description": "Account description" + "title": "Is Read", + "description": "Whether the message has been read", + "default": false }, - "Rating": { + "received_date_time": { "anyOf": [ { "type": "string" @@ -21864,170 +21664,187 @@ "type": "null" } ], - "title": "Rating", - "description": "Account rating (e.g., Hot, Warm, Cold)" + "title": "Received Date Time", + "description": "Date and time the message was received" } }, "type": "object", "required": [ - "Name" + "title" ], - "title": "SalesforceAccount", - "description": "Salesforce Account object with comprehensive field support" + "title": "OutlookMailMessage", + "description": "Outlook Mail Message object" }, - "SalesforceCampaign": { + "Override": { "properties": { - "Name": { + "date": { "type": "string", - "maxLength": 80, - "minLength": 1, - "title": "Name", - "description": "Campaign name (required)" - }, - "Type": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Type", - "description": "Campaign type (e.g., Email, Webinar, Conference)" + "pattern": "^\\d{4}-\\d{2}-\\d{2}$", + "title": "Date" }, - "Status": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Status", - "description": "Campaign status (e.g., Planned, In Progress, Completed)", - "default": "Planned" + "startTime": { + "type": "string", + "pattern": "^\\d{2}:\\d{2}$", + "title": "Starttime" }, - "StartDate": { + "endTime": { + "type": "string", + "pattern": "^\\d{2}:\\d{2}$", + "title": "Endtime" + } + }, + "type": "object", + "required": [ + "date", + "startTime", + "endTime" + ], + "title": "Override", + "description": "Represents a date-specific override to the regular schedule.\n\nAttributes:\n date: Date in YYYY-MM-DD format\n startTime: Start time in HH:MM format (24-hour)\n endTime: End time in HH:MM format (24-hour)" + }, + "QuickBooksData": { + "properties": { + "accounts": { "anyOf": [ { - "type": "string" + "items": { + "additionalProperties": true, + "type": "object" + }, + "type": "array" }, { "type": "null" } ], - "title": "Startdate", - "description": "Start date (format: YYYY-MM-DD)" + "title": "Accounts", + "description": "List of accounts (input: minimal, output: full API objects)" }, - "EndDate": { + "customers": { "anyOf": [ { - "type": "string" + "items": { + "additionalProperties": true, + "type": "object" + }, + "type": "array" }, { "type": "null" } ], - "title": "Enddate", - "description": "End date (format: YYYY-MM-DD)" + "title": "Customers", + "description": "List of customers (input: minimal, output: full API objects)" }, - "ExpectedRevenue": { + "vendors": { "anyOf": [ { - "type": "number", - "minimum": 0 + "items": { + "additionalProperties": true, + "type": "object" + }, + "type": "array" }, { "type": "null" } ], - "title": "Expectedrevenue", - "description": "Expected revenue" + "title": "Vendors", + "description": "List of vendors (input: minimal, output: full API objects)" }, - "BudgetedCost": { + "invoices": { "anyOf": [ { - "type": "number", - "minimum": 0 + "items": { + "additionalProperties": true, + "type": "object" + }, + "type": "array" }, { "type": "null" } ], - "title": "Budgetedcost", - "description": "Budgeted cost" + "title": "Invoices", + "description": "List of invoices (input: minimal, output: full API objects)" }, - "ActualCost": { + "payments": { "anyOf": [ { - "type": "number", - "minimum": 0 + "items": { + "additionalProperties": true, + "type": "object" + }, + "type": "array" }, { "type": "null" } ], - "title": "Actualcost", - "description": "Actual cost" + "title": "Payments", + "description": "List of payments (input: minimal, output: full API objects)" + } + }, + "additionalProperties": true, + "type": "object", + "title": "QuickBooksData", + "description": "Complete QuickBooks sandbox data structure\n\nNote: For dump operations, accounts/customers/etc. will contain full API response objects\nwith additional fields beyond the minimal input schema. This is expected behavior." + }, + "RawActionsResponse": { + "properties": { + "instanceId": { + "type": "string", + "title": "Instanceid", + "description": "The instance ID" }, - "Description": { - "anyOf": [ - { - "type": "string", - "maxLength": 32000 - }, - { - "type": "null" - } - ], - "title": "Description", - "description": "Campaign description" + "serverName": { + "type": "string", + "title": "Servername", + "description": "The requested server name" }, - "IsActive": { - "anyOf": [ - { - "type": "boolean" - }, - { - "type": "null" - } - ], - "title": "Isactive", - "description": "Is campaign active" + "result": { + "additionalProperties": true, + "type": "object", + "title": "Result", + "description": "Map of categories to raw actions" } }, "type": "object", "required": [ - "Name" + "instanceId", + "serverName" ], - "title": "SalesforceCampaign", - "description": "Salesforce Campaign object" + "title": "RawActionsResponse" }, - "SalesforceCase": { + "ReleaseSandboxResponse": { "properties": { - "Subject": { + "sandbox_id": { "type": "string", - "maxLength": 255, - "minLength": 1, - "title": "Subject", - "description": "Case subject (required)" + "title": "Sandbox Id", + "description": "Released sandbox identifier" }, - "Status": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Status", - "description": "Case status (e.g., New, Working, Closed)", - "default": "New" + "status": { + "$ref": "#/components/schemas/SandboxStatus", + "description": "Current status after release (should be idle)" }, - "Priority": { + "message": { + "type": "string", + "title": "Message", + "description": "Release confirmation message" + } + }, + "type": "object", + "required": [ + "sandbox_id", + "status", + "message" + ], + "title": "ReleaseSandboxResponse", + "description": "Response model for sandbox release (delete)" + }, + "ResendBroadcast": { + "properties": { + "id": { "anyOf": [ { "type": "string" @@ -22036,11 +21853,20 @@ "type": "null" } ], - "title": "Priority", - "description": "Priority (e.g., High, Medium, Low)", - "default": "Medium" + "title": "Id", + "description": "Broadcast ID (read-only, set by Resend)" + }, + "from": { + "type": "string", + "title": "From", + "description": "Sender email with optional display name" + }, + "subject": { + "type": "string", + "title": "Subject", + "description": "Email subject" }, - "Origin": { + "name": { "anyOf": [ { "type": "string" @@ -22049,10 +21875,10 @@ "type": "null" } ], - "title": "Origin", - "description": "Case origin (e.g., Phone, Email, Web)" + "title": "Name", + "description": "Broadcast name for internal reference" }, - "Type": { + "html": { "anyOf": [ { "type": "string" @@ -22061,10 +21887,10 @@ "type": "null" } ], - "title": "Type", - "description": "Case type (e.g., Problem, Question, Feature Request)" + "title": "Html", + "description": "HTML content" }, - "Reason": { + "text": { "anyOf": [ { "type": "string" @@ -22073,10 +21899,10 @@ "type": "null" } ], - "title": "Reason", - "description": "Case reason" + "title": "Text", + "description": "Plain text content" }, - "AccountId": { + "reply_to": { "anyOf": [ { "type": "string" @@ -22085,10 +21911,10 @@ "type": "null" } ], - "title": "Accountid", - "description": "Related account ID (18-char Salesforce ID)" + "title": "Reply To", + "description": "Reply-to email address" }, - "ContactId": { + "status": { "anyOf": [ { "type": "string" @@ -22097,157 +21923,86 @@ "type": "null" } ], - "title": "Contactid", - "description": "Related contact ID (18-char Salesforce ID)" + "title": "Status", + "description": "Broadcast status (draft, sent, etc.)" }, - "Description": { + "created_at": { "anyOf": [ { - "type": "string", - "maxLength": 32000 + "type": "string" }, { "type": "null" } ], - "title": "Description", - "description": "Case description" + "title": "Created At", + "description": "Creation timestamp" }, - "SuppliedEmail": { + "scheduled_at": { "anyOf": [ { - "type": "string", - "format": "email" + "type": "string" }, { "type": "null" } ], - "title": "Suppliedemail", - "description": "Supplied email address" + "title": "Scheduled At", + "description": "Scheduled send timestamp" }, - "SuppliedName": { + "sent_at": { "anyOf": [ { - "type": "string", - "maxLength": 80 + "type": "string" }, { "type": "null" } ], - "title": "Suppliedname", - "description": "Supplied name" + "title": "Sent At", + "description": "Actual send timestamp" }, - "SuppliedPhone": { + "topic_id": { "anyOf": [ { - "type": "string", - "maxLength": 40 + "type": "string" }, { "type": "null" } ], - "title": "Suppliedphone", - "description": "Supplied phone number" + "title": "Topic Id", + "description": "Topic ID for scoping" } }, "type": "object", "required": [ - "Subject" + "from", + "subject" ], - "title": "SalesforceCase", - "description": "Salesforce Case object with comprehensive field support" + "title": "ResendBroadcast", + "description": "Resend Broadcast object - child of Segment, requires parent segment" }, - "SalesforceContact": { + "ResendContact": { "properties": { - "FirstName": { + "id": { "anyOf": [ { - "type": "string", - "maxLength": 40 + "type": "string" }, { "type": "null" } ], - "title": "Firstname", - "description": "First name" + "title": "Id", + "description": "Contact ID (read-only, set by Resend)" }, - "LastName": { + "email": { "type": "string", - "maxLength": 80, - "minLength": 1, - "title": "Lastname", - "description": "Last name (required)" - }, - "Email": { - "anyOf": [ - { - "type": "string", - "format": "email" - }, - { - "type": "null" - } - ], "title": "Email", - "description": "Email address" - }, - "Phone": { - "anyOf": [ - { - "type": "string", - "maxLength": 40 - }, - { - "type": "null" - } - ], - "title": "Phone", - "description": "Phone number" - }, - "MobilePhone": { - "anyOf": [ - { - "type": "string", - "maxLength": 40 - }, - { - "type": "null" - } - ], - "title": "Mobilephone", - "description": "Mobile phone number" - }, - "Title": { - "anyOf": [ - { - "type": "string", - "maxLength": 128 - }, - { - "type": "null" - } - ], - "title": "Title", - "description": "Job title" - }, - "Department": { - "anyOf": [ - { - "type": "string", - "maxLength": 80 - }, - { - "type": "null" - } - ], - "title": "Department", - "description": "Department" + "description": "The email address of the contact." }, - "AccountId": { + "first_name": { "anyOf": [ { "type": "string" @@ -22256,88 +22011,35 @@ "type": "null" } ], - "title": "Accountid", - "description": "Related account ID (18-char Salesforce ID)" - }, - "MailingStreet": { - "anyOf": [ - { - "type": "string", - "maxLength": 255 - }, - { - "type": "null" - } - ], - "title": "Mailingstreet", - "description": "Mailing street address" - }, - "MailingCity": { - "anyOf": [ - { - "type": "string", - "maxLength": 40 - }, - { - "type": "null" - } - ], - "title": "Mailingcity", - "description": "Mailing city" - }, - "MailingState": { - "anyOf": [ - { - "type": "string", - "maxLength": 80 - }, - { - "type": "null" - } - ], - "title": "Mailingstate", - "description": "Mailing state/province" - }, - "MailingPostalCode": { - "anyOf": [ - { - "type": "string", - "maxLength": 20 - }, - { - "type": "null" - } - ], - "title": "Mailingpostalcode", - "description": "Mailing postal code" + "title": "First Name", + "description": "Contact first name" }, - "MailingCountry": { + "last_name": { "anyOf": [ { - "type": "string", - "maxLength": 80 + "type": "string" }, { "type": "null" } ], - "title": "Mailingcountry", - "description": "Mailing country" + "title": "Last Name", + "description": "Contact last name" }, - "Description": { + "unsubscribed": { "anyOf": [ { - "type": "string", - "maxLength": 32000 + "type": "boolean" }, { "type": "null" } ], - "title": "Description", - "description": "Contact description" + "title": "Unsubscribed", + "description": "Whether contact is unsubscribed from all Broadcasts", + "default": false }, - "LeadSource": { + "created_at": { "anyOf": [ { "type": "string" @@ -22346,24 +22048,24 @@ "type": "null" } ], - "title": "Leadsource", - "description": "Lead source (e.g., Web, Phone Inquiry, Partner Referral)" + "title": "Created At", + "description": "Creation timestamp" } }, "type": "object", "required": [ - "LastName" + "email" ], - "title": "SalesforceContact", - "description": "Salesforce Contact object with comprehensive field support" + "title": "ResendContact", + "description": "Resend Contact object - standalone, independent of segments" }, - "SalesforceData": { + "ResendData-Input": { "properties": { - "accounts": { + "emails": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/SalesforceAccount" + "$ref": "#/components/schemas/ResendEmail" }, "type": "array" }, @@ -22371,14 +22073,14 @@ "type": "null" } ], - "title": "Accounts", - "description": "List of Salesforce accounts" + "title": "Emails", + "description": "List of standalone transactional emails" }, "contacts": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/SalesforceContact" + "$ref": "#/components/schemas/ResendContact" }, "type": "array" }, @@ -22387,58 +22089,13 @@ } ], "title": "Contacts", - "description": "List of Salesforce contacts" - }, - "opportunities": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/SalesforceOpportunity" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Opportunities", - "description": "List of Salesforce opportunities" - }, - "leads": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/SalesforceLead" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Leads", - "description": "List of Salesforce leads" - }, - "cases": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/SalesforceCase" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Cases", - "description": "List of Salesforce cases" + "description": "List of standalone contacts" }, - "campaigns": { + "segments": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/SalesforceCampaign" + "$ref": "#/components/schemas/ResendSegment" }, "type": "array" }, @@ -22446,121 +22103,135 @@ "type": "null" } ], - "title": "Campaigns", - "description": "List of Salesforce campaigns" + "title": "Segments", + "description": "List of segments with nested broadcasts" } }, "type": "object", - "title": "SalesforceData", - "description": "Complete Salesforce sandbox data structure with all supported objects" + "title": "ResendData", + "description": "Complete Resend sandbox data structure.\n\nMixed structure:\n- Segments contain nested Broadcasts (broadcasts MUST have a segment)\n- Emails are standalone (transactional)\n- Contacts are standalone (can be optionally linked to segments)" + }, + "ResendData-Output": { + "additionalProperties": true, + "type": "object" }, - "SalesforceLead": { + "ResendEmail": { "properties": { - "FirstName": { + "id": { "anyOf": [ { - "type": "string", - "maxLength": 40 + "type": "string" }, { "type": "null" } ], - "title": "Firstname", - "description": "First name" + "title": "Id", + "description": "Email ID (read-only, set by Resend)" }, - "LastName": { + "from": { "type": "string", - "maxLength": 80, - "minLength": 1, - "title": "Lastname", - "description": "Last name (required)" + "title": "From", + "description": "Sender email address" + }, + "to": { + "items": { + "type": "string" + }, + "type": "array", + "title": "To", + "description": "Recipient email addresses" }, - "Company": { + "subject": { "type": "string", - "maxLength": 255, - "minLength": 1, - "title": "Company", - "description": "Company name (required)" + "title": "Subject", + "description": "Email subject" }, - "Email": { + "html": { "anyOf": [ { - "type": "string", - "format": "email" + "type": "string" }, { "type": "null" } ], - "title": "Email", - "description": "Email address" + "title": "Html", + "description": "HTML content of the email" }, - "Phone": { + "text": { "anyOf": [ { - "type": "string", - "maxLength": 40 + "type": "string" }, { "type": "null" } ], - "title": "Phone", - "description": "Phone number" + "title": "Text", + "description": "Plain text content of the email" }, - "MobilePhone": { + "reply_to": { "anyOf": [ { - "type": "string", - "maxLength": 40 + "type": "string" }, { "type": "null" } ], - "title": "Mobilephone", - "description": "Mobile phone number" + "title": "Reply To", + "description": "Reply-to email address" }, - "Title": { + "cc": { "anyOf": [ { - "type": "string", - "maxLength": 128 + "items": { + "type": "string" + }, + "type": "array" }, { "type": "null" } ], - "title": "Title", - "description": "Job title" + "title": "Cc", + "description": "CC email addresses" }, - "Status": { + "bcc": { "anyOf": [ { - "type": "string" + "items": { + "type": "string" + }, + "type": "array" }, { "type": "null" } ], - "title": "Status", - "description": "Lead status (e.g., Open, Contacted, Qualified)", - "default": "Open - Not Contacted" + "title": "Bcc", + "description": "BCC email addresses" }, - "LeadSource": { + "tags": { "anyOf": [ { - "type": "string" + "items": { + "additionalProperties": { + "type": "string" + }, + "type": "object" + }, + "type": "array" }, { "type": "null" } ], - "title": "Leadsource", - "description": "Lead source (e.g., Web, Phone Inquiry, Partner Referral)" + "title": "Tags", + "description": "Email tags for categorization" }, - "Industry": { + "created_at": { "anyOf": [ { "type": "string" @@ -22569,10 +22240,22 @@ "type": "null" } ], - "title": "Industry", - "description": "Industry (e.g., Technology, Healthcare, Finance)" - }, - "Rating": { + "title": "Created At", + "description": "Creation timestamp" + } + }, + "type": "object", + "required": [ + "from", + "to", + "subject" + ], + "title": "ResendEmail", + "description": "Resend Email object - standalone transactional email" + }, + "ResendSegment": { + "properties": { + "id": { "anyOf": [ { "type": "string" @@ -22581,252 +22264,256 @@ "type": "null" } ], - "title": "Rating", - "description": "Lead rating (e.g., Hot, Warm, Cold)" + "title": "Id", + "description": "Segment ID (read-only, set by Resend)" }, - "Street": { - "anyOf": [ - { - "type": "string", - "maxLength": 255 - }, - { - "type": "null" - } - ], - "title": "Street", - "description": "Street address" + "name": { + "type": "string", + "title": "Name", + "description": "Segment name" }, - "City": { + "created_at": { "anyOf": [ { - "type": "string", - "maxLength": 40 + "type": "string" }, { "type": "null" } ], - "title": "City", - "description": "City" + "title": "Created At", + "description": "Creation timestamp" }, - "State": { + "broadcasts": { "anyOf": [ { - "type": "string", - "maxLength": 80 + "items": { + "$ref": "#/components/schemas/ResendBroadcast" + }, + "type": "array" }, { "type": "null" } ], - "title": "State", - "description": "State/province" + "title": "Broadcasts", + "description": "Broadcasts sent to this segment" + } + }, + "type": "object", + "required": [ + "name" + ], + "title": "ResendSegment", + "description": "Resend Segment object - parent containing broadcasts that target this segment" + }, + "ResetSandboxResponse": { + "properties": { + "sandbox_id": { + "type": "string", + "title": "Sandbox Id", + "description": "Sandbox identifier" + }, + "status": { + "$ref": "#/components/schemas/SandboxStatus", + "description": "Current status after reset" }, - "PostalCode": { + "message": { + "type": "string", + "title": "Message", + "description": "Reset result message" + } + }, + "type": "object", + "required": [ + "sandbox_id", + "status", + "message" + ], + "title": "ResetSandboxResponse", + "description": "Response model for sandbox reset" + }, + "SalesforceData-Input": { + "properties": { + "accounts": { "anyOf": [ { - "type": "string", - "maxLength": 20 + "items": { + "$ref": "#/components/schemas/Account" + }, + "type": "array" }, { "type": "null" } ], - "title": "Postalcode", - "description": "Postal code" + "title": "Accounts" }, - "Country": { + "contacts": { "anyOf": [ { - "type": "string", - "maxLength": 80 + "items": { + "$ref": "#/components/schemas/Contact" + }, + "type": "array" }, { "type": "null" } ], - "title": "Country", - "description": "Country" + "title": "Contacts" }, - "Website": { + "opportunities": { "anyOf": [ { - "type": "string", - "maxLength": 255 + "items": { + "$ref": "#/components/schemas/Opportunity" + }, + "type": "array" }, { "type": "null" } ], - "title": "Website", - "description": "Website URL" + "title": "Opportunities" }, - "Description": { + "leads": { "anyOf": [ { - "type": "string", - "maxLength": 32000 + "items": { + "$ref": "#/components/schemas/Lead" + }, + "type": "array" }, { "type": "null" } ], - "title": "Description", - "description": "Lead description" + "title": "Leads" }, - "NumberOfEmployees": { + "cases": { "anyOf": [ { - "type": "integer", - "minimum": 0 + "items": { + "$ref": "#/components/schemas/Case" + }, + "type": "array" }, { "type": "null" } ], - "title": "Numberofemployees", - "description": "Number of employees" + "title": "Cases" }, - "AnnualRevenue": { + "campaigns": { "anyOf": [ { - "type": "number", - "minimum": 0 + "items": { + "$ref": "#/components/schemas/Campaign" + }, + "type": "array" }, { "type": "null" } ], - "title": "Annualrevenue", - "description": "Annual revenue" + "title": "Campaigns" } }, "type": "object", - "required": [ - "LastName", - "Company" - ], - "title": "SalesforceLead", - "description": "Salesforce Lead object with comprehensive field support" + "title": "SalesforceData", + "description": "Complete Salesforce sandbox data" }, - "SalesforceOpportunity": { + "SalesforceData-Output": { "properties": { - "Name": { - "type": "string", - "maxLength": 120, - "minLength": 1, - "title": "Name", - "description": "Opportunity name (required)" - }, - "StageName": { - "type": "string", - "title": "Stagename", - "description": "Sales stage (required, e.g., Prospecting, Qualification, Closed Won)" - }, - "CloseDate": { - "type": "string", - "title": "Closedate", - "description": "Close date (required, format: YYYY-MM-DD)" - }, - "Amount": { - "anyOf": [ - { - "type": "number", - "minimum": 0 - }, - { - "type": "null" - } - ], - "title": "Amount", - "description": "Opportunity amount" - }, - "Probability": { + "accounts": { "anyOf": [ { - "type": "integer", - "maximum": 100, - "minimum": 0 + "items": { + "$ref": "#/components/schemas/Account" + }, + "type": "array" }, { "type": "null" } ], - "title": "Probability", - "description": "Probability of closing (0-100)" + "title": "Accounts" }, - "AccountId": { + "contacts": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/Contact" + }, + "type": "array" }, { "type": "null" } ], - "title": "Accountid", - "description": "Related account ID (18-char Salesforce ID)" + "title": "Contacts" }, - "Type": { + "opportunities": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/Opportunity" + }, + "type": "array" }, { "type": "null" } ], - "title": "Type", - "description": "Opportunity type (e.g., New Business, Existing Business)" + "title": "Opportunities" }, - "LeadSource": { + "leads": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/Lead" + }, + "type": "array" }, { "type": "null" } ], - "title": "Leadsource", - "description": "Lead source (e.g., Web, Phone Inquiry, Partner Referral)" + "title": "Leads" }, - "NextStep": { + "cases": { "anyOf": [ { - "type": "string", - "maxLength": 255 + "items": { + "$ref": "#/components/schemas/Case" + }, + "type": "array" }, { "type": "null" } ], - "title": "Nextstep", - "description": "Next step in the sales process" + "title": "Cases" }, - "Description": { + "campaigns": { "anyOf": [ { - "type": "string", - "maxLength": 32000 + "items": { + "$ref": "#/components/schemas/Campaign" + }, + "type": "array" }, { "type": "null" } ], - "title": "Description", - "description": "Opportunity description" + "title": "Campaigns" } }, "type": "object", - "required": [ - "Name", - "StageName", - "CloseDate" - ], - "title": "SalesforceOpportunity", - "description": "Salesforce Opportunity object with comprehensive field support" + "title": "SalesforceData", + "description": "Complete Salesforce sandbox data" }, "SandboxInfo": { "properties": { @@ -23008,65 +22695,6 @@ ], "title": "SetUserAuthRequest" }, - "SharesightOAuthErrorResponse": { - "properties": { - "error": { - "type": "string", - "title": "Error", - "description": "Error message from the OAuth process" - }, - "message": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Message", - "description": "Additional error message" - } - }, - "type": "object", - "required": [ - "error" - ], - "title": "SharesightOAuthErrorResponse" - }, - "SharesightOAuthSuccessResponse": { - "properties": { - "status": { - "type": "string", - "title": "Status", - "description": "Status of the OAuth process", - "default": "success" - }, - "message": { - "type": "string", - "title": "Message", - "description": "Success message" - }, - "data": { - "anyOf": [ - { - "additionalProperties": true, - "type": "object" - }, - { - "type": "null" - } - ], - "title": "Data", - "description": "Additional data related to the response" - } - }, - "type": "object", - "required": [ - "message" - ], - "title": "SharesightOAuthSuccessResponse" - }, "ShopifyAddress": { "properties": { "address1": { @@ -23507,7 +23135,7 @@ "description": "Whether the column can be NULL", "default": true }, - "default": { + "default_value": { "anyOf": [ { "type": "string" @@ -23516,7 +23144,7 @@ "type": "null" } ], - "title": "Default", + "title": "Default Value", "description": "Default value for the column" } }, @@ -23554,7 +23182,7 @@ "title": "Name", "description": "Database name, cannot be 'SNOWFLAKE' or 'USER$...' since they are default databases" }, - "comment": { + "description": { "anyOf": [ { "type": "string" @@ -23563,8 +23191,8 @@ "type": "null" } ], - "title": "Comment", - "description": "Database comment/description" + "title": "Description", + "description": "Database description" }, "schemas": { "anyOf": [ @@ -23637,242 +23265,24 @@ }, "SnowflakeTable": { "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Table name" - }, - "columns": { - "items": { - "$ref": "#/components/schemas/SnowflakeColumn" - }, - "type": "array", - "title": "Columns", - "description": "List of columns in the table" - }, - "rows": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/SnowflakeRow" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Rows", - "description": "Initial data rows for the table" - } - }, - "type": "object", - "required": [ - "name", - "columns" - ], - "title": "SnowflakeTable", - "description": "Snowflake Table object" - }, - "StatusResponse": { - "properties": { - "success": { - "type": "boolean", - "title": "Success" - }, - "message": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Message" - } - }, - "type": "object", - "required": [ - "success" - ], - "title": "StatusResponse" - }, - "StorageBucket-Input": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Bucket name" - }, - "location": { - "type": "string", - "title": "Location", - "description": "Bucket location", - "default": "US" - }, - "storageClass": { - "type": "string", - "title": "Storageclass", - "description": "Storage class", - "default": "STANDARD" - }, - "versioning": { - "anyOf": [ - { - "$ref": "#/components/schemas/StorageBucketVersioning" - }, - { - "type": "null" - } - ], - "description": "Versioning configuration" - }, - "lifecycle": { - "anyOf": [ - { - "$ref": "#/components/schemas/StorageBucketLifecycle-Input" - }, - { - "type": "null" - } - ], - "description": "Lifecycle configuration" - }, - "timeCreated": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Timecreated", - "description": "Creation timestamp" - }, - "updated": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Updated", - "description": "Last update timestamp" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "StorageBucket", - "description": "Cloud Storage bucket object - matches API format" - }, - "StorageBucket-Output": { - "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Bucket name" - }, - "location": { - "type": "string", - "title": "Location", - "description": "Bucket location", - "default": "US" - }, - "storageClass": { - "type": "string", - "title": "Storageclass", - "description": "Storage class", - "default": "STANDARD" - }, - "versioning": { - "anyOf": [ - { - "$ref": "#/components/schemas/StorageBucketVersioning" - }, - { - "type": "null" - } - ], - "description": "Versioning configuration" - }, - "lifecycle": { - "anyOf": [ - { - "$ref": "#/components/schemas/StorageBucketLifecycle-Output" - }, - { - "type": "null" - } - ], - "description": "Lifecycle configuration" - }, - "timeCreated": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Timecreated", - "description": "Creation timestamp" - }, - "updated": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Updated", - "description": "Last update timestamp" - } - }, - "type": "object", - "required": [ - "name" - ], - "title": "StorageBucket", - "description": "Cloud Storage bucket object - matches API format" - }, - "StorageBucketLifecycle-Input": { - "properties": { - "rule": { - "anyOf": [ - { - "items": { - "$ref": "#/components/schemas/StorageBucketLifecycleRule" - }, - "type": "array" - }, - { - "type": "null" - } - ], - "title": "Rule", - "description": "Lifecycle rules" - } - }, - "type": "object", - "title": "StorageBucketLifecycle", - "description": "Lifecycle configuration for a bucket" - }, - "StorageBucketLifecycle-Output": { - "properties": { - "rule": { + "name": { + "type": "string", + "title": "Name", + "description": "Table name" + }, + "columns": { + "items": { + "$ref": "#/components/schemas/SnowflakeColumn" + }, + "type": "array", + "title": "Columns", + "description": "List of columns in the table" + }, + "rows": { "anyOf": [ { "items": { - "$ref": "#/components/schemas/StorageBucketLifecycleRule" + "$ref": "#/components/schemas/SnowflakeRow" }, "type": "array" }, @@ -23880,41 +23290,25 @@ "type": "null" } ], - "title": "Rule", - "description": "Lifecycle rules" - } - }, - "type": "object", - "title": "StorageBucketLifecycle", - "description": "Lifecycle configuration for a bucket" - }, - "StorageBucketLifecycleRule": { - "properties": { - "action": { - "$ref": "#/components/schemas/StorageBucketLifecycleRuleAction", - "description": "Rule action" - }, - "condition": { - "$ref": "#/components/schemas/StorageBucketLifecycleRuleCondition", - "description": "Rule condition" + "title": "Rows", + "description": "Initial data rows for the table" } }, "type": "object", "required": [ - "action", - "condition" + "name", + "columns" ], - "title": "StorageBucketLifecycleRule", - "description": "A lifecycle rule for a bucket" + "title": "SnowflakeTable", + "description": "Snowflake Table object" }, - "StorageBucketLifecycleRuleAction": { + "StatusResponse": { "properties": { - "type": { - "type": "string", - "title": "Type", - "description": "Action type (Delete, SetStorageClass)" + "success": { + "type": "boolean", + "title": "Success" }, - "storageClass": { + "message": { "anyOf": [ { "type": "string" @@ -23923,110 +23317,100 @@ "type": "null" } ], - "title": "Storageclass", - "description": "Storage class for SetStorageClass action" + "title": "Message" } }, "type": "object", "required": [ - "type" + "success" ], - "title": "StorageBucketLifecycleRuleAction", - "description": "Action for a lifecycle rule" + "title": "StatusResponse" }, - "StorageBucketLifecycleRuleCondition": { + "StorageBucket": { "properties": { - "age": { - "anyOf": [ - { - "type": "integer" - }, - { - "type": "null" - } - ], - "title": "Age", - "description": "Age in days" + "name": { + "type": "string", + "title": "Name", + "description": "Bucket name" + }, + "location": { + "type": "string", + "title": "Location", + "description": "Bucket location", + "default": "US" + }, + "storage_class": { + "type": "string", + "title": "Storage Class", + "description": "Storage class", + "default": "STANDARD" }, - "createdBefore": { + "versioning_enabled": { + "type": "boolean", + "title": "Versioning Enabled", + "description": "Whether versioning is enabled", + "default": false + }, + "lifecycle_rules": { "anyOf": [ { - "type": "string" + "items": { + "$ref": "#/components/schemas/LifecycleRule" + }, + "type": "array" }, { "type": "null" } ], - "title": "Createdbefore", - "description": "Created before date" + "title": "Lifecycle Rules", + "description": "Lifecycle rules" }, - "isLive": { + "created_at": { "anyOf": [ { - "type": "boolean" + "type": "string" }, { "type": "null" } ], - "title": "Islive", - "description": "Is live version" + "title": "Created At", + "description": "Creation timestamp" }, - "numNewerVersions": { + "updated_at": { "anyOf": [ { - "type": "integer" + "type": "string" }, { "type": "null" } ], - "title": "Numnewerversions", - "description": "Number of newer versions" - } - }, - "type": "object", - "title": "StorageBucketLifecycleRuleCondition", - "description": "Condition for a lifecycle rule" - }, - "StorageBucketVersioning": { - "properties": { - "enabled": { - "type": "boolean", - "title": "Enabled", - "description": "Whether versioning is enabled", - "default": false + "title": "Updated At", + "description": "Last update timestamp" } }, "type": "object", - "title": "StorageBucketVersioning", - "description": "Versioning configuration for a bucket" + "required": [ + "name" + ], + "title": "StorageBucket", + "description": "Cloud Storage bucket" }, "StorageObject": { "properties": { - "name": { - "type": "string", - "title": "Name", - "description": "Object name" - }, "bucket": { "type": "string", "title": "Bucket", "description": "Bucket name" }, - "size": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Size", - "description": "Object size in bytes" + "name": { + "type": "string", + "title": "Name", + "description": "Object name/path" }, - "contentType": { + "content_type": { "anyOf": [ { "type": "string" @@ -24035,22 +23419,10 @@ "type": "null" } ], - "title": "Contenttype", + "title": "Content Type", "description": "Content type" }, - "timeCreated": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Timecreated", - "description": "Creation timestamp" - }, - "updated": { + "size": { "anyOf": [ { "type": "string" @@ -24059,10 +23431,10 @@ "type": "null" } ], - "title": "Updated", - "description": "Last update timestamp" + "title": "Size", + "description": "Object size in bytes" }, - "generation": { + "content": { "anyOf": [ { "type": "string" @@ -24071,10 +23443,10 @@ "type": "null" } ], - "title": "Generation", - "description": "Object generation" + "title": "Content", + "description": "Object content (for initialization)" }, - "metageneration": { + "md5_hash": { "anyOf": [ { "type": "string" @@ -24083,10 +23455,10 @@ "type": "null" } ], - "title": "Metageneration", - "description": "Metadata generation" + "title": "Md5 Hash", + "description": "MD5 hash" }, - "md5Hash": { + "created_at": { "anyOf": [ { "type": "string" @@ -24095,10 +23467,10 @@ "type": "null" } ], - "title": "Md5Hash", - "description": "MD5 hash" + "title": "Created At", + "description": "Creation timestamp" }, - "content": { + "updated_at": { "anyOf": [ { "type": "string" @@ -24107,17 +23479,17 @@ "type": "null" } ], - "title": "Content", - "description": "Object content (text or base64 encoded)" + "title": "Updated At", + "description": "Last update timestamp" } }, "type": "object", "required": [ - "name", - "bucket" + "bucket", + "name" ], "title": "StorageObject", - "description": "Cloud Storage object (blob) - matches API format" + "description": "Cloud Storage object (blob)" }, "StrataAddServersRequest": { "properties": { @@ -24618,7 +23990,7 @@ }, "TeamsChannel": { "properties": { - "id": { + "channel_id": { "anyOf": [ { "type": "string" @@ -24627,15 +23999,15 @@ "type": "null" } ], - "title": "Id", + "title": "Channel Id", "description": "Channel ID (read-only, set by Teams)" }, - "displayName": { + "name": { "type": "string", - "title": "Displayname", + "title": "Name", "description": "Channel display name" }, - "description": { + "channel_description": { "anyOf": [ { "type": "string" @@ -24644,10 +24016,10 @@ "type": "null" } ], - "title": "Description", + "title": "Channel Description", "description": "Channel description" }, - "messages": { + "channel_messages": { "anyOf": [ { "items": { @@ -24659,20 +24031,20 @@ "type": "null" } ], - "title": "Messages", + "title": "Channel Messages", "description": "List of messages in the channel" } }, "type": "object", "required": [ - "displayName" + "name" ], "title": "TeamsChannel", "description": "Teams Channel object" }, "TeamsChat": { "properties": { - "id": { + "chat_id": { "anyOf": [ { "type": "string" @@ -24681,10 +24053,10 @@ "type": "null" } ], - "title": "Id", + "title": "Chat Id", "description": "Chat ID (read-only, set by Teams)" }, - "members": { + "chat_members": { "anyOf": [ { "items": { @@ -24696,10 +24068,10 @@ "type": "null" } ], - "title": "Members", + "title": "Chat Members", "description": "List of chat members" }, - "messages": { + "chat_messages": { "anyOf": [ { "items": { @@ -24711,7 +24083,7 @@ "type": "null" } ], - "title": "Messages", + "title": "Chat Messages", "description": "List of messages in the chat" } }, @@ -24721,7 +24093,7 @@ }, "TeamsChatMember": { "properties": { - "id": { + "member_id": { "anyOf": [ { "type": "string" @@ -24730,10 +24102,10 @@ "type": "null" } ], - "title": "Id", + "title": "Member Id", "description": "Member ID (read-only, set by Teams)" }, - "displayName": { + "member_name": { "anyOf": [ { "type": "string" @@ -24742,10 +24114,10 @@ "type": "null" } ], - "title": "Displayname", + "title": "Member Name", "description": "Member display name" }, - "email": { + "member_email": { "anyOf": [ { "type": "string" @@ -24754,7 +24126,7 @@ "type": "null" } ], - "title": "Email", + "title": "Member Email", "description": "User email for creating chats" } }, @@ -24764,7 +24136,7 @@ }, "TeamsMessage": { "properties": { - "id": { + "message_id": { "anyOf": [ { "type": "string" @@ -24773,18 +24145,18 @@ "type": "null" } ], - "title": "Id", + "title": "Message Id", "description": "Message ID (read-only, set by Teams)" }, - "body": { + "content": { "type": "string", - "title": "Body", + "title": "Content", "description": "Message content" } }, "type": "object", "required": [ - "body" + "content" ], "title": "TeamsMessage", "description": "Teams Message object" @@ -24828,6 +24200,51 @@ ], "title": "ToolFormat" }, + "UpdateServerInstanceRequest": { + "properties": { + "isReadOnly": { + "anyOf": [ + { + "type": "boolean" + }, + { + "type": "null" + } + ], + "title": "Isreadonly", + "description": "Whether the MCP server connection is read-only. When true, write operations will be restricted." + } + }, + "additionalProperties": false, + "type": "object", + "title": "UpdateServerInstanceRequest" + }, + "UpdateServerInstanceResponse": { + "properties": { + "instanceId": { + "type": "string", + "title": "Instanceid", + "description": "The unique identifier of the updated connection integration instance." + }, + "isReadOnly": { + "type": "boolean", + "title": "Isreadonly", + "description": "The current read-only status of the connection." + }, + "message": { + "type": "string", + "title": "Message", + "description": "A message indicating the result of the update operation." + } + }, + "type": "object", + "required": [ + "instanceId", + "isReadOnly", + "message" + ], + "title": "UpdateServerInstanceResponse" + }, "UserInfo": { "properties": { "userId": { @@ -25048,65 +24465,6 @@ ], "title": "WordPressPost", "description": "WordPress post data for creation/update\n\nAttributes:\n title: Post title\n content: Post content (HTML or plain text)\n status: Post status (draft, publish, private, pending)\n excerpt: Post excerpt/summary (optional)\n categories: List of category names (optional, created if not exist)\n tags: List of tag names (optional, created if not exist)\n featured_image: URL to featured image (optional)\n date: Post publish date in ISO format (optional)\n format: Post format (standard, aside, gallery, etc.) (optional)" - }, - "ZoomOAuthErrorResponse": { - "properties": { - "error": { - "type": "string", - "title": "Error", - "description": "Error message from the OAuth process" - }, - "message": { - "anyOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ], - "title": "Message", - "description": "Additional error message" - } - }, - "type": "object", - "required": [ - "error" - ], - "title": "ZoomOAuthErrorResponse" - }, - "ZoomOAuthSuccessResponse": { - "properties": { - "status": { - "type": "string", - "title": "Status", - "description": "Status of the OAuth process", - "default": "success" - }, - "message": { - "type": "string", - "title": "Message", - "description": "Success message" - }, - "data": { - "anyOf": [ - { - "additionalProperties": true, - "type": "object" - }, - { - "type": "null" - } - ], - "title": "Data", - "description": "Additional data related to the response" - } - }, - "type": "object", - "required": [ - "message" - ], - "title": "ZoomOAuthSuccessResponse" } }, "securitySchemes": { diff --git a/docs/concepts/sandbox.mdx b/docs/concepts/sandbox.mdx new file mode 100644 index 00000000..9035b9df --- /dev/null +++ b/docs/concepts/sandbox.mdx @@ -0,0 +1,80 @@ +--- +title: "Sandbox" +description: "Scalable, isolated training and RL environments for real-world tool use" +--- + +## The Problem + +For LLM researchers, setting up LLM training or reinforcement learning environment for real-world tool use is complex and painful: + +- Managing different environment or test accounts +- Implementing MCP Servers and handling various authentication issues +- Initializing realistic data +- Resetting states between multiple runs +- Ensuring isolation across concurrent sessions + +## The Solution + +Klavis MCP Sandbox as a Service solves these challenges. In addition to letting your model interact with our comprehensive MCP server ecosystem, you can use our sandbox infrastructure to easily **dump and reset data** on any concurrent run. + + +Our sandbox infrastructure is **horizontally scalable**, so it can handle any number of concurrent sessions as you need. + + +## Lifecycle + + + + Request a sandbox based on the external services you need (Snowflake, Gmail, CRM, ERP, etc.) and get an MCP server URL for that isolated instance. + + + Load a deterministic "world state" in JSON format. We handle everything—creating databases, setting up CRM data, ERP systems, and more. + + + Let your LLM / AI agent use MCP tools against the sandbox as if it were the real app. You can use multiple MCP servers with many tools simultaneously. + + + Snapshot the full sandbox state to programmatically compare against your ground truth—whether your LLM completed the task correctly or not. + + + Wipe the sandbox back to a clean slate and kick off the next run. + + + +## Video + + + +## Resources + + + + Create sandboxes, seed data, run an agent, then dump and clean up. + + + Manage isolated sandbox environments for training/eval: pooling, init, export, teardown. + + + Use Klavis MCP Sandbox with Eval Protocol for model training and RL at scale. + + + + diff --git a/docs/docs.json b/docs/docs.json index 500a4cf4..268dc760 100644 --- a/docs/docs.json +++ b/docs/docs.json @@ -50,7 +50,8 @@ "group": "Core Concepts", "pages": [ "concepts/mcp", - "concepts/strata" + "concepts/strata", + "concepts/sandbox" ] }, { @@ -92,6 +93,7 @@ "mcp-server/overview", "mcp-server/affinity", "mcp-server/airtable", + "mcp-server/amplitude", "mcp-server/asana", "mcp-server/attio", "mcp-server/box", @@ -100,6 +102,7 @@ "mcp-server/calendly", "mcp-server/canva", "mcp-server/clickup", + "mcp-server/clockwise", "mcp-server/close", "mcp-server/cloudflare", "mcp-server/coinbase", @@ -136,8 +139,10 @@ "mcp-server/honeycomb", "mcp-server/hubspot", "mcp-server/huggingface", + "mcp-server/instagram", "mcp-server/intercom", "mcp-server/jira", + "mcp-server/jotform", "mcp-server/klavis-reportgen", "mcp-server/klaviyo", "mcp-server/linear", @@ -171,12 +176,16 @@ "mcp-server/sendgrid", "mcp-server/sentry", "mcp-server/servicenow", + "mcp-server/sharesight", "mcp-server/shopify", "mcp-server/slack", + "mcp-server/snowflake", + "mcp-server/square", "mcp-server/stripe", "mcp-server/supabase", "mcp-server/tavily", "mcp-server/vercel", + "mcp-server/wandb", "mcp-server/whatsapp", "mcp-server/wordpress", "mcp-server/youtube", @@ -228,6 +237,7 @@ "pages": [ "api-reference/mcp-server/create-a-server-instance", "api-reference/mcp-server/get-server-instance", + "api-reference/mcp-server/update-a-server-instance", "api-reference/mcp-server/delete-a-server-instance", "api-reference/mcp-server/list-raw-actions", { diff --git a/docs/enterprise-security/klavis-security.mdx b/docs/enterprise-security/klavis-security.mdx index 07c48e93..eed1e32b 100644 --- a/docs/enterprise-security/klavis-security.mdx +++ b/docs/enterprise-security/klavis-security.mdx @@ -33,7 +33,3 @@ Klavis Guardrails operates as a security proxy that intercepts, analyzes, and en **Privilege Escalation Monitoring**: Enforces granular access controls ensuring MCP servers operate under least privilege principles. **Command Injection Mitigation**: Performs deep inspection of tool invocations with strict allowlists and input sanitization. - -## Get Started - -**Ready to secure your MCP infrastructure?** Join our beta by [scheduling a 15-minute call](https://cal.com/zihao-lin-u35ykt/15min) with us, or reach out directly at security@klavis.ai. \ No newline at end of file diff --git a/docs/images/amplitude.svg b/docs/images/amplitude.svg new file mode 100644 index 00000000..e67e1aa3 --- /dev/null +++ b/docs/images/amplitude.svg @@ -0,0 +1,25 @@ + + + + + + + + + diff --git a/docs/images/clockwise.svg b/docs/images/clockwise.svg new file mode 100644 index 00000000..b1860f93 --- /dev/null +++ b/docs/images/clockwise.svg @@ -0,0 +1,3 @@ + + + diff --git a/docs/images/instagram.svg b/docs/images/instagram.svg new file mode 100644 index 00000000..4ce33d4d --- /dev/null +++ b/docs/images/instagram.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/docs/images/jotform.svg b/docs/images/jotform.svg new file mode 100644 index 00000000..26946e27 --- /dev/null +++ b/docs/images/jotform.svg @@ -0,0 +1,6 @@ + + + + + + diff --git a/docs/images/snowflake.svg b/docs/images/snowflake.svg new file mode 100644 index 00000000..e88dcad6 --- /dev/null +++ b/docs/images/snowflake.svg @@ -0,0 +1,9 @@ + + + + + + + \ No newline at end of file diff --git a/docs/images/square.svg b/docs/images/square.svg new file mode 100644 index 00000000..0597bdf4 --- /dev/null +++ b/docs/images/square.svg @@ -0,0 +1,7 @@ + + + diff --git a/docs/images/wandb.webp b/docs/images/wandb.webp new file mode 100644 index 00000000..245d1de5 Binary files /dev/null and b/docs/images/wandb.webp differ diff --git a/docs/knowledge-base/oauth_app/oauth-scopes.mdx b/docs/knowledge-base/oauth_app/oauth-scopes.mdx index 7aa06d3a..6e95ca00 100644 --- a/docs/knowledge-base/oauth_app/oauth-scopes.mdx +++ b/docs/knowledge-base/oauth_app/oauth-scopes.mdx @@ -33,10 +33,13 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser projects:write projects:delete stories:read + stories:write task_templates:read tasks:read tasks:write tasks:delete + tags:read + tags:write teams:read users:read workspaces:read @@ -50,6 +53,20 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser + + ``` + root_readonly + root_readwrite + manage_managed_users + manage_app_users + manage_groups + manage_webhook + manage_enterprise_properties + manage_data_retention + manage_triggers + ``` + + ``` app:read @@ -73,6 +90,12 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser ``` + + + Cal.com OAuth scopes are configured via the Cal.com app settings rather than an explicit scope string. + + + ClickUp uses OAuth 2.0 with client credentials. The specific scopes are managed through the ClickUp app configuration rather than explicit scope parameters. @@ -114,6 +137,30 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser read:account report:personal-data offline_access + read:content.metadata:confluence + delete:space:confluence + write:comment:confluence + delete:comment:confluence + delete:page:confluence + read:comment:confluence + ``` + + + + ``` + recordings_export + message_content_export + screen_pop + calls:list + fax_message + offline_access + ``` + + + + ``` + signature + impersonation ``` @@ -133,12 +180,60 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser ``` + + ``` + public_api + ``` + + + + ``` + current_user:read + file_comments:read + file_comments:write + file_content:read + file_metadata:read + file_versions:read + library_assets:read + library_content:read + team_library_content:read + webhooks:read + webhooks:write + ``` + + ``` repo read:user read:org security_events + delete_repo + ``` + + + + ``` + api + read_api + read_user + create_runner + manage_runner + k8s_proxy + read_repository + write_repository + read_registry + write_registry + read_virtual_registry + write_virtual_registry + read_observability + write_observability + ai_features + admin_mode + sudo + openid + profile + email ``` @@ -148,6 +243,9 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser https://www.googleapis.com/auth/gmail.send https://www.googleapis.com/auth/gmail.compose https://www.googleapis.com/auth/gmail.modify + https://www.googleapis.com/auth/contacts.other.readonly + https://www.googleapis.com/auth/contacts.readonly + https://www.googleapis.com/auth/directory.readonly ``` @@ -155,6 +253,15 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser ``` https://www.googleapis.com/auth/calendar.readonly https://www.googleapis.com/auth/calendar.events + https://www.googleapis.com/auth/contacts.other.readonly + https://www.googleapis.com/auth/contacts.readonly + https://www.googleapis.com/auth/directory.readonly + ``` + + + + ``` + https://www.googleapis.com/auth/cloud-platform ``` @@ -170,6 +277,12 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser ``` + + ``` + https://www.googleapis.com/auth/drive + ``` + + ``` https://www.googleapis.com/auth/drive @@ -214,13 +327,83 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser ``` + + ``` + instagram_business_basic + instagram_business_manage_insights + instagram_business_manage_comments + ``` + + ``` read:jira-user read:jira-work write:jira-work + manage:jira-project manage:jira-configuration + manage:jira-webhook + manage:jira-data-provider + read:servicedesk-request + manage:servicedesk-customer + write:servicedesk-request + read:servicemanagement-insight-objects offline_access + read:sprint:jira-software + write:sprint:jira-software + read:board-scope:jira-software + write:board-scope:jira-software + read:project:jira + delete:sprint:jira-software + delete:comment:jira + delete:comment.property:jira + delete:issue:jira + ``` + + + + ``` + accounts:read + campaigns:read + campaigns:write + conversations:read + conversations:write + catalogs:read + catalogs:write + coupons:read + coupons:write + coupon-codes:read + coupon-codes:write + data-privacy:read + data-privacy:write + events:read + events:write + flows:read + flows:write + forms:read + images:read + images:write + lists:read + lists:write + metrics:read + metrics:write + profiles:read + profiles:write + push-tokens:read + push-tokens:write + segments:read + segments:write + reviews:read + subscriptions:read + subscriptions:write + tags:read + tags:write + templates:read + templates:write + tracking-settings:read + tracking-settings:write + web-feeds:read + web-feeds:write ``` @@ -231,6 +414,7 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser issues:create comments:create timeSchedule:write + admin ``` @@ -243,12 +427,81 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser ``` + + ``` + openid + profile + email + User.Read + Calendars.ReadWrite + MailboxSettings.Read + offline_access + ``` + + + + ``` + openid + profile + email + User.Read + Mail.Read + Mail.ReadWrite + MailboxSettings.Read + MailboxSettings.ReadWrite + Mail.Send + offline_access + ``` + + + + ``` + openid + profile + email + offline_access + User.Read + Channel.Create + Channel.Delete.All + Channel.ReadBasic.All + ChannelMessage.Read.All + ChannelMessage.Send + Chat.Read + Chat.ReadBasic + Chat.ReadWrite + ChatMessage.Send + Chat.Create + Team.ReadBasic.All + Team.Create + TeamMember.Read.All + TeamMember.ReadWriteNonOwnerRole.All + TeamMember.ReadWrite.All + User.ReadBasic.All + Group.ReadWrite.All + ``` + + ``` - users:read + account:read + assets:read boards:read boards:write + docs:read + docs:write + me:read + notifications:write + tags:read + teams:read + teams:write + updates:read updates:write + users:read + users:write + webhooks:read + webhooks:write + workspaces:read + workspaces:write ``` @@ -281,10 +534,81 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser email offline_access Files.ReadWrite.All + Files.Read.All User.Read ``` + + ``` + abilities.read + addons.read + addons.write + analytics.read + audit_records.read + change_events.read + change_events.write + custom_fields.read + custom_fields.write + escalation_policies.read + escalation_policies.write + event_orchestrations.read + event_orchestrations.write + event_rules.read + event_rules.write + extension_schemas.read + extensions.read + extensions.write + incident_types.read + incident_types.write + incident_workflows.read + incident_workflows.write + incident_workflows:instances.write + incidents.read + incidents.write + jira_cloud_accounts.read + jira_cloud_rules.read + jira_cloud_rules.write + licenses.read + notifications.read + oauth_delegations.read + oauth_delegations.write + oncalls.read + priorities.read + response_plays.read + response_plays.write + schedules.read + schedules.write + services.read + services.write + standards.read + standards.write + status_dashboards.read + status_pages.read + status_pages.write + subscribers.read + subscribers.write + tags.read + tags.write + teams.read + teams.write + templates.read + templates.write + users.read + users.write + users:contact_methods.read + users:contact_methods.write + users:sessions.read + users:sessions.write + vendors.read + webhook_subscriptions.read + webhook_subscriptions.write + workflow_integrations.read + workflow_integrations:connections.read + workflow_integrations:connections.write + ``` + + ``` com.intuit.quickbooks.accounting @@ -301,25 +625,37 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser ``` + + + Shopify OAuth scopes are configured via the Shopify app settings rather than an explicit scope string. + + + + + ``` + read_write + ``` + + **Bot Scopes:** ``` app_mentions:read channels:history - channels:read chat:write - chat:write.customize - commands - files:read + channels:join + channels:manage + channels:read + groups:history groups:read groups:write im:history - im:read + im:write + mpim:history mpim:read + mpim:write reactions:read reactions:write - team:read - users:read ``` **User Scopes:** @@ -327,18 +663,22 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser channels:history channels:read channels:write + channels:write.invites chat:write groups:history groups:read groups:write + groups:write.invites im:history im:read im:write mpim:history mpim:read + mpim:write users:read - users:write search:read + users:read.email + reactions:write ``` @@ -348,12 +688,24 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser + + + Vercel OAuth scopes are configured via the Vercel app settings rather than an explicit scope string. + + + ``` global ``` + + + X (Twitter) OAuth scopes are configured via the X app settings rather than an explicit scope string. + + + ``` accounting.transactions.read @@ -361,6 +713,20 @@ These are the minimum OAuth scopes required for all tools in each Klavis MCP ser offline_access ``` + + + ``` + https://www.googleapis.com/auth/youtube + ``` + + + + ``` + read + write + impersonate + ``` + ## Need Help? diff --git a/docs/mcp-server/amplitude.mdx b/docs/mcp-server/amplitude.mdx new file mode 100644 index 00000000..0bf8d77a --- /dev/null +++ b/docs/mcp-server/amplitude.mdx @@ -0,0 +1,210 @@ +--- +title: 'Amplitude' +description: 'Connect AI agents to Amplitude to analyze product usage data, track user behavior, and optimize product decisions' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Amplitude to analyze product usage data, track user behavior, and optimize product decisions through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Amplitude + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.AMPLITUDE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Amplitude + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Amplitude], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Amplitude"], + "userId": "user123" + }' + ``` + + + + `userId` specifies whose connected accounts and data you are accessing in Klavis. It should be a unique id for yourself, your team, or your organization. + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.AMPLITUDE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Amplitude]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/amplitude/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Amplitude API key from your [Amplitude Developer Console](https://amplitude.com/docs). + + + + 🎉 **Your Amplitude MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Amplitude from the list of available integrations. + + + + Complete the OAuth flow to connect your Amplitude account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/amplitude + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/amplitude-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/amplitude-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_amplitude_api_key"}' \ + ghcr.io/klavis-ai/amplitude-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "amplitude": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Amplitude. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/clockwise.mdx b/docs/mcp-server/clockwise.mdx new file mode 100644 index 00000000..c9008dfa --- /dev/null +++ b/docs/mcp-server/clockwise.mdx @@ -0,0 +1,210 @@ +--- +title: 'Clockwise' +description: 'Connect AI agents to Clockwise to intelligently optimize calendars, schedule meetings, and improve team productivity.' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Clockwise to intelligently optimize calendars, schedule meetings, and improve team productivity through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Clockwise + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.CLOCKWISE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Clockwise + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Clockwise], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Clockwise"], + "userId": "user123" + }' + ``` + + + + `userId` specifies whose connected accounts and data you are accessing in Klavis. It should be a unique id for yourself, your team, or your organization. + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.CLOCKWISE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Clockwise]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/clockwise/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Clockwise API key from your [Clockwise Developer Console](https://www.getclockwise.com/mcp). + + + + 🎉 **Your Clockwise MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Clockwise from the list of available integrations. + + + + Complete the OAuth flow to connect your Clockwise account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/clockwise + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/clockwise-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/clockwise-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_clockwise_api_key"}' \ + ghcr.io/klavis-ai/clockwise-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "clockwise": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Clockwise. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/instagram.mdx b/docs/mcp-server/instagram.mdx new file mode 100644 index 00000000..2b2e0412 --- /dev/null +++ b/docs/mcp-server/instagram.mdx @@ -0,0 +1,210 @@ +--- +title: 'Instagram' +description: 'Connect AI agents to Instagram to manage content, analyze engagement, and optimize social media growth and campaigns.' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Instagram to manage content, analyze engagement, and optimize social media growth and campaigns through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Instagram + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.INSTAGRAM], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Instagram + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Instagram], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Instagram"], + "userId": "user123" + }' + ``` + + + + `userId` specifies whose connected accounts and data you are accessing in Klavis. It should be a unique id for yourself, your team, or your organization. + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.INSTAGRAM]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Instagram]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/instagram/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Instagram API key from your [Instagram Developer Console](https://developers.facebook.com/docs/instagram-platform/). + + + + 🎉 **Your Instagram MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Instagram from the list of available integrations. + + + + Complete the OAuth flow to connect your Instagram account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/instagram + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/instagram-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/instagram-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_instagram_api_key"}' \ + ghcr.io/klavis-ai/instagram-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "instagram": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Instagram. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/jotform.mdx b/docs/mcp-server/jotform.mdx new file mode 100644 index 00000000..abad87d1 --- /dev/null +++ b/docs/mcp-server/jotform.mdx @@ -0,0 +1,210 @@ +--- +title: 'Jotform' +description: 'Connect AI agents to Jotform to create, manage, and analyze forms, automate data collection, and integrate responses into workflows.' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Jotform to create, manage, and analyze forms, automate data collection, and integrate responses into workflows through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Jotform + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.JOTFORM], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Jotform + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Jotform], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Jotform"], + "userId": "user123" + }' + ``` + + + + `userId` specifies whose connected accounts and data you are accessing in Klavis. It should be a unique id for yourself, your team, or your organization. + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.JOTFORM]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Jotform]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/jotform/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Jotform API key from your [Jotform Developer Console](https://www.jotform.com/developers/). + + + + 🎉 **Your Jotform MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Jotform from the list of available integrations. + + + + Complete the OAuth flow to connect your Jotform account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/jotform + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/jotform-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/jotform-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_jotform_api_key"}' \ + ghcr.io/klavis-ai/jotform-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "jotform": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Jotform. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/overview.mdx b/docs/mcp-server/overview.mdx index e3a3afdf..8339571c 100644 --- a/docs/mcp-server/overview.mdx +++ b/docs/mcp-server/overview.mdx @@ -91,6 +91,17 @@ import ZohoDeskCard from '/snippets/mcp-server-card/zoho-desk-card.mdx'; import ZohoMailCard from '/snippets/mcp-server-card/zoho-mail-card.mdx'; import CanvaCard from '/snippets/mcp-server-card/canva-card.mdx'; import OutlookCalendarCard from '/snippets/mcp-server-card/outlook-calendar-card.mdx'; +import AmplitudeCard from '/snippets/mcp-server-card/amplitude-card.mdx'; +import ClockwiseCard from '/snippets/mcp-server-card/clockwise-card.mdx'; +import InstagramCard from '/snippets/mcp-server-card/instagram-card.mdx'; +import JotformCard from '/snippets/mcp-server-card/jotform-card.mdx'; +import SnowflakeCard from '/snippets/mcp-server-card/snowflake-card.mdx'; +import SquareCard from '/snippets/mcp-server-card/square-card.mdx'; +import WandbCard from '/snippets/mcp-server-card/wandb-card.mdx'; +import HuggingfaceCard from '/snippets/mcp-server-card/huggingface-card.mdx'; +import NetlifyCard from '/snippets/mcp-server-card/netlify-card.mdx'; +import PaypalCard from '/snippets/mcp-server-card/paypal-card.mdx'; +import HoneycombCard from '/snippets/mcp-server-card/honeycomb-card.mdx'; Learn how to easily integrate with Klavis remote hosted MCP Servers. @@ -99,6 +110,7 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + @@ -107,6 +119,7 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + @@ -139,8 +152,12 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + + + + @@ -154,12 +171,14 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + + @@ -173,11 +192,14 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + + + @@ -190,6 +212,7 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + @@ -245,14 +268,17 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + + + @@ -263,6 +289,7 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + @@ -271,6 +298,7 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + @@ -292,6 +320,7 @@ Learn how to easily integrate with Klavis remote hosted MCP Servers. + diff --git a/docs/mcp-server/sharesight.mdx b/docs/mcp-server/sharesight.mdx new file mode 100644 index 00000000..937d7a31 --- /dev/null +++ b/docs/mcp-server/sharesight.mdx @@ -0,0 +1,167 @@ +--- +title: 'Sharesight' +description: 'Connect AI agents to Sharesight for portfolio tracking, investment management, and financial reporting automation' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Sharesight to manage investment portfolios, track performance, and automate financial reporting through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Sharesight + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.SHARESIGHT], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Sharesight + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Sharesight], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Sharesight"], + "userId": "user123" + }' + ``` + + + + `userId` specifies whose connected accounts and data you are accessing in Klavis. It should be a unique id for yourself, your team, or your organization. + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.SHARESIGHT]) + ``` + + ```typescript TypeScript + import open from 'open'; + + # Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Sharesight]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/sharesight/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + 🎉 **Your Sharesight MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Sharesight from the list of available integrations. + + + + Complete the OAuth flow to connect your Sharesight account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Sharesight. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Customize OAuth flows with your own branding + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + diff --git a/docs/mcp-server/snowflake.mdx b/docs/mcp-server/snowflake.mdx new file mode 100644 index 00000000..317b1ade --- /dev/null +++ b/docs/mcp-server/snowflake.mdx @@ -0,0 +1,210 @@ +--- +title: 'Snowflake' +description: 'Connect AI agents to Snowflake to query and analyze large-scale data, automate data workflows, and generate insights.' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Snowflake to query and analyze large-scale data, automate data workflows, and generate insights through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Snowflake + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.SNOWFLAKE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Snowflake + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Snowflake], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Snowflake"], + "userId": "user123" + }' + ``` + + + + `userId` specifies whose connected accounts and data you are accessing in Klavis. It should be a unique id for yourself, your team, or your organization. + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.SNOWFLAKE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Snowflake]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/snowflake/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Snowflake API key from your [Snowflake Developer Console](https://www.snowflake.com/en/developers/). + + + + 🎉 **Your Snowflake MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Snowflake from the list of available integrations. + + + + Complete the OAuth flow to connect your Snowflake account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/snowflake + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/snowflake-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/snowflake-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_snowflake_api_key"}' \ + ghcr.io/klavis-ai/snowflake-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "snowflake": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Snowflake. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/square.mdx b/docs/mcp-server/square.mdx new file mode 100644 index 00000000..d1d39ab1 --- /dev/null +++ b/docs/mcp-server/square.mdx @@ -0,0 +1,210 @@ +--- +title: 'Square' +description: 'Connect AI agents to Square to manage payments, track sales and inventory, analyze customer data, and automate reporting.' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Square to manage payments and transactions through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Square + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.SQUARE], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Square + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.Square], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["Square"], + "userId": "user123" + }' + ``` + + + + `userId` specifies whose connected accounts and data you are accessing in Klavis. It should be a unique id for yourself, your team, or your organization. + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.SQUARE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.Square]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/square/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Square API key from your [Square Developer Console](https://developer.squareup.com/us/en). + + + + 🎉 **Your Square MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Square from the list of available integrations. + + + + Complete the OAuth flow to connect your Square account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/square + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/square-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/square-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_square_api_key"}' \ + ghcr.io/klavis-ai/square-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "square": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Square. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/wandb.mdx b/docs/mcp-server/wandb.mdx new file mode 100644 index 00000000..00850a18 --- /dev/null +++ b/docs/mcp-server/wandb.mdx @@ -0,0 +1,210 @@ +--- +title: 'Weights and Biases' +description: 'Connect AI agents to Weights & Biases to track experiments, monitor model training, and collaborate on workflows from development to production.' +--- + + + **Prerequisites** + Before you begin, [create an account](https://www.klavis.ai/home/api-keys) and get your API Key. + + +## Getting started + +Connect to Weights and Biases to track experiments, monitor model training, and collaborate on workflows from development to production through AI agents. + + + + + + + ```bash pip + pip install klavis + ``` + ```bash npm + npm install klavis + ``` + + + + + + ```python Python + from klavis import Klavis + from klavis.types import McpServerName + + klavis_client = Klavis(api_key="YOUR_API_KEY") + + # Create a Strata MCP server with Weights and Biases + response = klavis_client.mcp_server.create_strata_server( + servers=[McpServerName.WEIGHTS_AND_BIASES], + user_id="user123" + ) + ``` + + ```typescript TypeScript + import { KlavisClient, Klavis } from 'klavis'; + + const klavis = new KlavisClient({ apiKey: process.env.KLAVIS_API_KEY! }); + + // Create a Strata MCP server with Weights and Biases + const response = await klavis.mcpServer.createStrataServer({ + servers: [Klavis.McpServerName.WeightsandBiases], + userId: "user123" + }); + ``` + + ```bash cURL + curl -X POST "https://api.klavis.ai/mcp-server/strata/create" \ + -H "Authorization: Bearer YOUR_API_KEY" \ + -H "Content-Type: application/json" \ + -d '{ + "servers": ["WeightsandBiases"], + "userId": "user123" + }' + ``` + + + + `userId` specifies whose connected accounts and data you are accessing in Klavis. It should be a unique id for yourself, your team, or your organization. + + + + Full Strata API endpoints + + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.WEIGHTS_AND_BIASES]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.WeightsandBiases]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/weights-and-biases/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + + + + Get your Weights and Biases API key from your [Weights and Biases Developer Console](https://wandb.ai/site/). + + + + 🎉 **Your Weights and Biases MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. + + + + + + + + + + Go to your [Dashboard](https://www.klavis.ai/home/mcp-servers). + + + + Select Weights and Biases from the list of available integrations. + + + + Complete the OAuth flow to connect your Weights and Biases account. + + + + Copy the MCP endpoint URL and add it to your MCP-supported client (Claude Desktop, Cursor, VS Code, etc.). + + + + + + + + ```bash + git clone https://github.com/klavis-ai/klavis + cd klavis/mcp_servers/weights-and-biases + ``` + + + + ```bash + # Pull the Docker image + docker pull ghcr.io/klavis-ai/weights-and-biases-mcp-server:latest + + # Run with OAuth support (requires Klavis API key) + docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ + ghcr.io/klavis-ai/weights-and-biases-mcp-server:latest + + # Or run with manual API key + docker run -p 5000:5000 \ + -e AUTH_DATA='{"access_token":"your_weights_and_biases_api_key"}' \ + ghcr.io/klavis-ai/weights-and-biases-mcp-server:latest + ``` + + + + ```json + { + "mcpServers": { + "weights-and-biases": { + "url": "http://localhost:5000/mcp/" + } + } + } + ``` + + + + + +## Available Tools + + +With our progressive discovery approach, Klavis System is capable of enabling all tools for Weights and Biases. Please use the [get_tools](https://www.klavis.ai/docs/api-reference/mcp-server/get-tools) API for more details. If you find any tool that is missing, please reach out to contact@klavis.ai. + + +## Next Steps + + + + Integrate Klavis MCP Servers with leading AI platforms + + + + Explore available MCP servers + + + + REST endpoints and schemas + + + Customize OAuth flows with your own branding + + \ No newline at end of file diff --git a/docs/mcp-server/youtube.mdx b/docs/mcp-server/youtube.mdx index 1b4a511f..4c3ce759 100644 --- a/docs/mcp-server/youtube.mdx +++ b/docs/mcp-server/youtube.mdx @@ -34,7 +34,6 @@ Connect to YouTube to extract video transcripts, analyze content, and automate v klavis_client = Klavis(api_key="YOUR_API_KEY") - # Create a Strata MCP server with YouTube response = klavis_client.mcp_server.create_strata_server( servers=[McpServerName.YOUTUBE], user_id="user123" @@ -71,9 +70,32 @@ Connect to YouTube to extract video transcripts, analyze content, and automate v Full Strata API endpoints + + + + + ```python Python + import webbrowser + + # Open OAuth authorization page + webbrowser.open(response.oauth_urls[McpServerName.YOUTUBE]) + ``` + + ```typescript TypeScript + import open from 'open'; + + // Open OAuth authorization page + await open(response.oauthUrls[Klavis.McpServerName.YouTube]); + ``` + + ```bash cURL + # Copy and paste the OAuth URL into your browser + echo "Visit this URL to authorize: https://api.klavis.ai/oauth/youtube/authorize?instance_id=YOUR_INSTANCE_ID" + ``` + - 🎉 **Your YouTube MCP Server is ready!** You can now use your MCP server URL with any MCP-compatible client. + 🎉 **Your YouTube MCP Server is ready!** Once authentication is complete, you can use your MCP server URL with any MCP-compatible client. @@ -91,8 +113,8 @@ Connect to YouTube to extract video transcripts, analyze content, and automate v Select YouTube from the list of available integrations. - - No authentication required - your server is immediately ready. + + Complete the OAuth flow to connect your YouTube account. @@ -115,8 +137,9 @@ Connect to YouTube to extract video transcripts, analyze content, and automate v # Pull the Docker image docker pull ghcr.io/klavis-ai/youtube-mcp-server:latest - # Run the server + # Run with OAuth support (requires Klavis API key) docker run -p 5000:5000 \ + -e KLAVIS_API_KEY=$KLAVIS_API_KEY \ ghcr.io/klavis-ai/youtube-mcp-server:latest ``` @@ -145,6 +168,14 @@ With our progressive discovery approach, Klavis System is capable of enabling al ## Next Steps + + Customize OAuth flows with your own branding + + REST endpoints and schemas - - Customize OAuth flows with your own branding - diff --git a/docs/snippets/mcp-server-card/amplitude-card.mdx b/docs/snippets/mcp-server-card/amplitude-card.mdx new file mode 100644 index 00000000..29716b84 --- /dev/null +++ b/docs/snippets/mcp-server-card/amplitude-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/clockwise-card.mdx b/docs/snippets/mcp-server-card/clockwise-card.mdx new file mode 100644 index 00000000..02d5ffb2 --- /dev/null +++ b/docs/snippets/mcp-server-card/clockwise-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/instagram-card.mdx b/docs/snippets/mcp-server-card/instagram-card.mdx new file mode 100644 index 00000000..d9d3b173 --- /dev/null +++ b/docs/snippets/mcp-server-card/instagram-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/jotform-card.mdx b/docs/snippets/mcp-server-card/jotform-card.mdx new file mode 100644 index 00000000..9f3d8bfe --- /dev/null +++ b/docs/snippets/mcp-server-card/jotform-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/snowflake-card.mdx b/docs/snippets/mcp-server-card/snowflake-card.mdx new file mode 100644 index 00000000..fb952c93 --- /dev/null +++ b/docs/snippets/mcp-server-card/snowflake-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/square-card.mdx b/docs/snippets/mcp-server-card/square-card.mdx new file mode 100644 index 00000000..24211eed --- /dev/null +++ b/docs/snippets/mcp-server-card/square-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/docs/snippets/mcp-server-card/wandb-card.mdx b/docs/snippets/mcp-server-card/wandb-card.mdx new file mode 100644 index 00000000..558f1c9c --- /dev/null +++ b/docs/snippets/mcp-server-card/wandb-card.mdx @@ -0,0 +1,5 @@ + \ No newline at end of file diff --git a/examples/langchain-klavis/typescript/package-lock.json b/examples/langchain-klavis/typescript/package-lock.json index 9699e4ca..9f652dfd 100644 --- a/examples/langchain-klavis/typescript/package-lock.json +++ b/examples/langchain-klavis/typescript/package-lock.json @@ -12,7 +12,7 @@ "@langchain/openai": "^1.1.1", "dotenv": "^16.4.7", "klavis": "^2.12.2", - "langchain": "^1.0.4", + "langchain": "^1.2.3", "open": "^10.1.0" }, "devDependencies": { @@ -25,8 +25,7 @@ "version": "4.1.1", "resolved": "https://registry.npmjs.org/@cfworker/json-schema/-/json-schema-4.1.1.tgz", "integrity": "sha512-gAmrUZSGtKc3AiBL71iNWxDsyUC5uMaKKGdvzYsBoTW/xi42JQHl7eKV2OYzCUqvc+D2RCcf7EXY2iCyFIk6og==", - "license": "MIT", - "peer": true + "license": "MIT" }, "node_modules/@esbuild/aix-ppc64": { "version": "0.25.12", @@ -471,9 +470,9 @@ } }, "node_modules/@langchain/core": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.0.5.tgz", - "integrity": "sha512-9Hy/b9+j+mm0Bhnm8xD9B0KpBYTidroLrDHdbrHoMC2DqXoY2umvi1M3M/9D744qsMSaIMP0ZwFcy5YbqI/dGw==", + "version": "1.1.8", + "resolved": "https://registry.npmjs.org/@langchain/core/-/core-1.1.8.tgz", + "integrity": "sha512-kIUidOgc0ZdyXo4Ahn9Zas+OayqOfk4ZoKPi7XaDipNSWSApc2+QK5BVcjvwtzxstsNOrmXJiJWEN6WPF/MvAw==", "license": "MIT", "peer": true, "dependencies": { @@ -482,10 +481,9 @@ "camelcase": "6", "decamelize": "1.2.0", "js-tiktoken": "^1.0.12", - "langsmith": "^0.3.64", + "langsmith": ">=0.4.0 <1.0.0", "mustache": "^4.2.0", "p-queue": "^6.6.2", - "p-retry": "4", "uuid": "^10.0.0", "zod": "^3.25.76 || ^4" }, @@ -493,11 +491,46 @@ "node": ">=20" } }, + "node_modules/@langchain/core/node_modules/langsmith": { + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.2.tgz", + "integrity": "sha512-BvBeFgSmR9esl8x5wsiDlALiHKKPybw2wE2Hh6x1tgSZki46H9c9KI9/06LARbPhyyDu/TZU7exfg6fnhdj1Qg==", + "license": "MIT", + "dependencies": { + "@types/uuid": "^10.0.0", + "chalk": "^4.1.2", + "console-table-printer": "^2.12.1", + "p-queue": "^6.6.2", + "semver": "^7.6.3", + "uuid": "^10.0.0" + }, + "peerDependencies": { + "@opentelemetry/api": "*", + "@opentelemetry/exporter-trace-otlp-proto": "*", + "@opentelemetry/sdk-trace-base": "*", + "openai": "*" + }, + "peerDependenciesMeta": { + "@opentelemetry/api": { + "optional": true + }, + "@opentelemetry/exporter-trace-otlp-proto": { + "optional": true + }, + "@opentelemetry/sdk-trace-base": { + "optional": true + }, + "openai": { + "optional": true + } + } + }, "node_modules/@langchain/langgraph": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/@langchain/langgraph/-/langgraph-1.0.2.tgz", "integrity": "sha512-syxzzWTnmpCL+RhUEvalUeOXFoZy/KkzHa2Da2gKf18zsf9Dkbh3rfnRDrTyUGS1XSTejq07s4rg1qntdEDs2A==", "license": "MIT", + "peer": true, "dependencies": { "@langchain/langgraph-checkpoint": "^1.0.0", "@langchain/langgraph-sdk": "~1.0.0", @@ -756,7 +789,6 @@ "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-5.2.0.tgz", "integrity": "sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==", "license": "MIT", - "peer": true, "engines": { "node": ">=10" }, @@ -896,7 +928,6 @@ "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-6.3.0.tgz", "integrity": "sha512-Gmy6FhYlCY7uOElZUSbxo2UCDH8owEk996gkbrpsgGtrJLM3J7jGxl9Ic7Qwwj4ivOE5AWZWRMecDdF7hqGjFA==", "license": "MIT", - "peer": true, "engines": { "node": ">=10" }, @@ -1062,7 +1093,6 @@ "resolved": "https://registry.npmjs.org/decamelize/-/decamelize-1.2.0.tgz", "integrity": "sha512-z2S+W9X73hAUUki+N+9Za2lBlun89zigOyGrsax+KUQ6wKW4ZoWpEYBkGhQjwAjjDCkWxhY0VKEhk8wzY7F5cA==", "license": "MIT", - "peer": true, "engines": { "node": ">=0.10.0" } @@ -1787,14 +1817,14 @@ } }, "node_modules/langchain": { - "version": "1.0.4", - "resolved": "https://registry.npmjs.org/langchain/-/langchain-1.0.4.tgz", - "integrity": "sha512-g7z2kKvnXOecybbVGHfI2ZmdmP309mxC1FYlq6WC/7RsKgX5MwY9gBjwK16mpKOaozOD9QCo1Ia7o2UcUBRb9Q==", + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/langchain/-/langchain-1.2.3.tgz", + "integrity": "sha512-3k986xJuqg4az53JxV5LnGlOzIXF1d9Kq6Y9s7XjitvzhpsbFuTDV5/kiF4cx3pkNGyw0mUXC4tLz9RxucO0hw==", "license": "MIT", "dependencies": { "@langchain/langgraph": "^1.0.0", "@langchain/langgraph-checkpoint": "^1.0.0", - "langsmith": "~0.3.74", + "langsmith": ">=0.4.0 <1.0.0", "uuid": "^10.0.0", "zod": "^3.25.76 || ^4" }, @@ -1802,20 +1832,19 @@ "node": ">=20" }, "peerDependencies": { - "@langchain/core": "^1.0.3" + "@langchain/core": "1.1.8" } }, "node_modules/langsmith": { - "version": "0.3.79", - "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.3.79.tgz", - "integrity": "sha512-j5uiAsyy90zxlxaMuGjb7EdcL51Yx61SpKfDOI1nMPBbemGju+lf47he4e59Hp5K63CY8XWgFP42WeZ+zuIU4Q==", + "version": "0.4.2", + "resolved": "https://registry.npmjs.org/langsmith/-/langsmith-0.4.2.tgz", + "integrity": "sha512-BvBeFgSmR9esl8x5wsiDlALiHKKPybw2wE2Hh6x1tgSZki46H9c9KI9/06LARbPhyyDu/TZU7exfg6fnhdj1Qg==", "license": "MIT", "dependencies": { "@types/uuid": "^10.0.0", "chalk": "^4.1.2", "console-table-printer": "^2.12.1", "p-queue": "^6.6.2", - "p-retry": "4", "semver": "^7.6.3", "uuid": "^10.0.0" }, @@ -1902,7 +1931,6 @@ "resolved": "https://registry.npmjs.org/mustache/-/mustache-4.2.0.tgz", "integrity": "sha512-71ippSywq5Yb7/tVYyGbkBggbU8H3u5Rz56fH60jGFgr8uHwxs+aSKeqmluIVzM0m0kB7xQjKS6qPfd0b2ZoqQ==", "license": "MIT", - "peer": true, "bin": { "mustache": "bin/mustache" } @@ -2653,6 +2681,7 @@ "resolved": "https://registry.npmjs.org/zod/-/zod-3.25.76.tgz", "integrity": "sha512-gzUt/qt81nXsFGKIFcC3YnfEAx5NkunCfnDlvuBSSFS02bcXu4Lmea0AFIUwbLWxWPx3d9p8S5QoaujKcNQxcQ==", "license": "MIT", + "peer": true, "funding": { "url": "https://github.com/sponsors/colinhacks" } @@ -2662,6 +2691,7 @@ "resolved": "https://registry.npmjs.org/zod-to-json-schema/-/zod-to-json-schema-3.24.6.tgz", "integrity": "sha512-h/z3PKvcTcTetyjl1fkj79MHNEjm+HpD6NXheWjzOekY7kV+lwDYnHw+ivHkijnCSMz1yJaWBD9vu/Fcmk+vEg==", "license": "ISC", + "peer": true, "peerDependencies": { "zod": "^3.24.1" } diff --git a/examples/langchain-klavis/typescript/package.json b/examples/langchain-klavis/typescript/package.json index c96b1a16..931f2f93 100644 --- a/examples/langchain-klavis/typescript/package.json +++ b/examples/langchain-klavis/typescript/package.json @@ -10,7 +10,7 @@ }, "dependencies": { "@langchain/openai": "^1.1.1", - "langchain": "^1.0.4", + "langchain": "^1.2.3", "@langchain/mcp-adapters": "^1.0.0", "klavis": "^2.12.2", "dotenv": "^16.4.7", diff --git a/fern/overrides.yml b/fern/overrides.yml index 4c91d3bc..39a94e81 100644 --- a/fern/overrides.yml +++ b/fern/overrides.yml @@ -93,6 +93,10 @@ components: x-fern-type-name: ResendDataInput ResendData-Output: x-fern-type-name: ResendDataOutput + SalesforceData-Input: + x-fern-type-name: SalesforceDataInput + SalesforceData-Output: + x-fern-type-name: SalesforceDataOutput ShopifyData-Input: x-fern-type-name: ShopifyDataInput ShopifyData-Output: diff --git a/mcp_servers/airtable/tools/__init__.py b/mcp_servers/airtable/tools/__init__.py index 32529c8a..064e4234 100644 --- a/mcp_servers/airtable/tools/__init__.py +++ b/mcp_servers/airtable/tools/__init__.py @@ -29,6 +29,6 @@ "create_records", "update_records", "delete_records", - # Base + # Base utilities "auth_token_context", ] diff --git a/mcp_servers/airtable/tools/base.py b/mcp_servers/airtable/tools/base.py index b2cc308c..4d62da1e 100644 --- a/mcp_servers/airtable/tools/base.py +++ b/mcp_servers/airtable/tools/base.py @@ -1,6 +1,6 @@ import logging import os -from typing import Optional +from typing import Any, Dict, Optional from contextvars import ContextVar import aiohttp @@ -10,6 +10,112 @@ logger = logging.getLogger(__name__) +# ============================================================ +# Response Normalization Utilities +# ============================================================ + +def get_path(data: Dict, path: str) -> Any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: Dict, mapping: Dict[str, Any]) -> Dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# ============================================================ +# Mapping Rules for Airtable Entities +# ============================================================ + +BASE_RULES = { + "id": "id", + "name": "name", + "accessLevel": "permissionLevel", +} + +TABLE_RULES = { + "id": "id", + "name": "name", + "description": "description", + "primaryFieldId": "primaryFieldId", +} + +FIELD_RULES = { + "id": "id", + "name": "name", + "type": "type", + "description": "description", + "options": "options", +} + +VIEW_RULES = { + "id": "id", + "name": "name", + "type": "type", +} + +RECORD_RULES = { + "id": "id", + "createdAt": "createdTime", + "data": "fields", + "commentCount": "commentCount", +} + + +def normalize_field(raw_field: Dict) -> Dict: + """Normalize a single field.""" + return normalize(raw_field, FIELD_RULES) + + +def normalize_view(raw_view: Dict) -> Dict: + """Normalize a single view.""" + return normalize(raw_view, VIEW_RULES) + + +def normalize_table(raw_table: Dict) -> Dict: + """Normalize a single table with nested fields and views.""" + table = normalize(raw_table, TABLE_RULES) + if raw_table.get('fields'): + table['fields'] = [normalize_field(f) for f in raw_table['fields']] + if raw_table.get('views'): + table['views'] = [normalize_view(v) for v in raw_table['views']] + return table + + +def normalize_record(raw_record: Dict) -> Dict: + """Normalize a single record.""" + return normalize(raw_record, RECORD_RULES) + + +def normalize_base(raw_base: Dict) -> Dict: + """Normalize a single base.""" + return normalize(raw_base, BASE_RULES) + + class AirtableValidationError(Exception): """Custom exception for Airtable 422 validation errors.""" diff --git a/mcp_servers/airtable/tools/bases.py b/mcp_servers/airtable/tools/bases.py index 6dd0784e..1f9fb824 100644 --- a/mcp_servers/airtable/tools/bases.py +++ b/mcp_servers/airtable/tools/bases.py @@ -1,7 +1,7 @@ import logging from typing import Any, Dict -from .base import make_airtable_request +from .base import make_airtable_request, normalize_base # Configure logging logger = logging.getLogger("airtable_tools") @@ -11,4 +11,11 @@ async def get_bases_info() -> Dict[str, Any]: """Get information about all bases.""" endpoint = "meta/bases" logger.info("Executing tool: get_bases_info") - return await make_airtable_request("GET", endpoint) + raw_response = await make_airtable_request("GET", endpoint) + + bases = [normalize_base(b) for b in raw_response.get("bases", [])] + return { + "nextPageToken": raw_response.get("offset"), + "count": len(bases), + "bases": bases, + } diff --git a/mcp_servers/airtable/tools/fields.py b/mcp_servers/airtable/tools/fields.py index 7c43a098..f2635df6 100644 --- a/mcp_servers/airtable/tools/fields.py +++ b/mcp_servers/airtable/tools/fields.py @@ -1,7 +1,7 @@ import logging from typing import Any, Dict -from .base import make_airtable_request +from .base import make_airtable_request, normalize_field # Configure logging logger = logging.getLogger("airtable_tools") @@ -32,7 +32,8 @@ async def create_field( logger.info( f"Executing tool: create_field '{name}' of type '{type}' in table {table_id}, base {base_id}" ) - return await make_airtable_request("POST", endpoint, json_data=payload) + raw_response = await make_airtable_request("POST", endpoint, json_data=payload) + return normalize_field(raw_response) async def update_field( @@ -56,4 +57,5 @@ async def update_field( logger.info( f"Executing tool: update_field '{field_id}' in table {table_id}, base {base_id}" ) - return await make_airtable_request("PATCH", endpoint, json_data=payload) + raw_response = await make_airtable_request("PATCH", endpoint, json_data=payload) + return normalize_field(raw_response) diff --git a/mcp_servers/airtable/tools/records.py b/mcp_servers/airtable/tools/records.py index 6896dff4..56cc860d 100644 --- a/mcp_servers/airtable/tools/records.py +++ b/mcp_servers/airtable/tools/records.py @@ -1,7 +1,7 @@ import logging from typing import Any, Dict -from .base import make_airtable_request +from .base import make_airtable_request, normalize_record # Configure logging logger = logging.getLogger("airtable_tools") @@ -65,7 +65,14 @@ async def list_records( endpoint = f"{endpoint}?{'&'.join(query_parts)}" logger.info(f"Executing tool: list_records for table {table_id} in base {base_id}") - return await make_airtable_request("GET", endpoint) + raw_response = await make_airtable_request("GET", endpoint) + + records = [normalize_record(r) for r in raw_response.get("records", [])] + return { + "nextPageToken": raw_response.get("offset"), + "count": len(records), + "records": records, + } async def get_record(base_id: str, table_id: str, record_id: str) -> Dict[str, Any]: @@ -74,7 +81,8 @@ async def get_record(base_id: str, table_id: str, record_id: str) -> Dict[str, A logger.info( f"Executing tool: get_record for record {record_id} in table {table_id}, base {base_id}" ) - return await make_airtable_request("GET", endpoint) + raw_response = await make_airtable_request("GET", endpoint) + return normalize_record(raw_response) async def create_records( @@ -96,7 +104,13 @@ async def create_records( logger.info( f"Executing tool: create_records for table {table_id} in base {base_id}" ) - return await make_airtable_request("POST", endpoint, json_data=payload) + raw_response = await make_airtable_request("POST", endpoint, json_data=payload) + + records = [normalize_record(r) for r in raw_response.get("records", [])] + return { + "count": len(records), + "records": records, + } async def update_records( @@ -126,7 +140,19 @@ async def update_records( logger.info( f"Executing tool: update_records for table {table_id} in base {base_id}" ) - return await make_airtable_request("PATCH", endpoint, json_data=payload) + raw_response = await make_airtable_request("PATCH", endpoint, json_data=payload) + + normalized_records = [normalize_record(r) for r in raw_response.get("records", [])] + result = { + "count": len(normalized_records), + "records": normalized_records, + } + # Include upsert-specific fields if present + if raw_response.get("createdRecords"): + result["createdRecordIds"] = raw_response["createdRecords"] + if raw_response.get("updatedRecords"): + result["updatedRecordIds"] = raw_response["updatedRecords"] + return result async def delete_records( @@ -144,4 +170,14 @@ async def delete_records( logger.info( f"Executing tool: delete_records for table {table_id} in base {base_id}" ) - return await make_airtable_request("DELETE", endpoint) + raw_response = await make_airtable_request("DELETE", endpoint) + + # Normalize delete response + deleted = [] + for item in raw_response.get("records", []): + if item.get("deleted"): + deleted.append(item.get("id")) + return { + "deletedCount": len(deleted), + "deletedRecordIds": deleted, + } diff --git a/mcp_servers/airtable/tools/tables.py b/mcp_servers/airtable/tools/tables.py index 1bd5987d..3f6bdea2 100644 --- a/mcp_servers/airtable/tools/tables.py +++ b/mcp_servers/airtable/tools/tables.py @@ -1,7 +1,7 @@ import logging from typing import Any, Dict -from .base import make_airtable_request +from .base import make_airtable_request, normalize_table # Configure logging logger = logging.getLogger("airtable_tools") @@ -11,7 +11,13 @@ async def get_tables_info(base_id: str) -> Dict[str, Any]: """Get information about all tables in a base.""" endpoint = f"meta/bases/{base_id}/tables" logger.info(f"Executing tool: get_tables_info for base_id: {base_id}") - return await make_airtable_request("GET", endpoint) + raw_response = await make_airtable_request("GET", endpoint) + + tables = [normalize_table(t) for t in raw_response.get("tables", [])] + return { + "count": len(tables), + "tables": tables, + } async def create_table( @@ -31,7 +37,8 @@ async def create_table( payload["description"] = description logger.info(f"Executing tool: create_table for base_id: {base_id}") - return await make_airtable_request("POST", endpoint, json_data=payload) + raw_response = await make_airtable_request("POST", endpoint, json_data=payload) + return normalize_table(raw_response) async def update_table( @@ -50,4 +57,5 @@ async def update_table( payload["description"] = description logger.info(f"Executing tool: update_table for table {table_id} in base {base_id}") - return await make_airtable_request("PATCH", endpoint, json_data=payload) + raw_response = await make_airtable_request("PATCH", endpoint, json_data=payload) + return normalize_table(raw_response) diff --git a/mcp_servers/asana/tools/base.py b/mcp_servers/asana/tools/base.py index 087a4578..d6f2f9af 100644 --- a/mcp_servers/asana/tools/base.py +++ b/mcp_servers/asana/tools/base.py @@ -13,6 +13,220 @@ # Configure logging logger = logging.getLogger(__name__) + +# ============================================================================ +# Response Normalization Utilities +# ============================================================================ + +def get_path(data: Dict, path: str) -> Any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: Dict, mapping: Dict[str, Any]) -> Dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + Args: + source: Raw vendor JSON. + mapping: Dict of { "TargetFieldName": "Source.Path" OR Lambda_Function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# ============================================================================ +# Normalization Mapping Rules +# ============================================================================ + +# User normalization rules +USER_RULES = { + "id": "id", + "name": "name", + "email": "email", + "photo": "photo", + "workspaces": lambda x: [ + normalize(ws, WORKSPACE_COMPACT_RULES) for ws in x.get("workspaces", []) + ] if x.get("workspaces") else None, +} + +USER_COMPACT_RULES = { + "id": "id", + "name": "name", +} + +# Workspace normalization rules +WORKSPACE_RULES = { + "id": "id", + "name": "name", + "emailDomains": "email_domains", + "isOrganization": "is_organization", +} + +WORKSPACE_COMPACT_RULES = { + "id": "id", + "name": "name", +} + +# Team normalization rules +TEAM_RULES = { + "id": "id", + "name": "name", + "description": "description", + "organization": lambda x: normalize(x.get("organization", {}), WORKSPACE_COMPACT_RULES) if x.get("organization") else None, + "url": "permalink_url", +} + +TEAM_COMPACT_RULES = { + "id": "id", + "name": "name", +} + +# Tag normalization rules +TAG_RULES = { + "id": "id", + "name": "name", + "color": "color", + "description": "notes", + "workspace": lambda x: normalize(x.get("workspace", {}), WORKSPACE_COMPACT_RULES) if x.get("workspace") else None, +} + +TAG_COMPACT_RULES = { + "id": "id", + "name": "name", +} + +# Project normalization rules +PROJECT_RULES = { + "id": "id", + "name": "name", + "description": "notes", + "color": "color", + "isCompleted": "completed", + "completedAt": "completed_at", + "completedBy": lambda x: normalize(x.get("completed_by", {}), USER_COMPACT_RULES) if x.get("completed_by") else None, + "createdAt": "created_at", + "modifiedAt": "modified_at", + "dueDate": "due_on", + "owner": lambda x: normalize(x.get("owner", {}), USER_COMPACT_RULES) if x.get("owner") else None, + "team": lambda x: normalize(x.get("team", {}), TEAM_COMPACT_RULES) if x.get("team") else None, + "workspace": lambda x: normalize(x.get("workspace", {}), WORKSPACE_COMPACT_RULES) if x.get("workspace") else None, + "url": "permalink_url", + "statusUpdate": "current_status_update", + "members": lambda x: [ + normalize(m, USER_COMPACT_RULES) for m in x.get("members", []) + ] if x.get("members") else None, +} + +PROJECT_COMPACT_RULES = { + "id": "id", + "name": "name", +} + +# Membership normalization rules +MEMBERSHIP_RULES = { + "project": lambda x: normalize(x.get("project", {}), PROJECT_COMPACT_RULES) if x.get("project") else None, + "section": lambda x: {"id": x.get("section", {}).get("id"), "name": x.get("section", {}).get("name")} if x.get("section") else None, +} + +# Task normalization rules +TASK_RULES = { + "id": "id", + "name": "name", + "description": "notes", + "isCompleted": "completed", + "completedAt": "completed_at", + "completedBy": lambda x: normalize(x.get("completed_by", {}), USER_COMPACT_RULES) if x.get("completed_by") else None, + "createdAt": "created_at", + "createdBy": lambda x: normalize(x.get("created_by", {}), USER_COMPACT_RULES) if x.get("created_by") else None, + "dueDate": "due_on", + "startDate": "start_on", + "assignee": lambda x: normalize(x.get("assignee", {}), USER_COMPACT_RULES) if x.get("assignee") else None, + "assigneeStatus": "assignee_status", + "approvalStatus": "approval_status", + "subtaskCount": "num_subtasks", + "parent": lambda x: {"id": x.get("parent", {}).get("id"), "name": x.get("parent", {}).get("name")} if x.get("parent") else None, + "workspace": lambda x: normalize(x.get("workspace", {}), WORKSPACE_COMPACT_RULES) if x.get("workspace") else None, + "url": "permalink_url", + "tags": lambda x: [ + normalize(t, TAG_COMPACT_RULES) for t in x.get("tags", []) + ] if x.get("tags") else None, + "memberships": lambda x: [ + normalize(m, MEMBERSHIP_RULES) for m in x.get("memberships", []) + ] if x.get("memberships") else None, + "dependencies": lambda x: [ + {"id": d.get("id")} for d in x.get("dependencies", []) + ] if x.get("dependencies") else None, + "dependents": lambda x: [ + {"id": d.get("id")} for d in x.get("dependents", []) + ] if x.get("dependents") else None, +} + +# Attachment normalization rules +ATTACHMENT_RULES = { + "id": "id", + "name": "name", + "url": "download_url", + "host": "host", + "viewUrl": "view_url", + "createdAt": "created_at", + "parent": lambda x: {"id": x.get("parent", {}).get("id")} if x.get("parent") else None, +} + + +def normalize_task(raw_task: Dict) -> Dict: + """Normalize a single task.""" + return normalize(raw_task, TASK_RULES) + + +def normalize_project(raw_project: Dict) -> Dict: + """Normalize a single project.""" + return normalize(raw_project, PROJECT_RULES) + + +def normalize_workspace(raw_workspace: Dict) -> Dict: + """Normalize a single workspace.""" + return normalize(raw_workspace, WORKSPACE_RULES) + + +def normalize_user(raw_user: Dict) -> Dict: + """Normalize a single user.""" + return normalize(raw_user, USER_RULES) + + +def normalize_team(raw_team: Dict) -> Dict: + """Normalize a single team.""" + return normalize(raw_team, TEAM_RULES) + + +def normalize_tag(raw_tag: Dict) -> Dict: + """Normalize a single tag.""" + return normalize(raw_tag, TAG_RULES) + + +def normalize_attachment(raw_attachment: Dict) -> Dict: + """Normalize a single attachment.""" + return normalize(raw_attachment, ATTACHMENT_RULES) + # Context variable to store the access token for each request auth_token_context: ContextVar[str] = ContextVar('auth_token') diff --git a/mcp_servers/asana/tools/projects.py b/mcp_servers/asana/tools/projects.py index 48f87f0f..7bb086a2 100644 --- a/mcp_servers/asana/tools/projects.py +++ b/mcp_servers/asana/tools/projects.py @@ -9,6 +9,7 @@ get_unique_workspace_id_or_raise_error, remove_none_values, AsanaToolExecutionError, + normalize_project, ) logger = logging.getLogger(__name__) @@ -110,7 +111,7 @@ async def get_project_by_id( f"/projects/{project_id}", params={"opt_fields": ",".join(PROJECT_OPT_FIELDS)}, ) - return {"project": response["data"]} + return {"project": normalize_project(response["data"])} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") @@ -174,6 +175,9 @@ async def list_projects( # Trim to requested limit after filtering projects = projects[:limit] + # Normalize projects + projects = [normalize_project(p) for p in projects] + return { "projects": projects, "count": len(projects), diff --git a/mcp_servers/asana/tools/tags.py b/mcp_servers/asana/tools/tags.py index cfd60ada..cbbd5a99 100644 --- a/mcp_servers/asana/tools/tags.py +++ b/mcp_servers/asana/tools/tags.py @@ -8,6 +8,7 @@ get_unique_workspace_id_or_raise_error, remove_none_values, AsanaToolExecutionError, + normalize_tag, ) logger = logging.getLogger(__name__) @@ -20,7 +21,7 @@ async def get_tag_by_id( try: client = get_asana_client() response = await client.get(f"/tags/{tag_id}") - return {"tag": response["data"]} + return {"tag": normalize_tag(response["data"])} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") @@ -52,7 +53,7 @@ async def create_tag( client = get_asana_client() response = await client.post("/tags", json_data={"data": data}) - return {"tag": response["data"]} + return {"tag": normalize_tag(response["data"])} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") @@ -84,9 +85,10 @@ async def list_tags( }), ) + tags = [normalize_tag(t) for t in response["data"]] return { - "tags": response["data"], - "count": len(response["data"]), + "tags": tags, + "count": len(tags), "next_page": get_next_page(response), } diff --git a/mcp_servers/asana/tools/tasks.py b/mcp_servers/asana/tools/tasks.py index 30f401fe..21c2b818 100644 --- a/mcp_servers/asana/tools/tasks.py +++ b/mcp_servers/asana/tools/tasks.py @@ -12,6 +12,8 @@ get_unique_workspace_id_or_raise_error, AsanaToolExecutionError, RetryableToolError, + normalize_task, + normalize_attachment, ) logger = logging.getLogger(__name__) @@ -445,7 +447,7 @@ async def search_tasks( ) tasks_by_id = {task["id"]: task for task in response["data"]} - tasks = list(tasks_by_id.values()) + tasks = [normalize_task(task) for task in tasks_by_id.values()] return {"tasks": tasks, "count": len(tasks)} @@ -468,11 +470,12 @@ async def get_task_by_id( f"/tasks/{task_id}", params={"opt_fields": ",".join(TASK_OPT_FIELDS)}, ) + task = normalize_task(response["data"]) if max_subtasks > 0: max_subtasks = min(max_subtasks, 100) subtasks = await get_subtasks_from_a_task(task_id=task_id, limit=max_subtasks) - response["data"]["subtasks"] = subtasks["subtasks"] - return {"task": response["data"]} + task["subtasks"] = subtasks["subtasks"] + return {"task": task} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") @@ -500,9 +503,10 @@ async def get_subtasks_from_a_task( }), ) + subtasks = [normalize_task(subtask) for subtask in response["data"]] return { - "subtasks": response["data"], - "count": len(response["data"]), + "subtasks": subtasks, + "count": len(subtasks), "next_page": get_next_page(response), } @@ -540,7 +544,7 @@ async def update_task( }) response = await client.put(f"/tasks/{task_id}", json_data={"data": update_data}) - return {"task": response["data"]} + return {"task": normalize_task(response["data"])} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") @@ -594,7 +598,7 @@ async def create_task( }) response = await client.post("/tasks", json_data={"data": task_data}) - return {"task": response["data"]} + return {"task": normalize_task(response["data"])} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") @@ -641,7 +645,7 @@ async def attach_file_to_task( response = await client.post("/attachments", data=data, files=files) - return {"attachment": response["data"]} + return {"attachment": normalize_attachment(response["data"])} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") diff --git a/mcp_servers/asana/tools/teams.py b/mcp_servers/asana/tools/teams.py index d7b27227..52434d4e 100644 --- a/mcp_servers/asana/tools/teams.py +++ b/mcp_servers/asana/tools/teams.py @@ -8,6 +8,7 @@ get_unique_workspace_id_or_raise_error, remove_none_values, AsanaToolExecutionError, + normalize_team, ) logger = logging.getLogger(__name__) @@ -23,7 +24,7 @@ async def get_team_by_id( f"/teams/{team_id}", params=remove_none_values({"opt_fields": ",".join(TEAM_OPT_FIELDS)}), ) - return {"team": response["data"]} + return {"team": normalize_team(response["data"])} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") @@ -55,9 +56,10 @@ async def list_teams_the_current_user_is_a_member_of( }), ) + teams = [normalize_team(t) for t in response["data"]] return { - "teams": response["data"], - "count": len(response["data"]), + "teams": teams, + "count": len(teams), "next_page": get_next_page(response), } @@ -90,9 +92,10 @@ async def list_teams( }), ) + teams = [normalize_team(t) for t in response["data"]] return { - "teams": response["data"], - "count": len(response["data"]), + "teams": teams, + "count": len(teams), "next_page": get_next_page(response), } diff --git a/mcp_servers/asana/tools/users.py b/mcp_servers/asana/tools/users.py index 8fa4cbef..73b9a451 100644 --- a/mcp_servers/asana/tools/users.py +++ b/mcp_servers/asana/tools/users.py @@ -8,6 +8,7 @@ get_unique_workspace_id_or_raise_error, remove_none_values, AsanaToolExecutionError, + normalize_user, ) logger = logging.getLogger(__name__) @@ -36,9 +37,10 @@ async def list_users( }), ) + users = [normalize_user(u) for u in response["data"]] return { - "users": response["data"], - "count": len(response["data"]), + "users": users, + "count": len(users), "next_page": get_next_page(response), } @@ -57,7 +59,7 @@ async def get_user_by_id( try: client = get_asana_client() response = await client.get(f"/users/{user_id}", params={"opt_fields": ",".join(USER_OPT_FIELDS)}) - return {"user": response["data"]} + return {"user": normalize_user(response["data"])} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") diff --git a/mcp_servers/asana/tools/workspaces.py b/mcp_servers/asana/tools/workspaces.py index 2d6bdf06..2148ccf9 100644 --- a/mcp_servers/asana/tools/workspaces.py +++ b/mcp_servers/asana/tools/workspaces.py @@ -7,6 +7,7 @@ get_next_page, remove_none_values, AsanaToolExecutionError, + normalize_workspace, ) logger = logging.getLogger(__name__) @@ -19,7 +20,7 @@ async def get_workspace_by_id( try: client = get_asana_client() response = await client.get(f"/workspaces/{workspace_id}") - return {"workspace": response["data"]} + return {"workspace": normalize_workspace(response["data"])} except AsanaToolExecutionError as e: logger.error(f"Asana API error: {e}") @@ -47,9 +48,10 @@ async def list_workspaces( }), ) + workspaces = [normalize_workspace(ws) for ws in response["data"]] return { - "workspaces": response["data"], - "count": len(response["data"]), + "workspaces": workspaces, + "count": len(workspaces), "next_page": get_next_page(response), } diff --git a/mcp_servers/attio/index.ts b/mcp_servers/attio/index.ts index 8d6d6146..40d1de02 100644 --- a/mcp_servers/attio/index.ts +++ b/mcp_servers/attio/index.ts @@ -155,9 +155,7 @@ class AttioClient { if (data.name) { recordData.name = data.name; } if (data.email_addresses) { recordData.email_addresses = data.email_addresses; } if (data.phone_numbers) { - for (const phoneNumber of data.phone_numbers) { - recordData.phone_numbers.push({ original_phone_number: phoneNumber }); - } + recordData.phone_numbers = data.phone_numbers.map(phoneNumber => ({ original_phone_number: phoneNumber })); } if (data.job_title) { recordData.job_title = data.job_title; } if (data.description) { recordData.description = data.description; } @@ -206,9 +204,7 @@ class AttioClient { if (data.name) { recordData.name = data.name; } if (data.email_addresses) { recordData.email_addresses = data.email_addresses; } if (data.phone_numbers) { - for (const phoneNumber of data.phone_numbers) { - recordData.phone_numbers.push({ original_phone_number: phoneNumber }); - } + recordData.phone_numbers = data.phone_numbers.map(phoneNumber => ({ original_phone_number: phoneNumber })); } if (data.job_title) { recordData.job_title = data.job_title; } if (data.description) { recordData.description = data.description; } diff --git a/mcp_servers/clickup/server.py b/mcp_servers/clickup/server.py index 70df4596..6e983ec7 100644 --- a/mcp_servers/clickup/server.py +++ b/mcp_servers/clickup/server.py @@ -65,7 +65,13 @@ def extract_access_token(request_or_scope) -> str: logger.warning(f"Failed to parse auth data JSON: {e}") return "" - return "" + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" @click.command() @click.option("--port", default=CLICKUP_MCP_SERVER_PORT, help="Port to listen on for HTTP") diff --git a/mcp_servers/clickup/tools/comments.py b/mcp_servers/clickup/tools/comments.py index 64396e45..e6dec249 100644 --- a/mcp_servers/clickup/tools/comments.py +++ b/mcp_servers/clickup/tools/comments.py @@ -1,6 +1,7 @@ import logging from typing import Any, Dict, Optional from .base import make_clickup_request +from .normalize import normalize_comments, normalize_comment # Configure logging logger = logging.getLogger(__name__) @@ -16,7 +17,7 @@ async def get_comments(task_id: str, custom_task_ids: bool = False, team_id: Opt params["team_id"] = team_id result = await make_clickup_request(f"task/{task_id}/comment", params=params) - return result + return normalize_comments(result) except Exception as e: logger.exception(f"Error executing tool get_comments: {e}") raise e @@ -46,7 +47,7 @@ async def create_comment( params["team_id"] = team_id result = await make_clickup_request(f"task/{task_id}/comment", "POST", data, params) - return result + return normalize_comment(result) except Exception as e: logger.exception(f"Error executing tool create_comment: {e}") raise e @@ -68,7 +69,7 @@ async def update_comment( data["resolved"] = resolved result = await make_clickup_request(f"comment/{comment_id}", "PUT", data) - return result + return normalize_comment(result) except Exception as e: logger.exception(f"Error executing tool update_comment: {e}") raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/folders.py b/mcp_servers/clickup/tools/folders.py index 36a032ed..b8752b14 100644 --- a/mcp_servers/clickup/tools/folders.py +++ b/mcp_servers/clickup/tools/folders.py @@ -1,6 +1,7 @@ import logging from typing import Any, Dict, Optional from .base import make_clickup_request +from .normalize import normalize_folders, normalize_folder # Configure logging logger = logging.getLogger(__name__) @@ -10,7 +11,7 @@ async def get_folders(space_id: str) -> Dict[str, Any]: logger.info(f"Executing tool: get_folders with space_id: {space_id}") try: result = await make_clickup_request(f"space/{space_id}/folder") - return result + return normalize_folders(result) except Exception as e: logger.exception(f"Error executing tool get_folders: {e}") raise e @@ -21,7 +22,7 @@ async def create_folder(space_id: str, name: str) -> Dict[str, Any]: try: data = {"name": name} result = await make_clickup_request(f"space/{space_id}/folder", "POST", data) - return result + return normalize_folder(result) except Exception as e: logger.exception(f"Error executing tool create_folder: {e}") raise e @@ -32,7 +33,7 @@ async def update_folder(folder_id: str, name: str) -> Dict[str, Any]: try: data = {"name": name} result = await make_clickup_request(f"folder/{folder_id}", "PUT", data) - return result + return normalize_folder(result) except Exception as e: logger.exception(f"Error executing tool update_folder: {e}") raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/lists.py b/mcp_servers/clickup/tools/lists.py index a6cefcdf..305f52fe 100644 --- a/mcp_servers/clickup/tools/lists.py +++ b/mcp_servers/clickup/tools/lists.py @@ -1,6 +1,7 @@ import logging from typing import Any, Dict, Optional from .base import make_clickup_request +from .normalize import normalize_lists, normalize_list # Configure logging logger = logging.getLogger(__name__) @@ -15,7 +16,7 @@ async def get_lists(folder_id: Optional[str] = None, space_id: Optional[str] = N result = await make_clickup_request(f"space/{space_id}/list") else: raise ValueError("Either folder_id or space_id must be provided") - return result + return normalize_lists(result) except Exception as e: logger.exception(f"Error executing tool get_lists: {e}") raise e @@ -51,7 +52,7 @@ async def create_list( result = await make_clickup_request(f"space/{space_id}/list", "POST", data) else: raise ValueError("Either folder_id or space_id must be provided") - return result + return normalize_list(result) except Exception as e: logger.exception(f"Error executing tool create_list: {e}") raise e @@ -83,7 +84,7 @@ async def update_list( data["unset_status"] = unset_status result = await make_clickup_request(f"list/{list_id}", "PUT", data) - return result + return normalize_list(result) except Exception as e: logger.exception(f"Error executing tool update_list: {e}") raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/normalize.py b/mcp_servers/clickup/tools/normalize.py new file mode 100644 index 00000000..bf24d425 --- /dev/null +++ b/mcp_servers/clickup/tools/normalize.py @@ -0,0 +1,301 @@ +""" +Response normalization utilities for ClickUp MCP Server. +Transforms raw vendor responses into Klavis-defined schemas. +""" + +from typing import Any, Dict, List, Optional + + +def get_path(data: Dict, path: str) -> Any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: Dict, mapping: Dict[str, Any]) -> Dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + Args: + source: Raw vendor JSON. + mapping: Dict of { "TargetFieldName": "Source.Path" OR Lambda_Function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# ==================== +# Mapping Rules +# ==================== + +USER_RULES = { + "id": "id", + "username": "username", + "email": "email", + "displayName": lambda x: x.get("username") or x.get("email"), + "avatarUrl": "profilePicture", + "initials": "initials", + "color": "color", +} + +MEMBER_RULES = { + "id": "user.id", + "username": "user.username", + "email": "user.email", + "displayName": lambda x: get_path(x, "user.username") or get_path(x, "user.email"), + "avatarUrl": "user.profilePicture", + "initials": "user.initials", + "role": "role", +} + +TEAM_RULES = { + "id": "id", + "name": "name", + "color": "color", + "avatarUrl": "avatar", + "members": lambda x: [ + normalize(m, MEMBER_RULES) for m in x.get("members", []) + ] if x.get("members") else None, +} + +SPACE_RULES = { + "id": "id", + "name": "name", + "isPrivate": "private", + "color": "color", + "isArchived": "archived", +} + +FOLDER_RULES = { + "id": "id", + "name": "name", + "isHidden": "hidden", + "isArchived": "archived", + "spaceId": "space.id", + "taskCount": "task_count", +} + +STATUS_RULES = { + "id": "id", + "name": "status", + "color": "color", + "type": "type", + "orderIndex": "orderindex", +} + +LIST_RULES = { + "id": "id", + "name": "name", + "description": "content", + "isArchived": "archived", + "folderId": "folder.id", + "spaceId": "space.id", + "taskCount": "task_count", + "dueDate": "due_date", + "startDate": "start_date", + "priority": "priority.priority", + "statuses": lambda x: [ + normalize(s, STATUS_RULES) for s in x.get("statuses", []) + ] if x.get("statuses") else None, +} + +ASSIGNEE_RULES = { + "id": "id", + "username": "username", + "email": "email", + "displayName": lambda x: x.get("username") or x.get("email"), + "avatarUrl": "profilePicture", + "initials": "initials", +} + +TAG_RULES = { + "name": "name", + "color": "tag_bg", + "textColor": "tag_fg", +} + +PRIORITY_RULES = { + "level": "priority", + "color": "color", +} + +TASK_RULES = { + "id": "id", + "customId": "custom_id", + "name": "name", + "description": "description", + "textContent": "text_content", + "status": "status.status", + "statusColor": "status.color", + "createdAt": "date_created", + "updatedAt": "date_updated", + "closedAt": "date_closed", + "doneAt": "date_done", + "dueDate": "due_date", + "startDate": "start_date", + "timeEstimate": "time_estimate", + "timeSpent": lambda x: get_path(x, "time_spent.time") or x.get("time_spent"), + "url": "url", + "isArchived": "archived", + "listId": "list.id", + "listName": "list.name", + "folderId": "folder.id", + "folderName": "folder.name", + "spaceId": "space.id", + "parentTaskId": "parent", + "priority": lambda x: x.get("priority", {}).get("priority") if isinstance(x.get("priority"), dict) else x.get("priority"), + "priorityColor": "priority.color", + "creator": lambda x: normalize(x.get("creator", {}), ASSIGNEE_RULES) if x.get("creator") else None, + "assignees": lambda x: [ + normalize(a, ASSIGNEE_RULES) for a in x.get("assignees", []) + ] if x.get("assignees") else None, + "tags": lambda x: [ + normalize(t, TAG_RULES) for t in x.get("tags", []) + ] if x.get("tags") else None, + "subtasks": lambda x: [ + normalize_task(s) for s in x.get("subtasks", []) + ] if x.get("subtasks") else None, +} + +COMMENT_RULES = { + "id": "id", + "text": lambda x: x.get("comment_text") or x.get("text_content") or ( + x.get("comment") if isinstance(x.get("comment"), str) else None + ), + "createdAt": "date", + "isResolved": "resolved", + "author": lambda x: normalize(x.get("user", {}), ASSIGNEE_RULES) if x.get("user") else None, + "assignee": lambda x: normalize(x.get("assignee", {}), ASSIGNEE_RULES) if x.get("assignee") else None, +} + + +# ==================== +# Normalize Functions +# ==================== + +def normalize_user(raw: Dict) -> Dict: + """Normalize a user response.""" + if not raw: + return {} + # Handle nested user object from /user endpoint + user_data = raw.get("user", raw) + return normalize(user_data, USER_RULES) + + +def normalize_team(raw: Dict) -> Dict: + """Normalize a single team/workspace.""" + return normalize(raw, TEAM_RULES) + + +def normalize_teams(raw: Dict) -> Dict: + """Normalize teams list response.""" + if not raw: + return {"workspaces": []} + teams = raw.get("teams", []) + return { + "workspaces": [normalize_team(t) for t in teams] + } + + +def normalize_members(raw: Dict) -> Dict: + """Normalize team members response.""" + if not raw: + return {"members": []} + members = raw.get("members", []) + return { + "members": [normalize(m, MEMBER_RULES) for m in members] + } + + +def normalize_space(raw: Dict) -> Dict: + """Normalize a single space.""" + return normalize(raw, SPACE_RULES) + + +def normalize_spaces(raw: Dict) -> Dict: + """Normalize spaces list response.""" + if not raw: + return {"spaces": []} + spaces = raw.get("spaces", []) + return { + "spaces": [normalize_space(s) for s in spaces] + } + + +def normalize_folder(raw: Dict) -> Dict: + """Normalize a single folder.""" + return normalize(raw, FOLDER_RULES) + + +def normalize_folders(raw: Dict) -> Dict: + """Normalize folders list response.""" + if not raw: + return {"folders": []} + folders = raw.get("folders", []) + return { + "folders": [normalize_folder(f) for f in folders] + } + + +def normalize_list(raw: Dict) -> Dict: + """Normalize a single list.""" + return normalize(raw, LIST_RULES) + + +def normalize_lists(raw: Dict) -> Dict: + """Normalize lists response.""" + if not raw: + return {"lists": []} + lists = raw.get("lists", []) + return { + "lists": [normalize_list(lst) for lst in lists] + } + + +def normalize_task(raw: Dict) -> Dict: + """Normalize a single task.""" + return normalize(raw, TASK_RULES) + + +def normalize_tasks(raw: Dict) -> Dict: + """Normalize tasks list response.""" + if not raw: + return {"tasks": []} + tasks = raw.get("tasks", []) + return { + "tasks": [normalize_task(t) for t in tasks] + } + + +def normalize_comment(raw: Dict) -> Dict: + """Normalize a single comment.""" + return normalize(raw, COMMENT_RULES) + + +def normalize_comments(raw: Dict) -> Dict: + """Normalize comments list response.""" + if not raw: + return {"comments": []} + comments = raw.get("comments", []) + return { + "comments": [normalize_comment(c) for c in comments] + } + diff --git a/mcp_servers/clickup/tools/spaces.py b/mcp_servers/clickup/tools/spaces.py index 97b0c16e..d6ba7ec9 100644 --- a/mcp_servers/clickup/tools/spaces.py +++ b/mcp_servers/clickup/tools/spaces.py @@ -1,6 +1,7 @@ import logging from typing import Any, Dict, Optional from .base import make_clickup_request +from .normalize import normalize_spaces, normalize_space # Configure logging logger = logging.getLogger(__name__) @@ -10,7 +11,7 @@ async def get_spaces(team_id: str) -> Dict[str, Any]: logger.info(f"Executing tool: get_spaces with team_id: {team_id}") try: result = await make_clickup_request(f"team/{team_id}/space") - return result + return normalize_spaces(result) except Exception as e: logger.exception(f"Error executing tool get_spaces: {e}") raise e @@ -27,7 +28,7 @@ async def create_space(team_id: str, name: str, color: Optional[str] = None, pri data["color"] = color result = await make_clickup_request(f"team/{team_id}/space", "POST", data) - return result + return normalize_space(result) except Exception as e: logger.exception(f"Error executing tool create_space: {e}") raise e @@ -45,7 +46,7 @@ async def update_space(space_id: str, name: Optional[str] = None, color: Optiona data["private"] = private result = await make_clickup_request(f"space/{space_id}", "PUT", data) - return result + return normalize_space(result) except Exception as e: logger.exception(f"Error executing tool update_space: {e}") raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/tasks.py b/mcp_servers/clickup/tools/tasks.py index 115d5fc7..3bdb6c44 100644 --- a/mcp_servers/clickup/tools/tasks.py +++ b/mcp_servers/clickup/tools/tasks.py @@ -1,6 +1,7 @@ import logging from typing import Any, Dict, Optional, List from .base import make_clickup_request +from .normalize import normalize_tasks, normalize_task # Configure logging logger = logging.getLogger(__name__) @@ -57,7 +58,7 @@ async def get_tasks( params["date_updated_lt"] = date_updated_lt result = await make_clickup_request(f"list/{list_id}/task", params=params) - return result + return normalize_tasks(result) except Exception as e: logger.exception(f"Error executing tool get_tasks: {e}") raise e @@ -74,7 +75,7 @@ async def get_task_by_id(task_id: str, custom_task_ids: bool = False, team_id: O params["team_id"] = team_id result = await make_clickup_request(f"task/{task_id}", params=params) - return result + return normalize_task(result) except Exception as e: logger.exception(f"Error executing tool get_task_by_id: {e}") raise e @@ -140,7 +141,7 @@ async def create_task( params["team_id"] = team_id result = await make_clickup_request(f"list/{list_id}/task", "POST", data, params) - return result + return normalize_task(result) except Exception as e: logger.exception(f"Error executing tool create_task: {e}") raise e @@ -199,7 +200,7 @@ async def update_task( params["team_id"] = team_id result = await make_clickup_request(f"task/{task_id}", "PUT", data, params) - return result + return normalize_task(result) except Exception as e: logger.exception(f"Error executing tool update_task: {e}") raise e @@ -220,7 +221,7 @@ async def search_tasks( } result = await make_clickup_request(f"team/{team_id}/task", params=params) - return result + return normalize_tasks(result) except Exception as e: logger.exception(f"Error executing tool search_tasks: {e}") raise e \ No newline at end of file diff --git a/mcp_servers/clickup/tools/teams.py b/mcp_servers/clickup/tools/teams.py index 8736af88..7ba9d8bc 100644 --- a/mcp_servers/clickup/tools/teams.py +++ b/mcp_servers/clickup/tools/teams.py @@ -1,6 +1,7 @@ import logging from typing import Any, Dict from .base import make_clickup_request +from .normalize import normalize_teams # Configure logging logger = logging.getLogger(__name__) @@ -10,7 +11,7 @@ async def get_teams() -> Dict[str, Any]: logger.info("Executing tool: get_teams") try: result = await make_clickup_request("team") - return result + return normalize_teams(result) except Exception as e: logger.exception(f"Error executing tool get_teams: {e}") raise e diff --git a/mcp_servers/clickup/tools/users.py b/mcp_servers/clickup/tools/users.py index c9855cbe..f152517e 100644 --- a/mcp_servers/clickup/tools/users.py +++ b/mcp_servers/clickup/tools/users.py @@ -1,6 +1,7 @@ import logging from typing import Any, Dict from .base import make_clickup_request +from .normalize import normalize_user, normalize_members # Configure logging logger = logging.getLogger(__name__) @@ -10,7 +11,7 @@ async def get_user() -> Dict[str, Any]: logger.info("Executing tool: get_user") try: result = await make_clickup_request("user") - return result + return normalize_user(result) except Exception as e: logger.exception(f"Error executing tool get_user: {e}") raise e @@ -20,7 +21,7 @@ async def get_team_members(team_id: str) -> Dict[str, Any]: logger.info(f"Executing tool: get_team_members with team_id: {team_id}") try: result = await make_clickup_request(f"team/{team_id}/member") - return result + return normalize_members(result) except Exception as e: logger.exception(f"Error executing tool get_team_members: {e}") raise e \ No newline at end of file diff --git a/mcp_servers/close/tools/activities.py b/mcp_servers/close/tools/activities.py index 395825ba..a7653e92 100644 --- a/mcp_servers/close/tools/activities.py +++ b/mcp_servers/close/tools/activities.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_activity, ) from .constants import CLOSE_MAX_LIMIT @@ -55,9 +56,9 @@ async def list_activities( response = await client.get("/activity/", params=params) return { - "activities": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "activities": [normalize_activity(a) for a in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -84,8 +85,7 @@ async def search_activities( response = await client.get("/activity/", params=params) return { - "activities": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "activities": [normalize_activity(a) for a in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } - diff --git a/mcp_servers/close/tools/activities_calls.py b/mcp_servers/close/tools/activities_calls.py index 0382067b..bca89523 100644 --- a/mcp_servers/close/tools/activities_calls.py +++ b/mcp_servers/close/tools/activities_calls.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_call_activity, ) from .constants import CLOSE_MAX_LIMIT @@ -56,9 +57,9 @@ async def list_calls( response = await client.get("/activity/call/", params=params) return { - "calls": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "calls": [normalize_call_activity(c) for c in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -74,7 +75,7 @@ async def get_call(call_id: str) -> ToolResponse: response = await client.get(f"/activity/call/{call_id}/") - return response + return {"call": normalize_call_activity(response)} async def create_call( @@ -129,7 +130,7 @@ async def create_call( response = await client.post("/activity/call/", json_data=call_data) - return response + return {"call": normalize_call_activity(response)} async def update_call( @@ -171,7 +172,7 @@ async def update_call( response = await client.put(f"/activity/call/{call_id}/", json_data=call_data) - return response + return {"call": normalize_call_activity(response)} async def delete_call(call_id: str) -> ToolResponse: @@ -184,9 +185,9 @@ async def delete_call(call_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/activity/call/{call_id}/") + await client.delete(f"/activity/call/{call_id}/") - return {"success": True, "call_id": call_id} + return {"success": True, "callId": call_id} async def search_calls( @@ -212,8 +213,7 @@ async def search_calls( response = await client.get("/activity/call/", params=params) return { - "calls": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "calls": [normalize_call_activity(c) for c in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } - diff --git a/mcp_servers/close/tools/activities_emails.py b/mcp_servers/close/tools/activities_emails.py index ef9b0abb..b1a91b1e 100644 --- a/mcp_servers/close/tools/activities_emails.py +++ b/mcp_servers/close/tools/activities_emails.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_email_activity, ) from .constants import CLOSE_MAX_LIMIT @@ -53,9 +54,9 @@ async def list_emails( response = await client.get("/activity/email/", params=params) return { - "emails": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "emails": [normalize_email_activity(e) for e in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -71,7 +72,7 @@ async def get_email(email_id: str) -> ToolResponse: response = await client.get(f"/activity/email/{email_id}/") - return response + return {"email": normalize_email_activity(response)} async def create_email( @@ -123,7 +124,7 @@ async def create_email( response = await client.post("/activity/email/", json_data=email_data) - return response + return {"email": normalize_email_activity(response)} async def update_email( @@ -168,7 +169,7 @@ async def update_email( response = await client.put(f"/activity/email/{email_id}/", json_data=email_data) - return response + return {"email": normalize_email_activity(response)} async def delete_email(email_id: str) -> ToolResponse: @@ -181,9 +182,9 @@ async def delete_email(email_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/activity/email/{email_id}/") + await client.delete(f"/activity/email/{email_id}/") - return {"success": True, "email_id": email_id} + return {"success": True, "emailId": email_id} async def send_email( @@ -201,7 +202,7 @@ async def send_email( response = await client.post(f"/activity/email/{email_id}/send/", json_data={}) - return response + return {"email": normalize_email_activity(response)} async def search_emails( @@ -227,8 +228,8 @@ async def search_emails( response = await client.get("/activity/email/", params=params) return { - "emails": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "emails": [normalize_email_activity(e) for e in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } diff --git a/mcp_servers/close/tools/activities_meeting.py b/mcp_servers/close/tools/activities_meeting.py index ecd3c821..86b262e5 100644 --- a/mcp_servers/close/tools/activities_meeting.py +++ b/mcp_servers/close/tools/activities_meeting.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_meeting_activity, ) from .constants import CLOSE_MAX_LIMIT @@ -59,9 +60,9 @@ async def list_meetings( response = await client.get("/activity/meeting/", params=params) return { - "meetings": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "meetings": [normalize_meeting_activity(m) for m in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -77,7 +78,7 @@ async def get_meeting(meeting_id: str) -> ToolResponse: response = await client.get(f"/activity/meeting/{meeting_id}/") - return response + return {"meeting": normalize_meeting_activity(response)} async def create_meeting( @@ -137,7 +138,7 @@ async def create_meeting( response = await client.post("/activity/meeting/", json_data=meeting_data) - return response + return {"meeting": normalize_meeting_activity(response)} async def update_meeting( @@ -185,7 +186,7 @@ async def update_meeting( response = await client.put(f"/activity/meeting/{meeting_id}/", json_data=meeting_data) - return response + return {"meeting": normalize_meeting_activity(response)} async def delete_meeting(meeting_id: str) -> ToolResponse: @@ -198,9 +199,9 @@ async def delete_meeting(meeting_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/activity/meeting/{meeting_id}/") + await client.delete(f"/activity/meeting/{meeting_id}/") - return {"success": True, "meeting_id": meeting_id} + return {"success": True, "meetingId": meeting_id} async def search_meetings( @@ -226,8 +227,7 @@ async def search_meetings( response = await client.get("/activity/meeting/", params=params) return { - "meetings": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "meetings": [normalize_meeting_activity(m) for m in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } - diff --git a/mcp_servers/close/tools/activities_notes.py b/mcp_servers/close/tools/activities_notes.py index a1ea34fe..6c84a673 100644 --- a/mcp_servers/close/tools/activities_notes.py +++ b/mcp_servers/close/tools/activities_notes.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_note_activity, ) from .constants import CLOSE_MAX_LIMIT @@ -53,9 +54,9 @@ async def list_notes( response = await client.get("/activity/note/", params=params) return { - "notes": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "notes": [normalize_note_activity(n) for n in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -71,7 +72,7 @@ async def get_note(note_id: str) -> ToolResponse: response = await client.get(f"/activity/note/{note_id}/") - return response + return {"note": normalize_note_activity(response)} async def create_note( @@ -102,7 +103,7 @@ async def create_note( response = await client.post("/activity/note/", json_data=note_data) - return response + return {"note": normalize_note_activity(response)} async def update_note( @@ -129,7 +130,7 @@ async def update_note( response = await client.put(f"/activity/note/{note_id}/", json_data=note_data) - return response + return {"note": normalize_note_activity(response)} async def delete_note(note_id: str) -> ToolResponse: @@ -142,9 +143,9 @@ async def delete_note(note_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/activity/note/{note_id}/") + await client.delete(f"/activity/note/{note_id}/") - return {"success": True, "note_id": note_id} + return {"success": True, "noteId": note_id} async def search_notes( @@ -170,8 +171,7 @@ async def search_notes( response = await client.get("/activity/note/", params=params) return { - "notes": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "notes": [normalize_note_activity(n) for n in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } - diff --git a/mcp_servers/close/tools/activities_sms.py b/mcp_servers/close/tools/activities_sms.py index d7fa329f..8ec33fbd 100644 --- a/mcp_servers/close/tools/activities_sms.py +++ b/mcp_servers/close/tools/activities_sms.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_sms_activity, ) from .constants import CLOSE_MAX_LIMIT @@ -53,9 +54,9 @@ async def list_sms( response = await client.get("/activity/sms/", params=params) return { - "sms": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "messages": [normalize_sms_activity(s) for s in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -71,7 +72,7 @@ async def get_sms(sms_id: str) -> ToolResponse: response = await client.get(f"/activity/sms/{sms_id}/") - return response + return {"message": normalize_sms_activity(response)} async def create_sms( @@ -120,7 +121,7 @@ async def create_sms( response = await client.post("/activity/sms/", json_data=sms_data) - return response + return {"message": normalize_sms_activity(response)} async def update_sms( @@ -156,7 +157,7 @@ async def update_sms( response = await client.put(f"/activity/sms/{sms_id}/", json_data=sms_data) - return response + return {"message": normalize_sms_activity(response)} async def delete_sms(sms_id: str) -> ToolResponse: @@ -169,9 +170,9 @@ async def delete_sms(sms_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/activity/sms/{sms_id}/") + await client.delete(f"/activity/sms/{sms_id}/") - return {"success": True, "sms_id": sms_id} + return {"success": True, "messageId": sms_id} async def send_sms( @@ -189,7 +190,7 @@ async def send_sms( response = await client.post(f"/activity/sms/{sms_id}/send/", json_data={}) - return response + return {"message": normalize_sms_activity(response)} async def search_sms( @@ -215,8 +216,7 @@ async def search_sms( response = await client.get("/activity/sms/", params=params) return { - "sms": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "messages": [normalize_sms_activity(s) for s in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } - diff --git a/mcp_servers/close/tools/activities_whatsapp.py b/mcp_servers/close/tools/activities_whatsapp.py index 7600e77b..2b57649c 100644 --- a/mcp_servers/close/tools/activities_whatsapp.py +++ b/mcp_servers/close/tools/activities_whatsapp.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_whatsapp_activity, ) from .constants import CLOSE_MAX_LIMIT @@ -53,9 +54,9 @@ async def list_whatsapp( response = await client.get("/activity/whatsapp_message/", params=params) return { - "whatsapp_messages": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "messages": [normalize_whatsapp_activity(m) for m in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -71,7 +72,7 @@ async def get_whatsapp(whatsapp_id: str) -> ToolResponse: response = await client.get(f"/activity/whatsapp_message/{whatsapp_id}/") - return response + return {"message": normalize_whatsapp_activity(response)} async def create_whatsapp( @@ -140,7 +141,7 @@ async def create_whatsapp( response = await client.post("/activity/whatsapp_message/", json_data=whatsapp_data, params=params if params else None) - return response + return {"message": normalize_whatsapp_activity(response)} async def update_whatsapp( @@ -181,7 +182,7 @@ async def update_whatsapp( response = await client.put(f"/activity/whatsapp_message/{whatsapp_id}/", json_data=whatsapp_data) - return response + return {"message": normalize_whatsapp_activity(response)} async def delete_whatsapp(whatsapp_id: str) -> ToolResponse: @@ -194,9 +195,9 @@ async def delete_whatsapp(whatsapp_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/activity/whatsapp_message/{whatsapp_id}/") + await client.delete(f"/activity/whatsapp_message/{whatsapp_id}/") - return {"success": True, "whatsapp_id": whatsapp_id} + return {"success": True, "messageId": whatsapp_id} async def search_whatsapp( @@ -222,8 +223,7 @@ async def search_whatsapp( response = await client.get("/activity/whatsapp_message/", params=params) return { - "whatsapp_messages": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "messages": [normalize_whatsapp_activity(m) for m in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } - diff --git a/mcp_servers/close/tools/base.py b/mcp_servers/close/tools/base.py index bd44db25..c0fb3157 100644 --- a/mcp_servers/close/tools/base.py +++ b/mcp_servers/close/tools/base.py @@ -2,7 +2,7 @@ import json from dataclasses import dataclass import logging -from typing import Any, Dict, Optional, cast +from typing import Any, Dict, List, Optional, Callable, Union, cast from contextvars import ContextVar from functools import wraps @@ -19,7 +19,359 @@ # Type definitions ToolResponse = dict[str, Any] + +# ============================================================================ +# Response Normalization Utilities (Klavis Interface Layer) +# ============================================================================ +# These utilities transform raw API responses into Klavis-defined schemas, + +def get_path(data: Dict, path: str) -> Any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: Dict, mapping: Dict[str, Any]) -> Dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + Args: + source: Raw API JSON. + mapping: Dict of { "targetFieldName": "source.path" OR lambda_function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +def normalize_list(items: List[Dict], mapping: Dict[str, Any]) -> List[Dict]: + """Normalize a list of items using the provided mapping.""" + return [normalize(item, mapping) for item in items if item] + + +# ============================================================================ +# Klavis Interface Mapping Rules +# ============================================================================ + +# Email/Phone nested object rules +EMAIL_RULES = { + "address": "email", + "type": "type", +} + +PHONE_RULES = { + "number": "phone", + "type": "type", +} + +URL_RULES = { + "url": "url", + "type": "type", +} + +ADDRESS_RULES = { + "line1": "address_1", + "line2": "address_2", + "city": "city", + "state": "state", + "postalCode": "zipcode", + "country": "country", +} + +# Contact rules +CONTACT_RULES = { + "id": "id", + "leadId": "lead_id", + "name": "name", + "firstName": "first_name", + "lastName": "last_name", + "title": "title", + "emails": lambda x: normalize_list(x.get("emails", []), EMAIL_RULES) if x.get("emails") else None, + "phones": lambda x: normalize_list(x.get("phones", []), PHONE_RULES) if x.get("phones") else None, + "urls": lambda x: normalize_list(x.get("urls", []), URL_RULES) if x.get("urls") else None, + "createdAt": "date_created", + "updatedAt": "date_updated", +} + +# Lead rules +LEAD_RULES = { + "id": "id", + "name": "name", + "description": "description", + "statusId": "status_id", + "statusLabel": "status_label", + "url": "url", + "addresses": lambda x: normalize_list(x.get("addresses", []), ADDRESS_RULES) if x.get("addresses") else None, + "contacts": lambda x: normalize_list(x.get("contacts", []), CONTACT_RULES) if x.get("contacts") else None, + "createdAt": "date_created", + "updatedAt": "date_updated", + "createdBy": "created_by", + "updatedBy": "updated_by", +} + +# Opportunity rules +OPPORTUNITY_RULES = { + "id": "id", + "leadId": "lead_id", + "leadName": "lead_name", + "contactId": "contact_id", + "contactName": "contact_name", + "userId": "user_id", + "userName": "user_name", + "statusId": "status_id", + "statusLabel": "status_label", + "statusType": "status_type", + "note": "note", + "confidence": "confidence", + "value": "value", + "valueCurrency": "value_currency", + "valuePeriod": "value_period", + "valueFormatted": "value_formatted", + "expectedValue": "expected_value", + "annualizedValue": "annualized_value", + "annualizedExpectedValue": "annualized_expected_value", + "expectedCloseDate": "date_won", + "dateLost": "date_lost", + "createdAt": "date_created", + "updatedAt": "date_updated", + "createdBy": "created_by", + "updatedBy": "updated_by", +} + +# Task rules +TASK_RULES = { + "id": "id", + "leadId": "lead_id", + "leadName": "lead_name", + "assignedTo": "assigned_to", + "assignedToName": "assigned_to_name", + "text": "text", + "dueDate": "date", + "isComplete": "is_complete", + "isDateless": "is_dateless", + "type": "_type", + "createdAt": "date_created", + "updatedAt": "date_updated", +} + +# User rules +USER_RULES = { + "id": "id", + "email": "email", + "firstName": "first_name", + "lastName": "last_name", + "displayName": lambda x: f"{x.get('first_name', '')} {x.get('last_name', '')}".strip() or x.get('email'), + "image": "image", + "createdAt": "date_created", + "updatedAt": "date_updated", +} + +# Email Activity rules +EMAIL_ACTIVITY_RULES = { + "id": "id", + "leadId": "lead_id", + "contactId": "contact_id", + "userId": "user_id", + "direction": "direction", + "status": "status", + "subject": "subject", + "bodyText": "body_text", + "bodyHtml": "body_html", + "sender": "sender", + "to": "to", + "cc": "cc", + "bcc": "bcc", + "templateId": "template_id", + "createdAt": "date_created", + "updatedAt": "date_updated", + "sentAt": "date_sent", +} + +# Call Activity rules +CALL_ACTIVITY_RULES = { + "id": "id", + "leadId": "lead_id", + "contactId": "contact_id", + "userId": "user_id", + "direction": "direction", + "disposition": "disposition", + "durationSeconds": "duration", + "phone": "phone", + "note": "note", + "recordingUrl": "recording_url", + "voicemailUrl": "voicemail_url", + "createdAt": "date_created", + "updatedAt": "date_updated", +} + +# SMS Activity rules +SMS_ACTIVITY_RULES = { + "id": "id", + "leadId": "lead_id", + "contactId": "contact_id", + "userId": "user_id", + "direction": "direction", + "status": "status", + "text": "text", + "remotePhone": "remote_phone", + "localPhone": "local_phone", + "createdAt": "date_created", + "updatedAt": "date_updated", +} + +# Note Activity rules +NOTE_ACTIVITY_RULES = { + "id": "id", + "leadId": "lead_id", + "userId": "user_id", + "note": "note", + "noteHtml": "note_html", + "createdAt": "date_created", + "updatedAt": "date_updated", +} + +# Meeting Activity rules +MEETING_ATTENDEE_RULES = { + "contactId": "contact_id", + "status": "status", +} + +MEETING_ACTIVITY_RULES = { + "id": "id", + "leadId": "lead_id", + "userId": "user_id", + "status": "status", + "startsAt": "starts_at", + "endsAt": "ends_at", + "attendees": lambda x: normalize_list(x.get("attendees", []), MEETING_ATTENDEE_RULES) if x.get("attendees") else None, + "noteHtml": "user_note_html", + "outcomeId": "outcome_id", + "createdAt": "date_created", + "updatedAt": "date_updated", +} + +# WhatsApp Activity rules +WHATSAPP_ATTACHMENT_RULES = { + "url": "url", + "filename": "filename", + "contentType": "content_type", +} + +WHATSAPP_ACTIVITY_RULES = { + "id": "id", + "leadId": "lead_id", + "contactId": "contact_id", + "userId": "user_id", + "direction": "direction", + "externalMessageId": "external_whatsapp_message_id", + "messageMarkdown": "message_markdown", + "attachments": lambda x: normalize_list(x.get("attachments", []), WHATSAPP_ATTACHMENT_RULES) if x.get("attachments") else None, + "integrationLink": "integration_link", + "replyToId": "response_to_id", + "createdAt": "date_created", + "updatedAt": "date_updated", +} + +# Generic Activity rules (for list_activities) +ACTIVITY_RULES = { + "id": "id", + "type": "_type", + "leadId": "lead_id", + "userId": "user_id", + "createdAt": "date_created", + "updatedAt": "date_updated", +} + + +# ============================================================================ +# Normalization Functions +# ============================================================================ + +def normalize_lead(raw: Dict) -> Dict: + """Normalize a single lead response.""" + lead = normalize(raw, LEAD_RULES) + # Handle nested opportunities + if raw.get("opportunities"): + lead["opportunities"] = normalize_list(raw["opportunities"], OPPORTUNITY_RULES) + return lead + + +def normalize_contact(raw: Dict) -> Dict: + """Normalize a single contact response.""" + return normalize(raw, CONTACT_RULES) + + +def normalize_opportunity(raw: Dict) -> Dict: + """Normalize a single opportunity response.""" + return normalize(raw, OPPORTUNITY_RULES) + + +def normalize_task(raw: Dict) -> Dict: + """Normalize a single task response.""" + return normalize(raw, TASK_RULES) + + +def normalize_user(raw: Dict) -> Dict: + """Normalize a single user response.""" + return normalize(raw, USER_RULES) + + +def normalize_email_activity(raw: Dict) -> Dict: + """Normalize a single email activity response.""" + return normalize(raw, EMAIL_ACTIVITY_RULES) + + +def normalize_call_activity(raw: Dict) -> Dict: + """Normalize a single call activity response.""" + return normalize(raw, CALL_ACTIVITY_RULES) + + +def normalize_sms_activity(raw: Dict) -> Dict: + """Normalize a single SMS activity response.""" + return normalize(raw, SMS_ACTIVITY_RULES) + + +def normalize_note_activity(raw: Dict) -> Dict: + """Normalize a single note activity response.""" + return normalize(raw, NOTE_ACTIVITY_RULES) + + +def normalize_meeting_activity(raw: Dict) -> Dict: + """Normalize a single meeting activity response.""" + return normalize(raw, MEETING_ACTIVITY_RULES) + + +def normalize_whatsapp_activity(raw: Dict) -> Dict: + """Normalize a single WhatsApp activity response.""" + return normalize(raw, WHATSAPP_ACTIVITY_RULES) + + +def normalize_activity(raw: Dict) -> Dict: + """Normalize a generic activity response.""" + return normalize(raw, ACTIVITY_RULES) + + +# ============================================================================ # Exception classes +# ============================================================================ + class ToolExecutionError(Exception): def __init__(self, message: str, developer_message: str = ""): super().__init__(message) @@ -47,68 +399,14 @@ def __init__(self, message: str, additional_prompt_content: str = "", retry_afte self.developer_message = developer_message +# ============================================================================ # Utility functions +# ============================================================================ + def remove_none_values(data: dict[str, Any]) -> dict[str, Any]: return {k: v for k, v in data.items() if v is not None} -def format_currency_from_cents(amount_cents: Optional[int], currency: str = "USD") -> Optional[str]: - """Convert cents to formatted currency string.""" - if amount_cents is None: - return None - amount_dollars = amount_cents / 100 - if currency == "USD": - return f"${amount_dollars:,.2f}" - return f"{amount_dollars:,.2f} {currency}" - - -def format_opportunity_values(opportunity: dict[str, Any]) -> dict[str, Any]: - """Format opportunity monetary values from cents to readable currency strings.""" - formatted_opp = opportunity.copy() - - # List of fields that contain monetary values in cents - money_fields = ['value', 'expected_value', 'annualized_value', 'annualized_expected_value'] - - for field in money_fields: - if field in formatted_opp and formatted_opp[field] is not None: - # Store original value with _cents suffix for reference - formatted_opp[f"{field}_cents"] = formatted_opp[field] - # Replace with formatted dollar amount - currency = formatted_opp.get('value_currency', 'USD') - formatted_opp[field] = format_currency_from_cents(formatted_opp[field], currency) - - return formatted_opp - - -def format_leads_response(response: dict[str, Any]) -> dict[str, Any]: - """Format lead response to convert opportunity values from cents to dollars.""" - formatted_response = response.copy() - - if 'leads' in formatted_response: - formatted_leads = [] - for lead in formatted_response['leads']: - formatted_lead = lead.copy() - if 'opportunities' in formatted_lead: - formatted_opportunities = [] - for opp in formatted_lead['opportunities']: - formatted_opportunities.append(format_opportunity_values(opp)) - formatted_lead['opportunities'] = formatted_opportunities - formatted_leads.append(formatted_lead) - formatted_response['leads'] = formatted_leads - - return formatted_response - - -def get_next_page(response: dict[str, Any]) -> dict[str, Any]: - """Extract next page information from response.""" - has_more = response.get("has_more", False) - next_cursor = response.get("next_cursor") - return { - "has_more": has_more, - "next_cursor": next_cursor - } - - # Decorator function to clean Close response def clean_close_response(func): def response_cleaner(data: dict[str, Any]) -> dict[str, Any]: @@ -234,6 +532,7 @@ async def post( json_data: Optional[dict] = None, files: Optional[dict] = None, headers: Optional[dict] = None, + params: Optional[dict] = None, api_version: str | None = None, ) -> dict: default_headers = { @@ -252,6 +551,9 @@ async def post( "timeout": CLOSE_MAX_TIMEOUT_SECONDS, } + if params: + kwargs["params"] = params + if files is not None: kwargs["files"] = files if data is not None: @@ -324,7 +626,6 @@ async def get_current_user(self) -> dict: def get_close_client() -> CloseClient: access_token = get_auth_token() - logger.info(f"Access Token: {access_token}") return CloseClient(access_token=access_token) diff --git a/mcp_servers/close/tools/contacts.py b/mcp_servers/close/tools/contacts.py index 3c2733d1..4c21eef2 100644 --- a/mcp_servers/close/tools/contacts.py +++ b/mcp_servers/close/tools/contacts.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_contact, ) from .constants import CLOSE_MAX_LIMIT @@ -33,9 +34,9 @@ async def list_contacts( response = await client.get("/contact/", params=params) return { - "contacts": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "contacts": [normalize_contact(c) for c in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -46,7 +47,7 @@ async def get_contact(contact_id: str) -> ToolResponse: response = await client.get(f"/contact/{contact_id}/") - return response + return {"contact": normalize_contact(response)} async def create_contact( @@ -74,7 +75,7 @@ async def create_contact( response = await client.post("/contact/", json_data=contact_data) - return response + return {"contact": normalize_contact(response)} async def update_contact( @@ -104,7 +105,7 @@ async def update_contact( response = await client.put(f"/contact/{contact_id}/", json_data=contact_data) - return response + return {"contact": normalize_contact(response)} async def delete_contact(contact_id: str) -> ToolResponse: @@ -112,9 +113,9 @@ async def delete_contact(contact_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/contact/{contact_id}/") + await client.delete(f"/contact/{contact_id}/") - return {"success": True, "contact_id": contact_id} + return {"success": True, "contactId": contact_id} async def search_contacts( @@ -134,7 +135,7 @@ async def search_contacts( response = await client.get("/contact/", params=params) return { - "contacts": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "contacts": [normalize_contact(c) for c in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } \ No newline at end of file diff --git a/mcp_servers/close/tools/leads.py b/mcp_servers/close/tools/leads.py index 6acc0d82..64a1a847 100644 --- a/mcp_servers/close/tools/leads.py +++ b/mcp_servers/close/tools/leads.py @@ -6,7 +6,7 @@ ToolResponse, get_close_client, remove_none_values, - format_leads_response, + normalize_lead, ) from .constants import CLOSE_MAX_LIMIT @@ -33,13 +33,11 @@ async def list_leads( response = await client.get("/lead/", params=params) - result = { - "leads": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + return { + "leads": [normalize_lead(lead) for lead in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } - - return format_leads_response(result) async def get_lead(lead_id: str, fields: Optional[str] = None) -> ToolResponse: @@ -53,13 +51,7 @@ async def get_lead(lead_id: str, fields: Optional[str] = None) -> ToolResponse: response = await client.get(f"/lead/{lead_id}/", params=params) - # Format opportunities if they exist in the lead - if 'opportunities' in response: - result = {"leads": [response]} - formatted = format_leads_response(result) - return formatted["leads"][0] - - return response + return {"lead": normalize_lead(response)} async def create_lead( @@ -87,7 +79,7 @@ async def create_lead( response = await client.post("/lead/", json_data=lead_data) - return response + return {"lead": normalize_lead(response)} async def update_lead( @@ -115,7 +107,7 @@ async def update_lead( response = await client.put(f"/lead/{lead_id}/", json_data=lead_data) - return response + return {"lead": normalize_lead(response)} async def delete_lead(lead_id: str) -> ToolResponse: @@ -123,9 +115,9 @@ async def delete_lead(lead_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/lead/{lead_id}/") + await client.delete(f"/lead/{lead_id}/") - return {"success": True, "lead_id": lead_id} + return {"success": True, "leadId": lead_id} async def search_leads( @@ -146,13 +138,11 @@ async def search_leads( response = await client.get("/lead/", params=params) - result = { - "leads": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + return { + "leads": [normalize_lead(lead) for lead in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } - - return format_leads_response(result) async def merge_leads( @@ -170,4 +160,4 @@ async def merge_leads( response = await client.post("/lead/merge/", json_data=merge_data) - return response \ No newline at end of file + return {"lead": normalize_lead(response)} \ No newline at end of file diff --git a/mcp_servers/close/tools/opportunities.py b/mcp_servers/close/tools/opportunities.py index 8208bb59..25acb677 100644 --- a/mcp_servers/close/tools/opportunities.py +++ b/mcp_servers/close/tools/opportunities.py @@ -6,7 +6,7 @@ ToolResponse, get_close_client, remove_none_values, - format_opportunity_values, + normalize_opportunity, ) from .constants import CLOSE_MAX_LIMIT @@ -33,15 +33,10 @@ async def list_opportunities( response = await client.get("/opportunity/", params=params) - # Format monetary values in opportunities - formatted_opportunities = [] - for opp in response.get("data", []): - formatted_opportunities.append(format_opportunity_values(opp)) - return { - "opportunities": formatted_opportunities, - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "opportunities": [normalize_opportunity(opp) for opp in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -52,7 +47,7 @@ async def get_opportunity(opportunity_id: str) -> ToolResponse: response = await client.get(f"/opportunity/{opportunity_id}/") - return format_opportunity_values(response) + return {"opportunity": normalize_opportunity(response)} async def create_opportunity( @@ -82,7 +77,7 @@ async def create_opportunity( response = await client.post("/opportunity/", json_data=opportunity_data) - return format_opportunity_values(response) + return {"opportunity": normalize_opportunity(response)} async def update_opportunity( @@ -114,7 +109,7 @@ async def update_opportunity( response = await client.put(f"/opportunity/{opportunity_id}/", json_data=opportunity_data) - return format_opportunity_values(response) + return {"opportunity": normalize_opportunity(response)} async def delete_opportunity(opportunity_id: str) -> ToolResponse: @@ -122,9 +117,9 @@ async def delete_opportunity(opportunity_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/opportunity/{opportunity_id}/") + await client.delete(f"/opportunity/{opportunity_id}/") - return {"success": True, "opportunity_id": opportunity_id} + return {"success": True, "opportunityId": opportunity_id} async def search_opportunities( @@ -143,13 +138,8 @@ async def search_opportunities( response = await client.get("/opportunity/", params=params) - # Format monetary values in opportunities - formatted_opportunities = [] - for opp in response.get("data", []): - formatted_opportunities.append(format_opportunity_values(opp)) - return { - "opportunities": formatted_opportunities, - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "opportunities": [normalize_opportunity(opp) for opp in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } \ No newline at end of file diff --git a/mcp_servers/close/tools/tasks.py b/mcp_servers/close/tools/tasks.py index 038f3fbc..ad1ed324 100644 --- a/mcp_servers/close/tools/tasks.py +++ b/mcp_servers/close/tools/tasks.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_task, ) from .constants import CLOSE_MAX_LIMIT, TaskType @@ -39,9 +40,9 @@ async def list_tasks( response = await client.get("/task/", params=params) return { - "tasks": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "tasks": [normalize_task(t) for t in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -52,7 +53,7 @@ async def get_task(task_id: str) -> ToolResponse: response = await client.get(f"/task/{task_id}/") - return response + return {"task": normalize_task(response)} async def create_task( @@ -77,7 +78,7 @@ async def create_task( response = await client.post("/task/", json_data=task_data) - return response + return {"task": normalize_task(response)} async def update_task( @@ -104,7 +105,7 @@ async def update_task( response = await client.put(f"/task/{task_id}/", json_data=task_data) - return response + return {"task": normalize_task(response)} async def delete_task(task_id: str) -> ToolResponse: @@ -112,9 +113,9 @@ async def delete_task(task_id: str) -> ToolResponse: client = get_close_client() - response = await client.delete(f"/task/{task_id}/") + await client.delete(f"/task/{task_id}/") - return {"success": True, "task_id": task_id} + return {"success": True, "taskId": task_id} async def bulk_update_tasks( @@ -144,7 +145,7 @@ async def bulk_update_tasks( response = await client.put("/task/", params=params, json_data=task_data) - return response + return {"tasks": [normalize_task(t) for t in response.get("data", [])]} async def search_tasks( @@ -166,7 +167,7 @@ async def search_tasks( response = await client.get("/task/", params=params) return { - "tasks": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "tasks": [normalize_task(t) for t in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } \ No newline at end of file diff --git a/mcp_servers/close/tools/users.py b/mcp_servers/close/tools/users.py index 64e56b0c..32f0c99c 100644 --- a/mcp_servers/close/tools/users.py +++ b/mcp_servers/close/tools/users.py @@ -6,6 +6,7 @@ ToolResponse, get_close_client, remove_none_values, + normalize_user, ) from .constants import CLOSE_MAX_LIMIT @@ -19,7 +20,7 @@ async def get_current_user() -> ToolResponse: response = await client.get_current_user() - return response + return {"user": normalize_user(response)} async def list_users( @@ -39,9 +40,9 @@ async def list_users( response = await client.get("/user/", params=params) return { - "users": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "users": [normalize_user(u) for u in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } @@ -52,7 +53,7 @@ async def get_user(user_id: str) -> ToolResponse: response = await client.get(f"/user/{user_id}/") - return response + return {"user": normalize_user(response)} async def search_users( @@ -72,7 +73,7 @@ async def search_users( response = await client.get("/user/", params=params) return { - "users": response.get("data", []), - "has_more": response.get("has_more", False), - "total_results": response.get("total_results"), + "users": [normalize_user(u) for u in response.get("data", [])], + "hasMore": response.get("has_more", False), + "totalCount": response.get("total_results"), } \ No newline at end of file diff --git a/mcp_servers/confluence/client.py b/mcp_servers/confluence/client.py index a8bfd0d0..8cfa23f1 100644 --- a/mcp_servers/confluence/client.py +++ b/mcp_servers/confluence/client.py @@ -17,6 +17,13 @@ build_hierarchy, remove_none_values, ) +from normalizers import ( + normalize_page, + normalize_space, + normalize_attachment, + normalize_search_result, + normalize_tree_node, +) # Set up logging logger = logging.getLogger(__name__) @@ -322,15 +329,8 @@ def transform_search_content_response( base_url = response.get("_links", {}).get("base", "") transformed_results = [] for result in response.get("results", []): - content = result.get("content", {}) - transformed_result = { - "id": content.get("id"), - "title": content.get("title"), - "type": content.get("type"), - "status": content.get("status"), - "excerpt": result.get("excerpt"), - "url": f"{base_url}{result.get('url')}", - } + url = f"{base_url}{result.get('url')}" + transformed_result = normalize_search_result(result, url=url) transformed_results.append(transformed_result) return {"results": transformed_results} @@ -343,33 +343,28 @@ def __init__(self): logger.warning("No auth token found in context") super().__init__(auth_token, api_version=ConfluenceAPIVersion.V2) - def _transform_links( - self, response: dict[str, Any], base_url: str | None = None - ) -> dict[str, Any]: + def _build_url(self, response: dict[str, Any], base_url: str | None = None) -> str | None: """ - Transform the links in a page response by converting relative URLs to absolute URLs. - + Build absolute URL from response _links. + Args: - response: A page object from the API - base_url: The base URL to use for the transformation - + response: A raw response object from the API + base_url: Optional base URL override + Returns: - The transformed response + The absolute URL or None if not available """ - result = response.copy() - if "_links" in result: - base_url = base_url or result["_links"].get("base", "") - webui_path = result["_links"].get("webui", "") - result["url"] = f"{base_url}{webui_path}" - del result["_links"] - return result + if "_links" not in response: + return None + base = base_url or response["_links"].get("base", "") + webui_path = response["_links"].get("webui", "") + return f"{base}{webui_path}" if webui_path else None def transform_get_spaces_response( self, response: dict[str, Any] ) -> dict[str, list[dict[str, Any]]]: """ - Transform the response from the GET /spaces endpoint by converting relative webui paths - to absolute URLs using the base URL from the response. + Transform the response from the GET /spaces endpoint using Klavis normalizers. """ pagination_token = parse_qs(urlparse(response.get("_links", {}).get("next", "")).query).get( "cursor", @@ -381,48 +376,53 @@ def transform_get_spaces_response( transformed_results = [] for space in results: - space_copy = space.copy() - if "_links" in space_copy and "webui" in space_copy["_links"]: - webui_path = space_copy["_links"]["webui"] - space_copy["url"] = base_url + webui_path - del space_copy["_links"] - transformed_results.append(space_copy) - - results = {"spaces": transformed_results, "pagination_token": pagination_token} + url = self._build_url(space, base_url) + normalized = normalize_space(space, url=url) + transformed_results.append(normalized) + + results = {"spaces": transformed_results, "paginationToken": pagination_token} return remove_none_values(results) def transform_list_pages_response(self, response: dict[str, Any]) -> dict[str, Any]: - """Transform the response from the GET /pages endpoint.""" + """Transform the response from the GET /pages endpoint using Klavis normalizers.""" pagination_token = parse_qs(urlparse(response.get("_links", {}).get("next", "")).query).get( "cursor", [None], # type: ignore[list-item] )[0] base_url = response.get("_links", {}).get("base", "") - pages = [self._transform_links(page, base_url) for page in response["results"]] - results = {"pages": pages, "pagination_token": pagination_token} + pages = [] + for page in response["results"]: + url = self._build_url(page, base_url) + pages.append(normalize_page(page, url=url)) + results = {"pages": pages, "paginationToken": pagination_token} return remove_none_values(results) def transform_get_multiple_pages_response( self, response: dict[str, Any] ) -> dict[str, list[dict[str, Any]]]: - """Transform the response from the GET /pages endpoint.""" + """Transform the response from the GET /pages endpoint using Klavis normalizers.""" base_url = response.get("_links", {}).get("base", "") - pages = [self._transform_links(page, base_url) for page in response["results"]] + pages = [] + for page in response["results"]: + url = self._build_url(page, base_url) + pages.append(normalize_page(page, url=url)) return {"pages": pages} def transform_space_response( self, response: dict[str, Any], base_url: str | None = None ) -> dict[str, dict[str, Any]]: - """Transform API responses that return a space object.""" - return {"space": self._transform_links(response, base_url)} + """Transform API responses that return a space object using Klavis normalizers.""" + url = self._build_url(response, base_url) + return {"space": normalize_space(response, url=url)} def transform_page_response(self, response: dict[str, Any]) -> dict[str, dict[str, Any]]: - """Transform API responses that return a page object.""" - return {"page": self._transform_links(response)} + """Transform API responses that return a page object using Klavis normalizers.""" + url = self._build_url(response) + return {"page": normalize_page(response, url=url)} def transform_get_attachments_response(self, response: dict[str, Any]) -> dict[str, Any]: - """Transform the response from the GET /pages/{id}/attachments endpoint.""" + """Transform the response from the GET /pages/{id}/attachments endpoint using Klavis normalizers.""" pagination_token = parse_qs(urlparse(response.get("_links", {}).get("next", "")).query).get( "cursor", [None], # type: ignore[list-item] @@ -431,19 +431,30 @@ def transform_get_attachments_response(self, response: dict[str, Any]) -> dict[s base_url = response.get("_links", {}).get("base", "") attachments = [] for attachment in response["results"]: - result = attachment.copy() - if "_links" in result: - webui_path = result["_links"].get("webui", "") - download_path = result["_links"].get("download", "") - result["url"] = f"{base_url}{webui_path}" - result["download_link"] = f"{base_url}{download_path}" - del result["_links"] - del result["webuiLink"] - del result["downloadLink"] - del result["version"] - attachments.append(result) - - return {"attachments": attachments, "pagination_token": pagination_token} + url = None + download_link = None + if "_links" in attachment: + webui_path = attachment["_links"].get("webui", "") + download_path = attachment["_links"].get("download", "") + url = f"{base_url}{webui_path}" if webui_path else None + download_link = f"{base_url}{download_path}" if download_path else None + normalized = normalize_attachment(attachment, url=url, download_link=download_link) + attachments.append(normalized) + + result = {"attachments": attachments, "paginationToken": pagination_token} + return remove_none_values(result) + + def transform_attachment_response(self, response: dict[str, Any]) -> dict[str, Any]: + """Transform a single attachment response using Klavis normalizers.""" + base_url = response.get("_links", {}).get("base", "") + url = None + download_link = None + if "_links" in response: + webui_path = response["_links"].get("webui", "") + download_path = response["_links"].get("download", "") + url = f"{base_url}{webui_path}" if webui_path else None + download_link = f"{base_url}{download_path}" if download_path else None + return {"attachment": normalize_attachment(response, url=url, download_link=download_link)} def prepare_update_page_payload( self, @@ -530,7 +541,11 @@ async def get_root_pages_in_space(self, space_id: str) -> dict[str, Any]: } pages = await self.get(f"spaces/{space_id}/pages", params=params) base_url = pages.get("_links", {}).get("base", "") - return {"pages": [self._transform_links(page, base_url) for page in pages["results"]]} + normalized_pages = [] + for page in pages["results"]: + url = self._build_url(page, base_url) + normalized_pages.append(normalize_page(page, url=url)) + return {"pages": normalized_pages} async def get_space_homepage(self, space_id: str) -> dict[str, Any]: """ @@ -541,7 +556,8 @@ async def get_space_homepage(self, space_id: str) -> dict[str, Any]: root_pages = await self.get_root_pages_in_space(space_id) for page in root_pages["pages"]: if page.get("url", "").endswith("overview"): - return self._transform_links(page) + # Page is already normalized from get_root_pages_in_space + return page raise ToolExecutionError(message="No homepage found for space.") async def get_page_by_id( @@ -679,7 +695,7 @@ async def get_space_id(self, space_identifier: str) -> str: return space["space"]["id"] def create_space_tree(self, space: dict) -> dict: - """Create a space tree structure from space data.""" + """Create a space tree structure from normalized space data.""" space_data = space.get("space", {}) return { @@ -688,23 +704,17 @@ def create_space_tree(self, space: dict) -> dict: "name": space_data.get("name"), "type": "space", "url": space_data.get("url", ""), - "description": space_data.get("description", {}).get("plain", ""), + "description": space_data.get("description", ""), "children": [] } def convert_root_pages_to_tree_nodes(self, pages: list) -> list: - """Convert root pages to tree nodes.""" + """Convert normalized root pages to tree nodes.""" tree_nodes = [] for page in pages: - node = { - "id": page.get("id"), - "title": page.get("title"), - "type": page.get("type", "page"), - "status": page.get("status", "current"), - "url": page.get("url", ""), - "children": [] - } + # Pages are already normalized, extract tree node fields + node = normalize_tree_node(page, url=page.get("url")) tree_nodes.append(node) return tree_nodes @@ -726,22 +736,16 @@ async def process_page_descendants(self, root_children: list, base_url: str) -> if descendants: transformed_children = [] for desc in descendants: - child_node = { - "id": desc.get("id"), - "title": desc.get("title"), - "type": desc.get("type", "page"), - "status": desc.get("status", "current"), - "parent_id": None, - "children": [] - } + # Build URL first + url = build_child_url(base_url, desc) or "" + + # Use normalizer for consistent field names + child_node = normalize_tree_node(desc, url=url) # Determine parent ID from ancestors ancestors = desc.get("ancestors", []) if ancestors: - child_node["parent_id"] = ancestors[-1].get("id") - - # Build URL - child_node["url"] = build_child_url(base_url, child_node) or "" + child_node["parentId"] = ancestors[-1].get("id") transformed_children.append(child_node) diff --git a/mcp_servers/confluence/normalizers.py b/mcp_servers/confluence/normalizers.py new file mode 100644 index 00000000..3b4d9f66 --- /dev/null +++ b/mcp_servers/confluence/normalizers.py @@ -0,0 +1,177 @@ +""" +Normalizers for Confluence API responses. + +This module provides Klavis-defined abstractions for transforming raw vendor API +responses into normalized schemas. These normalizers ensure that: +- Raw vendor responses are never exposed directly +- Vendor-specific field names are mapped to Klavis-defined names +- The output follows Klavis Interface conventions +""" + +from typing import Any, Dict + + +def get_path(data: Dict, path: str) -> Any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: Dict, mapping: Dict[str, Any]) -> Dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + + Args: + source: Raw vendor JSON. + mapping: Dict of { "TargetFieldName": "Source.Path" OR Lambda_Function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# ============================================================================= +# Klavis Normalization Rules for Confluence Entities +# ============================================================================= + +# Version information rules +VERSION_RULES = { + "number": "number", + "message": "message", + "createdAt": "createdAt", + "authorId": "authorId", + "isMinorEdit": "minorEdit", +} + +# Page entity rules - maps Confluence API fields to Klavis fields +PAGE_RULES = { + "id": "id", + "title": "title", + "type": "type", + "status": "status", + "spaceId": "spaceId", + "parentId": "parentId", + "parentType": "parentType", + "position": "position", + "authorId": "authorId", + "ownerId": "ownerId", + "createdAt": "createdAt", + # Nested body content - handled specially + "body": lambda x: x.get("body") if x.get("body") else None, + # Version info - normalized + "version": lambda x: normalize(x.get("version", {}), VERSION_RULES) if x.get("version") else None, +} + +# Space entity rules +SPACE_RULES = { + "id": "id", + "key": "key", + "name": "name", + "type": "type", + "status": "status", + "authorId": "authorId", + "createdAt": "createdAt", + "homepageId": "homepageId", + # Description - extract plain text value + "description": lambda x: get_path(x, "description.plain.value"), + # Icon path + "iconPath": lambda x: get_path(x, "icon.path"), +} + +# Attachment entity rules +ATTACHMENT_RULES = { + "id": "id", + "title": "title", + "status": "status", + "mediaType": "mediaType", + "mediaTypeDescription": "mediaTypeDescription", + "comment": "comment", + "fileId": "fileId", + "fileSize": "fileSize", + "pageId": "pageId", + "createdAt": "createdAt", + # Version info - normalized + "version": lambda x: normalize(x.get("version", {}), VERSION_RULES) if x.get("version") else None, +} + +# Search result rules (v1 API) +SEARCH_RESULT_RULES = { + "id": "content.id", + "title": "content.title", + "type": "content.type", + "status": "content.status", + "excerpt": "excerpt", +} + +# Tree node rules for hierarchy +TREE_NODE_RULES = { + "id": "id", + "title": "title", + "type": "type", + "status": "status", +} + + +def normalize_page(raw_page: Dict, url: str | None = None) -> Dict: + """Normalize a single page response.""" + page = normalize(raw_page, PAGE_RULES) + # Default type to "page" if not present (v2 API doesn't include type for pages) + if "type" not in page: + page["type"] = "page" + if url: + page["url"] = url + return page + + +def normalize_space(raw_space: Dict, url: str | None = None) -> Dict: + """Normalize a single space response.""" + space = normalize(raw_space, SPACE_RULES) + if url: + space["url"] = url + return space + + +def normalize_attachment(raw_attachment: Dict, url: str | None = None, download_link: str | None = None) -> Dict: + """Normalize a single attachment response.""" + attachment = normalize(raw_attachment, ATTACHMENT_RULES) + if url: + attachment["url"] = url + if download_link: + attachment["downloadLink"] = download_link + return attachment + + +def normalize_search_result(raw_result: Dict, url: str | None = None) -> Dict: + """Normalize a single search result.""" + result = normalize(raw_result, SEARCH_RESULT_RULES) + if url: + result["url"] = url + return result + + +def normalize_tree_node(raw_node: Dict, url: str | None = None) -> Dict: + """Normalize a tree node for hierarchy responses.""" + node = normalize(raw_node, TREE_NODE_RULES) + if url: + node["url"] = url + node["children"] = [] + return node + diff --git a/mcp_servers/confluence/utils.py b/mcp_servers/confluence/utils.py index 05815dd7..56ce97bc 100644 --- a/mcp_servers/confluence/utils.py +++ b/mcp_servers/confluence/utils.py @@ -60,11 +60,11 @@ def build_child_url(base_url: str, child: dict) -> str | None: def build_hierarchy(transformed_children: list, parent_id: str, parent_node: dict) -> None: """Build parent-child hierarchy from a flat list of descendants. - This function takes a flat list of items that have parent_id references and + This function takes a flat list of items that have parentId references and builds a hierarchical tree structure. It modifies the parent_node in place. Args: - transformed_children: List of child nodes with parent_id fields + transformed_children: List of child nodes with parentId fields parent_id: The ID of the parent node parent_node: The parent node to attach direct children to @@ -77,11 +77,11 @@ def build_hierarchy(transformed_children: list, parent_id: str, parent_node: dic # Find all direct children of the given parent_id direct_children = [] for child in transformed_children: - if child.get("parent_id") == parent_id: + if child.get("parentId") == parent_id: direct_children.append(child) - elif child.get("parent_id") in child_map: + elif child.get("parentId") in child_map: # Add child to its parent's children list - parent = child_map[child.get("parent_id")] + parent = child_map[child.get("parentId")] if "children" not in parent: parent["children"] = [] parent["children"].append(child) diff --git a/mcp_servers/fetch_url/src/index.ts b/mcp_servers/fetch_url/src/index.ts index 08f31e1e..86364eb8 100644 --- a/mcp_servers/fetch_url/src/index.ts +++ b/mcp_servers/fetch_url/src/index.ts @@ -37,6 +37,7 @@ server.registerTool( { description: "Fetch a website and return the content as HTML", inputSchema: fetchInputSchema, + annotations: { category: "FETCH_URL" }, }, async (args) => { return await Fetcher.html(args as RequestPayload); @@ -49,6 +50,7 @@ server.registerTool( { description: "Fetch a website and return the content as Markdown", inputSchema: fetchInputSchema, + annotations: { category: "FETCH_URL" }, }, async (args) => { return await Fetcher.markdown(args as RequestPayload); @@ -61,6 +63,7 @@ server.registerTool( { description: "Fetch a website, return the content as plain text (no HTML)", inputSchema: fetchInputSchema, + annotations: { category: "FETCH_URL" }, }, async (args) => { return await Fetcher.txt(args as RequestPayload); @@ -73,6 +76,7 @@ server.registerTool( { description: "Fetch a JSON file from a URL", inputSchema: fetchInputSchema, + annotations: { category: "FETCH_URL" }, }, async (args) => { return await Fetcher.json(args as RequestPayload); diff --git a/mcp_servers/gmail/src/index.ts b/mcp_servers/gmail/src/index.ts index 657e4746..5080c648 100644 --- a/mcp_servers/gmail/src/index.ts +++ b/mcp_servers/gmail/src/index.ts @@ -158,13 +158,13 @@ function extractEmailContent(messagePart: GmailMessagePart): EmailContent { // Schema definitions const SendEmailSchema = z.object({ - to: z.array(z.string()).describe("List of recipient email addresses. You can use gmail_search_contacts tool to find contact emails. You MUST NOT assume the emails unless they are explicitly provided."), + to: z.array(z.string()).describe("List of recipient email addresses. You MUST NOT assume the emails unless they are explicitly provided. You may use gmail_search_contacts tool to find contact emails."), subject: z.string().describe("Email subject"), body: z.string().describe("Email body content (used for text/plain or when htmlBody not provided)"), htmlBody: z.string().optional().describe("HTML version of the email body"), mimeType: z.enum(['text/plain', 'text/html', 'multipart/alternative']).optional().default('text/plain').describe("Email content type"), - cc: z.array(z.string()).optional().describe("List of CC recipients. You can use gmail_search_contacts tool to find contact emails. You MUST NOT assume the emails unless they are explicitly provided."), - bcc: z.array(z.string()).optional().describe("List of BCC recipients. You can use gmail_search_contacts tool to find contact emails. You MUST NOT assume the emails unless they are explicitly provided."), + cc: z.array(z.string()).optional().describe("List of CC recipients. You MUST NOT assume the emails unless they are explicitly provided. You may use gmail_search_contacts tool to find contact emails."), + bcc: z.array(z.string()).optional().describe("List of BCC recipients. You MUST NOT assume the emails unless they are explicitly provided. You may use gmail_search_contacts tool to find contact emails."), threadId: z.string().optional().describe("Thread ID to reply to"), inReplyTo: z.string().optional().describe("Message ID being replied to"), }); @@ -233,7 +233,7 @@ const getGmailMcpServer = () => { tools: [ { name: "gmail_send_email", - description: "Sends a new email. You can use gmail_search_contacts tool to find contact emails. You MUST NOT assume the emails unless they are explicitly provided.", + description: "Sends a new email. You MUST NOT assume the emails unless they are explicitly provided. You may use gmail_search_contacts tool to find contact emails.", inputSchema: zodToJsonSchema(SendEmailSchema), annotations: { category: "GMAIL_EMAIL" }, }, diff --git a/mcp_servers/google_calendar/server.py b/mcp_servers/google_calendar/server.py index 386cf4c2..51574035 100644 --- a/mcp_servers/google_calendar/server.py +++ b/mcp_servers/google_calendar/server.py @@ -25,6 +25,115 @@ from googleapiclient.discovery import build from googleapiclient.errors import HttpError + +def get_path(data: Dict, path: str) -> Any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: Dict, mapping: Dict[str, Any]) -> Dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + Args: + source: Raw vendor JSON. + mapping: Dict of { "TargetFieldName": "Source.Path" OR Lambda_Function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# Mapping Rules + +ATTENDEE_RULES = { + "email": "email", + "name": "displayName", + "responseStatus": "responseStatus", + "isOrganizer": "organizer", + "isSelf": "self", + "isOptional": "optional", + "comment": "comment", +} + +CALENDAR_RULES = { + "id": "id", + "name": "summary", + "description": "description", + "location": "location", + "timezone": "timeZone", +} + +ATTACHMENT_RULES = { + "url": "fileUrl", + "title": "title", + "mimeType": "mimeType", + "fileId": "fileId", +} + +EVENT_RULES = { + "id": "id", + "title": "summary", + "description": "description", + "location": "location", + "status": "status", + "type": "eventType", + "visibility": "visibility", + "link": "htmlLink", + "meetingLink": "hangoutLink", + "created": "created", + "updated": "updated", + # Creator/Organizer + "creatorEmail": "creator.email", + "creatorName": "creator.displayName", + "organizerEmail": "organizer.email", + "organizerName": "organizer.displayName", + # Recurrence + "recurrenceId": "recurringEventId", + "recurrence": "recurrence", + # Date/time handling + "startTime": lambda x: x.get('start', {}).get('dateTime'), + "endTime": lambda x: x.get('end', {}).get('dateTime'), + "startDate": lambda x: x.get('start', {}).get('date'), + "endDate": lambda x: x.get('end', {}).get('date'), + "timezone": lambda x: x.get('start', {}).get('timeZone') or x.get('end', {}).get('timeZone'), + # Nested objects + "attendees": lambda x: [ + normalize(a, ATTENDEE_RULES) for a in x.get('attendees', []) + ] if x.get('attendees') else None, + "attachments": lambda x: [ + normalize(a, ATTACHMENT_RULES) for a in x.get('attachments', []) + ] if x.get('attachments') else None, +} + + +def normalize_event(raw_event: Dict) -> Dict: + """Normalize a single event and add computed fields.""" + event = normalize(raw_event, EVENT_RULES) + # Add day of week (computed field) + datetime_str = event.get('startTime') or event.get('startDate') + if datetime_str: + event['dayOfWeek'] = get_day_of_week(datetime_str) + return event + # Configure logging logger = logging.getLogger(__name__) @@ -158,7 +267,7 @@ async def list_calendars( service = get_calendar_service(access_token) max_results = max(1, min(max_results, 250)) - calendars = ( + raw_response = ( service.calendarList() .list( pageToken=next_page_token, @@ -169,13 +278,14 @@ async def list_calendars( .execute() ) - items = calendars.get("items", []) - keys = ["description", "id", "summary", "timeZone"] - relevant_items = [{k: i.get(k) for k in keys if i.get(k)} for i in items] + calendars = [ + normalize(item, CALENDAR_RULES) + for item in raw_response.get("items", []) + ] return { - "next_page_token": calendars.get("nextPageToken"), - "num_calendars": len(relevant_items), - "calendars": relevant_items, + "nextPageToken": raw_response.get("nextPageToken"), + "count": len(calendars), + "calendars": calendars, } except HttpError as e: logger.error(f"Google Calendar API error: {e}") @@ -242,21 +352,14 @@ async def create_event( # Set conferenceDataVersion to 1 when creating conferences conference_data_version = 1 if add_google_meet else 0 - created_event = service.events().insert( + raw_event = service.events().insert( calendarId=calendar_id, body=event, sendUpdates=send_updates, conferenceDataVersion=conference_data_version ).execute() - # Add day of the week to the created event - start_time = created_event.get("start", {}) - datetime_str = start_time.get("dateTime") or start_time.get("date") - day_of_week = get_day_of_week(datetime_str) - if day_of_week: - created_event["dayOfWeek"] = day_of_week - - return {"event": created_event} + return {"event": normalize_event(raw_event)} except HttpError as e: logger.error(f"Google Calendar API error: {e}") error_detail = json.loads(e.content.decode('utf-8')) @@ -288,7 +391,7 @@ async def list_events( if min_end_dt > max_start_dt: min_end_dt, max_start_dt = max_start_dt, min_end_dt - events_result = ( + raw_response = ( service.events() .list( calendarId=calendar_id, @@ -301,38 +404,12 @@ async def list_events( .execute() ) - items_keys = [ - "attachments", - "attendees", - "creator", - "description", - "end", - "eventType", - "htmlLink", - "id", - "location", - "organizer", - "recurrence", - "recurringEventId", - "start", - "summary", - "visibility", - ] - events = [ - {key: event[key] for key in items_keys if key in event} - for event in events_result.get("items", []) + normalize_event(item) + for item in raw_response.get("items", []) ] - - # Add day of the week to each event - for event in events: - start_time = event.get("start", {}) - datetime_str = start_time.get("dateTime") or start_time.get("date") - day_of_week = get_day_of_week(datetime_str) - if day_of_week: - event["dayOfWeek"] = day_of_week - - return {"events_count": len(events), "events": events} + + return {"count": len(events), "events": events} except HttpError as e: logger.error(f"Google Calendar API error: {e}") error_detail = json.loads(e.content.decode('utf-8')) diff --git a/mcp_servers/google_docs/server.py b/mcp_servers/google_docs/server.py index fac93d56..46d4ae94 100644 --- a/mcp_servers/google_docs/server.py +++ b/mcp_servers/google_docs/server.py @@ -77,17 +77,180 @@ def get_auth_token() -> str: except LookupError: raise RuntimeError("Authentication token not found in request context") + +def normalize_document_response(raw_response: Dict[str, Any]) -> Dict[str, Any]: + """ + Normalize the Google Docs API response to a simplified structure. + Reduces complexity while preserving important information. + """ + + def extract_text_from_paragraph(paragraph: Dict) -> Dict[str, Any]: + """Extract text content and styling from a paragraph.""" + elements = paragraph.get('elements', []) + text_parts = [] + + for element in elements: + if 'textRun' in element: + text_run = element['textRun'] + content = text_run.get('content', '') + text_style = text_run.get('textStyle', {}) + + part = {'text': content} + if text_style.get('bold'): + part['bold'] = True + if text_style.get('italic'): + part['italic'] = True + if text_style.get('underline'): + part['underline'] = True + + text_parts.append(part) + + # Combine text for simple display + full_text = ''.join(p['text'] for p in text_parts).strip() + + result = {'text': full_text} + + # Add paragraph style info + para_style = paragraph.get('paragraphStyle', {}) + named_style = para_style.get('namedStyleType') + if named_style and named_style != 'NORMAL_TEXT': + result['style'] = named_style + + heading_id = para_style.get('headingId') + if heading_id: + result['headingId'] = heading_id + + # Add bullet info if present + if 'bullet' in paragraph: + bullet = paragraph['bullet'] + result['isBullet'] = True + result['listId'] = bullet.get('listId') + if bullet.get('nestingLevel', 0) > 0: + result['nestingLevel'] = bullet['nestingLevel'] + + # Include rich text parts if there's formatting + has_formatting = any( + p.get('bold') or p.get('italic') or p.get('underline') + for p in text_parts + ) + if has_formatting: + result['formattedParts'] = [p for p in text_parts if p['text'].strip()] + + return result + + def extract_table(table: Dict) -> Dict[str, Any]: + """Extract table content in a simplified format.""" + rows = table.get('rows', 0) + columns = table.get('columns', 0) + table_rows = table.get('tableRows', []) + + extracted_rows = [] + for table_row in table_rows: + cells = [] + for cell in table_row.get('tableCells', []): + cell_content = [] + for content_item in cell.get('content', []): + if 'paragraph' in content_item: + para_data = extract_text_from_paragraph(content_item['paragraph']) + if para_data['text']: + cell_content.append(para_data['text']) + cells.append(' '.join(cell_content)) + extracted_rows.append(cells) + + return { + 'type': 'table', + 'rows': rows, + 'columns': columns, + 'data': extracted_rows + } + + def process_content(content_list: list) -> list: + """Process the document content into a simplified structure.""" + processed = [] + + for item in content_list: + # Skip section breaks + if 'sectionBreak' in item: + continue + + # Process paragraphs + if 'paragraph' in item: + para_data = extract_text_from_paragraph(item['paragraph']) + if para_data['text']: # Only include non-empty paragraphs + processed.append({ + 'type': 'paragraph', + **para_data + }) + + # Process tables + elif 'table' in item: + table_data = extract_table(item['table']) + processed.append(table_data) + + return processed + + # Build the normalized response + normalized = { + 'documentId': raw_response.get('documentId'), + 'title': raw_response.get('title'), + 'revisionId': raw_response.get('revisionId'), + } + + # Process body content + body = raw_response.get('body', {}) + content = body.get('content', []) + normalized['content'] = process_content(content) + + # Extract document metadata + doc_style = raw_response.get('documentStyle', {}) + if doc_style: + page_size = doc_style.get('pageSize', {}) + normalized['pageInfo'] = { + 'width': page_size.get('width', {}).get('magnitude'), + 'height': page_size.get('height', {}).get('magnitude'), + 'unit': page_size.get('width', {}).get('unit', 'PT'), + 'margins': { + 'top': doc_style.get('marginTop', {}).get('magnitude'), + 'bottom': doc_style.get('marginBottom', {}).get('magnitude'), + 'left': doc_style.get('marginLeft', {}).get('magnitude'), + 'right': doc_style.get('marginRight', {}).get('magnitude'), + } + } + + # Include list definitions (simplified) + lists = raw_response.get('lists', {}) + if lists: + normalized['lists'] = { + list_id: { + 'type': 'bullet' if props.get('listProperties', {}).get('nestingLevels', [{}])[0].get('glyphSymbol') else 'numbered' + } + for list_id, props in lists.items() + } + + return normalized + +async def _get_document_raw(document_id: str) -> Dict[str, Any]: + """Internal function to get raw Google Docs API response.""" + access_token = get_auth_token() + service = get_docs_service(access_token) + request = service.documents().get(documentId=document_id) + response = request.execute() + return dict(response) + + async def get_document_by_id(document_id: str) -> Dict[str, Any]: - """Get the latest version of the specified Google Docs document.""" + """Get the latest version of the specified Google Docs document. + + Args: + document_id: The ID of the Google Docs document to retrieve. + + Returns: + Normalized document response with simplified structure. + """ logger.info(f"Executing tool: get_document_by_id with document_id: {document_id}") try: - access_token = get_auth_token() - service = get_docs_service(access_token) - - request = service.documents().get(documentId=document_id) - response = request.execute() - - return dict(response) + raw_response = await _get_document_raw(document_id) + return normalize_document_response(raw_response) except HttpError as e: logger.error(f"Google Docs API error: {e}") error_detail = json.loads(e.content.decode('utf-8')) @@ -103,7 +266,8 @@ async def insert_text_at_end(document_id: str, text: str) -> Dict[str, Any]: access_token = get_auth_token() service = get_docs_service(access_token) - document = await get_document_by_id(document_id) + # Need raw response to get endIndex + document = await _get_document_raw(document_id) end_index = document["body"]["content"][-1]["endIndex"] @@ -125,7 +289,10 @@ async def insert_text_at_end(document_id: str, text: str) -> Dict[str, Any]: .execute() ) - return dict(response) + return { + "id": document_id, + "status": "success", + } except HttpError as e: logger.error(f"Google Docs API error: {e}") error_detail = json.loads(e.content.decode('utf-8')) @@ -148,8 +315,8 @@ async def create_blank_document(title: str) -> Dict[str, Any]: return { "title": response["title"], - "document_id": response["documentId"], - "document_url": f"https://docs.google.com/document/d/{response['documentId']}/edit", + "id": response["documentId"], + "url": f"https://docs.google.com/document/d/{response['documentId']}/edit", } except HttpError as e: logger.error(f"Google Docs API error: {e}") @@ -188,8 +355,8 @@ async def create_document_from_text(title: str, text_content: str) -> Dict[str, return { "title": document["title"], - "documentId": document["document_id"], - "documentUrl": f"https://docs.google.com/document/d/{document['document_id']}/edit", + "id": document["document_id"], + "url": f"https://docs.google.com/document/d/{document['document_id']}/edit", } except HttpError as e: logger.error(f"Google Docs API error: {e}") @@ -221,9 +388,9 @@ async def get_all_documents() -> Dict[str, Any]: documents.append({ 'id': file['id'], 'name': file['name'], - 'createdTime': file.get('createdTime'), - 'modifiedTime': file.get('modifiedTime'), - 'webViewLink': file.get('webViewLink') + 'createdAt': file.get('createdTime'), + 'modifiedAt': file.get('modifiedTime'), + 'url': file.get('webViewLink') }) return { diff --git a/mcp_servers/google_drive/server.py b/mcp_servers/google_drive/server.py index ed0677d1..de72a1a2 100644 --- a/mcp_servers/google_drive/server.py +++ b/mcp_servers/google_drive/server.py @@ -294,7 +294,7 @@ async def search_documents( include_shared_drives: bool = False, include_organization_domain_documents: bool = False, order_by: list[str] | None = None, - limit: int = 50, + limit: int = 10, pagination_token: str | None = None, ) -> Dict[str, Any]: """Search for documents in the user's Google Drive.""" @@ -337,6 +337,8 @@ async def search_documents( results = service.files().list(**params).execute() batch = results.get("files", []) + # Extract only id, name, and mimeType fields from each file + batch = [{"id": f.get("id"), "name": f.get("name"), "mimeType": f.get("mimeType")} for f in batch] files.extend(batch[: limit - len(files)]) pagination_token = results.get("nextPageToken") @@ -360,7 +362,7 @@ async def search_and_retrieve_documents( include_shared_drives: bool = False, include_organization_domain_documents: bool = False, order_by: list[str] | None = None, - limit: int = 50, + limit: int = 10, pagination_token: str | None = None, ) -> Dict[str, Any]: """Search and retrieve the contents of Google documents in the user's Google Drive.""" @@ -612,7 +614,7 @@ async def list_tools() -> list[types.Tool]: "limit": { "type": "integer", "description": "The number of documents to list.", - "default": 50, + "default": 10, }, "pagination_token": { "type": "string", @@ -671,7 +673,7 @@ async def list_tools() -> list[types.Tool]: "limit": { "type": "integer", "description": "The number of documents to list.", - "default": 50, + "default": 10, }, "pagination_token": { "type": "string", @@ -796,7 +798,7 @@ async def call_tool( include_shared_drives=arguments.get("include_shared_drives", False), include_organization_domain_documents=arguments.get("include_organization_domain_documents", False), order_by=arguments.get("order_by"), - limit=arguments.get("limit", 50), + limit=arguments.get("limit", 10), pagination_token=arguments.get("pagination_token"), ) return [ @@ -824,7 +826,7 @@ async def call_tool( include_shared_drives=arguments.get("include_shared_drives", False), include_organization_domain_documents=arguments.get("include_organization_domain_documents", False), order_by=arguments.get("order_by"), - limit=arguments.get("limit", 50), + limit=arguments.get("limit", 10), pagination_token=arguments.get("pagination_token"), ) return [ diff --git a/mcp_servers/google_drive/utils.py b/mcp_servers/google_drive/utils.py index 312cd0f8..e735a7be 100644 --- a/mcp_servers/google_drive/utils.py +++ b/mcp_servers/google_drive/utils.py @@ -7,18 +7,19 @@ def convert_document_to_html(document: dict) -> str: "" ) for element in document["body"]["content"]: - html += convert_structural_element(element) + html += _convert_structural_element_html(element) html += "" return html -def convert_structural_element(element: dict, wrap_paragraphs: bool = True) -> str: + +def _convert_structural_element_html(element: dict, wrap_paragraphs: bool = True) -> str: if "sectionBreak" in element or "tableOfContents" in element: return "" elif "paragraph" in element: paragraph_content = "" - prepend, append = get_paragraph_style_tags( + prepend, append = _get_paragraph_style_tags_html( style=element["paragraph"]["paragraphStyle"], wrap_paragraphs=wrap_paragraphs, ) @@ -26,7 +27,7 @@ def convert_structural_element(element: dict, wrap_paragraphs: bool = True) -> s for item in element["paragraph"]["elements"]: if "textRun" not in item: continue - paragraph_content += extract_paragraph_content(item["textRun"]) + paragraph_content += _extract_paragraph_content_html(item["textRun"]) if not paragraph_content: return "" @@ -37,26 +38,26 @@ def convert_structural_element(element: dict, wrap_paragraphs: bool = True) -> s table = [ [ "".join([ - convert_structural_element(element=cell_element, wrap_paragraphs=False) + _convert_structural_element_html(element=cell_element, wrap_paragraphs=False) for cell_element in cell["content"] ]) for cell in row["tableCells"] ] for row in element["table"]["tableRows"] ] - return table_list_to_html(table) + return _table_list_to_html(table) else: raise ValueError(f"Unknown document body element type: {element}") -def extract_paragraph_content(text_run: dict) -> str: +def _extract_paragraph_content_html(text_run: dict) -> str: content = text_run["content"] style = text_run["textStyle"] - return apply_text_style(content, style) + return _apply_text_style_html(content, style) -def apply_text_style(content: str, style: dict) -> str: +def _apply_text_style_html(content: str, style: dict) -> str: content = content.rstrip("\n") content = content.replace("\n", "
") italic = style.get("italic", False) @@ -68,7 +69,7 @@ def apply_text_style(content: str, style: dict) -> str: return content -def get_paragraph_style_tags(style: dict, wrap_paragraphs: bool = True) -> tuple[str, str]: +def _get_paragraph_style_tags_html(style: dict, wrap_paragraphs: bool = True) -> tuple[str, str]: named_style = style["namedStyleType"] if named_style == "NORMAL_TEXT": return ("

", "

") if wrap_paragraphs else ("", "") @@ -86,7 +87,7 @@ def get_paragraph_style_tags(style: dict, wrap_paragraphs: bool = True) -> tuple return ("

", "

") if wrap_paragraphs else ("", "") -def table_list_to_html(table: list[list[str]]) -> str: +def _table_list_to_html(table: list[list[str]]) -> str: html = "" for row in table: html += "" @@ -98,44 +99,48 @@ def table_list_to_html(table: list[list[str]]) -> str: html += "
" return html + # doc to markdown def convert_document_to_markdown(document: dict) -> str: md = f"---\ntitle: {document['title']}\ndocumentId: {document['documentId']}\n---\n" for element in document["body"]["content"]: - md += convert_structural_element(element) + md += _convert_structural_element_md(element) return md -def convert_structural_element(element: dict) -> str: +def _convert_structural_element_md(element: dict, in_table: bool = False) -> str: if "sectionBreak" in element or "tableOfContents" in element: return "" elif "paragraph" in element: md = "" - prepend = get_paragraph_style_prepend_str(element["paragraph"]["paragraphStyle"]) + prepend = _get_paragraph_style_prepend_str_md(element["paragraph"]["paragraphStyle"]) if not in_table else "" for item in element["paragraph"]["elements"]: if "textRun" not in item: continue - content = extract_paragraph_content(item["textRun"]) + content = _extract_paragraph_content_md(item["textRun"], in_table=in_table) md += f"{prepend}{content}" return md elif "table" in element: - return convert_structural_element(element) + return _table_to_markdown(element["table"]) else: raise ValueError(f"Unknown document body element type: {element}") -def extract_paragraph_content(text_run: dict) -> str: +def _extract_paragraph_content_md(text_run: dict, in_table: bool = False) -> str: content = text_run["content"] style = text_run["textStyle"] - return apply_text_style(content, style) + return _apply_text_style_md(content, style, in_table=in_table) -def apply_text_style(content: str, style: dict) -> str: - append = "\n" if content.endswith("\n") else "" +def _apply_text_style_md(content: str, style: dict, in_table: bool = False) -> str: + append = "\n" if content.endswith("\n") and not in_table else "" content = content.rstrip("\n") + if in_table: + # Replace newlines with spaces in table cells, and escape pipes + content = content.replace("\n", " ").replace("|", "\\|") italic = style.get("italic", False) bold = style.get("bold", False) if italic: @@ -145,7 +150,7 @@ def apply_text_style(content: str, style: dict) -> str: return f"{content}{append}" -def get_paragraph_style_prepend_str(style: dict) -> str: +def _get_paragraph_style_prepend_str_md(style: dict) -> str: named_style = style["namedStyleType"] if named_style == "NORMAL_TEXT": return "" @@ -160,3 +165,41 @@ def get_paragraph_style_prepend_str(style: dict) -> str: except ValueError: return "" return "" + + +def _table_to_markdown(table: dict) -> str: + """Convert a Google Docs table to Markdown format.""" + rows = table.get("tableRows", []) + if not rows: + return "" + + md_rows = [] + for row in rows: + cells = [] + for cell in row.get("tableCells", []): + cell_content = "".join([ + _convert_structural_element_md(element, in_table=True) + for element in cell.get("content", []) + ]) + # Clean up cell content for markdown table + cell_content = cell_content.strip() + cells.append(cell_content) + md_rows.append(cells) + + if not md_rows: + return "" + + # Build markdown table + md = "\n" + # Header row + md += "| " + " | ".join(md_rows[0]) + " |\n" + # Separator row + md += "| " + " | ".join(["---"] * len(md_rows[0])) + " |\n" + # Data rows + for row in md_rows[1:]: + # Ensure row has same number of columns as header + while len(row) < len(md_rows[0]): + row.append("") + md += "| " + " | ".join(row) + " |\n" + md += "\n" + return md diff --git a/mcp_servers/google_sheets/server.py b/mcp_servers/google_sheets/server.py index c83e0d80..2adfabe8 100644 --- a/mcp_servers/google_sheets/server.py +++ b/mcp_servers/google_sheets/server.py @@ -282,9 +282,9 @@ async def list_spreadsheets_tool() -> Dict[str, Any]: spreadsheet_info = { "id": file.get('id'), "name": file.get('name'), - "createdTime": file.get('createdTime'), - "modifiedTime": file.get('modifiedTime'), - "webViewLink": file.get('webViewLink'), + "createdAt": file.get('createdTime'), + "modifiedAt": file.get('modifiedTime'), + "link": file.get('webViewLink'), "owners": [owner.get('displayName', owner.get('emailAddress', 'Unknown')) for owner in file.get('owners', [])] } @@ -333,7 +333,7 @@ async def list_sheets_tool(spreadsheet_id: str) -> Dict[str, Any]: return { "spreadsheetId": response.get('spreadsheetId', ''), - "spreadsheetUrl": response.get('spreadsheetUrl', ''), + "url": response.get('spreadsheetUrl', ''), "title": response.get('properties', {}).get('title', ''), "sheets": sheets, } diff --git a/mcp_servers/hubspot/server.py b/mcp_servers/hubspot/server.py index 0fd9f4bb..1f50a5c5 100644 --- a/mcp_servers/hubspot/server.py +++ b/mcp_servers/hubspot/server.py @@ -1096,7 +1096,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1122,7 +1122,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1216,7 +1216,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1242,7 +1242,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1336,7 +1336,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1362,7 +1362,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1380,7 +1380,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1456,7 +1456,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1482,7 +1482,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1500,7 +1500,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1610,7 +1610,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1636,7 +1636,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: @@ -1654,7 +1654,7 @@ async def call_tool( return [ types.TextContent( type="text", - text=str(result), + text=json.dumps(result, indent=2), ) ] except Exception as e: diff --git a/mcp_servers/hubspot/tools/base.py b/mcp_servers/hubspot/tools/base.py index a6a90346..1de09bd1 100644 --- a/mcp_servers/hubspot/tools/base.py +++ b/mcp_servers/hubspot/tools/base.py @@ -2,12 +2,226 @@ import os from contextvars import ContextVar from hubspot import HubSpot -from typing import Optional +from typing import Optional, Any, Dict from dotenv import load_dotenv # Configure logging logger = logging.getLogger(__name__) + +# ============================================================================ +# Normalization Utilities (Klavis Interface Layer) +# ============================================================================ + +def get_path(data: Dict, path: str) -> Any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: Dict, mapping: Dict[str, Any]) -> Dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + Args: + source: Raw vendor JSON/dict. + mapping: Dict of { "TargetFieldName": "Source.Path" OR Lambda_Function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# ============================================================================ +# Mapping Rules for HubSpot Objects (Klavis-defined schemas) +# ============================================================================ + +# Contact mapping rules +CONTACT_RULES = { + "id": "id", + "email": "email", + "firstName": "firstname", + "lastName": "lastname", + "phone": "phone", + "mobilePhone": "mobilephone", + "company": "company", + "jobTitle": "jobtitle", + "website": "website", + "address": "address", + "city": "city", + "state": "state", + "postalCode": "zip", + "country": "country", + "lifecycleStage": "lifecyclestage", + "leadStatus": "hs_lead_status", + "ownerId": "hubspot_owner_id", + "createdAt": "createdate", + "updatedAt": "lastmodifieddate", +} + +# Company mapping rules +COMPANY_RULES = { + "id": "id", + "name": "name", + "domain": "domain", + "website": "website", + "phone": "phone", + "address": "address", + "city": "city", + "state": "state", + "postalCode": "zip", + "country": "country", + "industry": "industry", + "employeeCount": "numberofemployees", + "annualRevenue": "annualrevenue", + "description": "description", + "type": "type", + "lifecycleStage": "lifecyclestage", + "leadStatus": "hs_lead_status", + "ownerId": "hubspot_owner_id", + "createdAt": "createdate", + "updatedAt": "hs_lastmodifieddate", +} + +# Deal mapping rules +DEAL_RULES = { + "id": "id", + "name": "dealname", + "amount": "amount", + "stage": "dealstage", + "stageLabel": "dealstage_label", + "pipeline": "pipeline", + "closeDate": "closedate", + "dealType": "dealtype", + "description": "description", + "ownerId": "hubspot_owner_id", + "createdAt": "createdate", + "updatedAt": "hs_lastmodifieddate", +} + +# Ticket mapping rules +TICKET_RULES = { + "id": "id", + "subject": "subject", + "content": "content", + "status": "hs_ticket_status", + "priority": "hs_ticket_priority", + "category": "hs_ticket_category", + "pipeline": "hs_pipeline", + "pipelineStage": "hs_pipeline_stage", + "ownerId": "hubspot_owner_id", + "createdAt": "createdate", + "updatedAt": "hs_lastmodifieddate", +} + +# Task mapping rules +TASK_RULES = { + "id": "id", + "subject": "hs_task_subject", + "body": "hs_task_body", + "status": "hs_task_status", + "priority": "hs_task_priority", + "type": "hs_task_type", + "timestamp": "hs_timestamp", + "ownerId": "hubspot_owner_id", +} + +# Property mapping rules +PROPERTY_RULES = { + "name": "name", + "label": "label", + "type": "type", + "fieldType": "field_type", +} + + +def normalize_crm_object(obj: Any, rules: Dict[str, Any]) -> Dict: + """ + Normalize a HubSpot CRM object (SimplePublicObject) to Klavis schema. + Handles both raw dicts and HubSpot SDK objects. + """ + if obj is None: + return {} + + # Extract properties from SDK object or dict + if hasattr(obj, 'properties'): + props = dict(obj.properties) if obj.properties else {} + obj_id = getattr(obj, 'id', None) + elif isinstance(obj, dict): + props = obj.get('properties', obj) + obj_id = obj.get('id') + else: + return {} + + # Add id to props for normalization + if obj_id: + props['id'] = obj_id + + return normalize(props, rules) + + +def normalize_contact(obj: Any) -> Dict: + """Normalize a contact object.""" + return normalize_crm_object(obj, CONTACT_RULES) + + +def normalize_company(obj: Any) -> Dict: + """Normalize a company object.""" + return normalize_crm_object(obj, COMPANY_RULES) + + +def normalize_deal(obj: Any) -> Dict: + """Normalize a deal object.""" + return normalize_crm_object(obj, DEAL_RULES) + + +def normalize_ticket(obj: Any) -> Dict: + """Normalize a ticket object.""" + return normalize_crm_object(obj, TICKET_RULES) + + +def normalize_task(obj: Any) -> Dict: + """Normalize a task object.""" + return normalize_crm_object(obj, TASK_RULES) + + +def normalize_property(prop: Any) -> Dict: + """Normalize a property metadata object.""" + if prop is None: + return {} + + if hasattr(prop, 'name'): + # SDK object + data = { + 'name': getattr(prop, 'name', None), + 'label': getattr(prop, 'label', None), + 'type': getattr(prop, 'type', None), + 'field_type': getattr(prop, 'field_type', None), + } + elif isinstance(prop, dict): + data = prop + else: + return {} + + return normalize(data, PROPERTY_RULES) + load_dotenv() # Context variable to store the access token for each request diff --git a/mcp_servers/hubspot/tools/companies.py b/mcp_servers/hubspot/tools/companies.py index 97de650a..c88c9068 100644 --- a/mcp_servers/hubspot/tools/companies.py +++ b/mcp_servers/hubspot/tools/companies.py @@ -1,7 +1,8 @@ import logging import json +from typing import Dict, Any from hubspot.crm.companies import SimplePublicObjectInputForCreate, SimplePublicObjectInput -from .base import get_hubspot_client +from .base import get_hubspot_client, normalize_company # Configure logging logger = logging.getLogger(__name__) @@ -61,7 +62,7 @@ async def hubspot_create_companies(properties: str) -> str: logger.error(f"Error creating company: {e}") return f"Error occurred: {e}" -async def hubspot_get_companies(limit: int = 10): +async def hubspot_get_companies(limit: int = 10) -> Dict[str, Any]: """ Fetch a list of companies from HubSpot. @@ -69,7 +70,7 @@ async def hubspot_get_companies(limit: int = 10): - limit: Number of companies to retrieve Returns: - - Paginated companies response + - Normalized companies response """ client = get_hubspot_client() if not client: @@ -78,13 +79,22 @@ async def hubspot_get_companies(limit: int = 10): try: logger.info(f"Fetching up to {limit} companies...") result = client.crm.companies.basic_api.get_page(limit=limit) - logger.info(f"Fetched {len(result.results)} companies successfully.") - return result + + # Normalize response + companies = [normalize_company(obj) for obj in (result.results or [])] + + logger.info(f"Fetched {len(companies)} companies successfully.") + return { + "count": len(companies), + "companies": companies, + "hasMore": result.paging.next.after is not None if result.paging and result.paging.next else False, + } except Exception as e: logger.error(f"Error fetching companies: {e}") - return None + raise e -async def hubspot_get_company_by_id(company_id: str): + +async def hubspot_get_company_by_id(company_id: str) -> Dict[str, Any]: """ Get a company by ID. @@ -92,7 +102,7 @@ async def hubspot_get_company_by_id(company_id: str): - company_id: ID of the company Returns: - - Company object + - Normalized company object """ client = get_hubspot_client() if not client: @@ -101,11 +111,15 @@ async def hubspot_get_company_by_id(company_id: str): try: logger.info(f"Fetching company with ID: {company_id}...") result = client.crm.companies.basic_api.get_by_id(company_id) + + # Normalize response + company = normalize_company(result) + logger.info(f"Fetched company ID: {company_id} successfully.") - return result + return {"company": company} except Exception as e: logger.error(f"Error fetching company by ID: {e}") - return None + raise e async def hubspot_update_company_by_id(company_id: str, updates: str) -> str: """ diff --git a/mcp_servers/hubspot/tools/contacts.py b/mcp_servers/hubspot/tools/contacts.py index 6fa00b27..00365cd5 100644 --- a/mcp_servers/hubspot/tools/contacts.py +++ b/mcp_servers/hubspot/tools/contacts.py @@ -1,12 +1,14 @@ import logging import json +from typing import Dict, Any from hubspot.crm.contacts import SimplePublicObjectInputForCreate, SimplePublicObjectInput -from .base import get_hubspot_client +from .base import get_hubspot_client, normalize_contact # Configure logging logger = logging.getLogger(__name__) -async def hubspot_get_contacts(limit: int = 10): + +async def hubspot_get_contacts(limit: int = 10) -> Dict[str, Any]: """ Fetch a list of contacts from HubSpot. @@ -14,7 +16,7 @@ async def hubspot_get_contacts(limit: int = 10): - limit: Number of contacts to retrieve Returns: - - Paginated contacts response + - Normalized contacts response """ client = get_hubspot_client() if not client: @@ -23,13 +25,22 @@ async def hubspot_get_contacts(limit: int = 10): try: logger.info(f"Fetching up to {limit} contacts from HubSpot") result = client.crm.contacts.basic_api.get_page(limit=limit) + + # Normalize response + contacts = [normalize_contact(obj) for obj in (result.results or [])] + logger.info("Successfully fetched contacts") - return result + return { + "count": len(contacts), + "contacts": contacts, + "hasMore": result.paging.next.after is not None if result.paging and result.paging.next else False, + } except Exception as e: logger.error(f"Error fetching contacts: {e}") raise e -async def hubspot_get_contact_by_id(contact_id: str): + +async def hubspot_get_contact_by_id(contact_id: str) -> Dict[str, Any]: """ Get a specific contact by ID. @@ -37,7 +48,7 @@ async def hubspot_get_contact_by_id(contact_id: str): - contact_id: ID of the contact to retrieve Returns: - - Contact object + - Normalized contact object """ client = get_hubspot_client() if not client: @@ -46,8 +57,12 @@ async def hubspot_get_contact_by_id(contact_id: str): try: logger.info(f"Fetching contact with ID: {contact_id}") result = client.crm.contacts.basic_api.get_by_id(contact_id) + + # Normalize response + contact = normalize_contact(result) + logger.info("Successfully fetched contact") - return result + return {"contact": contact} except Exception as e: logger.error(f"Error fetching contact by ID: {e}") raise e diff --git a/mcp_servers/hubspot/tools/deals.py b/mcp_servers/hubspot/tools/deals.py index 4254c7fb..9b26bbff 100644 --- a/mcp_servers/hubspot/tools/deals.py +++ b/mcp_servers/hubspot/tools/deals.py @@ -1,7 +1,8 @@ import logging import json +from typing import Dict, Any from hubspot.crm.deals import SimplePublicObjectInputForCreate, SimplePublicObjectInput -from .base import get_hubspot_client +from .base import get_hubspot_client, normalize_deal # Configure logging logger = logging.getLogger(__name__) @@ -28,7 +29,7 @@ def _build_dealstage_label_map(client) -> dict: logger.debug(f"Failed to fetch pipelines for deals: {exc}") return stage_id_to_label -async def hubspot_get_deals(limit: int = 10): +async def hubspot_get_deals(limit: int = 10) -> Dict[str, Any]: """ Fetch a list of deals from HubSpot. @@ -36,7 +37,7 @@ async def hubspot_get_deals(limit: int = 10): - limit: Number of deals to return Returns: - - List of deal records + - Normalized list of deal records """ client = get_hubspot_client() if not client: @@ -45,6 +46,7 @@ async def hubspot_get_deals(limit: int = 10): try: logger.info(f"Fetching up to {limit} deals...") result = client.crm.deals.basic_api.get_page(limit=limit) + # Enrich with human-readable dealstage label stage_label_map = _build_dealstage_label_map(client) for obj in getattr(result, "results", []) or []: @@ -53,13 +55,22 @@ async def hubspot_get_deals(limit: int = 10): if stage_id and stage_id in stage_label_map: props["dealstage_label"] = stage_label_map[stage_id] obj.properties = props - logger.info(f"Fetched {len(result.results)} deals successfully.") - return result + + # Normalize response + deals = [normalize_deal(obj) for obj in (result.results or [])] + + logger.info(f"Fetched {len(deals)} deals successfully.") + return { + "count": len(deals), + "deals": deals, + "hasMore": result.paging.next.after is not None if result.paging and result.paging.next else False, + } except Exception as e: logger.error(f"Error fetching deals: {e}") - return None + raise e + -async def hubspot_get_deal_by_id(deal_id: str): +async def hubspot_get_deal_by_id(deal_id: str) -> Dict[str, Any]: """ Fetch a deal by its ID. @@ -67,7 +78,7 @@ async def hubspot_get_deal_by_id(deal_id: str): - deal_id: HubSpot deal ID Returns: - - Deal object + - Normalized deal object """ client = get_hubspot_client() if not client: @@ -76,6 +87,7 @@ async def hubspot_get_deal_by_id(deal_id: str): try: logger.info(f"Fetching deal ID: {deal_id}...") result = client.crm.deals.basic_api.get_by_id(deal_id) + # Enrich with human-readable dealstage label stage_label_map = _build_dealstage_label_map(client) props = getattr(result, "properties", {}) or {} @@ -83,13 +95,17 @@ async def hubspot_get_deal_by_id(deal_id: str): if stage_id and stage_id in stage_label_map: props["dealstage_label"] = stage_label_map[stage_id] result.properties = props + + # Normalize response + deal = normalize_deal(result) + logger.info(f"Fetched deal ID: {deal_id} successfully.") - return result + return {"deal": deal} except Exception as e: logger.error(f"Error fetching deal by ID: {e}") - return None + raise e -async def hubspot_create_deal(properties: str): +async def hubspot_create_deal(properties: str) -> Dict[str, Any]: """ Create a new deal. @@ -97,7 +113,7 @@ async def hubspot_create_deal(properties: str): - properties: JSON string of deal properties Returns: - - Newly created deal + - Normalized newly created deal """ client = get_hubspot_client() if not client: @@ -131,15 +147,19 @@ async def hubspot_create_deal(properties: str): error_msg = "Invalid property names detected:\n" + "\n".join(suggestions) error_msg += "\n\nTip: Call 'hubspot_list_properties' with object_type='deals' to see all valid property names." logger.warning(error_msg) - return f"Error: {error_msg}" + return {"error": error_msg} data = SimplePublicObjectInputForCreate(properties=props) result = client.crm.deals.basic_api.create(simple_public_object_input_for_create=data) + + # Normalize response + deal = normalize_deal(result) + logger.info("Deal created successfully.") - return result + return {"deal": deal, "status": "created"} except Exception as e: logger.error(f"Error creating deal: {e}") - return f"Error occurred: {e}" + raise e async def hubspot_update_deal_by_id(deal_id: str, updates: str): """ diff --git a/mcp_servers/hubspot/tools/properties.py b/mcp_servers/hubspot/tools/properties.py index 608356e8..e7ac0d7b 100644 --- a/mcp_servers/hubspot/tools/properties.py +++ b/mcp_servers/hubspot/tools/properties.py @@ -1,15 +1,24 @@ import logging import json import ast +from typing import Dict, Any, List from hubspot.crm.objects import Filter, FilterGroup, PublicObjectSearchRequest from hubspot.crm.properties import PropertyCreate -from .base import get_hubspot_client +from .base import ( + get_hubspot_client, + normalize_property, + normalize_contact, + normalize_company, + normalize_deal, + normalize_ticket, +) from .deals import _build_dealstage_label_map # Configure logging logger = logging.getLogger(__name__) -async def hubspot_list_properties(object_type: str) -> list[dict]: + +async def hubspot_list_properties(object_type: str) -> Dict[str, Any]: """ List all properties for a given object type. @@ -17,7 +26,7 @@ async def hubspot_list_properties(object_type: str) -> list[dict]: - object_type: One of "contacts", "companies", "deals", or "tickets" Returns: - - List of property metadata + - Normalized list of property metadata """ client = get_hubspot_client() if not client: @@ -26,16 +35,16 @@ async def hubspot_list_properties(object_type: str) -> list[dict]: logger.info(f"Executing hubspot_list_properties for object_type: {object_type}") try: props = client.crm.properties.core_api.get_all(object_type) + + # Normalize response + properties = [normalize_property(p) for p in props.results] + logger.info(f"Successfully Executed hubspot_list_properties for object_type: {object_type}") - return [ - { - "name": p.name, - "label": p.label, - "type": p.type, - "field_type": p.field_type - } - for p in props.results - ] + return { + "objectType": object_type, + "count": len(properties), + "properties": properties, + } except Exception as e: logger.exception(f"Error executing hubspot_list_properties: {e}") raise e @@ -45,9 +54,9 @@ async def hubspot_search_by_property( property_name: str, operator: str, value: str, - properties: list[str], + properties: List[str], limit: int = 10 -) -> list[dict]: +) -> Dict[str, Any]: """ Search HubSpot objects by property. @@ -200,23 +209,38 @@ async def hubspot_search_by_property( raise ValueError(f"Unsupported object type: {object_type}") logger.info(f"hubspot_search_by_property: Found {len(results.results)} result(s)") - # Enrich deals with human-readable dealstage label + + # Normalize results based on object type + normalized_results = [] if object_type == "deals": + # Enrich deals with human-readable dealstage label stage_label_map = _build_dealstage_label_map(client) - enriched: list[dict] = [] for obj in results.results: props = (getattr(obj, "properties", {}) or {}).copy() stage_id = props.get("dealstage") if stage_id and stage_id in stage_label_map: props["dealstage_label"] = stage_label_map[stage_id] - enriched.append(props) - return enriched - # For other objects, return properties as-is - return [obj.properties for obj in results.results] + obj.properties = props + normalized_results.append(normalize_deal(obj)) + elif object_type == "contacts": + normalized_results = [normalize_contact(obj) for obj in results.results] + elif object_type == "companies": + normalized_results = [normalize_company(obj) for obj in results.results] + elif object_type == "tickets": + normalized_results = [normalize_ticket(obj) for obj in results.results] + else: + # Fallback for other object types + normalized_results = [obj.properties for obj in results.results] + + return { + "objectType": object_type, + "count": len(normalized_results), + "results": normalized_results, + } except Exception as e: logger.exception(f"Error executing hubspot_search_by_property: {e}") - return (f"Error executing hubspot_search_by_property: {e}") + raise e async def hubspot_create_property(name: str, label: str, description: str, object_type: str) -> str: """ diff --git a/mcp_servers/hubspot/tools/tasks.py b/mcp_servers/hubspot/tools/tasks.py index 60ceeb81..231dce34 100644 --- a/mcp_servers/hubspot/tools/tasks.py +++ b/mcp_servers/hubspot/tools/tasks.py @@ -1,20 +1,20 @@ import logging import json -from typing import Optional +from typing import Optional, Dict, Any from hubspot.crm.objects import ( SimplePublicObjectInputForCreate, SimplePublicObjectInput, ) -from .base import get_hubspot_client +from .base import get_hubspot_client, normalize_task # Configure logging logger = logging.getLogger(__name__) -async def hubspot_get_tasks(limit: int = 10): +async def hubspot_get_tasks(limit: int = 10) -> Dict[str, Any]: """ Fetch a list of tasks from HubSpot. @@ -22,7 +22,7 @@ async def hubspot_get_tasks(limit: int = 10): - limit: Number of tasks to return Returns: - - List of task records + - Normalized list of task records """ client = get_hubspot_client() if not client: @@ -42,14 +42,22 @@ async def hubspot_get_tasks(limit: int = 10): limit=limit, properties=common_properties, ) - logger.info(f"Fetched {len(result.results)} tasks successfully.") - return result + + # Normalize response + tasks = [normalize_task(obj) for obj in (result.results or [])] + + logger.info(f"Fetched {len(tasks)} tasks successfully.") + return { + "count": len(tasks), + "tasks": tasks, + "hasMore": result.paging.next.after is not None if result.paging and result.paging.next else False, + } except Exception as e: logger.error(f"Error fetching tasks: {e}") - return None + raise e -async def hubspot_get_task_by_id(task_id: str): +async def hubspot_get_task_by_id(task_id: str) -> Dict[str, Any]: """ Fetch a task by its ID. @@ -57,7 +65,7 @@ async def hubspot_get_task_by_id(task_id: str): - task_id: HubSpot task ID Returns: - - Task object + - Normalized task object """ client = get_hubspot_client() if not client: @@ -77,15 +85,18 @@ async def hubspot_get_task_by_id(task_id: str): task_id, properties=common_properties, ) - print(f"---Result: {result}") + + # Normalize response + task = normalize_task(result) + logger.info(f"Fetched task ID: {task_id} successfully.") - return result + return {"task": task} except Exception as e: logger.error(f"Error fetching task by ID: {e}") - return None + raise e -async def hubspot_create_task(properties: str): +async def hubspot_create_task(properties: str) -> Dict[str, Any]: """ Create a new task. @@ -93,7 +104,7 @@ async def hubspot_create_task(properties: str): - properties: JSON string of task properties (see HubSpot docs) Returns: - - Newly created task + - Normalized newly created task """ client = get_hubspot_client() if not client: @@ -106,11 +117,15 @@ async def hubspot_create_task(properties: str): result = client.crm.objects.tasks.basic_api.create( simple_public_object_input_for_create=data ) + + # Normalize response + task = normalize_task(result) + logger.info("Task created successfully.") - return result + return {"task": task, "status": "created"} except Exception as e: logger.error(f"Error creating task: {e}") - return f"Error occurred: {e}" + raise e async def hubspot_update_task_by_id(task_id: str, updates: str): diff --git a/mcp_servers/hubspot/tools/tickets.py b/mcp_servers/hubspot/tools/tickets.py index c781b51f..115d6963 100644 --- a/mcp_servers/hubspot/tools/tickets.py +++ b/mcp_servers/hubspot/tools/tickets.py @@ -1,12 +1,14 @@ import logging import json +from typing import Dict, Any from hubspot.crm.tickets import SimplePublicObjectInputForCreate, SimplePublicObjectInput -from .base import get_hubspot_client +from .base import get_hubspot_client, normalize_ticket # Configure logging logger = logging.getLogger(__name__) -async def hubspot_get_tickets(limit: int = 10): + +async def hubspot_get_tickets(limit: int = 10) -> Dict[str, Any]: """ Fetch a list of tickets from HubSpot. @@ -14,7 +16,7 @@ async def hubspot_get_tickets(limit: int = 10): - limit: Number of tickets to return Returns: - - List of ticket records + - Normalized list of ticket records """ client = get_hubspot_client() if not client: @@ -23,13 +25,22 @@ async def hubspot_get_tickets(limit: int = 10): try: logger.info(f"Fetching up to {limit} tickets...") result = client.crm.tickets.basic_api.get_page(limit=limit) - logger.info(f"Fetched {len(result.results)} tickets successfully.") - return result + + # Normalize response + tickets = [normalize_ticket(obj) for obj in (result.results or [])] + + logger.info(f"Fetched {len(tickets)} tickets successfully.") + return { + "count": len(tickets), + "tickets": tickets, + "hasMore": result.paging.next.after is not None if result.paging and result.paging.next else False, + } except Exception as e: logger.error(f"Error fetching tickets: {e}") - return None + raise e + -async def hubspot_get_ticket_by_id(ticket_id: str): +async def hubspot_get_ticket_by_id(ticket_id: str) -> Dict[str, Any]: """ Fetch a ticket by its ID. @@ -37,7 +48,7 @@ async def hubspot_get_ticket_by_id(ticket_id: str): - ticket_id: HubSpot ticket ID Returns: - - Ticket object + - Normalized ticket object """ client = get_hubspot_client() if not client: @@ -46,13 +57,18 @@ async def hubspot_get_ticket_by_id(ticket_id: str): try: logger.info(f"Fetching ticket ID: {ticket_id}...") result = client.crm.tickets.basic_api.get_by_id(ticket_id) + + # Normalize response + ticket = normalize_ticket(result) + logger.info(f"Fetched ticket ID: {ticket_id} successfully.") - return result + return {"ticket": ticket} except Exception as e: logger.error(f"Error fetching ticket by ID: {e}") - return None + raise e -async def hubspot_create_ticket(properties: str): + +async def hubspot_create_ticket(properties: str) -> Dict[str, Any]: """ Create a new ticket. @@ -60,7 +76,7 @@ async def hubspot_create_ticket(properties: str): - properties: JSON string of ticket properties Returns: - - Newly created ticket + - Normalized newly created ticket """ client = get_hubspot_client() if not client: @@ -71,11 +87,15 @@ async def hubspot_create_ticket(properties: str): props = json.loads(properties) data = SimplePublicObjectInputForCreate(properties=props) result = client.crm.tickets.basic_api.create(simple_public_object_input_for_create=data) + + # Normalize response + ticket = normalize_ticket(result) + logger.info("Ticket created successfully.") - return result + return {"ticket": ticket, "status": "created"} except Exception as e: logger.error(f"Error creating ticket: {e}") - return f"Error occurred: {e}" + raise e async def hubspot_update_ticket_by_id(ticket_id: str, updates: str): """ diff --git a/mcp_servers/linear/server.py b/mcp_servers/linear/server.py index e9d83abd..f8abb4ff 100644 --- a/mcp_servers/linear/server.py +++ b/mcp_servers/linear/server.py @@ -4,8 +4,6 @@ import json import base64 from collections.abc import AsyncIterator -from typing import Any, Dict -from contextvars import ContextVar import click import mcp.types as types @@ -18,6 +16,224 @@ from starlette.types import Receive, Scope, Send from dotenv import load_dotenv + +def get_path(data: dict, path: str) -> any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: dict, mapping: dict[str, any]) -> dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + Args: + source: Raw vendor JSON. + mapping: Dict of { "TargetFieldName": "Source.Path" OR Lambda_Function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# Mapping Rules for Linear Objects + +USER_RULES = { + "userId": "id", + "displayName": "name", + "userEmail": "email", + "nickname": "displayName", +} + +STATE_RULES = { + "stateId": "id", + "stateName": "name", + "category": "type", + "stateColor": "color", +} + +TEAM_RULES = { + "teamId": "id", + "teamName": "name", + "teamKey": "key", + "summary": "description", + "isPrivate": "private", + "dateCreated": "createdAt", + "dateModified": "updatedAt", + "workflowStates": lambda x: [ + normalize(s, STATE_RULES) for s in get_path(x, 'states.nodes') or [] + ] if get_path(x, 'states.nodes') else None, + "teamMembers": lambda x: [ + normalize(m, USER_RULES) for m in get_path(x, 'members.nodes') or [] + ] if get_path(x, 'members.nodes') else None, +} + +ISSUE_RULES = { + "itemId": "id", + "ticketNumber": "identifier", + "subject": "title", + "details": "description", + "priorityLevel": "priority", + "priorityName": "priorityLabel", + "deadline": "dueDate", + "status": lambda x: normalize(get_path(x, 'state') or {}, STATE_RULES), + "assignedTo": lambda x: normalize(get_path(x, 'assignee') or {}, USER_RULES), + "reportedBy": lambda x: normalize(get_path(x, 'creator') or {}, USER_RULES), + "owningTeam": lambda x: normalize(get_path(x, 'team') or {}, {"teamId": "id", "teamName": "name", "teamKey": "key"}), + "parentProject": lambda x: normalize(get_path(x, 'project') or {}, {"projectId": "id", "projectName": "name"}), + "dateCreated": "createdAt", + "dateModified": "updatedAt", + "externalLink": "url", + "responses": lambda x: [ + normalize(c, { + "responseId": "id", + "content": "body", + "author": lambda c: normalize(get_path(c, 'user') or {}, USER_RULES), + "dateCreated": "createdAt", + "dateModified": "updatedAt" + }) for c in get_path(x, 'comments.nodes') or [] + ] if get_path(x, 'comments.nodes') else None, +} + +PROJECT_RULES = { + "projectId": "id", + "projectName": "name", + "summary": "description", + "currentState": "state", + "completion": "progress", + "targetCompletion": "targetDate", + "projectLead": lambda x: normalize(get_path(x, 'lead') or {}, USER_RULES), + "contributors": lambda x: [ + normalize(m, USER_RULES) for m in get_path(x, 'members.nodes') or [] + ] if get_path(x, 'members.nodes') else None, + "associatedTeams": lambda x: [ + normalize(t, {"teamId": "id", "teamName": "name", "teamKey": "key"}) + for t in get_path(x, 'teams.nodes') or [] + ] if get_path(x, 'teams.nodes') else None, + "dateCreated": "createdAt", + "dateModified": "updatedAt", + "externalLink": "url", +} + +COMMENT_RULES = { + "responseId": "id", + "content": "body", + "author": lambda x: normalize(get_path(x, 'user') or {}, USER_RULES), + "relatedIssue": lambda x: normalize(get_path(x, 'issue') or {}, { + "itemId": "id", + "ticketNumber": "identifier", + "subject": "title" + }), + "dateCreated": "createdAt", + "dateModified": "updatedAt", + "externalLink": "url", +} + + +def normalize_team(raw_team: dict) -> dict: + """Normalize a single team and add computed fields.""" + return normalize(raw_team, TEAM_RULES) + + +def normalize_issue(raw_issue: dict) -> dict: + """Normalize a single issue and add computed fields.""" + return normalize(raw_issue, ISSUE_RULES) + + +def normalize_project(raw_project: dict) -> dict: + """Normalize a single project and add computed fields.""" + return normalize(raw_project, PROJECT_RULES) + + +def normalize_comment(raw_comment: dict) -> dict: + """Normalize a single comment and add computed fields.""" + return normalize(raw_comment, COMMENT_RULES) + + +def normalize_linear_response(data: dict, data_type: str) -> dict: + """Normalize Linear API response to avoid IP conflicts.""" + if not data or 'data' not in data: + return data + + normalized_data = {"data": {}} + + # Copy errors if they exist + if 'errors' in data: + normalized_data['errors'] = data['errors'] + + original_data = data['data'] + + if data_type == 'teams': + if 'teams' in original_data and 'nodes' in original_data['teams']: + normalized_data['data']['workspaceTeams'] = { + "items": [normalize_team(team) for team in original_data['teams']['nodes']] + } + + elif data_type == 'issues': + if 'issues' in original_data and 'nodes' in original_data['issues']: + normalized_data['data']['workItems'] = { + "items": [normalize_issue(issue) for issue in original_data['issues']['nodes']] + } + + elif data_type == 'issue': + if 'issue' in original_data: + normalized_data['data']['workItem'] = normalize_issue(original_data['issue']) + + elif data_type == 'projects': + if 'projects' in original_data and 'nodes' in original_data['projects']: + normalized_data['data']['initiatives'] = { + "items": [normalize_project(project) for project in original_data['projects']['nodes']] + } + + elif data_type == 'comments': + if 'issue' in original_data and 'comments' in original_data['issue']: + issue_data = normalize_issue(original_data['issue']) + normalized_data['data']['workItemResponses'] = { + "parentItem": { + "itemId": issue_data.get('itemId'), + "ticketNumber": issue_data.get('ticketNumber'), + "subject": issue_data.get('subject') + }, + "items": issue_data.get('responses', []) + } + + elif data_type in ['issueCreate', 'issueUpdate', 'projectCreate', 'projectUpdate', 'commentCreate', 'commentUpdate']: + # Handle mutation responses + for key, value in original_data.items(): + if key.endswith('Create') or key.endswith('Update'): + normalized_key = key.replace('issue', 'workItem').replace('comment', 'response').replace('project', 'initiative') + normalized_data['data'][normalized_key] = {} + + if 'success' in value: + normalized_data['data'][normalized_key]['success'] = value['success'] + + if 'issue' in value: + normalized_data['data'][normalized_key]['workItem'] = normalize_issue(value['issue']) + elif 'comment' in value: + normalized_data['data'][normalized_key]['response'] = normalize_comment(value['comment']) + elif 'project' in value: + normalized_data['data'][normalized_key]['initiative'] = normalize_project(value['project']) + + return normalized_data + + from tools import ( auth_token_context, get_teams, @@ -478,10 +694,11 @@ async def call_tool( if name == "linear_get_teams": try: result = await get_teams() + normalized_result = normalize_linear_response(result, 'teams') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -499,10 +716,11 @@ async def call_tool( filter_param = arguments.get("filter") try: result = await get_issues(team_id, limit, filter_param) + normalized_result = normalize_linear_response(result, 'issues') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -525,10 +743,11 @@ async def call_tool( ] try: result = await get_issue_by_id(issue_id) + normalized_result = normalize_linear_response(result, 'issue') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -560,10 +779,11 @@ async def call_tool( try: result = await create_issue(team_id, title, description, assignee_id, priority, state_id, project_id, due_date) + normalized_result = normalize_linear_response(result, 'issueCreate') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -595,10 +815,11 @@ async def call_tool( try: result = await update_issue(issue_id, title, description, assignee_id, priority, state_id, project_id, due_date) + normalized_result = normalize_linear_response(result, 'issueUpdate') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -616,10 +837,11 @@ async def call_tool( filter_param = arguments.get("filter") try: result = await get_projects(team_id, limit, filter_param) + normalized_result = normalize_linear_response(result, 'projects') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -648,10 +870,11 @@ async def call_tool( try: result = await create_project(name, description, team_ids, lead_id, target_date) + normalized_result = normalize_linear_response(result, 'projectCreate') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -681,10 +904,11 @@ async def call_tool( try: result = await update_project(project_id, name, description, state, target_date, lead_id) + normalized_result = normalize_linear_response(result, 'projectUpdate') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -707,10 +931,11 @@ async def call_tool( ] try: result = await get_comments(issue_id) + normalized_result = normalize_linear_response(result, 'comments') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -734,10 +959,11 @@ async def call_tool( ] try: result = await create_comment(issue_id, body) + normalized_result = normalize_linear_response(result, 'commentCreate') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -761,10 +987,11 @@ async def call_tool( ] try: result = await update_comment(comment_id, body) + normalized_result = normalize_linear_response(result, 'commentUpdate') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -791,10 +1018,11 @@ async def call_tool( try: result = await search_issues(query_text, team_id, limit) + normalized_result = normalize_linear_response(result, 'issues') return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: diff --git a/mcp_servers/mem0/requirements.txt b/mcp_servers/mem0/requirements.txt index 61322502..30bbc162 100644 --- a/mcp_servers/mem0/requirements.txt +++ b/mcp_servers/mem0/requirements.txt @@ -7,4 +7,4 @@ httpx>=0.27.0 python-dotenv>=1.0.0 typing-extensions starlette>=0.49.1 -mem0ai>=0.1.115 \ No newline at end of file +mem0ai>=1.0.1 \ No newline at end of file diff --git a/mcp_servers/mem0/server.py b/mcp_servers/mem0/server.py index f57567f7..a2084854 100644 --- a/mcp_servers/mem0/server.py +++ b/mcp_servers/mem0/server.py @@ -4,6 +4,7 @@ import contextlib import json from collections.abc import AsyncIterator +from typing import Any, Dict, Optional import click from dotenv import load_dotenv @@ -18,51 +19,220 @@ from tools import ( mem0_api_key_context, + mem0_user_id_context, + DEFAULT_MCP_USER_ID, + get_mem0_user_id, add_memory, - get_all_memories, + get_memories, search_memories, + get_memory, update_memory, delete_memory, + delete_all_memories, + list_entities, + delete_entities, ) load_dotenv() + +def get_path(data: dict, path: str) -> any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: dict, mapping: dict[str, any]) -> dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + Args: + source: Raw vendor JSON. + mapping: Dict of { "TargetFieldName": "Source.Path" OR Lambda_Function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# Mapping Rules for Mem0 Objects + +MEMORY_RULES = { + "memoryId": "id", + "content": "memory", + "created": "created_at", + "updated": "updated_at", + "userId": "user_id", + "agentId": "agent_id", + "appId": "app_id", + "runId": "run_id", + "score": "score", + "metadata": "metadata", + "categories": "categories", + "expirationDate": "expiration_date", + "structuredData": "structured_attributes", +} + +MEMORY_LIST_RULES = { + "items": lambda x: [ + normalize(memory, MEMORY_RULES) for memory in x.get('result', {}).get('results', []) + ] if x.get('success') and x.get('result', {}).get('results') else None, + "totalCount": lambda x: len(x.get('result', {}).get('results', [])) if x.get('success') and x.get('result', {}).get('results') else None, + "success": "success", + "error": "error", +} + +MEMORY_SEARCH_RULES = { + "results": lambda x: [ + normalize(memory, MEMORY_RULES) for memory in (x.get('result', {}).get('results', []) if x.get('result', {}).get('results') else x.get('result', [])) + ] if x.get('success') and x.get('result') else None, + "query": lambda x: x.get('query') or getattr(x, '_original_query', None), + "totalFound": lambda x: len(x.get('result', {}).get('results', []) if x.get('result', {}).get('results') else x.get('result', [])) if x.get('success') and x.get('result') else None, + "success": "success", + "error": "error", +} + +MEMORY_OPERATION_RULES = { + "success": "success", + "memoryId": "result.id", + "operation": lambda x: x.get('operation', 'unknown'), + "message": lambda x: x.get('result', {}).get('message') or x.get('error', 'Operation completed'), + "error": "error", + "status": "status", +} + +ENTITY_LIST_RULES = { + "entities": lambda x: [ + { + "name": entity.get('name'), # 'name' field contains the actual user_id + "entityType": entity.get('type', 'user'), + "memoryCount": entity.get('total_memories', 0), + "created": entity.get('created_at'), + "updated": entity.get('updated_at'), + "owner": entity.get('owner'), + "metadata": entity.get('metadata'), + } for entity in x.get('result', {}).get('results', []) + ] if x.get('success') and x.get('result', {}).get('results') else None, + "totalEntities": lambda x: x.get('result', {}).get('count', 0) if x.get('success') and x.get('result') else None, + "totalUsers": lambda x: x.get('result', {}).get('total_users', 0) if x.get('success') and x.get('result') else None, + "totalAgents": lambda x: x.get('result', {}).get('total_agents', 0) if x.get('success') and x.get('result') else None, + "totalApps": lambda x: x.get('result', {}).get('total_apps', 0) if x.get('success') and x.get('result') else None, + "totalRuns": lambda x: x.get('result', {}).get('total_runs', 0) if x.get('success') and x.get('result') else None, + "nextPage": lambda x: x.get('result', {}).get('next') if x.get('success') and x.get('result') else None, + "previousPage": lambda x: x.get('result', {}).get('previous') if x.get('success') and x.get('result') else None, + "success": "success", + "error": "error", +} + + +def normalize_memory(raw_memory: dict) -> dict: + """Normalize a single memory and add computed fields.""" + memory = normalize(raw_memory, MEMORY_RULES) + return memory + + +def normalize_memory_list(raw_response: dict) -> dict: + """Normalize a memory list response and add computed fields.""" + response = normalize(raw_response, MEMORY_LIST_RULES) + return response + + +def normalize_memory_search(raw_response: dict) -> dict: + """Normalize a memory search response and add computed fields.""" + response = normalize(raw_response, MEMORY_SEARCH_RULES) + return response + + +def normalize_memory_operation(raw_response: dict) -> dict: + """Normalize a memory operation response (add/update/delete) and add computed fields.""" + response = normalize(raw_response, MEMORY_OPERATION_RULES) + return response + + +def normalize_entity_list(raw_response: dict) -> dict: + """Normalize an entity list response and add computed fields.""" + response = normalize(raw_response, ENTITY_LIST_RULES) + return response + # Configure logging logging.basicConfig(level=logging.INFO) logger = logging.getLogger("mem0-mcp-server") MEM0_MCP_SERVER_PORT = int(os.getenv("MEM0_MCP_SERVER_PORT", "5000")) -def extract_api_key(request_or_scope) -> str: - """Extract API key from headers or environment.""" - api_key = os.getenv("API_KEY") +def extract_auth_info(request_or_scope) -> tuple[str, str | None]: + """Extract API key and User ID from headers or environment.""" + api_key = os.getenv("MEM0_API_KEY") or os.getenv("API_KEY") + user_id = os.getenv("MEM0_DEFAULT_USER_ID", DEFAULT_MCP_USER_ID) - if not api_key: - # Handle different input types (request object for SSE, scope dict for StreamableHTTP) - if hasattr(request_or_scope, 'headers'): - # SSE request object - auth_data = request_or_scope.headers.get(b'x-auth-data') - if auth_data and isinstance(auth_data, bytes): - auth_data = base64.b64decode(auth_data).decode('utf-8') - elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: - # StreamableHTTP scope object - headers = dict(request_or_scope.get("headers", [])) - auth_data = headers.get(b'x-auth-data') - if auth_data: - auth_data = base64.b64decode(auth_data).decode('utf-8') - else: - auth_data = None - + auth_data = None + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data and isinstance(auth_data, bytes): + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') if auth_data: - try: - # Parse the JSON auth data to extract token - auth_json = json.loads(auth_data) - api_key = auth_json.get('token') or auth_json.get('api_key') or '' - except (json.JSONDecodeError, TypeError) as e: - logger.warning(f"Failed to parse auth data JSON: {e}") - api_key = "" + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if auth_data: + try: + # Parse the JSON auth data to extract token and user_id + auth_json = json.loads(auth_data) + + # Extract API key if not already set or if we want to allow override (logic below prefers existing api_key if set, + # but usually we might want request to override. The original code preferred env var if set. + # Let's stick to original behavior for API key but check for user_id) + extracted_key = auth_json.get('token') or auth_json.get('api_key') + if extracted_key and not api_key: + api_key = extracted_key + + # Extract user_id + extracted_user = auth_json.get('user_id') + if extracted_user: + user_id = extracted_user + + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") - return api_key or "" + return api_key or "", user_id + +def _with_default_filters( + default_user_id: str, filters: Optional[Dict[str, Any]] = None +) -> Dict[str, Any]: + """Ensure filters exist and include the default user_id at the top level.""" + if not filters: + return {"AND": [{"user_id": default_user_id}]} + if not any(key in filters for key in ("AND", "OR", "NOT")): + filters = {"AND": [filters]} + has_user = json.dumps(filters, sort_keys=True).find('"user_id"') != -1 + if not has_user: + and_list = filters.setdefault("AND", []) + if not isinstance(and_list, list): + raise ValueError("filters['AND'] must be a list when present.") + and_list.insert(0, {"user_id": default_user_id}) + return filters @click.command() @click.option("--port", default=MEM0_MCP_SERVER_PORT, help="Port to listen on for HTTP") @@ -96,117 +266,215 @@ async def list_tools() -> list[types.Tool]: return [ types.Tool( name="mem0_add_memory", - description="Add a new memory to mem0 for long-term storage. This tool stores code snippets, implementation details, and programming knowledge for future reference. When storing information, you should include: complete code with all necessary imports and dependencies, language/framework version information, full implementation context and any required setup/configuration, detailed comments explaining the logic, example usage or test cases, any known limitations or performance considerations, related patterns or alternative approaches, links to relevant documentation or resources, environment setup requirements, and error handling tips. The memory will be indexed for semantic search and can be retrieved later using natural language queries.", + description="Store a new preference, fact, or conversation snippet. Requires at least one: user_id, agent_id, or run_id.", inputSchema={ "type": "object", - "required": ["content"], "properties": { + "text": { + "type": "string", + "description": "Plain sentence summarizing what to store. Required even if `messages` is provided.", + }, "content": { "type": "string", - "description": "The content to store in memory, including code, documentation, and context." + "description": "Legacy alias of `text`.", + }, + "messages": { + "type": "array", + "description": "Structured conversation history with `role`/`content`. Use when you have multiple turns.", + "items": { + "type": "object", + "required": ["role", "content"], + "properties": { + "role": {"type": "string"}, + "content": {"type": "string"}, + }, + }, }, "user_id": { "type": "string", - "description": "Optional user ID. If not provided, uses the default user ID." - } - } + "description": "Unique identifier for the user. If not specified, the system uses the default user. Do NOT fill with a random value." + }, + "agent_id": {"type": "string", "description": "Optional agent identifier."}, + "app_id": {"type": "string", "description": "Optional app identifier."}, + "run_id": {"type": "string", "description": "Optional run identifier."}, + "metadata": { + "type": "object", + "description": "Attach arbitrary metadata JSON to the memory.", + "additionalProperties": True, + }, + "enable_graph": { + "type": "boolean", + "description": "Set true only if the caller explicitly wants Mem0 graph memory.", + }, + }, + "anyOf": [{"required": ["text"]}, {"required": ["content"]}, {"required": ["messages"]}], }, annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY"}) ), types.Tool( - name="mem0_get_all_memories", - description="Retrieve all stored memories for the user. Call this tool when you need complete context of all previously stored information. This is useful when you need to analyze all available code patterns and knowledge, check all stored implementation examples, review the full history of stored solutions, or ensure no relevant information is missed. Returns a comprehensive list of code snippets and implementation patterns, programming knowledge and best practices, technical documentation and examples, and setup and configuration guides. Results are returned in JSON format with metadata.", + name="mem0_get_memories", + description="""Page through memories using filters instead of search. + + Use filters to list specific memories. Common filter patterns: + - Single user: {"AND": [{"user_id": "john"}]} + - Agent memories: {"AND": [{"agent_id": "agent_name"}]} + - Recent memories: {"AND": [{"user_id": "john"}, {"created_at": {"gte": "2024-01-01"}}]} + - Multiple users: {"AND": [{"user_id": {"in": ["john", "jane"]}}]} + + Pagination: Use page (1-indexed) and page_size for browsing results. + user_id is automatically added to filters if not provided.""", inputSchema={ "type": "object", "properties": { - "user_id": { - "type": "string", - "description": "Optional user ID. If not provided, uses the default user ID." + "filters": { + "type": "object", + "description": "Structured filters; user_id injected automatically.", + "additionalProperties": True, }, "page": { "type": "integer", - "description": "Page number for pagination (default: 1).", - "default": 1 + "description": "1-indexed page number when paginating.", }, "page_size": { "type": "integer", - "description": "Number of memories per page (default: 50).", - "default": 50 - } - } + "description": "Number of memories per page (default 10).", + }, + "enable_graph": { + "type": "boolean", + "description": "Set true only if the caller explicitly wants graph-derived memories.", + }, + "user_id": { + "type": "string", + "description": "Unique identifier for the user. If not specified, the system uses the default user. Do NOT fill with a random value.", + }, + }, }, - annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY", "readOnlyHint": True}) + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY", "readOnlyHint": True}), ), types.Tool( name="mem0_search_memories", - description="Search through stored memories using semantic search. This tool should be called for user queries to find relevant code and implementation details. It helps find specific code implementations or patterns, solutions to programming problems, best practices and coding standards, setup and configuration guides, and technical documentation and examples. The search uses natural language understanding to find relevant matches, so you can describe what you're looking for in plain English. Search the memories to leverage existing knowledge before providing answers.", + description="""Run a semantic search over existing memories. + + Use filters to narrow results. Common filter patterns: + - Single user: {"AND": [{"user_id": "john"}]} + - Agent memories: {"AND": [{"agent_id": "agent_name"}]} + - Recent memories: {"AND": [{"user_id": "john"}, {"created_at": {"gte": "2024-01-01"}}]} + - Multiple users: {"AND": [{"user_id": {"in": ["john", "jane"]}}]} + - Cross-entity: {"OR": [{"user_id": "john"}, {"agent_id": "agent_name"}]} + + user_id is automatically added to filters if not provided.""", inputSchema={ "type": "object", "required": ["query"], "properties": { "query": { "type": "string", - "description": "Search query string describing what you're looking for. Can be natural language or specific technical terms." + "description": "Natural language description of what to find." }, "user_id": { "type": "string", - "description": "Optional user ID. If not provided, uses the default user ID." + "description": "Unique identifier for the user. If not specified, the system uses the default user. Do NOT fill with a random value." + }, + "filters": { + "type": "object", + "description": "Additional filter clauses (user_id injected automatically).", + "additionalProperties": True, }, "limit": { "type": "integer", - "description": "Maximum number of results to return (default: 20).", + "description": "Maximum number of results to return.", "default": 20 - } + }, + "enable_graph": { + "type": "boolean", + "description": "Set true only when the user explicitly wants graph-derived memories.", + }, } }, annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY", "readOnlyHint": True}) ), + types.Tool( + name="mem0_get_memory", + description="Fetch a single memory once you know its memory_id.", + inputSchema={ + "type": "object", + "required": ["memory_id"], + "properties": {"memory_id": {"type": "string", "description": "Exact memory_id to fetch."}}, + }, + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY", "readOnlyHint": True}), + ), + types.Tool( + name="mem0_list_entities", + description="List which users/agents/apps/runs currently hold memories.", + inputSchema={"type": "object", "properties": {}}, + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY", "readOnlyHint": True}), + ), types.Tool( name="mem0_update_memory", - description="Update an existing memory with new data. This tool allows you to modify the content of a previously stored memory while maintaining its unique identifier. Use this when you need to correct, enhance, or completely replace the content of an existing memory entry.", + description="Overwrite an existing memory's text.", inputSchema={ "type": "object", - "required": ["memory_id", "data"], + "required": ["memory_id"], "properties": { "memory_id": { "type": "string", - "description": "The unique identifier of the memory to update." + "description": "Exact memory_id to overwrite." }, - "data": { + "text": { "type": "string", - "description": "The new content to replace the existing memory data." + "description": "Replacement text for the memory." }, - "user_id": { + "data": { "type": "string", - "description": "Optional user ID. If not provided, uses the default user ID." - } + "description": "Legacy alias of `text`.", + }, } }, annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY"}) ), types.Tool( name="mem0_delete_memory", - description="Delete a specific memory by ID or delete all memories for a user. This tool provides options to remove individual memories or clear all stored memories for a user. Use with caution as deleted memories cannot be recovered.", + description="Delete one memory after the user confirms its memory_id.", inputSchema={ "type": "object", + "required": ["memory_id"], "properties": { "memory_id": { "type": "string", - "description": "The unique identifier of the memory to delete. Required if delete_all is false." - }, - "user_id": { - "type": "string", - "description": "Optional user ID. If not provided, uses the default user ID." - }, - "delete_all": { - "type": "boolean", - "description": "If true, deletes all memories for the user. If false, deletes specific memory by ID.", - "default": False + "description": "Exact memory_id to delete." } } }, annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY"}) ), + types.Tool( + name="mem0_delete_all_memories", + description="Delete every memory in the given user/agent/app/run but keep the entity.", + inputSchema={ + "type": "object", + "properties": { + "user_id": {"type": "string", "description": "Unique identifier for the user. If not specified, the system uses the default user. Do NOT fill with a random value."}, + "agent_id": {"type": "string", "description": "Optional agent scope to delete."}, + "app_id": {"type": "string", "description": "Optional app scope to delete."}, + "run_id": {"type": "string", "description": "Optional run scope to delete."}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY"}), + ), + types.Tool( + name="mem0_delete_entities", + description="Remove a user/agent/app/run record entirely (and cascade-delete its memories).", + inputSchema={ + "type": "object", + "properties": { + "user_id": {"type": "string", "description": "Unique identifier for the user to delete. If not specified, the system uses the default user. Do NOT fill with a random value."}, + "agent_id": {"type": "string", "description": "Delete this agent and its memories."}, + "app_id": {"type": "string", "description": "Delete this app and its memories."}, + "run_id": {"type": "string", "description": "Delete this run and its memories."}, + }, + }, + annotations=types.ToolAnnotations(**{"category": "MEM0_MEMORY"}), + ), ] @app.call_tool() @@ -215,21 +483,45 @@ async def call_tool( ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: if name == "mem0_add_memory": - content = arguments.get("content") + text = arguments.get("text") or arguments.get("content") + messages = arguments.get("messages") user_id = arguments.get("user_id") - if not content: + agent_id = arguments.get("agent_id") + app_id = arguments.get("app_id") + run_id = arguments.get("run_id") + metadata = arguments.get("metadata") + enable_graph = arguments.get("enable_graph") + if not text and not messages: return [ types.TextContent( type="text", - text="Error: content parameter is required", + text="Error: provide at least one of text/content or messages", + ) + ] + if not any([user_id, agent_id, run_id]) and not get_mem0_user_id(): + return [ + types.TextContent( + type="text", + text="Error: provide at least one of user_id, agent_id, or run_id (or configure a default user_id)", ) ] try: - result = await add_memory(content, user_id) + result = await add_memory( + text=text or "", + messages=messages, + user_id=user_id, + agent_id=agent_id, + app_id=app_id, + run_id=run_id, + metadata=metadata, + enable_graph=enable_graph, + ) + # Normalize the response + normalized_result = normalize_memory_operation(result) return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -241,16 +533,33 @@ async def call_tool( ) ] - elif name == "mem0_get_all_memories": + elif name == "mem0_get_memories": + filters = arguments.get("filters") + page = arguments.get("page") + page_size = arguments.get("page_size") + enable_graph = arguments.get("enable_graph") user_id = arguments.get("user_id") - page = arguments.get("page", 1) - page_size = arguments.get("page_size", 50) + + default_user = user_id or get_mem0_user_id() + + if not filters and not default_user: + return [ + types.TextContent( + type="text", + text="Error: provide filters (or user_id) to scope the listing; no default user_id is configured", + ) + ] try: - result = await get_all_memories(user_id, page, page_size) + if default_user: + filters = _with_default_filters(default_user, filters) + + result = await get_memories(filters=filters, page=page, page_size=page_size, enable_graph=enable_graph) + # Normalize the response + normalized_result = normalize_memory_list(result) return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -261,11 +570,13 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + elif name == "mem0_search_memories": query = arguments.get("query") - user_id = arguments.get("user_id") limit = arguments.get("limit", 20) + filters = arguments.get("filters") + enable_graph = arguments.get("enable_graph") + user_id = arguments.get("user_id") if not query: return [ types.TextContent( @@ -273,12 +584,33 @@ async def call_tool( text="Error: query parameter is required", ) ] + + default_user = user_id or get_mem0_user_id() + + if not filters and not default_user: + return [ + types.TextContent( + type="text", + text="Error: provide filters or user_id to scope the search; no default user_id is configured", + ) + ] try: - result = await search_memories(query, user_id, limit) + if default_user: + filters = _with_default_filters(default_user, filters) + + result = await search_memories( + query=query, + user_id=None, + filters=filters, + limit=limit, + enable_graph=enable_graph, + ) + # Normalize the response + normalized_result = normalize_memory_search(result) return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -290,23 +622,50 @@ async def call_tool( ) ] + elif name == "mem0_get_memory": + memory_id = arguments.get("memory_id") + if not memory_id: + return [types.TextContent(type="text", text="Error: memory_id parameter is required")] + try: + result = await get_memory(memory_id) + # Normalize the response - single memory from result field + if result.get('success') and result.get('result'): + normalized_result = normalize_memory(result['result']) + else: + normalized_result = normalize_memory_operation(result) + return [types.TextContent(type="text", text=json.dumps(normalized_result, indent=2))] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + elif name == "mem0_list_entities": + try: + result = await list_entities() + # Normalize the response + normalized_result = normalize_entity_list(result) + return [types.TextContent(type="text", text=json.dumps(normalized_result, indent=2))] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + elif name == "mem0_update_memory": memory_id = arguments.get("memory_id") - data = arguments.get("data") - user_id = arguments.get("user_id") - if not memory_id or not data: + text = arguments.get("text") or arguments.get("data") + if not memory_id or not text: return [ types.TextContent( type="text", - text="Error: memory_id and data parameters are required", + text="Error: memory_id and text/data parameters are required", ) ] try: - result = await update_memory(memory_id, data, user_id) + result = await update_memory(memory_id, text) + # Normalize the response + normalized_result = normalize_memory_operation(result) return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -320,21 +679,16 @@ async def call_tool( elif name == "mem0_delete_memory": memory_id = arguments.get("memory_id") - user_id = arguments.get("user_id") - delete_all = arguments.get("delete_all", False) - if not delete_all and not memory_id: - return [ - types.TextContent( - type="text", - text="Error: memory_id parameter is required when delete_all is false", - ) - ] + if not memory_id: + return [types.TextContent(type="text", text="Error: memory_id parameter is required")] try: - result = await delete_memory(memory_id, user_id, delete_all) + result = await delete_memory(memory_id) + # Normalize the response + normalized_result = normalize_memory_operation(result) return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(normalized_result, indent=2), ) ] except Exception as e: @@ -345,6 +699,36 @@ async def call_tool( text=f"Error: {str(e)}", ) ] + + elif name == "mem0_delete_all_memories": + try: + result = await delete_all_memories( + user_id=arguments.get("user_id"), + agent_id=arguments.get("agent_id"), + app_id=arguments.get("app_id"), + run_id=arguments.get("run_id"), + ) + # Normalize the response + normalized_result = normalize_memory_operation(result) + return [types.TextContent(type="text", text=json.dumps(normalized_result, indent=2))] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] + + elif name == "mem0_delete_entities": + try: + result = await delete_entities( + user_id=arguments.get("user_id"), + agent_id=arguments.get("agent_id"), + app_id=arguments.get("app_id"), + run_id=arguments.get("run_id"), + ) + # Normalize the response + normalized_result = normalize_memory_operation(result) + return [types.TextContent(type="text", text=json.dumps(normalized_result, indent=2))] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [types.TextContent(type="text", text=f"Error: {str(e)}")] else: return [ @@ -360,11 +744,12 @@ async def call_tool( async def handle_sse(request): logger.info("Handling SSE connection") - # Extract API key from headers - api_key = extract_api_key(request) + # Extract API key and User ID + api_key, user_id = extract_auth_info(request) - # Set the API key in context for this request - token = mem0_api_key_context.set(api_key or "") + # Set context + token_key = mem0_api_key_context.set(api_key or "") + token_user = mem0_user_id_context.set(user_id) try: async with sse.connect_sse( request.scope, request.receive, request._send @@ -373,7 +758,8 @@ async def handle_sse(request): streams[0], streams[1], app.create_initialization_options() ) finally: - mem0_api_key_context.reset(token) + mem0_api_key_context.reset(token_key) + mem0_user_id_context.reset(token_user) return Response() @@ -390,15 +776,17 @@ async def handle_streamable_http( ) -> None: logger.info("Handling StreamableHTTP request") - # Extract API key from headers - api_key = extract_api_key(scope) + # Extract API key and User ID + api_key, user_id = extract_auth_info(scope) - # Set the API key in context for this request - token = mem0_api_key_context.set(api_key or "") + # Set context + token_key = mem0_api_key_context.set(api_key or "") + token_user = mem0_user_id_context.set(user_id) try: await session_manager.handle_request(scope, receive, send) finally: - mem0_api_key_context.reset(token) + mem0_api_key_context.reset(token_key) + mem0_user_id_context.reset(token_user) @contextlib.asynccontextmanager async def lifespan(app: Starlette) -> AsyncIterator[None]: diff --git a/mcp_servers/mem0/tools/__init__.py b/mcp_servers/mem0/tools/__init__.py index a19ba425..01da3131 100644 --- a/mcp_servers/mem0/tools/__init__.py +++ b/mcp_servers/mem0/tools/__init__.py @@ -3,22 +3,32 @@ from .memories import ( add_memory, - get_all_memories, + get_memories, search_memories, + get_memory, update_memory, - delete_memory + delete_memory, + delete_all_memories, + list_entities, + delete_entities, ) -from .base import get_user_id, mem0_api_key_context +from .base import mem0_api_key_context, mem0_user_id_context, DEFAULT_MCP_USER_ID, get_mem0_user_id __all__ = [ # Memories "add_memory", - "get_all_memories", + "get_memories", "search_memories", + "get_memory", "update_memory", "delete_memory", + "delete_all_memories", + "list_entities", + "delete_entities", # Base - "get_user_id", "mem0_api_key_context", + "mem0_user_id_context", + "DEFAULT_MCP_USER_ID", + "get_mem0_user_id", ] diff --git a/mcp_servers/mem0/tools/base.py b/mcp_servers/mem0/tools/base.py index 38709c9f..21719685 100644 --- a/mcp_servers/mem0/tools/base.py +++ b/mcp_servers/mem0/tools/base.py @@ -1,47 +1,113 @@ +import json import logging import os from contextvars import ContextVar +from typing import Any, Dict, Optional + from mem0 import MemoryClient -from dotenv import load_dotenv +try: + # Official mem0-mcp uses this import path. + from mem0.exceptions import MemoryError # type: ignore +except Exception: # pragma: no cover + MemoryError = Exception # type: ignore[misc,assignment] + +try: + from dotenv import load_dotenv +except Exception: # pragma: no cover + def load_dotenv(*args, **kwargs): # type: ignore[no-redef] + return False logger = logging.getLogger(__name__) load_dotenv() -DEFAULT_USER_ID = os.getenv("DEFAULT_USER_ID", "mem0_mcp") -CUSTOM_INSTRUCTIONS = """ -Extract the Following Information: +DEFAULT_MCP_USER_ID = "user" # "user" is default user id for mem0 that can see from the dashboard -- Code Snippets: Save the actual code for future reference and analysis. -- Explanation: Document a clear description of what the code does, its purpose, and implementation details. -- Technical Context: Include information about programming languages, frameworks, libraries, dependencies, and system requirements. -- Key Features: Highlight main functionalities, important methods, design patterns, and notable implementation aspects. -- Usage Context: Document how and when the code should be used, including any prerequisites or constraints. -""" +ENABLE_GRAPH_DEFAULT = ( + os.getenv("MEM0_ENABLE_GRAPH_DEFAULT", "false").lower() in {"1", "true", "yes"} +) mem0_api_key_context: ContextVar[str] = ContextVar('mem0_api_key') +mem0_user_id_context: ContextVar[Optional[str]] = ContextVar('mem0_user_id', default=None) def get_mem0_api_key() -> str: """Get the mem0 API key from context or environment.""" try: return mem0_api_key_context.get() except LookupError: - api_key = os.getenv("MEM0_API_KEY") + api_key = os.getenv("MEM0_API_KEY") or os.getenv("API_KEY") if not api_key: raise RuntimeError("mem0 API key not found in request context or environment") return api_key -def get_user_id() -> str: - """Get the current user identifier for memory operations.""" - logger.debug(f"DEFAULT_USER_ID: {DEFAULT_USER_ID}") - return DEFAULT_USER_ID +def get_mem0_user_id() -> Optional[str]: + """Get the mem0 user ID from context or environment.""" + try: + return mem0_user_id_context.get() + except LookupError: + return os.getenv("MEM0_DEFAULT_USER_ID", DEFAULT_MCP_USER_ID) + +def default_enable_graph(enable_graph: Optional[bool]) -> bool: + """Use caller override when provided, else use server default.""" + if enable_graph is None: + return ENABLE_GRAPH_DEFAULT + return bool(enable_graph) + +def with_default_filters( + filters: Optional[Dict[str, Any]] = None, + user_id: Optional[str] = None, +) -> Dict[str, Any]: + """ + Normalize filter shape: + - Ensure there is a boolean root (AND/OR/NOT); wrap plain dict into AND + - If user_id is provided and not present in filters, inject it. + """ + if not filters: + if user_id: + return {"AND": [{"user_id": user_id}]} + return {} + + if not any(key in filters for key in ("AND", "OR", "NOT")): + filters = {"AND": [filters]} + + if user_id: + # Check if user_id is already in the filters + has_user = json.dumps(filters, sort_keys=True).find('"user_id"') != -1 + if not has_user: + and_list = filters.setdefault("AND", []) + if not isinstance(and_list, list): + # Should not happen if we respect the structure, but safe guard + # If AND is not a list, we can't easily append. + # Fallback: wrap the whole thing? + # For now assume standard usage. + logger.warning("filters['AND'] is not a list, skipping user_id injection") + else: + and_list.insert(0, {"user_id": user_id}) + + return filters + +def mem0_call(func, *args, **kwargs) -> Dict[str, Any]: + """Call Mem0 client methods and surface structured errors (official-style).""" + try: + result = func(*args, **kwargs) + return {"success": True, "result": result} + except MemoryError as exc: + logger.error("Mem0 call failed: %s", exc) + return { + "success": False, + "error": str(exc), + "status": getattr(exc, "status", None), + "payload": getattr(exc, "payload", None), + } + except Exception as exc: + logger.exception("Unexpected Mem0 call error: %s", exc) + return {"success": False, "error": str(exc)} def get_mem0_client() -> MemoryClient: """Get a configured mem0 client with current API key from context.""" try: api_key = get_mem0_api_key() client = MemoryClient(api_key=api_key) - client.update_project(custom_instructions=CUSTOM_INSTRUCTIONS) logger.debug("mem0 client initialized successfully") return client except Exception as e: diff --git a/mcp_servers/mem0/tools/memories.py b/mcp_servers/mem0/tools/memories.py index c0d34f85..92846382 100644 --- a/mcp_servers/mem0/tools/memories.py +++ b/mcp_servers/mem0/tools/memories.py @@ -1,141 +1,235 @@ import logging -from typing import Any, Dict -from .base import get_mem0_client, get_user_id +from typing import Any, Dict, Optional + +from .base import ( + default_enable_graph, + get_mem0_client, + mem0_call, + with_default_filters, + get_mem0_user_id +) # Configure logging logger = logging.getLogger(__name__) -async def add_memory(content: str, user_id: str = None) -> Dict[str, Any]: - """Add a new memory to mem0.""" - if not user_id: - user_id = get_user_id() +async def add_memory( + text: str, + messages: Optional[list[Dict[str, str]]] = None, + user_id: Optional[str] = None, + agent_id: Optional[str] = None, + app_id: Optional[str] = None, + run_id: Optional[str] = None, + metadata: Optional[Dict[str, Any]] = None, + enable_graph: Optional[bool] = None, +) -> Dict[str, Any]: + """ + Official-style add_memory: + - Accept either `messages` (role/content list) or derive from `text` + - Scope can be by user_id OR agent_id/run_id (if those are provided, user_id can be omitted) + """ + if not any([user_id, agent_id, run_id]) and not get_mem0_user_id(): + return { + "success": False, + "error": "scope_missing", + "detail": "Provide at least one of user_id, agent_id, or run_id.", + } + resolved_user_id = user_id or get_mem0_user_id() + + logger.info( + "Adding memory (user_id=%s agent_id=%s app_id=%s run_id=%s)", + resolved_user_id, + agent_id, + app_id, + run_id, + ) + + if not messages and not text: + return { + "success": False, + "error": "messages_missing", + "detail": "Provide either `text` or `messages` so Mem0 knows what to store.", + } + conversation = messages if messages else [{"role": "user", "content": text}] + payload: Dict[str, Any] = { + "user_id": resolved_user_id, + "agent_id": agent_id, + "app_id": app_id, + "run_id": run_id, + "metadata": metadata, + "enable_graph": default_enable_graph(enable_graph), + } + payload = {k: v for k, v in payload.items() if v is not None} + + mem0_client = get_mem0_client() + result = mem0_call(mem0_client.add, conversation, **payload) + if result.get("success"): + result["scope"] = { + "user_id": resolved_user_id, + "agent_id": agent_id, + "app_id": app_id, + "run_id": run_id, + } + return result + +async def get_memories( + filters: Optional[Dict[str, Any]] = None, + page: Optional[int] = None, + page_size: Optional[int] = None, + enable_graph: Optional[bool] = None, +) -> Dict[str, Any]: + """List memories with filters + pagination. No implicit user_id is injected.""" + if not filters and not get_mem0_user_id(): + return { + "success": False, + "error": "filters_missing", + "detail": "Provide `filters` (for example: {\"AND\": [{\"user_id\": \"...\"}]}) to list memories.", + } + resolved_filters = with_default_filters(filters, user_id=get_mem0_user_id()) + + logger.info("Getting memories (page=%s page_size=%s)", page, page_size) + mem0_client = get_mem0_client() + payload: Dict[str, Any] = { + "filters": resolved_filters, + "page": page, + "page_size": page_size, + "enable_graph": default_enable_graph(enable_graph), + } + payload = {k: v for k, v in payload.items() if v is not None} + return mem0_call(mem0_client.get_all, **payload) + +async def search_memories( + query: str, + filters: Optional[Dict[str, Any]] = None, + limit: Optional[int] = None, + enable_graph: Optional[bool] = None, + # Back-compat: allow passing user_id directly (converted into filters) + user_id: Optional[str] = None, +) -> Dict[str, Any]: + """Search memories. No implicit user_id is injected.""" + resolved_user_id = user_id or get_mem0_user_id() - logger.info(f"Adding memory for user: {user_id}") - try: - mem0_client = get_mem0_client() - messages = [{"role": "user", "content": content}] - result = mem0_client.add(messages, user_id=user_id, output_format="v1.1") - logger.info(f"Successfully added memory for user {user_id}") + if not filters and not resolved_user_id: return { - "success": True, - "message": f"Successfully added memory: {content[:100]}{'...' if len(content) > 100 else ''}", - "user_id": user_id + "success": False, + "error": "scope_missing", + "detail": "Provide either `filters` or `user_id` to scope the search.", } - - except Exception as e: - logger.exception(f"Error adding memory: {e}") - raise e - -async def get_all_memories(user_id: str = None, page: int = 1, page_size: int = 50) -> Dict[str, Any]: - """Get all memories for a user.""" - if not user_id: - user_id = get_user_id() - - logger.info(f"Getting all memories for user: {user_id}") - try: - mem0_client = get_mem0_client() - memories = mem0_client.get_all(user_id=user_id, page=page, page_size=page_size) - formatted_memories = [] - for memory in memories["results"]: - formatted_memories.append({ - "id": memory["id"], - "memory": memory["memory"], - "created_at": memory.get("created_at"), - "updated_at": memory.get("updated_at") - }) - - logger.info(f"Retrieved {len(formatted_memories)} memories for user {user_id}") + resolved_filters = filters if filters else {"AND": [{"user_id": resolved_user_id}]} + resolved_filters = with_default_filters(resolved_filters, user_id=resolved_user_id) + + logger.info("Searching memories (query=%s limit=%s)", query, limit) + mem0_client = get_mem0_client() + payload: Dict[str, Any] = { + "query": query, + "filters": resolved_filters, + "limit": limit, + "enable_graph": default_enable_graph(enable_graph), + } + payload = {k: v for k, v in payload.items() if v is not None} + return mem0_call(mem0_client.search, **payload) + +async def get_memory(memory_id: str) -> Dict[str, Any]: + """Retrieve a single memory once the user has picked an exact ID.""" + logger.info("Getting memory %s", memory_id) + mem0_client = get_mem0_client() + if not hasattr(mem0_client, "get"): return { - "success": True, - "memories": formatted_memories, - "user_id": user_id, - "total_results": len(formatted_memories), - "page": page, - "page_size": page_size + "success": False, + "error": "unsupported_client_method", + "detail": "Mem0 client does not support `get(memory_id)`. Please upgrade mem0ai.", } - except Exception as e: - logger.exception(f"Error getting memories: {e}") - raise e - -async def search_memories(query: str, user_id: str = None, limit: int = 20) -> Dict[str, Any]: - """Search memories using semantic search.""" - if not user_id: - user_id = get_user_id() - - logger.info(f"Searching memories for user {user_id} with query: {query}") - try: - mem0_client = get_mem0_client() - memories = mem0_client.search(query, user_id=user_id, output_format="v1.1") - - formatted_memories = [] - for memory in memories["results"][:limit]: - formatted_memories.append({ - "id": memory["id"], - "memory": memory["memory"], - "score": memory.get("score", 0), - "created_at": memory.get("created_at"), - "updated_at": memory.get("updated_at") - }) - - logger.info(f"Found {len(formatted_memories)} matching memories for user {user_id}") + return mem0_call(mem0_client.get, memory_id) + +async def update_memory(memory_id: str, text: str) -> Dict[str, Any]: + """Overwrite an existing memory’s text after the user confirms the exact memory_id.""" + logger.info("Updating memory %s", memory_id) + mem0_client = get_mem0_client() + return mem0_call(mem0_client.update, memory_id=memory_id, text=text) + +async def delete_memory( + memory_id: str, +) -> Dict[str, Any]: + """Delete a memory once the user explicitly confirms the memory_id to remove.""" + logger.info("Deleting memory %s", memory_id) + mem0_client = get_mem0_client() + return mem0_call(mem0_client.delete, memory_id) + +async def delete_all_memories( + user_id: Optional[str] = None, + agent_id: Optional[str] = None, + app_id: Optional[str] = None, + run_id: Optional[str] = None, +) -> Dict[str, Any]: + """Bulk-delete every memory in the confirmed scope.""" + resolved_user_id = user_id or get_mem0_user_id() + + if not any([resolved_user_id, agent_id, app_id, run_id]): + return { + "success": False, + "error": "scope_missing", + "detail": "Provide at least one of user_id, agent_id, app_id, or run_id.", + } + logger.info( + "Deleting all memories (user_id=%s agent_id=%s app_id=%s run_id=%s)", + resolved_user_id, + agent_id, + app_id, + run_id, + ) + mem0_client = get_mem0_client() + payload: Dict[str, Any] = { + "user_id": resolved_user_id, + "agent_id": agent_id, + "app_id": app_id, + "run_id": run_id, + } + payload = {k: v for k, v in payload.items() if v is not None} + return mem0_call(mem0_client.delete_all, **payload) + +async def list_entities() -> Dict[str, Any]: + """List users/agents/apps/runs with stored memories.""" + logger.info("Listing entities") + mem0_client = get_mem0_client() + if not hasattr(mem0_client, "users"): + return { + "success": False, + "error": "unsupported_client_method", + "detail": "Mem0 client does not support `users()`. Please upgrade mem0ai.", + } + return mem0_call(mem0_client.users) + +async def delete_entities( + user_id: Optional[str] = None, + agent_id: Optional[str] = None, + app_id: Optional[str] = None, + run_id: Optional[str] = None, +) -> Dict[str, Any]: + """Delete a user/agent/app/run (and its memories) once the user confirms the scope.""" + logger.info( + "Deleting entities (user_id=%s agent_id=%s app_id=%s run_id=%s)", + user_id, + agent_id, + app_id, + run_id, + ) + if not any([user_id, agent_id, app_id, run_id]): return { - "success": True, - "memories": formatted_memories, - "user_id": user_id, - "query": query, - "total_results": len(formatted_memories) + "success": False, + "error": "scope_missing", + "detail": "Provide user_id, agent_id, app_id, or run_id before calling delete_entities.", } - except Exception as e: - logger.exception(f"Error searching memories: {e}") - raise e - -async def update_memory(memory_id: str, data: str, user_id: str = None) -> Dict[str, Any]: - """Update an existing memory.""" - if not user_id: - user_id = get_user_id() - - logger.info(f"Updating memory {memory_id} for user: {user_id}") - try: - mem0_client = get_mem0_client() - result = mem0_client.update(memory_id=memory_id, text=data) - logger.info(f"Successfully updated memory {memory_id} for user {user_id}") + mem0_client = get_mem0_client() + if not hasattr(mem0_client, "delete_users"): return { - "success": True, - "message": f"Successfully updated memory: {memory_id}", - "user_id": user_id, - "memory_id": memory_id, - "result": result + "success": False, + "error": "unsupported_client_method", + "detail": "Mem0 client does not support `delete_users(...)`. Please upgrade mem0ai.", } - - except Exception as e: - logger.exception(f"Error updating memory: {e}") - raise e - -async def delete_memory(memory_id: str = None, user_id: str = None, delete_all: bool = False) -> Dict[str, Any]: - """Delete a specific memory or all memories for a user.""" - if not user_id: - user_id = get_user_id() - - logger.info(f"Deleting memory for user: {user_id}") - try: - mem0_client = get_mem0_client() - if delete_all: - result = mem0_client.delete_all(user_id=user_id) - logger.info(f"Successfully deleted all memories for user {user_id}") - return { - "success": True, - "message": f"Successfully deleted all memories for user: {user_id}", - "user_id": user_id - } - else: - result = mem0_client.delete(memory_id=memory_id) - logger.info(f"Successfully deleted memory {memory_id}") - return { - "success": True, - "message": f"Successfully deleted memory: {memory_id}", - "memory_id": memory_id - } - - except Exception as e: - logger.exception(f"Error deleting memory: {e}") - raise e \ No newline at end of file + payload: Dict[str, Any] = { + "user_id": user_id, + "agent_id": agent_id, + "app_id": app_id, + "run_id": run_id, + } + payload = {k: v for k, v in payload.items() if v is not None} + return mem0_call(mem0_client.delete_users, **payload) \ No newline at end of file diff --git a/mcp_servers/onedrive/server.py b/mcp_servers/onedrive/server.py index 85d19300..4078fb52 100644 --- a/mcp_servers/onedrive/server.py +++ b/mcp_servers/onedrive/server.py @@ -4,7 +4,7 @@ import os import json from collections.abc import AsyncIterator -from typing import Any, Dict, List +from typing import List import click import mcp.types as types @@ -17,6 +17,160 @@ from starlette.types import Receive, Scope, Send from dotenv import load_dotenv + +def get_path(data: dict, path: str) -> any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: dict, mapping: dict[str, any]) -> dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + Args: + source: Raw vendor JSON. + mapping: Dict of { "TargetFieldName": "Source.Path" OR Lambda_Function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# Mapping Rules for OneDrive Objects + +ITEM_RULES = { + "itemId": "id", + "name": "name", + "size": "size", + "lastModified": "lastModifiedDateTime", + "created": "createdDateTime", + "webUrl": "webUrl", + "downloadUrl": lambda x: x.get("@microsoft.graph.downloadUrl"), + "isFolder": lambda x: bool(x.get('folder')), + "isFile": lambda x: bool(x.get('file')), + "parentId": "parentReference.id", + "parentPath": "parentReference.path", + "parentName": "parentReference.name", + "driveId": "parentReference.driveId", + "driveType": "parentReference.driveType", + "siteId": "parentReference.siteId", + "mimeType": "file.mimeType", + "hashSha1": "file.hashes.sha1Hash", + "hashSha256": "file.hashes.sha256Hash", + "hashQuickXor": "file.hashes.quickXorHash", + "folderChildCount": "folder.childCount", + "createdBy": "createdBy.user.displayName", + "createdBy": "createdBy.user.email", + "modifiedBy": "lastModifiedBy.user.displayName", + "modifiedBy": "lastModifiedBy.user.email", + "shared": lambda x: bool(x.get('shared')) +} + +FOLDER_RULES = { + "folderId": "id", + "name": "name", + "childCount": "folder.childCount", + "lastModified": "lastModifiedDateTime", + "created": "createdDateTime", + "webUrl": "webUrl", + "parentId": "parentReference.id", + "parentPath": "parentReference.path", + "parentName": "parentReference.name", + "driveId": "parentReference.driveId", + "driveType": "parentReference.driveType", + "siteId": "parentReference.siteId", + "size": "size", + "createdBy": "createdBy.user.displayName", + "createdBy": "createdBy.user.email", + "modifiedBy": "lastModifiedBy.user.displayName", + "modifiedBy": "lastModifiedBy.user.email", + "shared": lambda x: bool(x.get('shared')) +} + +SHARED_ITEM_RULES = { + "sharedItemId": "remoteItem.id", + "name": "remoteItem.name", + "size": "remoteItem.size", + "lastModified": "remoteItem.lastModifiedDateTime", + "created": "remoteItem.createdDateTime", + "webUrl": "remoteItem.webUrl", + "isFolder": lambda x: bool(get_path(x, 'remoteItem.folder')), + "isFile": lambda x: bool(get_path(x, 'remoteItem.file')), + "mimeType": "remoteItem.file.mimeType", + "parentId": "remoteItem.parentReference.id", + "driveId": "remoteItem.parentReference.driveId", + "sharedBy": "remoteItem.shared.sharedBy.user.displayName", + "sharedDateTime": "remoteItem.shared.sharedDateTime", + "permissions": "remoteItem.shared.scope" +} + + +def normalize_item(raw_item: dict) -> dict: + """Normalize a single item (file or folder) and add computed fields.""" + item = normalize(raw_item, ITEM_RULES) + return item + + +def normalize_folder(raw_folder: dict) -> dict: + """Normalize a single folder and add computed fields.""" + folder = normalize(raw_folder, FOLDER_RULES) + return folder + + +def normalize_file(raw_file: dict) -> dict: + """Normalize a single file and add computed fields.""" + file_item = normalize(raw_file, ITEM_RULES) + return file_item + + +def normalize_shared_item(raw_shared_item: dict) -> dict: + """Normalize a single shared item and add computed fields.""" + shared_item = normalize(raw_shared_item, SHARED_ITEM_RULES) + return shared_item + + +def normalize_items_response(response_data: dict) -> dict: + """Normalize a response containing multiple items (like list operations).""" + if not isinstance(response_data, dict): + return response_data + + normalized_response = {} + + # Handle pagination info + if "@odata.nextLink" in response_data: + normalized_response["nextPageToken"] = response_data["@odata.nextLink"] + + if "@odata.count" in response_data: + normalized_response["totalCount"] = response_data["@odata.count"] + + # Normalize items + if "value" in response_data and isinstance(response_data["value"], list): + normalized_response["items"] = [ + normalize_item(item) for item in response_data["value"] + ] + normalized_response["count"] = len(normalized_response["items"]) + + return normalized_response + + from tools import ( # Base auth_token_context, @@ -296,10 +450,18 @@ async def call_tool( file_id=arguments["file_id"], new_name=arguments["new_name"] ) + # Normalize the response + if isinstance(result, tuple) and len(result) == 2 and isinstance(result[1], dict): + _, raw_data = result + normalized_data = normalize_item(raw_data) + response = {"status": "success", "item": normalized_data} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -317,10 +479,18 @@ async def call_tool( item_id=arguments["item_id"], new_parent_id=arguments["new_parent_id"] ) + # Normalize the response + if isinstance(result, tuple) and len(result) == 2 and isinstance(result[1], dict): + _, raw_data = result + normalized_data = normalize_item(raw_data) + response = {"status": "success", "item": normalized_data} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -337,10 +507,20 @@ async def call_tool( result = await onedrive_delete_item( item_id=arguments["item_id"] ) + # For delete operations, wrap in a normalized structure + if isinstance(result, tuple) and len(result) == 1: + response = { + "status": "success", + "itemId": arguments["item_id"], + "message": result[0] + } + else: + response = {"status": "error", "details": result} + return [ types.TextContent( type="text", - text=result, + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -382,10 +562,18 @@ async def call_tool( data=arguments.get("data"), if_exists=arguments.get("if_exists", "error") ) + # Normalize the response + if isinstance(result, tuple) and len(result) == 2 and isinstance(result[1], dict): + _, raw_data = result + normalized_data = normalize_file(raw_data) + response = {"status": "success", "file": normalized_data} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -405,10 +593,18 @@ async def call_tool( new_folder_name=arguments["new_folder_name"], behavior=arguments.get("behavior", "fail") ) + # Normalize the response + if isinstance(result, tuple) and len(result) == 2 and isinstance(result[1], dict): + _, raw_data = result + normalized_data = normalize_folder(raw_data) + response = {"status": "success", "folder": normalized_data} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -424,10 +620,18 @@ async def call_tool( elif name == "onedrive_list_root_files_folders": try: result = await onedrive_list_root_files_folders() + # Normalize the response + if isinstance(result, tuple) and len(result) == 2 and isinstance(result[1], dict): + _, raw_data = result + normalized_data = normalize_items_response(raw_data) + response = {"status": "success", "data": normalized_data} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -444,10 +648,18 @@ async def call_tool( result = await onedrive_list_inside_folder( folder_id=arguments["folder_id"] ) + # Normalize the response + if isinstance(result, tuple) and len(result) == 2 and isinstance(result[1], dict): + _, raw_data = result + normalized_data = normalize_items_response(raw_data) + response = {"status": "success", "data": normalized_data} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -464,10 +676,18 @@ async def call_tool( result = await onedrive_search_item_by_name( itemname=arguments["itemname"] ) + # Normalize the response + if isinstance(result, tuple) and len(result) == 2 and isinstance(result[1], dict): + _, raw_data = result + normalized_data = normalize_items_response(raw_data) + response = {"status": "success", "data": normalized_data} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -484,10 +704,18 @@ async def call_tool( result = await onedrive_search_folder_by_name( folder_name=arguments["folder_name"] ) + # Normalize the response for folder search results + if isinstance(result, tuple) and len(result) == 2 and isinstance(result[1], list): + _, raw_folders = result + normalized_folders = [normalize_folder(folder) for folder in raw_folders] + response = {"status": "success", "folders": normalized_folders, "count": len(normalized_folders)} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -504,10 +732,17 @@ async def call_tool( result = await onedrive_get_item_by_id( item_id=arguments["item_id"] ) + # Normalize the response + if isinstance(result, dict) and "id" in result: + normalized_data = normalize_item(result) + response = {"status": "success", "item": normalized_data} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: @@ -523,10 +758,25 @@ async def call_tool( elif name == "onedrive_list_shared_items": try: result = await onedrive_list_shared_items() + # Normalize the response + if isinstance(result, tuple) and len(result) == 2 and isinstance(result[1], dict): + _, raw_data = result + if "value" in raw_data and isinstance(raw_data["value"], list): + normalized_items = [normalize_shared_item(item) for item in raw_data["value"]] + response = { + "status": "success", + "sharedItems": normalized_items, + "count": len(normalized_items) + } + else: + response = {"status": "success", "data": raw_data} + else: + response = result + return [ types.TextContent( type="text", - text=json.dumps(result, indent=2), + text=json.dumps(response, indent=2), ) ] except Exception as e: diff --git a/mcp_servers/outlook/server.py b/mcp_servers/outlook/server.py index af65b3d6..4bd52444 100644 --- a/mcp_servers/outlook/server.py +++ b/mcp_servers/outlook/server.py @@ -5,7 +5,6 @@ import json from collections.abc import AsyncIterator from typing import List -from contextvars import ContextVar import click import mcp.types as types @@ -18,6 +17,136 @@ from starlette.types import Receive, Scope, Send from dotenv import load_dotenv + +def get_path(data: dict, path: str) -> any: + """Safe dot-notation access. Returns None if path fails.""" + if not data: + return None + current = data + for key in path.split('.'): + if isinstance(current, dict): + current = current.get(key) + else: + return None + return current + + +def normalize(source: dict, mapping: dict[str, any]) -> dict: + """ + Creates a new clean dictionary based strictly on the mapping rules. + Excludes fields with None/null values from the output. + Args: + source: Raw vendor JSON. + mapping: Dict of { "TargetFieldName": "Source.Path" OR Lambda_Function } + """ + clean_data = {} + for target_key, rule in mapping.items(): + value = None + if isinstance(rule, str): + value = get_path(source, rule) + elif callable(rule): + try: + value = rule(source) + except Exception: + value = None + if value is not None: + clean_data[target_key] = value + return clean_data + + +# Mapping Rules for Outlook Mail Objects + +RECIPIENT_RULES = { + "email": "emailAddress.address", + "name": "emailAddress.name", +} + +FOLDER_RULES = { + "itemId": "id", + "name": "displayName", + "messageCount": "totalItemCount", + "unreadCount": "unreadItemCount", + "parentId": "parentFolderId", + "childCount": "childFolderCount", + "size": "sizeInBytes", + "hidden": "isHidden", + "wellKnownName": "wellKnownName", +} + +ATTACHMENT_RULES = { + "attachmentId": "id", + "name": "name", + "size": "size", + "type": "contentType", + "inline": "isInline", + "lastModified": "lastModifiedDateTime", +} + +MESSAGE_RULES = { + "id": "id", + "title": "subject", + "preview": "bodyPreview", + "content": "body.content", + "contentType": "body.contentType", + "importance": "importance", + "priority": "priority", + "isRead": "isRead", + "isDraft": "isDraft", + "hasAttachments": "hasAttachments", + "conversationId": "conversationId", + "conversationIndex": "conversationIndex", + "internetMessageId": "internetMessageId", + "webLink": "webLink", + "created": "createdDateTime", + "lastModified": "lastModifiedDateTime", + "received": "receivedDateTime", + "sent": "sentDateTime", + "changeKey": "changeKey", + "categories": "categories", + "isDeliveryReceiptRequested": "isDeliveryReceiptRequested", + "isReadReceiptRequested": "isReadReceiptRequested", + "inferenceClassification": "inferenceClassification", + "flagStatus": "flag.flagStatus", + # Sender/From + "senderEmail": "sender.emailAddress.address", + "senderName": "sender.emailAddress.name", + "fromEmail": "from.emailAddress.address", + "fromName": "from.emailAddress.name", + # Recipients + "toRecipients": lambda x: [ + normalize(r, RECIPIENT_RULES) for r in x.get('toRecipients', []) + ] if x.get('toRecipients') else None, + "ccRecipients": lambda x: [ + normalize(r, RECIPIENT_RULES) for r in x.get('ccRecipients', []) + ] if x.get('ccRecipients') else None, + "bccRecipients": lambda x: [ + normalize(r, RECIPIENT_RULES) for r in x.get('bccRecipients', []) + ] if x.get('bccRecipients') else None, + "replyTo": lambda x: [ + normalize(r, RECIPIENT_RULES) for r in x.get('replyTo', []) + ] if x.get('replyTo') else None, + # Attachments + "attachments": lambda x: [ + normalize(a, ATTACHMENT_RULES) for a in x.get('attachments', []) + ] if x.get('attachments') else None, + # Folder + "folderId": "parentFolderId", +} + + +def normalize_message(raw_message: dict) -> dict: + """Normalize a single message and add computed fields.""" + message = normalize(raw_message, MESSAGE_RULES) + # Add computed fields if needed + return message + + +def normalize_folder(raw_folder: dict) -> dict: + """Normalize a single folder and add computed fields.""" + folder = normalize(raw_folder, FOLDER_RULES) + return folder + + from tools import ( auth_token_context, @@ -511,6 +640,7 @@ async def call_tool( result = await outlookMail_delete_folder( folder_id=arguments["folder_id"] ) + # No normalization needed for delete operations return [ types.TextContent( type="text", @@ -532,6 +662,9 @@ async def call_tool( display_name=arguments["display_name"], is_hidden=arguments.get("is_hidden", False) ) + # Normalize the created folder if successful + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_folder(result) return [ types.TextContent( type="text", @@ -552,6 +685,13 @@ async def call_tool( result = await outlookMail_list_folders( include_hidden=arguments.get("include_hidden", True) ) + # Normalize the folder list from raw API response + if isinstance(result, dict) and 'value' in result and 'error' not in result: + normalized_folders = [normalize_folder(folder) for folder in result['value']] + result = { + "count": len(normalized_folders), + "folders": normalized_folders + } return [ types.TextContent( type="text", @@ -568,10 +708,20 @@ async def call_tool( ] elif name == "outlookMail_get_mail_folder_details": + if not arguments.get("folder_id"): + return [ + types.TextContent( + type="text", + text="Error: 'folder_id' argument is required." + ) + ] try: result = await outlookMail_get_mail_folder_details( folder_id=arguments["folder_id"] ) + # Normalize the single folder object from raw API response + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_folder(result) return [ types.TextContent( type="text", @@ -593,6 +743,9 @@ async def call_tool( folder_id=arguments["folder_id"], display_name=arguments["display_name"] ) + # Normalize the updated folder from raw API response + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_folder(result) return [ types.TextContent( type="text", @@ -610,10 +763,20 @@ async def call_tool( # Message Operations elif name == "outlookMail_read_message": + if not arguments.get("message_id"): + return [ + types.TextContent( + type="text", + text="Error: 'message_id' argument is required." + ) + ] try: result = await outlookMail_read_message( message_id=arguments["message_id"] ) + # Normalize the single message object from raw API response + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_message(result) return [ types.TextContent( type="text", @@ -636,6 +799,13 @@ async def call_tool( orderby=arguments.get("orderby"), select=arguments.get("select") ) + # Normalize the message list from raw API response + if isinstance(result, dict) and 'value' in result and 'error' not in result: + normalized_messages = [normalize_message(message) for message in result['value']] + result = { + "count": len(normalized_messages), + "messages": normalized_messages + } return [ types.TextContent( type="text", @@ -660,6 +830,13 @@ async def call_tool( orderby=arguments.get("orderby"), select=arguments.get("select") ) + # Normalize the message list from raw API response + if isinstance(result, dict) and 'value' in result and 'error' not in result: + normalized_messages = [normalize_message(message) for message in result['value']] + result = { + "count": len(normalized_messages), + "messages": normalized_messages + } return [ types.TextContent( type="text", @@ -685,6 +862,9 @@ async def call_tool( cc_recipients=arguments.get("cc_recipients"), bcc_recipients=arguments.get("bcc_recipients"), ) + # Normalize the updated message from raw API response + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_message(result) return [ types.TextContent( type="text", @@ -705,6 +885,7 @@ async def call_tool( result = await outlookMail_delete_draft( message_id=arguments["message_id"] ) + # No normalization needed for delete operations return [ types.TextContent( type="text", @@ -727,6 +908,9 @@ async def call_tool( comment=arguments["comment"], to_recipients=arguments["to_recipients"] ) + # Normalize the created draft message from raw API response + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_message(result) return [ types.TextContent( type="text", @@ -748,6 +932,9 @@ async def call_tool( message_id=arguments["message_id"], comment=arguments["comment"] ) + # Normalize the created draft message from raw API response + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_message(result) return [ types.TextContent( type="text", @@ -769,6 +956,9 @@ async def call_tool( message_id=arguments["message_id"], comment=arguments.get("comment", "") ) + # Normalize the created draft message from raw API response + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_message(result) return [ types.TextContent( type="text", @@ -789,6 +979,7 @@ async def call_tool( result = await outlookMail_send_draft( message_id=arguments["message_id"] ) + # No normalization needed for send operations (typically returns success status) return [ types.TextContent( type="text", @@ -813,6 +1004,9 @@ async def call_tool( cc_recipients=arguments.get("cc_recipients"), bcc_recipients=arguments.get("bcc_recipients"), ) + # Normalize the created draft message from raw API response + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_message(result) return [ types.TextContent( type="text", @@ -833,6 +1027,9 @@ async def call_tool( message_id=arguments["message_id"], destination_folder_id=arguments["destination_folder_id"] ) + # Normalize the moved message from raw API response + if isinstance(result, dict) and 'error' not in result and 'id' in result: + result = normalize_message(result) return [ types.TextContent( type="text", diff --git a/mcp_servers/outlook/tools/mailFolder.py b/mcp_servers/outlook/tools/mailFolder.py index 65a225bf..8603e1eb 100644 --- a/mcp_servers/outlook/tools/mailFolder.py +++ b/mcp_servers/outlook/tools/mailFolder.py @@ -34,50 +34,6 @@ async def outlookMail_list_folders(include_hidden: bool = True) -> dict: logging.error(f"Could not get mail folders from {url}: {e}") return {"error": f"Could not get mail folders from {url}: {e}"} -async def outlookMail_get_messages_from_folder( - folder_id: str, - top: int = 10, - filter_query: str = None, - orderby: str = None, - select: str = None -)-> dict: - """ - Retrieve messages from a specific Outlook mail folder. - - Args: - folder_id (str): The unique ID of the mail folder. - top (int, optional): Max number of messages to return (default: 10). - filter_query (str, optional): OData $filter expression (e.g., "contains(subject, 'weekly digest')"). - orderby (str, optional): OData $orderby expression (e.g., "receivedDateTime desc"). - select (str, optional): Comma-separated list of properties to include. - - Returns: - dict: JSON response with list of messages, or error info. - """ - client = get_outlookMail_client() - if not client: - logging.error("Could not get Outlook client") - return {"error": "Could not get Outlook client"} - - url = f"{client['base_url']}/me/mailFolders/{folder_id}/messages" - params = {'$top': top} - - if filter_query: - params['$filter'] = filter_query - if orderby: - params['$orderby'] = orderby - if select: - params['$select'] = select - - try: - async with httpx.AsyncClient() as httpx_client: - response = await httpx_client.get(url, headers=client['headers'], params=params) - response.raise_for_status() - return response.json() - except Exception as e: - logging.error(f"Could not get messages from {url}: {e}") - return {"error": f"Could not get messages from {url}"} - async def outlookMail_get_mail_folder_details(folder_id: str) -> dict: """ Get details of a specific mail folder by its ID. diff --git a/mcp_servers/slack/server.py b/mcp_servers/slack/server.py index 3dd46d96..787fab10 100644 --- a/mcp_servers/slack/server.py +++ b/mcp_servers/slack/server.py @@ -22,8 +22,8 @@ bot_token_context ) from bot_tools.bot_messages import ( - bot_post_message, - bot_reply_to_thread, + bot_post_message, + bot_reply_to_thread, bot_add_reaction ) @@ -55,7 +55,7 @@ def extract_access_tokens(request_or_scope) -> tuple[str, str]: Returns (bot_token, user_token) """ auth_data = None - + ## ---- for Klavis Cloud ---- ## # Handle different input types (request object for SSE, scope dict for StreamableHTTP) if hasattr(request_or_scope, 'headers'): @@ -69,14 +69,14 @@ def extract_access_tokens(request_or_scope) -> tuple[str, str]: auth_data = headers.get(b'x-auth-data') if auth_data: auth_data = base64.b64decode(auth_data).decode('utf-8') - + ## ---- for local development ---- ## if not auth_data: # Fall back to environment variables bot_token = os.getenv("SLACK_BOT_TOKEN", "") user_token = os.getenv("SLACK_USER_TOKEN", "") return bot_token, user_token - + try: # Parse the JSON auth data to extract both tokens auth_json = json.loads(auth_data) @@ -122,24 +122,38 @@ async def list_tools() -> list[types.Tool]: # User Channels types.Tool( name="slack_user_list_channels", - description="List all channels the authenticated user has access to. This includes public channels, private channels the user is a member of, direct messages, and multi-party direct messages.", + description="List all channels the authenticated user has access to. Supports filtering by channel name or DM user ID, and flexible response formats.", inputSchema={ "type": "object", "properties": { "limit": { "type": "number", - "description": "Maximum number of channels to return (default 100, max 200)", + "description": "Maximum number of channels to return from API (default 100, max 200)", "default": 100, }, "cursor": { "type": "string", - "description": "Pagination cursor for next page of results", + "description": "Pagination cursor for next page of results. Use the value from response_metadata.next_cursor in the previous response.", }, "types": { "type": "string", "description": "Mix and match channel types by providing a comma-separated list of any combination of public_channel, private_channel, mpim, im", "default": "public_channel", }, + "channel_name": { + "type": "string", + "description": "Filter channels by name (case-insensitive partial match). Applies to public_channel, private_channel, and mpim types. Ignored for im (DM) type.", + }, + "user_id": { + "type": "string", + "description": "Filter DMs by user ID (exact match). Only applies when types includes 'im'. Ignored for other channel types.", + }, + "response_format": { + "type": "string", + "enum": ["concise", "detailed"], + "description": "Response format. 'concise' (default) returns only id and name/user fields. 'detailed' returns complete channel objects(not recommended).", + "default": "concise", + }, }, }, annotations=types.ToolAnnotations( @@ -148,7 +162,7 @@ async def list_tools() -> list[types.Tool]: ), types.Tool( name="slack_get_channel_history", - description="Get recent messages from a channel", + description="Get recent messages from a channel. Can also retrieve a specific message by its timestamp using oldest/latest/inclusive parameters.", inputSchema={ "type": "object", "properties": { @@ -161,6 +175,28 @@ async def list_tools() -> list[types.Tool]: "description": "Number of messages to retrieve (default 10)", "default": 10, }, + "cursor": { + "type": "string", + "description": "Pagination cursor for next page of results", + }, + "oldest": { + "type": "string", + "description": "Only messages after this Unix timestamp (e.g., '1234567890.123456')", + }, + "latest": { + "type": "string", + "description": "Only messages before this Unix timestamp (e.g., '1234567890.123456')", + }, + "inclusive": { + "type": "boolean", + "description": "Include messages with oldest or latest timestamps in results. Set to true when fetching a specific message.", + }, + "response_format": { + "type": "string", + "enum": ["concise", "detailed"], + "description": "Response format. 'concise' (default) returns essential fields (user_id, ts, text, thread_ts, reply_count, reactions). In most cases, 'concise' is sufficient. 'detailed' returns complete API response(not recommended).", + "default": "concise", + }, }, "required": ["channel_id"], }, @@ -203,6 +239,12 @@ async def list_tools() -> list[types.Tool]: "type": "boolean", "description": "Include messages with oldest or latest timestamps in results", }, + "response_format": { + "type": "string", + "enum": ["concise", "detailed"], + "description": "Response format. 'concise' (default) returns only essential fields (user_id, ts, text, thread_ts, is_parent, reply_count/parent_user_id). 'detailed' returns complete API response(not recommended).", + "default": "concise", + }, }, "required": ["channel_id", "thread_ts"], }, @@ -234,21 +276,22 @@ async def list_tools() -> list[types.Tool]: **{"category": "SLACK_CHANNEL"} ), ), - + # User Info types.Tool( name="slack_list_users", - description="Lists all users in a Slack team using user token", + description="Lists all users in a Slack team. Supports filtering by user ID or name, and flexible response formats.", inputSchema={ "type": "object", "properties": { "cursor": { "type": "string", - "description": "Pagination cursor for getting more results", + "description": "Pagination cursor for next page of results. Use the value from response_metadata.next_cursor in the previous response.", }, "limit": { "type": "integer", - "description": "Maximum number of users to return (default 100, max 200)", + "description": "Maximum number of users to return from API (default 100, max 200)", + "default": 100, }, "team_id": { "type": "string", @@ -258,6 +301,20 @@ async def list_tools() -> list[types.Tool]: "type": "boolean", "description": "Whether to include locale information for each user", }, + "user_id": { + "type": "string", + "description": "Filter by user ID (exact match). When both user_id and name are provided, results matching either condition are returned (OR search).", + }, + "name": { + "type": "string", + "description": "Filter by name (case-insensitive partial match against name or real_name fields). When both user_id and name are provided, results matching either condition are returned (OR search).", + }, + "response_format": { + "type": "string", + "enum": ["concise", "detailed"], + "description": "Response format. 'concise' (default) returns only id, name, and real_name fields. 'detailed' returns complete user objects(not recommended).", + "default": "concise", + }, }, "required": [], }, @@ -286,11 +343,11 @@ async def list_tools() -> list[types.Tool]: **{"category": "SLACK_USER", "readOnlyHint": True} ), ), - + # User Search types.Tool( name="slack_user_search_messages", - description="Searches for messages matching a query.", + description="Searches for messages matching a query. Supports filtering by channel and searching for messages mentioning the authenticated user.", inputSchema={ "type": "object", "properties": { @@ -303,7 +360,12 @@ async def list_tools() -> list[types.Tool]: "items": { "type": "string", }, - "description": "Optional list of channel IDs to search within. If not provided, searches across all accessible channels.", + "description": "Optional list of channel IDs to search within.", + }, + "to_me": { + "type": "boolean", + "description": "If true, searches for messages that mention the authenticated user. Automatically adds 'to:@' to the query.", + "default": False, }, "sort": { "type": "string", @@ -331,6 +393,12 @@ async def list_tools() -> list[types.Tool]: "description": "Whether to include highlighting of matched terms", "default": True, }, + "response_format": { + "type": "string", + "enum": ["concise", "detailed"], + "description": "Response format. 'concise' (default) returns only essential fields (channel_id, channel_name, user_id, username, ts, text, permalink, thread_ts). 'detailed' returns complete API response(not recommended).", + "default": "concise", + }, }, "required": ["query"], }, @@ -338,7 +406,7 @@ async def list_tools() -> list[types.Tool]: **{"category": "SLACK_MESSAGE", "readOnlyHint": True} ), ), - + # User Messages types.Tool( name="slack_user_post_message", @@ -411,9 +479,9 @@ async def list_tools() -> list[types.Tool]: **{"category": "SLACK_REACTION"} ), ), - + # ============= BOT TOOLS (using bot token) ============= - + # Bot Messages types.Tool( name="slack_bot_post_message", @@ -492,17 +560,27 @@ async def list_tools() -> list[types.Tool]: async def call_tool( name: str, arguments: dict ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: - + # ============= USER TOOLS (using user token) ============= - + # User Channels if name == "slack_user_list_channels": limit = arguments.get("limit") cursor = arguments.get("cursor") types_param = arguments.get("types") - + channel_name = arguments.get("channel_name") + user_id = arguments.get("user_id") + response_format = arguments.get("response_format") + try: - result = await user_list_channels(limit, cursor, types_param) + result = await user_list_channels( + limit=limit, + cursor=cursor, + types=types_param, + channel_name=channel_name, + user_id=user_id, + response_format=response_format, + ) return [ types.TextContent( type="text", @@ -517,7 +595,7 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + elif name == "slack_get_channel_history": channel_id = arguments.get("channel_id") if not channel_id: @@ -527,11 +605,18 @@ async def call_tool( text="Error: channel_id parameter is required", ) ] - + limit = arguments.get("limit") - + cursor = arguments.get("cursor") + oldest = arguments.get("oldest") + latest = arguments.get("latest") + inclusive = arguments.get("inclusive") + response_format = arguments.get("response_format") + try: - result = await user_get_channel_history(channel_id, limit) + result = await user_get_channel_history( + channel_id, limit, cursor, oldest, latest, inclusive, response_format + ) return [ types.TextContent( type="text", @@ -546,11 +631,11 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + elif name == "slack_get_thread_replies": channel_id = arguments.get("channel_id") thread_ts = arguments.get("thread_ts") - + if not channel_id: return [ types.TextContent( @@ -558,7 +643,7 @@ async def call_tool( text="Error: channel_id parameter is required", ) ] - + if not thread_ts: return [ types.TextContent( @@ -566,22 +651,24 @@ async def call_tool( text="Error: thread_ts parameter is required", ) ] - + limit = arguments.get("limit") cursor = arguments.get("cursor") oldest = arguments.get("oldest") latest = arguments.get("latest") inclusive = arguments.get("inclusive") - + response_format = arguments.get("response_format") + try: result = await get_thread_replies( - channel_id, - thread_ts, - limit, + channel_id, + thread_ts, + limit, cursor, oldest, latest, - inclusive + inclusive, + response_format, ) return [ types.TextContent( @@ -597,11 +684,11 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + elif name == "slack_invite_users_to_channel": channel_id = arguments.get("channel_id") user_ids = arguments.get("user_ids") - + if not channel_id: return [ types.TextContent( @@ -609,7 +696,7 @@ async def call_tool( text="Error: channel_id parameter is required", ) ] - + if not user_ids or not isinstance(user_ids, list) or len(user_ids) == 0: return [ types.TextContent( @@ -617,7 +704,7 @@ async def call_tool( text="Error: user_ids parameter is required and must be a non-empty list", ) ] - + try: result = await invite_users_to_channel(channel_id, user_ids) return [ @@ -634,16 +721,27 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + # User Info elif name == "slack_list_users": cursor = arguments.get("cursor") limit = arguments.get("limit") team_id = arguments.get("team_id") include_locale = arguments.get("include_locale") - + user_id = arguments.get("user_id") + user_name = arguments.get("name") + response_format = arguments.get("response_format") + try: - result = await list_users(cursor, limit, team_id, include_locale) + result = await list_users( + cursor=cursor, + limit=limit, + team_id=team_id, + include_locale=include_locale, + user_id=user_id, + name=user_name, + response_format=response_format, + ) return [ types.TextContent( type="text", @@ -658,7 +756,7 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + elif name == "slack_user_get_info": user_id = arguments.get("user_id") if not user_id: @@ -668,9 +766,9 @@ async def call_tool( text="Error: user_id parameter is required", ) ] - + include_locale = arguments.get("include_locale") - + try: result = await user_get_info(user_id, include_locale) return [ @@ -687,7 +785,7 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + # User Search elif name == "slack_user_search_messages": query = arguments.get("query") @@ -698,16 +796,20 @@ async def call_tool( text="Error: query parameter is required", ) ] - + channel_ids = arguments.get("channel_ids") + to_me = arguments.get("to_me", False) sort = arguments.get("sort") sort_dir = arguments.get("sort_dir") count = arguments.get("count") cursor = arguments.get("cursor") highlight = arguments.get("highlight") - + response_format = arguments.get("response_format") + try: - result = await user_search_messages(query, channel_ids, sort, sort_dir, count, cursor, highlight) + result = await user_search_messages( + query, channel_ids, to_me, sort, sort_dir, count, cursor, highlight, response_format + ) return [ types.TextContent( type="text", @@ -722,7 +824,7 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + # User Messages elif name == "slack_user_post_message": channel_id = arguments.get("channel_id") @@ -734,7 +836,7 @@ async def call_tool( text="Error: channel_id and text parameters are required", ) ] - + try: result = await user_post_message(channel_id, text) return [ @@ -751,7 +853,7 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + elif name == "slack_user_reply_to_thread": channel_id = arguments.get("channel_id") thread_ts = arguments.get("thread_ts") @@ -763,7 +865,7 @@ async def call_tool( text="Error: channel_id, thread_ts, and text parameters are required", ) ] - + try: result = await user_reply_to_thread(channel_id, thread_ts, text) return [ @@ -780,7 +882,7 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + elif name == "slack_user_add_reaction": channel_id = arguments.get("channel_id") timestamp = arguments.get("timestamp") @@ -792,7 +894,7 @@ async def call_tool( text="Error: channel_id, timestamp, and reaction parameters are required", ) ] - + try: result = await user_add_reaction(channel_id, timestamp, reaction) return [ @@ -809,9 +911,9 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + # ============= BOT TOOLS (using bot token) ============= - + # Bot Messages elif name == "slack_bot_post_message": channel_id = arguments.get("channel_id") @@ -823,7 +925,7 @@ async def call_tool( text="Error: channel_id and text parameters are required", ) ] - + try: result = await bot_post_message(channel_id, text) return [ @@ -840,7 +942,7 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + elif name == "slack_bot_reply_to_thread": channel_id = arguments.get("channel_id") thread_ts = arguments.get("thread_ts") @@ -852,7 +954,7 @@ async def call_tool( text="Error: channel_id, thread_ts, and text parameters are required", ) ] - + try: result = await bot_reply_to_thread(channel_id, thread_ts, text) return [ @@ -869,7 +971,7 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + elif name == "slack_bot_add_reaction": channel_id = arguments.get("channel_id") timestamp = arguments.get("timestamp") @@ -881,7 +983,7 @@ async def call_tool( text="Error: channel_id, timestamp, and reaction parameters are required", ) ] - + try: result = await bot_add_reaction(channel_id, timestamp, reaction) return [ @@ -898,7 +1000,7 @@ async def call_tool( text=f"Error: {str(e)}", ) ] - + else: return [ types.TextContent( @@ -912,10 +1014,10 @@ async def call_tool( async def handle_sse(request): logger.info("Handling SSE connection") - + # Extract both bot and user tokens from headers bot_token, user_token = extract_access_tokens(request) - + # Set both tokens in context for this request bot_token_ctx = bot_token_context.set(bot_token) user_token_ctx = user_token_context.set(user_token) @@ -929,7 +1031,7 @@ async def handle_sse(request): finally: bot_token_context.reset(bot_token_ctx) user_token_context.reset(user_token_ctx) - + return Response() # Set up StreamableHTTP transport @@ -944,10 +1046,10 @@ async def handle_streamable_http( scope: Scope, receive: Receive, send: Send ) -> None: logger.info("Handling StreamableHTTP request") - + # Extract both bot and user tokens from headers bot_token, user_token = extract_access_tokens(scope) - + # Set both tokens in context for this request bot_token_ctx = bot_token_context.set(bot_token) user_token_ctx = user_token_context.set(user_token) @@ -974,7 +1076,7 @@ async def lifespan(app: Starlette) -> AsyncIterator[None]: # SSE routes Route("/sse", endpoint=handle_sse, methods=["GET"]), Mount("/messages/", app=sse.handle_post_message), - + # StreamableHTTP route Mount("/mcp", app=handle_streamable_http), ], diff --git a/mcp_servers/slack/user_tools/base.py b/mcp_servers/slack/user_tools/base.py index 93408b15..ebba7617 100644 --- a/mcp_servers/slack/user_tools/base.py +++ b/mcp_servers/slack/user_tools/base.py @@ -90,4 +90,19 @@ class SlackAPIError(Exception): """Custom exception for Slack API errors.""" def __init__(self, message: str, response: Optional[Dict[str, Any]] = None): super().__init__(message) - self.response = response \ No newline at end of file + self.response = response + + +def format_reactions(reactions: list[Dict[str, Any]]) -> str: + """Format reactions list to concise string format. + + Args: + reactions: List of reaction objects from Slack API + Each has 'name', 'count', and 'users' fields + + Returns: + Formatted string like "thumbsup * 2, heart * 1" + """ + if not reactions: + return "" + return ", ".join(f"{r['name']} * {r['count']}" for r in reactions) diff --git a/mcp_servers/slack/user_tools/channels.py b/mcp_servers/slack/user_tools/channels.py index b3444129..a2d8d7dc 100644 --- a/mcp_servers/slack/user_tools/channels.py +++ b/mcp_servers/slack/user_tools/channels.py @@ -1,78 +1,332 @@ import logging from typing import Any, Dict, Optional -from .base import make_slack_user_request + +from .base import make_slack_user_request, format_reactions # Configure logging logger = logging.getLogger(__name__) + +def filter_channels( + channels: list[dict[str, Any]], + channel_name: Optional[str] = None, + user_id: Optional[str] = None, +) -> list[Dict[str, Any]]: + """Client-side filtering of channels. + + Args: + channels: Raw channel list from Slack API + channel_name: Optional channel name filter (case-insensitive partial match) + user_id: Optional user ID filter for DMs (exact match) + + Returns: + Filtered list of channels + """ + if not channel_name and not user_id: + return channels + + filtered = [] + + if channel_name: + channel_name_lower = channel_name.lower() + filtered.extend( + ch + for ch in channels + if not ch.get("is_im", False) + and ( + channel_name_lower in ch.get("name", "").lower() + or channel_name_lower in ch.get("name_normalized", "").lower() + ) + ) + + if user_id: + filtered.extend( + ch + for ch in channels + if ch.get("is_im", False) and ch.get("user") == user_id + ) + + return filtered + + +def format_channel_response( + channel: dict[str, Any], + response_format: str = "concise", +) -> dict[str, Any]: + """Format a single channel object based on response_format. + + Args: + channel: Raw channel object from Slack API + response_format: "concise" or "detailed" + + Returns: + Formatted channel object + """ + if response_format == "detailed": + return channel + + if channel.get("is_im", False): + return { + "id": channel["id"], + "user": channel.get("user"), + } + else: + return { + "id": channel["id"], + "name": channel.get("name"), + } + + +def generate_summary(total_returned: int, has_next_cursor: bool) -> str: + """Generate summary message for the response. + + Args: + total_returned: Number of channels returned + has_next_cursor: Whether there are more results available + + Returns: + Summary message string + """ + if has_next_cursor: + return f"Found {total_returned} channels. More results available - please specify the cursor parameter to continue." + return f"Found {total_returned} channels." + # list_channels returns all channels that the user has access to # User tokens: channels:read, groups:read, im:read, mpim:read async def list_channels( limit: Optional[int] = None, cursor: Optional[str] = None, - types: Optional[str] = None + types: Optional[str] = None, + channel_name: Optional[str] = None, + user_id: Optional[str] = None, + response_format: Optional[str] = None, ) -> Dict[str, Any]: - """List all channels the authenticated user has access to. - + """List all channels the authenticated user has access to with optional filtering. + This uses the user token to list channels, which means it can access: - Public channels in the workspace - Private channels the user is a member of - Direct messages (DMs) - Multi-party direct messages (group DMs) - + Args: - limit: Maximum number of channels to return (default 100, max 200) + limit: Maximum number of channels to return from API (default 100, max 200) cursor: Pagination cursor for next page of results types: Channel types to include (public_channel, private_channel, mpim, im) - + channel_name: Filter by channel name (case-insensitive partial match) + user_id: Filter DMs by user ID (exact match, only for im type) + response_format: Response format - "concise" (default) or "detailed" + Returns: - Dictionary containing the list of channels and pagination metadata + Dictionary containing: + - ok: boolean + - channels: filtered and formatted channel list + - response_metadata: pagination info (if available) + - summary: result summary with total count and helpful message """ logger.info("Executing tool: slack_user_list_channels") - + params = { "exclude_archived": "true", } - + if limit: params["limit"] = str(min(limit, 200)) else: params["limit"] = "100" - + if cursor: params["cursor"] = cursor - + if types: params["types"] = types else: params["types"] = "public_channel" - + + if response_format is None: + response_format = "concise" + try: - return await make_slack_user_request("GET", "users.conversations", params=params) + response = await make_slack_user_request("GET", "users.conversations", params=params) + + if not response.get("ok", False): + return response + + channels = response.get("channels", []) + filtered_channels = filter_channels(channels, channel_name, user_id) + formatted_channels = [ + format_channel_response(ch, response_format) for ch in filtered_channels + ] + + response_metadata = response.get("response_metadata", {}) + next_cursor = response_metadata.get("next_cursor", "") + has_next_cursor = bool(next_cursor) + + result = { + "ok": True, + "channels": formatted_channels, + "summary": { + "total_returned": len(formatted_channels), + "message": generate_summary(len(formatted_channels), has_next_cursor), + }, + } + + if response_metadata: + result["response_metadata"] = response_metadata + + return result except Exception as e: logger.exception(f"Error executing tool slack_user_list_channels: {e}") raise e + +def format_history_message( + message: dict[str, Any], + response_format: str = "concise", +) -> dict[str, Any]: + """Format a single message object based on response_format. + + Args: + message: Raw message object from Slack API + response_format: "concise" or "detailed" + + Returns: + Formatted message object + """ + if response_format == "detailed": + return message + + formatted = { + "user_id": message.get("user"), + "ts": message.get("ts"), + "text": message.get("text"), + } + + # Add thread info if this is a thread parent + thread_ts = message.get("thread_ts") + if thread_ts: + formatted["thread_ts"] = thread_ts + # Check if this is a thread parent (ts == thread_ts) + if thread_ts == message.get("ts"): + formatted["reply_count"] = message.get("reply_count", 0) + formatted["is_thread_parent"] = True + else: + formatted["is_thread_parent"] = False + + # Add reactions if present + reactions = message.get("reactions") + if reactions: + formatted["reactions"] = format_reactions(reactions) + + return formatted + + +def generate_history_summary(returned: int, has_more: bool) -> str: + """Generate summary string for history response. + + Args: + returned: Number of messages returned + has_more: Whether there are more messages available + + Returns: + Summary string + """ + summary = f"Found {returned} messages." + + if has_more: + summary += " Use next_cursor for more results." + + return summary + + # get_channel_history returns the most recent messages from a channel # User tokens: channels:history, groups:history, im:history, mpim:history async def get_channel_history( channel_id: str, - limit: Optional[int] = None + limit: Optional[int] = None, + cursor: Optional[str] = None, + oldest: Optional[str] = None, + latest: Optional[str] = None, + inclusive: Optional[bool] = None, + response_format: Optional[str] = None ) -> Dict[str, Any]: - """Get recent messages from a channel.""" + """Get recent messages from a channel. + + Args: + channel_id: The ID of the channel to get history from + limit: Maximum number of messages to return (default 10) + cursor: Pagination cursor for next page of results + oldest: Only messages after this Unix timestamp + latest: Only messages before this Unix timestamp + inclusive: Include messages with oldest or latest timestamps in results + response_format: "concise" (default) or "detailed" + + Returns: + Dictionary containing messages and pagination info + + Examples: + # Get a specific message by its timestamp + result = await get_channel_history( + channel_id='C123456', + latest='1234567890.123456', + oldest='1234567890.123456', + inclusive=True, + limit=1 + ) + """ logger.info(f"Executing tool: slack_get_channel_history for channel {channel_id}") - + params = { "channel": channel_id, } - + if limit: params["limit"] = str(limit) else: params["limit"] = "10" - + + if cursor: + params["cursor"] = cursor + + if oldest: + params["oldest"] = oldest + + if latest: + params["latest"] = latest + + if inclusive is not None: + params["inclusive"] = "true" if inclusive else "false" + + if response_format is None: + response_format = "concise" + try: - return await make_slack_user_request("GET", "conversations.history", params=params) + response = await make_slack_user_request("GET", "conversations.history", params=params) + + if not response.get("ok", False): + return response + + messages = response.get("messages", []) + has_more = response.get("has_more", False) + + # Format messages based on response_format + formatted_messages = [ + format_history_message(msg, response_format) for msg in messages + ] + + result = { + "ok": True, + "messages": formatted_messages, + "has_more": has_more, + "summary": generate_history_summary(len(formatted_messages), has_more), + } + + response_metadata = response.get("response_metadata", {}) + if response_metadata: + result["response_metadata"] = response_metadata + + return result + except Exception as e: logger.exception(f"Error executing tool slack_get_channel_history: {e}") raise e @@ -84,29 +338,29 @@ async def invite_users_to_channel( user_ids: list[str] ) -> Dict[str, Any]: """Invite one or more users (including bot users) to a channel. - + This uses the user token to invite users to a channel. The authenticated user must have permission to invite users to the specified channel. Both regular users and bot users can be invited using their respective user IDs. - + Args: channel_id: The ID of the channel to invite users to (e.g., 'C1234567890') user_ids: A list of user IDs to invite (e.g., ['U1234567890', 'U9876543210']) - + Returns: Dictionary containing the updated channel information """ logger.info(f"Executing tool: slack_invite_users_to_channel for channel {channel_id}") - + if not user_ids: raise ValueError("At least one user ID must be provided") - + # Slack API expects comma-separated user IDs data = { "channel": channel_id, "users": ",".join(user_ids) } - + try: return await make_slack_user_request("POST", "conversations.invite", data=data) except Exception as e: diff --git a/mcp_servers/slack/user_tools/search.py b/mcp_servers/slack/user_tools/search.py index 8fcac3b4..b3b13795 100644 --- a/mcp_servers/slack/user_tools/search.py +++ b/mcp_servers/slack/user_tools/search.py @@ -5,27 +5,126 @@ # Configure logging logger = logging.getLogger(__name__) -# user_search_messages searches for messages in the workspace using user token (includes private channels and DMs) + +async def get_current_user_id() -> str: + """Get the user ID of the authenticated user using auth.test. + + This API does not require any additional scopes for user tokens. + + Returns: + The user ID of the authenticated user. + + Raises: + Exception: If the auth.test API call fails. + """ + response = await make_slack_user_request("GET", "auth.test") + if not response.get("ok"): + error = response.get("error", "Unknown error") + raise Exception(f"Failed to get current user info: {error}") + return response.get("user_id") + + +def format_message_response( + match: dict[str, Any], + response_format: str = "concise", +) -> dict[str, Any]: + """Format a single message match based on response_format. + + Args: + match: Raw message match object from Slack API + response_format: "concise" or "detailed" + + Returns: + Formatted message object + """ + if response_format == "detailed": + return match + + channel = match.get("channel", {}) + + formatted: dict[str, Any] = { + "channel_id": channel.get("id"), + "channel_name": channel.get("name"), # None for DMs + "user_id": match.get("user"), + "username": match.get("username"), + "ts": match.get("ts"), + "text": match.get("text"), + "permalink": match.get("permalink"), + } + + # Add thread_ts if this is a threaded message + if match.get("thread_ts"): + formatted["thread_ts"] = match.get("thread_ts") + + return formatted + + +def generate_summary( + total: int, + returned: int, + has_more: bool, + include_hint: bool = True, +) -> str: + """Generate summary string for the response. + + Args: + total: Total number of messages found + returned: Number of messages returned in this response + has_more: Whether there are more results available + include_hint: Whether to include hint for getting full message details + + Returns: + Summary string + """ + parts = [f"Found {total} messages"] + + if total > returned: + parts.append(f"showing {returned}") + + summary = ", ".join(parts) + "." + + if has_more: + summary += " Use next_cursor for more results." + + if include_hint: + summary += ( + " To get full message details, use slack_get_channel_history with " + "channel_id and latest=ts, inclusive=true, limit=1. " + "For threaded messages, use slack_get_thread_replies." + ) + + return summary + + # User tokens: search:read async def user_search_messages( query: str, channel_ids: Optional[List[str]] = None, + to_me: bool = False, sort: Optional[str] = None, sort_dir: Optional[str] = None, count: Optional[int] = None, cursor: Optional[str] = None, - highlight: Optional[bool] = None + highlight: Optional[bool] = None, + response_format: Optional[str] = None ) -> Dict[str, Any]: """Search for messages in the workspace using user token (includes private channels and DMs).""" logger.info(f"Executing tool: user_search_messages with query: {query}") - - # Construct the query with channel filters if provided + + # Build the search query search_query = query + + # Add to_me filter if requested + if to_me: + user_id = await get_current_user_id() + search_query = f"{search_query} to:<@{user_id}>" + + # Add channel filters if provided if channel_ids and len(channel_ids) > 0: # Add channel filters to the query channels_filter = " ".join([f"in:{channel_id}" for channel_id in channel_ids]) - search_query = f"{query} {channels_filter}" - + search_query = f"{search_query} {channels_filter}" + params = { "query": search_query, } @@ -52,9 +151,49 @@ async def user_search_messages( if cursor: params["cursor"] = cursor - + + if response_format is None: + response_format = "concise" + try: - return await make_slack_user_request("GET", "search.messages", params=params) + response = await make_slack_user_request("GET", "search.messages", params=params) + + if not response.get("ok", False): + return response + + messages = response.get("messages", {}) + matches = messages.get("matches", []) + total = messages.get("total", 0) + + # Format matches based on response_format + formatted_matches = [ + format_message_response(match, response_format) for match in matches + ] + + # Check for pagination + response_metadata = response.get("response_metadata", {}) + next_cursor = response_metadata.get("next_cursor", "") + has_more = bool(next_cursor) + + # Generate summary (include hint only for concise format) + include_hint = response_format == "concise" + summary = generate_summary(total, len(formatted_matches), has_more, include_hint) + + result = { + "ok": True, + "query": search_query, + "messages": { + "total": total, + "matches": formatted_matches, + }, + "summary": summary, + } + + if response_metadata: + result["response_metadata"] = response_metadata + + return result + except Exception as e: logger.exception(f"Error executing tool user_search_messages: {e}") raise e diff --git a/mcp_servers/slack/user_tools/threads.py b/mcp_servers/slack/user_tools/threads.py index 2fe6060b..244db31e 100644 --- a/mcp_servers/slack/user_tools/threads.py +++ b/mcp_servers/slack/user_tools/threads.py @@ -1,9 +1,73 @@ import logging from typing import Any, Dict, Optional -from .base import make_slack_user_request +from .base import make_slack_user_request, format_reactions logger = logging.getLogger(__name__) + +def format_thread_message( + message: dict[str, Any], + thread_ts: str, + response_format: str = "concise", +) -> dict[str, Any]: + """Format a single thread message based on response_format. + + Args: + message: Raw message object from Slack API + thread_ts: The thread timestamp to determine if message is parent + response_format: "concise" or "detailed" + + Returns: + Formatted message object + """ + if response_format == "detailed": + return message + + ts = message.get("ts") + is_parent = ts == thread_ts + + formatted: dict[str, Any] = { + "user_id": message.get("user"), + "ts": ts, + "text": message.get("text"), + "thread_ts": message.get("thread_ts"), + "is_parent": is_parent, + } + + if is_parent: + formatted["reply_count"] = message.get("reply_count", 0) + else: + formatted["parent_user_id"] = message.get("parent_user_id") + + # Add reactions if present + reactions = message.get("reactions") + if reactions: + formatted["reactions"] = format_reactions(reactions) + + return formatted + + +def generate_thread_summary( + returned: int, + has_more: bool, +) -> str: + """Generate summary string for thread response. + + Args: + returned: Number of messages returned + has_more: Whether there are more messages available + + Returns: + Summary string + """ + summary = f"Found {returned} messages in thread." + + if has_more: + summary += " Use next_cursor for more results." + + return summary + + # get_thread_replies returns all replies in a message thread # User tokens: channels:history, groups:history, im:history, mpim:history async def get_thread_replies( @@ -13,7 +77,8 @@ async def get_thread_replies( cursor: Optional[str] = None, oldest: Optional[str] = None, latest: Optional[str] = None, - inclusive: Optional[bool] = None + inclusive: Optional[bool] = None, + response_format: Optional[str] = None ) -> Dict[str, Any]: """Get all replies in a message thread. @@ -29,7 +94,8 @@ async def get_thread_replies( oldest: Only messages after this Unix timestamp (inclusive) latest: Only messages before this Unix timestamp (exclusive) inclusive: Include messages with oldest or latest timestamps in results - + response_format: "concise" (default) or "detailed" + Returns: Dictionary containing: - messages: List of messages in the thread (includes parent as first message) @@ -70,9 +136,37 @@ async def get_thread_replies( if inclusive is not None: params["inclusive"] = "true" if inclusive else "false" - + + if response_format is None: + response_format = "concise" + try: - return await make_slack_user_request("GET", "conversations.replies", params=params) + response = await make_slack_user_request("GET", "conversations.replies", params=params) + + if not response.get("ok", False): + return response + + messages = response.get("messages", []) + has_more = response.get("has_more", False) + + # Format messages based on response_format + formatted_messages = [ + format_thread_message(msg, thread_ts, response_format) for msg in messages + ] + + result = { + "ok": True, + "messages": formatted_messages, + "has_more": has_more, + "summary": generate_thread_summary(len(formatted_messages), has_more), + } + + response_metadata = response.get("response_metadata", {}) + if response_metadata: + result["response_metadata"] = response_metadata + + return result + except Exception as e: logger.exception(f"Error executing tool get_thread_replies: {e}") raise e diff --git a/mcp_servers/slack/user_tools/users.py b/mcp_servers/slack/user_tools/users.py index dc6ce410..f7603956 100644 --- a/mcp_servers/slack/user_tools/users.py +++ b/mcp_servers/slack/user_tools/users.py @@ -1,43 +1,178 @@ import logging from typing import Any, Dict, Optional + from .base import make_slack_user_request # Configure logging logger = logging.getLogger(__name__) + +def filter_users( + users: list[dict[str, Any]], + user_id: Optional[str] = None, + name: Optional[str] = None, +) -> list[Dict[str, Any]]: + """Client-side filtering of users. + + Args: + users: Raw user list from Slack API + user_id: Optional user ID filter (exact match) + name: Optional name filter (case-insensitive partial match against name or real_name) + + Returns: + Filtered list of users (duplicates removed) + """ + if not user_id and not name: + return users + + filtered = [] + seen_ids = set() + + if user_id: + for user in users: + if user.get("id") == user_id: + user_id_val = user.get("id") + if user_id_val and user_id_val not in seen_ids: + filtered.append(user) + seen_ids.add(user_id_val) + + if name: + name_lower = name.lower() + for user in users: + user_id_val = user.get("id") + if ( + user_id_val + and user_id_val not in seen_ids + and ( + name_lower in user.get("name", "").lower() + or name_lower in user.get("real_name", "").lower() + ) + ): + filtered.append(user) + seen_ids.add(user_id_val) + + return filtered + + +def format_user_response( + user: Dict[str, Any], + response_format: str = "concise", +) -> Dict[str, Any]: + """Format a single user object based on response_format. + + Args: + user: Raw user object from Slack API + response_format: "concise" or "detailed" + + Returns: + Formatted user object + """ + if response_format == "detailed": + return user + + return { + "id": user["id"], + "name": user.get("name"), + "real_name": user.get("real_name"), + } + + +def generate_summary(total_returned: int, has_next_cursor: bool) -> str: + """Generate summary message for the response. + + Args: + total_returned: Number of users returned + has_next_cursor: Whether there are more results available + + Returns: + Summary message string + """ + if has_next_cursor: + return f"Found {total_returned} users. More results available - please specify the cursor parameter to continue." + return f"Found {total_returned} users." + # Lists all users in a Slack team. # User tokens: users:read async def list_users( cursor: Optional[str] = None, limit: Optional[int] = None, team_id: Optional[str] = None, - include_locale: Optional[bool] = None + include_locale: Optional[bool] = None, + user_id: Optional[str] = None, + name: Optional[str] = None, + response_format: Optional[str] = None, ) -> Dict[str, Any]: - """Lists all users in a Slack team using users.list API.""" + """Lists all users in a Slack team with optional filtering. + + This uses the user token to list users, which means it can access: + - All users in the workspace (including deleted and bot users) + + Args: + cursor: Pagination cursor for next page of results + limit: Maximum number of users to return from API (default 100, max 200) + team_id: Team ID to list users from (for Enterprise Grid) + include_locale: Whether to include locale information for each user + user_id: Filter by user ID (exact match) + name: Filter by name (case-insensitive partial match against name or real_name) + response_format: Response format - "concise" (default) or "detailed" + + Returns: + Dictionary containing: + - ok: boolean + - members: filtered and formatted user list + - response_metadata: pagination info (if available) + - summary: result summary with total count and helpful message + """ logger.info("Executing tool: list_users") - + params = {} - - # Set limit (max 200 per page according to Slack API) + if limit: params["limit"] = str(min(limit, 200)) else: params["limit"] = "100" - - # Add cursor for pagination + if cursor: params["cursor"] = cursor - - # Add team_id if provided (for Enterprise Grid) + if team_id: params["team_id"] = team_id - - # Include locale information + if include_locale is not None: params["include_locale"] = str(include_locale).lower() - + + if response_format is None: + response_format = "concise" + try: - return await make_slack_user_request("GET", "users.list", params=params) + response = await make_slack_user_request("GET", "users.list", params=params) + + if not response.get("ok", False): + return response + + members = response.get("members", []) + filtered_members = filter_users(members, user_id, name) + formatted_members = [ + format_user_response(user, response_format) for user in filtered_members + ] + + response_metadata = response.get("response_metadata", {}) + next_cursor = response_metadata.get("next_cursor", "") + has_next_cursor = bool(next_cursor) + + result = { + "ok": True, + "members": formatted_members, + "summary": { + "total_returned": len(formatted_members), + "message": generate_summary(len(formatted_members), has_next_cursor), + }, + } + + if response_metadata: + result["response_metadata"] = response_metadata + + return result except Exception as e: logger.exception(f"Error executing tool list_users: {e}") raise e @@ -50,15 +185,15 @@ async def user_get_info( ) -> Dict[str, Any]: """Gets information about a user using users.info API.""" logger.info(f"Executing tool: user_get_info for user {user_id}") - + params = { "user": user_id } - + # Include locale information if include_locale is not None: params["include_locale"] = str(include_locale).lower() - + try: return await make_slack_user_request("GET", "users.info", params=params) except Exception as e: diff --git a/mcp_servers/youtube/requirements.txt b/mcp_servers/youtube/requirements.txt index abb85a46..933b2bbf 100644 --- a/mcp_servers/youtube/requirements.txt +++ b/mcp_servers/youtube/requirements.txt @@ -5,4 +5,8 @@ uvicorn[standard] aiohttp python-dotenv typing-extensions -youtube-transcript-api>=1.2.3 \ No newline at end of file +youtube-transcript-api>=1.2.3 +google-api-python-client>=2.100.0 +google-auth>=2.23.0 +google-auth-oauthlib>=1.1.0 +google-auth-httplib2>=0.1.0 \ No newline at end of file diff --git a/mcp_servers/youtube/server.py b/mcp_servers/youtube/server.py index 2a6b89f0..24419d25 100644 --- a/mcp_servers/youtube/server.py +++ b/mcp_servers/youtube/server.py @@ -1,10 +1,14 @@ import contextlib +import base64 import logging import os import re +import json from collections.abc import AsyncIterator -from typing import Any, Dict +from typing import Any, Dict, List, Optional from urllib.parse import urlparse, parse_qs +from contextvars import ContextVar +from datetime import datetime, timedelta import click import mcp.types as types @@ -21,24 +25,77 @@ import asyncio from youtube_transcript_api import YouTubeTranscriptApi from youtube_transcript_api.proxies import WebshareProxyConfig +from google.oauth2.credentials import Credentials +from googleapiclient.discovery import build +from googleapiclient.errors import HttpError # Configure logging logger = logging.getLogger(__name__) load_dotenv() +# Context variable to store the access token for each request +auth_token_context: ContextVar[str] = ContextVar('auth_token') + # YouTube API constants and configuration -YOUTUBE_API_KEY = os.getenv("YOUTUBE_API_KEY") -if not YOUTUBE_API_KEY: - raise ValueError("YOUTUBE_API_KEY environment variable is required") +YOUTUBE_API_BASE = "https://www.googleapis.com/youtube/v3" +YOUTUBE_MCP_SERVER_PORT = int(os.getenv("YOUTUBE_MCP_SERVER_PORT", "5000")) +TRANSCRIPT_LANGUAGES = [lang.strip() for lang in os.getenv("TRANSCRIPT_LANGUAGE", "en").split(',')] -# Proxy configuration +# Proxy configuration for transcript API WEBSHARE_PROXY_USERNAME = os.getenv("WEBSHARE_PROXY_USERNAME") WEBSHARE_PROXY_PASSWORD = os.getenv("WEBSHARE_PROXY_PASSWORD") -YOUTUBE_API_BASE = "https://www.googleapis.com/youtube/v3" -YOUTUBE_MCP_SERVER_PORT = int(os.getenv("YOUTUBE_MCP_SERVER_PORT", "5000")) -TRANSCRIPT_LANGUAGES = [lang.strip() for lang in os.getenv("TRANSCRIPT_LANGUAGE", "en").split(',')] + +def extract_access_token(request_or_scope) -> str: + """Extract access token from x-auth-data header.""" + auth_data = os.getenv("AUTH_DATA") + + if not auth_data: + # Handle different input types (request object for SSE, scope dict for StreamableHTTP) + if hasattr(request_or_scope, 'headers'): + # SSE request object + auth_data = request_or_scope.headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + elif isinstance(request_or_scope, dict) and 'headers' in request_or_scope: + # StreamableHTTP scope object + headers = dict(request_or_scope.get("headers", [])) + auth_data = headers.get(b'x-auth-data') + if auth_data: + auth_data = base64.b64decode(auth_data).decode('utf-8') + + if not auth_data: + return "" + + try: + # Parse the JSON auth data to extract access_token + auth_json = json.loads(auth_data) + return auth_json.get('access_token', '') + except (json.JSONDecodeError, TypeError) as e: + logger.warning(f"Failed to parse auth data JSON: {e}") + return "" + + +def get_auth_token() -> str: + """Get the authentication token from context.""" + try: + return auth_token_context.get() + except LookupError: + raise RuntimeError("Authentication token not found in request context") + + +def get_youtube_service(access_token: str): + """Create YouTube Data API service with OAuth access token.""" + credentials = Credentials(token=access_token) + return build('youtube', 'v3', credentials=credentials) + + +def get_youtube_analytics_service(access_token: str): + """Create YouTube Analytics API service with OAuth access token.""" + credentials = Credentials(token=access_token) + return build('youtubeAnalytics', 'v2', credentials=credentials) + # Initialize YouTube Transcript API with proxy if credentials are available if WEBSHARE_PROXY_USERNAME and WEBSHARE_PROXY_PASSWORD: @@ -112,13 +169,21 @@ def _extract_video_id(url: str) -> str: raise ValueError(f"Could not extract video ID from URL: {url}") -async def _make_youtube_request(endpoint: str, params: Dict[str, Any], headers: Dict[str, Any] = None) -> Any: +async def _make_youtube_request(endpoint: str, params: Dict[str, Any], access_token: Optional[str] = None) -> Any: """ - Makes an HTTP request to the YouTube Data API. + Makes an HTTP request to the YouTube Data API using OAuth access token. """ - params["key"] = YOUTUBE_API_KEY url = f"{YOUTUBE_API_BASE}/{endpoint}" + # Use provided access token or get from context + if not access_token: + access_token = get_auth_token() + + headers = { + "Authorization": f"Bearer {access_token}", + "Accept": "application/json" + } + async with aiohttp.ClientSession() as session: try: async with session.get(url, params=params, headers=headers) as response: @@ -148,7 +213,7 @@ async def get_video_details(video_id: str) -> Dict[str, Any]: "id": video_id } - result = await _make_youtube_request("videos", params) + result = await _make_youtube_request("captions", params) if not result.get("items"): return {"error": f"No video found with ID: {video_id}"} @@ -179,6 +244,792 @@ async def get_video_details(video_id: str) -> Dict[str, Any]: raise e +# ============================================================================ +# OAuth-based YouTube API Tools (require user authentication) +# ============================================================================ + +async def get_liked_videos(max_results: int = 25) -> Dict[str, Any]: + """Get the user's liked/favorite videos from their YouTube account.""" + logger.info(f"Executing tool: get_liked_videos with max_results: {max_results}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + # Get the user's liked videos playlist + request = service.videos().list( + part="snippet,contentDetails,statistics", + myRating="like", + maxResults=min(max_results, 50) + ) + response = request.execute() + + videos = [] + for item in response.get("items", []): + snippet = item.get("snippet", {}) + statistics = item.get("statistics", {}) + videos.append({ + "id": item.get("id"), + "title": snippet.get("title"), + "description": snippet.get("description", "")[:200] + "..." if len(snippet.get("description", "")) > 200 else snippet.get("description", ""), + "channelTitle": snippet.get("channelTitle"), + "publishedAt": snippet.get("publishedAt"), + "thumbnailUrl": snippet.get("thumbnails", {}).get("medium", {}).get("url"), + "viewCount": statistics.get("viewCount"), + "likeCount": statistics.get("likeCount"), + "url": f"https://www.youtube.com/watch?v={item.get('id')}" + }) + + return { + "liked_videos": videos, + "total_count": len(videos), + "next_page_token": response.get("nextPageToken") + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_liked_videos: {e}") + raise e + + +async def get_user_subscriptions(max_results: int = 25) -> Dict[str, Any]: + """Get the user's channel subscriptions.""" + logger.info(f"Executing tool: get_user_subscriptions with max_results: {max_results}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + request = service.subscriptions().list( + part="snippet,contentDetails", + mine=True, + maxResults=min(max_results, 50), + order="relevance" + ) + response = request.execute() + + subscriptions = [] + for item in response.get("items", []): + snippet = item.get("snippet", {}) + resource_id = snippet.get("resourceId", {}) + subscriptions.append({ + "subscriptionId": item.get("id"), + "channelId": resource_id.get("channelId"), + "channelTitle": snippet.get("title"), + "description": snippet.get("description", "")[:200] + "..." if len(snippet.get("description", "")) > 200 else snippet.get("description", ""), + "thumbnailUrl": snippet.get("thumbnails", {}).get("medium", {}).get("url"), + "channelUrl": f"https://www.youtube.com/channel/{resource_id.get('channelId')}" + }) + + return { + "subscriptions": subscriptions, + "total_count": len(subscriptions), + "next_page_token": response.get("nextPageToken") + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_user_subscriptions: {e}") + raise e + + +async def get_my_channel_info() -> Dict[str, Any]: + """Get information about the authenticated user's YouTube channel.""" + logger.info("Executing tool: get_my_channel_info") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + request = service.channels().list( + part="snippet,contentDetails,statistics,brandingSettings", + mine=True + ) + response = request.execute() + + if not response.get("items"): + return {"error": "No channel found for this user"} + + channel = response["items"][0] + snippet = channel.get("snippet", {}) + statistics = channel.get("statistics", {}) + content_details = channel.get("contentDetails", {}) + + return { + "channelId": channel.get("id"), + "title": snippet.get("title"), + "description": snippet.get("description"), + "customUrl": snippet.get("customUrl"), + "publishedAt": snippet.get("publishedAt"), + "thumbnailUrl": snippet.get("thumbnails", {}).get("high", {}).get("url"), + "subscriberCount": statistics.get("subscriberCount"), + "videoCount": statistics.get("videoCount"), + "viewCount": statistics.get("viewCount"), + "uploadsPlaylistId": content_details.get("relatedPlaylists", {}).get("uploads"), + "channelUrl": f"https://www.youtube.com/channel/{channel.get('id')}" + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_my_channel_info: {e}") + raise e + + +async def get_my_videos(max_results: int = 25) -> Dict[str, Any]: + """Get the authenticated user's uploaded videos.""" + logger.info(f"Executing tool: get_my_videos with max_results: {max_results}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + # First, get the uploads playlist ID + channel_info = await get_my_channel_info() + uploads_playlist_id = channel_info.get("uploadsPlaylistId") + + if not uploads_playlist_id: + return {"error": "Could not find uploads playlist for this channel"} + + # Get videos from the uploads playlist + request = service.playlistItems().list( + part="snippet,contentDetails", + playlistId=uploads_playlist_id, + maxResults=min(max_results, 50) + ) + response = request.execute() + + video_ids = [item.get("contentDetails", {}).get("videoId") for item in response.get("items", [])] + + # Get detailed statistics for each video + if video_ids: + videos_request = service.videos().list( + part="snippet,statistics,contentDetails", + id=",".join(video_ids) + ) + videos_response = videos_request.execute() + + videos = [] + for video in videos_response.get("items", []): + snippet = video.get("snippet", {}) + statistics = video.get("statistics", {}) + videos.append({ + "id": video.get("id"), + "title": snippet.get("title"), + "description": snippet.get("description", "")[:200] + "..." if len(snippet.get("description", "")) > 200 else snippet.get("description", ""), + "publishedAt": snippet.get("publishedAt"), + "thumbnailUrl": snippet.get("thumbnails", {}).get("medium", {}).get("url"), + "viewCount": statistics.get("viewCount"), + "likeCount": statistics.get("likeCount"), + "commentCount": statistics.get("commentCount"), + "url": f"https://www.youtube.com/watch?v={video.get('id')}" + }) + + return { + "videos": videos, + "total_count": len(videos), + "next_page_token": response.get("nextPageToken") + } + + return {"videos": [], "total_count": 0} + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_my_videos: {e}") + raise e + + +async def search_videos(query: str, max_results: int = 10, channel_id: Optional[str] = None, + published_after: Optional[str] = None, published_before: Optional[str] = None, + order: str = "relevance") -> Dict[str, Any]: + """ + Search for YouTube videos by query, optionally filtered by channel or date range. + + Args: + query: Search query string + max_results: Maximum number of results to return (default 10, max 50) + channel_id: Optional channel ID to search within + published_after: Optional ISO 8601 date string (e.g., "2024-01-01T00:00:00Z") + published_before: Optional ISO 8601 date string + order: Sort order - "relevance", "date", "viewCount", "rating" + """ + logger.info(f"Executing tool: search_videos with query: {query}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + search_params = { + "part": "snippet", + "q": query, + "type": "video", + "maxResults": min(max_results, 50), + "order": order + } + + if channel_id: + search_params["channelId"] = channel_id + if published_after: + search_params["publishedAfter"] = published_after + if published_before: + search_params["publishedBefore"] = published_before + + request = service.search().list(**search_params) + response = request.execute() + + # Get video IDs to fetch additional details + video_ids = [item.get("id", {}).get("videoId") for item in response.get("items", []) if item.get("id", {}).get("videoId")] + + videos = [] + if video_ids: + # Get detailed statistics + videos_request = service.videos().list( + part="snippet,statistics,contentDetails", + id=",".join(video_ids) + ) + videos_response = videos_request.execute() + + for video in videos_response.get("items", []): + snippet = video.get("snippet", {}) + statistics = video.get("statistics", {}) + videos.append({ + "id": video.get("id"), + "title": snippet.get("title"), + "description": snippet.get("description", "")[:200] + "..." if len(snippet.get("description", "")) > 200 else snippet.get("description", ""), + "channelId": snippet.get("channelId"), + "channelTitle": snippet.get("channelTitle"), + "publishedAt": snippet.get("publishedAt"), + "thumbnailUrl": snippet.get("thumbnails", {}).get("medium", {}).get("url"), + "viewCount": statistics.get("viewCount"), + "likeCount": statistics.get("likeCount"), + "commentCount": statistics.get("commentCount"), + "url": f"https://www.youtube.com/watch?v={video.get('id')}" + }) + + return { + "query": query, + "videos": videos, + "total_count": len(videos), + "next_page_token": response.get("nextPageToken") + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool search_videos: {e}") + raise e + + +async def get_channel_videos(channel_id: str, max_results: int = 25) -> Dict[str, Any]: + """Get videos from a specific YouTube channel.""" + logger.info(f"Executing tool: get_channel_videos with channel_id: {channel_id}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + # First, get channel info to find the uploads playlist + channel_request = service.channels().list( + part="contentDetails,snippet", + id=channel_id + ) + channel_response = channel_request.execute() + + if not channel_response.get("items"): + return {"error": f"No channel found with ID: {channel_id}"} + + channel = channel_response["items"][0] + channel_title = channel.get("snippet", {}).get("title") + uploads_playlist_id = channel.get("contentDetails", {}).get("relatedPlaylists", {}).get("uploads") + + if not uploads_playlist_id: + return {"error": "Could not find uploads playlist for this channel"} + + # Get videos from the uploads playlist + request = service.playlistItems().list( + part="snippet,contentDetails", + playlistId=uploads_playlist_id, + maxResults=min(max_results, 50) + ) + response = request.execute() + + video_ids = [item.get("contentDetails", {}).get("videoId") for item in response.get("items", [])] + + videos = [] + if video_ids: + videos_request = service.videos().list( + part="snippet,statistics,contentDetails", + id=",".join(video_ids) + ) + videos_response = videos_request.execute() + + for video in videos_response.get("items", []): + snippet = video.get("snippet", {}) + statistics = video.get("statistics", {}) + videos.append({ + "id": video.get("id"), + "title": snippet.get("title"), + "description": snippet.get("description", "")[:200] + "..." if len(snippet.get("description", "")) > 200 else snippet.get("description", ""), + "publishedAt": snippet.get("publishedAt"), + "thumbnailUrl": snippet.get("thumbnails", {}).get("medium", {}).get("url"), + "viewCount": statistics.get("viewCount"), + "likeCount": statistics.get("likeCount"), + "commentCount": statistics.get("commentCount"), + "url": f"https://www.youtube.com/watch?v={video.get('id')}" + }) + + return { + "channelId": channel_id, + "channelTitle": channel_title, + "videos": videos, + "total_count": len(videos), + "next_page_token": response.get("nextPageToken") + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_channel_videos: {e}") + raise e + + +async def search_channels(query: str, max_results: int = 10) -> Dict[str, Any]: + """Search for YouTube channels by name or keywords.""" + logger.info(f"Executing tool: search_channels with query: {query}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + request = service.search().list( + part="snippet", + q=query, + type="channel", + maxResults=min(max_results, 50) + ) + response = request.execute() + + channel_ids = [item.get("id", {}).get("channelId") for item in response.get("items", []) if item.get("id", {}).get("channelId")] + + channels = [] + if channel_ids: + # Get detailed channel info + channels_request = service.channels().list( + part="snippet,statistics", + id=",".join(channel_ids) + ) + channels_response = channels_request.execute() + + for channel in channels_response.get("items", []): + snippet = channel.get("snippet", {}) + statistics = channel.get("statistics", {}) + channels.append({ + "channelId": channel.get("id"), + "title": snippet.get("title"), + "description": snippet.get("description", "")[:200] + "..." if len(snippet.get("description", "")) > 200 else snippet.get("description", ""), + "customUrl": snippet.get("customUrl"), + "thumbnailUrl": snippet.get("thumbnails", {}).get("medium", {}).get("url"), + "subscriberCount": statistics.get("subscriberCount"), + "videoCount": statistics.get("videoCount"), + "viewCount": statistics.get("viewCount"), + "channelUrl": f"https://www.youtube.com/channel/{channel.get('id')}" + }) + + return { + "query": query, + "channels": channels, + "total_count": len(channels) + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool search_channels: {e}") + raise e + + +async def get_my_channel_analytics(start_date: Optional[str] = None, end_date: Optional[str] = None) -> Dict[str, Any]: + """ + Get analytics for the authenticated user's YouTube channel. + + Args: + start_date: Start date in YYYY-MM-DD format (default: 30 days ago) + end_date: End date in YYYY-MM-DD format (default: today) + """ + logger.info(f"Executing tool: get_my_channel_analytics") + try: + access_token = get_auth_token() + analytics_service = get_youtube_analytics_service(access_token) + youtube_service = get_youtube_service(access_token) + + # Get channel ID first + channel_request = youtube_service.channels().list( + part="id", + mine=True + ) + channel_response = channel_request.execute() + + if not channel_response.get("items"): + return {"error": "No channel found for this user"} + + channel_id = channel_response["items"][0]["id"] + + # Set default date range + if not end_date: + end_date = datetime.now().strftime("%Y-%m-%d") + if not start_date: + start_date = (datetime.now() - timedelta(days=30)).strftime("%Y-%m-%d") + + # Get channel analytics + request = analytics_service.reports().query( + ids=f"channel=={channel_id}", + startDate=start_date, + endDate=end_date, + metrics="views,estimatedMinutesWatched,averageViewDuration,likes,dislikes,comments,shares,subscribersGained,subscribersLost", + dimensions="day", + sort="day" + ) + response = request.execute() + + # Process the response + column_headers = [header["name"] for header in response.get("columnHeaders", [])] + rows = response.get("rows", []) + + daily_data = [] + for row in rows: + day_data = dict(zip(column_headers, row)) + daily_data.append(day_data) + + # Calculate totals + totals = {} + if rows: + for i, header in enumerate(column_headers): + if header != "day": + totals[header] = sum(row[i] for row in rows if isinstance(row[i], (int, float))) + + return { + "channelId": channel_id, + "dateRange": { + "startDate": start_date, + "endDate": end_date + }, + "totals": totals, + "dailyData": daily_data + } + except HttpError as e: + logger.error(f"YouTube Analytics API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube Analytics API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_my_channel_analytics: {e}") + raise e + + +async def get_my_video_analytics(video_id: str, start_date: Optional[str] = None, end_date: Optional[str] = None) -> Dict[str, Any]: + """ + Get analytics for a specific video on the authenticated user's channel. + + Args: + video_id: The YouTube video ID + start_date: Start date in YYYY-MM-DD format (default: 30 days ago) + end_date: End date in YYYY-MM-DD format (default: today) + """ + logger.info(f"Executing tool: get_my_video_analytics with video_id: {video_id}") + try: + access_token = get_auth_token() + analytics_service = get_youtube_analytics_service(access_token) + youtube_service = get_youtube_service(access_token) + + # Get channel ID first + channel_request = youtube_service.channels().list( + part="id", + mine=True + ) + channel_response = channel_request.execute() + + if not channel_response.get("items"): + return {"error": "No channel found for this user"} + + channel_id = channel_response["items"][0]["id"] + + # Set default date range + if not end_date: + end_date = datetime.now().strftime("%Y-%m-%d") + if not start_date: + start_date = (datetime.now() - timedelta(days=30)).strftime("%Y-%m-%d") + + # Get video analytics + request = analytics_service.reports().query( + ids=f"channel=={channel_id}", + startDate=start_date, + endDate=end_date, + metrics="views,estimatedMinutesWatched,averageViewDuration,likes,dislikes,comments,shares", + dimensions="day", + filters=f"video=={video_id}", + sort="day" + ) + response = request.execute() + + # Process the response + column_headers = [header["name"] for header in response.get("columnHeaders", [])] + rows = response.get("rows", []) + + daily_data = [] + for row in rows: + day_data = dict(zip(column_headers, row)) + daily_data.append(day_data) + + # Calculate totals + totals = {} + if rows: + for i, header in enumerate(column_headers): + if header != "day": + totals[header] = sum(row[i] for row in rows if isinstance(row[i], (int, float))) + + return { + "videoId": video_id, + "dateRange": { + "startDate": start_date, + "endDate": end_date + }, + "totals": totals, + "dailyData": daily_data + } + except HttpError as e: + logger.error(f"YouTube Analytics API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube Analytics API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_my_video_analytics: {e}") + raise e + + +async def rate_video(video_id: str, rating: str) -> Dict[str, Any]: + """ + Rate a video (like, dislike, or remove rating). + + Args: + video_id: The YouTube video ID to rate + rating: The rating to apply ('like', 'dislike', or 'none' to remove rating) + """ + logger.info(f"Executing tool: rate_video with video_id: {video_id}, rating: {rating}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + # Validate rating + valid_ratings = ['like', 'dislike', 'none'] + if rating not in valid_ratings: + raise ValueError(f"Invalid rating '{rating}'. Must be one of: {valid_ratings}") + + # Execute the rating + service.videos().rate( + id=video_id, + rating=rating + ).execute() + + return { + "success": True, + "video_id": video_id, + "rating": rating, + "message": f"Successfully {'removed rating from' if rating == 'none' else f'rated video as {rating}'}" + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool rate_video: {e}") + raise e + + +async def create_playlist(title: str, description: str = "", privacy_status: str = "private") -> Dict[str, Any]: + """ + Create a new YouTube playlist. + + Args: + title: The title of the playlist + description: The description of the playlist (optional) + privacy_status: Privacy status ('public', 'private', or 'unlisted') + """ + logger.info(f"Executing tool: create_playlist with title: {title}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + # Validate privacy status + valid_statuses = ['public', 'private', 'unlisted'] + if privacy_status not in valid_statuses: + raise ValueError(f"Invalid privacy_status '{privacy_status}'. Must be one of: {valid_statuses}") + + # Create the playlist + request_body = { + "snippet": { + "title": title, + "description": description + }, + "status": { + "privacyStatus": privacy_status + } + } + + response = service.playlists().insert( + part="snippet,status", + body=request_body + ).execute() + + playlist_id = response.get("id") + return { + "success": True, + "playlist_id": playlist_id, + "title": title, + "description": description, + "privacy_status": privacy_status, + "url": f"https://www.youtube.com/playlist?list={playlist_id}", + "message": f"Successfully created playlist '{title}'" + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool create_playlist: {e}") + raise e + + +async def add_video_to_playlist(playlist_id: str, video_id: str, position: Optional[int] = None) -> Dict[str, Any]: + """ + Add a video to a playlist. + + Args: + playlist_id: The ID of the playlist to add the video to + video_id: The YouTube video ID to add + position: The position in the playlist (0-indexed, optional - adds to end if not specified) + """ + logger.info(f"Executing tool: add_video_to_playlist with playlist_id: {playlist_id}, video_id: {video_id}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + # Build the request body + request_body = { + "snippet": { + "playlistId": playlist_id, + "resourceId": { + "kind": "youtube#video", + "videoId": video_id + } + } + } + + # Add position if specified + if position is not None: + request_body["snippet"]["position"] = position + + response = service.playlistItems().insert( + part="snippet", + body=request_body + ).execute() + + return { + "success": True, + "playlist_item_id": response.get("id"), + "playlist_id": playlist_id, + "video_id": video_id, + "position": response.get("snippet", {}).get("position"), + "message": f"Successfully added video to playlist" + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool add_video_to_playlist: {e}") + raise e + + +async def get_recent_uploads(days: int = 14, max_results: int = 25) -> Dict[str, Any]: + """ + Get videos uploaded within the specified number of days from subscribed channels. + + Args: + days: Number of days to look back (default: 14) + max_results: Maximum number of results to return (default: 25) + """ + logger.info(f"Executing tool: get_recent_uploads with days: {days}") + try: + access_token = get_auth_token() + service = get_youtube_service(access_token) + + # Calculate the date threshold + published_after = (datetime.now() - timedelta(days=days)).isoformat() + "Z" + + # Get user's subscriptions first + subs_request = service.subscriptions().list( + part="snippet", + mine=True, + maxResults=50 + ) + subs_response = subs_request.execute() + + # Get channel IDs from subscriptions + channel_ids = [ + item.get("snippet", {}).get("resourceId", {}).get("channelId") + for item in subs_response.get("items", []) + ] + + all_videos = [] + + # Search for recent videos from each subscribed channel + for channel_id in channel_ids[:10]: # Limit to first 10 channels to avoid rate limits + if not channel_id: + continue + + search_request = service.search().list( + part="snippet", + channelId=channel_id, + type="video", + publishedAfter=published_after, + order="date", + maxResults=5 + ) + search_response = search_request.execute() + + for item in search_response.get("items", []): + snippet = item.get("snippet", {}) + video_id = item.get("id", {}).get("videoId") + if video_id: + all_videos.append({ + "id": video_id, + "title": snippet.get("title"), + "channelTitle": snippet.get("channelTitle"), + "channelId": snippet.get("channelId"), + "publishedAt": snippet.get("publishedAt"), + "thumbnailUrl": snippet.get("thumbnails", {}).get("medium", {}).get("url"), + "url": f"https://www.youtube.com/watch?v={video_id}" + }) + + # Sort by published date (newest first) and limit results + all_videos.sort(key=lambda x: x.get("publishedAt", ""), reverse=True) + all_videos = all_videos[:max_results] + + return { + "days": days, + "videos": all_videos, + "total_count": len(all_videos) + } + except HttpError as e: + logger.error(f"YouTube API error: {e}") + error_detail = json.loads(e.content.decode('utf-8')) + raise RuntimeError(f"YouTube API Error ({e.resp.status}): {error_detail.get('error', {}).get('message', 'Unknown error')}") + except Exception as e: + logger.exception(f"Error executing tool get_recent_uploads: {e}") + raise e + + @click.command() @click.option("--port", default=YOUTUBE_MCP_SERVER_PORT, help="Port to listen on for HTTP") @click.option( @@ -223,15 +1074,278 @@ async def list_tools() -> list[types.Tool]: }, }, annotations=types.ToolAnnotations(**{"category": "YOUTUBE_TRANSCRIPT", "readOnlyHint": True}), - ) - ] - - @app.call_tool() - async def call_tool( - name: str, arguments: dict - ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: - ctx = app.request_context - + ), + types.Tool( + name="youtube_get_liked_videos", + description="Get the user's liked/favorite videos from their YouTube account. Requires OAuth authentication.", + inputSchema={ + "type": "object", + "properties": { + "max_results": { + "type": "integer", + "description": "Maximum number of videos to return (default: 25, max: 50)", + "default": 25 + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_ACCOUNT", "readOnlyHint": True}), + ), + types.Tool( + name="youtube_get_subscriptions", + description="Get the user's channel subscriptions from their YouTube account. Requires OAuth authentication.", + inputSchema={ + "type": "object", + "properties": { + "max_results": { + "type": "integer", + "description": "Maximum number of subscriptions to return (default: 25, max: 50)", + "default": 25 + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_ACCOUNT", "readOnlyHint": True}), + ), + types.Tool( + name="youtube_get_my_channel", + description="Get information about the authenticated user's YouTube channel including subscriber count, video count, and total views. Requires OAuth authentication.", + inputSchema={ + "type": "object", + "properties": {}, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_ACCOUNT", "readOnlyHint": True}), + ), + types.Tool( + name="youtube_get_my_videos", + description="Get the authenticated user's uploaded videos with statistics. Requires OAuth authentication.", + inputSchema={ + "type": "object", + "properties": { + "max_results": { + "type": "integer", + "description": "Maximum number of videos to return (default: 25, max: 50)", + "default": 25 + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_ACCOUNT", "readOnlyHint": True}), + ), + types.Tool( + name="youtube_search_videos", + description="Search for YouTube videos by query, optionally filtered by channel or date range. Use this to find videos about specific topics from any YouTuber.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "Search query string (e.g., 'machine learning tutorial')", + }, + "max_results": { + "type": "integer", + "description": "Maximum number of results to return (default: 10, max: 50)", + "default": 10 + }, + "channel_id": { + "type": "string", + "description": "Optional: Filter results to a specific channel ID", + }, + "published_after": { + "type": "string", + "description": "Optional: Only return videos published after this date (ISO 8601 format, e.g., '2024-01-01T00:00:00Z')", + }, + "published_before": { + "type": "string", + "description": "Optional: Only return videos published before this date (ISO 8601 format)", + }, + "order": { + "type": "string", + "enum": ["relevance", "date", "viewCount", "rating"], + "description": "Sort order for results (default: 'relevance')", + "default": "relevance" + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_SEARCH", "readOnlyHint": True}), + ), + types.Tool( + name="youtube_get_channel_videos", + description="Get videos from a specific YouTube channel. Use this to browse a YouTuber's uploaded videos.", + inputSchema={ + "type": "object", + "required": ["channel_id"], + "properties": { + "channel_id": { + "type": "string", + "description": "The YouTube channel ID (e.g., 'UC_x5XG1OV2P6uZZ5FSM9Ttw')", + }, + "max_results": { + "type": "integer", + "description": "Maximum number of videos to return (default: 25, max: 50)", + "default": 25 + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_CHANNEL", "readOnlyHint": True}), + ), + types.Tool( + name="youtube_search_channels", + description="Search for YouTube channels by name or keywords. Use this to find a YouTuber's channel ID.", + inputSchema={ + "type": "object", + "required": ["query"], + "properties": { + "query": { + "type": "string", + "description": "Search query for channel name or keywords", + }, + "max_results": { + "type": "integer", + "description": "Maximum number of results to return (default: 10, max: 50)", + "default": 10 + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_SEARCH", "readOnlyHint": True}), + ), + types.Tool( + name="youtube_get_my_analytics", + description="Get analytics for the authenticated user's YouTube channel including views, watch time, likes, comments, and subscriber changes. Requires OAuth authentication with YouTube Analytics scope.", + inputSchema={ + "type": "object", + "properties": { + "start_date": { + "type": "string", + "description": "Start date in YYYY-MM-DD format (default: 30 days ago)", + }, + "end_date": { + "type": "string", + "description": "End date in YYYY-MM-DD format (default: today)", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_ANALYTICS", "readOnlyHint": True}), + ), + types.Tool( + name="youtube_get_video_analytics", + description="Get analytics for a specific video on the authenticated user's channel. Requires OAuth authentication with YouTube Analytics scope.", + inputSchema={ + "type": "object", + "required": ["video_id"], + "properties": { + "video_id": { + "type": "string", + "description": "The YouTube video ID", + }, + "start_date": { + "type": "string", + "description": "Start date in YYYY-MM-DD format (default: 30 days ago)", + }, + "end_date": { + "type": "string", + "description": "End date in YYYY-MM-DD format (default: today)", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_ANALYTICS", "readOnlyHint": True}), + ), + types.Tool( + name="youtube_get_recent_uploads", + description="Get videos uploaded within the specified number of days from your subscribed channels. Great for seeing what's new from channels you follow. Requires OAuth authentication.", + inputSchema={ + "type": "object", + "properties": { + "days": { + "type": "integer", + "description": "Number of days to look back (default: 14)", + "default": 14 + }, + "max_results": { + "type": "integer", + "description": "Maximum number of results to return (default: 25)", + "default": 25 + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_ACCOUNT", "readOnlyHint": True}), + ), + # Write tools + types.Tool( + name="youtube_rate_video", + description="Rate a YouTube video (like, dislike, or remove rating). Requires OAuth authentication.", + inputSchema={ + "type": "object", + "required": ["video_id", "rating"], + "properties": { + "video_id": { + "type": "string", + "description": "The YouTube video ID to rate", + }, + "rating": { + "type": "string", + "enum": ["like", "dislike", "none"], + "description": "The rating to apply: 'like' to like the video, 'dislike' to dislike it, or 'none' to remove your rating", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_ACCOUNT", "readOnlyHint": False}), + ), + types.Tool( + name="youtube_create_playlist", + description="Create a new YouTube playlist on the authenticated user's channel. Requires OAuth authentication.", + inputSchema={ + "type": "object", + "required": ["title"], + "properties": { + "title": { + "type": "string", + "description": "The title of the playlist", + }, + "description": { + "type": "string", + "description": "The description of the playlist (optional)", + "default": "" + }, + "privacy_status": { + "type": "string", + "enum": ["public", "private", "unlisted"], + "description": "Privacy status of the playlist (default: 'private')", + "default": "private" + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_PLAYLISTS", "readOnlyHint": False}), + ), + types.Tool( + name="youtube_add_video_to_playlist", + description="Add a video to an existing YouTube playlist. Requires OAuth authentication and ownership of the playlist.", + inputSchema={ + "type": "object", + "required": ["playlist_id", "video_id"], + "properties": { + "playlist_id": { + "type": "string", + "description": "The ID of the playlist to add the video to", + }, + "video_id": { + "type": "string", + "description": "The YouTube video ID to add to the playlist", + }, + "position": { + "type": "integer", + "description": "The position in the playlist to insert the video (0-indexed). If not specified, the video is added to the end.", + }, + }, + }, + annotations=types.ToolAnnotations(**{"category": "YOUTUBE_PLAYLISTS", "readOnlyHint": False}), + ), + ] + + @app.call_tool() + async def call_tool( + name: str, arguments: dict + ) -> list[types.TextContent | types.ImageContent | types.EmbeddedResource]: + ctx = app.request_context + if name == "get_youtube_video_transcript": url = arguments.get("url") if not url: @@ -259,6 +1373,356 @@ async def call_tool( ) ] + elif name == "youtube_get_liked_videos": + try: + max_results = arguments.get("max_results", 25) + result = await get_liked_videos(max_results) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_get_subscriptions": + try: + max_results = arguments.get("max_results", 25) + result = await get_user_subscriptions(max_results) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_get_my_channel": + try: + result = await get_my_channel_info() + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_get_my_videos": + try: + max_results = arguments.get("max_results", 25) + result = await get_my_videos(max_results) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_search_videos": + query = arguments.get("query") + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + + try: + result = await search_videos( + query=query, + max_results=arguments.get("max_results", 10), + channel_id=arguments.get("channel_id"), + published_after=arguments.get("published_after"), + published_before=arguments.get("published_before"), + order=arguments.get("order", "relevance") + ) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_get_channel_videos": + channel_id = arguments.get("channel_id") + if not channel_id: + return [ + types.TextContent( + type="text", + text="Error: channel_id parameter is required", + ) + ] + + try: + result = await get_channel_videos( + channel_id=channel_id, + max_results=arguments.get("max_results", 25) + ) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_search_channels": + query = arguments.get("query") + if not query: + return [ + types.TextContent( + type="text", + text="Error: query parameter is required", + ) + ] + + try: + result = await search_channels( + query=query, + max_results=arguments.get("max_results", 10) + ) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_get_my_analytics": + try: + result = await get_my_channel_analytics( + start_date=arguments.get("start_date"), + end_date=arguments.get("end_date") + ) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_get_video_analytics": + video_id = arguments.get("video_id") + if not video_id: + return [ + types.TextContent( + type="text", + text="Error: video_id parameter is required", + ) + ] + + try: + result = await get_my_video_analytics( + video_id=video_id, + start_date=arguments.get("start_date"), + end_date=arguments.get("end_date") + ) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_get_recent_uploads": + try: + result = await get_recent_uploads( + days=arguments.get("days", 14), + max_results=arguments.get("max_results", 25) + ) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_rate_video": + video_id = arguments.get("video_id") + rating = arguments.get("rating") + if not video_id: + return [ + types.TextContent( + type="text", + text="Error: video_id parameter is required", + ) + ] + if not rating: + return [ + types.TextContent( + type="text", + text="Error: rating parameter is required", + ) + ] + + try: + result = await rate_video( + video_id=video_id, + rating=rating + ) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_create_playlist": + title = arguments.get("title") + if not title: + return [ + types.TextContent( + type="text", + text="Error: title parameter is required", + ) + ] + + try: + result = await create_playlist( + title=title, + description=arguments.get("description", ""), + privacy_status=arguments.get("privacy_status", "private") + ) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + + elif name == "youtube_add_video_to_playlist": + playlist_id = arguments.get("playlist_id") + video_id = arguments.get("video_id") + if not playlist_id: + return [ + types.TextContent( + type="text", + text="Error: playlist_id parameter is required", + ) + ] + if not video_id: + return [ + types.TextContent( + type="text", + text="Error: video_id parameter is required", + ) + ] + + try: + result = await add_video_to_playlist( + playlist_id=playlist_id, + video_id=video_id, + position=arguments.get("position") + ) + return [ + types.TextContent( + type="text", + text=str(result), + ) + ] + except Exception as e: + logger.exception(f"Error executing tool {name}: {e}") + return [ + types.TextContent( + type="text", + text=f"Error: {str(e)}", + ) + ] + return [ types.TextContent( type="text", @@ -314,12 +1778,22 @@ async def get_youtube_video_transcript(url: str) -> Dict[str, Any]: async def handle_sse(request): logger.info("Handling SSE connection") - async with sse.connect_sse( - request.scope, request.receive, request._send - ) as streams: - await app.run( - streams[0], streams[1], app.create_initialization_options() - ) + + # Extract auth token from headers + auth_token = extract_access_token(request) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + async with sse.connect_sse( + request.scope, request.receive, request._send + ) as streams: + await app.run( + streams[0], streams[1], app.create_initialization_options() + ) + finally: + auth_token_context.reset(token) + return Response() # Set up StreamableHTTP transport @@ -334,7 +1808,16 @@ async def handle_streamable_http( scope: Scope, receive: Receive, send: Send ) -> None: logger.info("Handling StreamableHTTP request") - await session_manager.handle_request(scope, receive, send) + + # Extract auth token from headers + auth_token = extract_access_token(scope) + + # Set the auth token in context for this request + token = auth_token_context.set(auth_token) + try: + await session_manager.handle_request(scope, receive, send) + finally: + auth_token_context.reset(token) @contextlib.asynccontextmanager async def lifespan(app: Starlette) -> AsyncIterator[None]: